1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
31 #include "insn-flags.h"
32 #include "insn-codes.h"
34 #include "insn-config.h"
37 #include "typeclass.h"
40 #include "bc-opcode.h"
41 #include "bc-typecd.h"
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
66 #define STACK_PUSH_CODE PRE_INC
70 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
71 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust;
90 /* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94 int inhibit_defer_pop;
96 /* A list of all cleanups which belong to the arguments of
97 function calls being expanded by expand_call. */
98 tree cleanups_this_call;
100 /* When temporaries are created by TARGET_EXPRs, they are created at
101 this level of temp_slot_level, so that they can remain allocated
102 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
104 int target_temp_slot_level;
106 /* Nonzero means __builtin_saveregs has already been done in this function.
107 The value is the pseudoreg containing the value __builtin_saveregs
109 static rtx saveregs_value;
111 /* Similarly for __builtin_apply_args. */
112 static rtx apply_args_value;
114 /* This structure is used by move_by_pieces to describe the move to
117 struct move_by_pieces
127 int explicit_inc_from;
134 /* This structure is used by clear_by_pieces to describe the clear to
137 struct clear_by_pieces
149 /* Used to generate bytecodes: keep track of size of local variables,
150 as well as depth of arithmetic stack. (Notice that variables are
151 stored on the machine's stack, not the arithmetic stack.) */
153 extern int local_vars_size;
154 extern int stack_depth;
155 extern int max_stack_depth;
156 extern struct obstack permanent_obstack;
157 extern rtx arg_pointer_save_area;
159 static rtx enqueue_insn PROTO((rtx, rtx));
160 static int queued_subexp_p PROTO((rtx));
161 static void init_queue PROTO((void));
162 static void move_by_pieces PROTO((rtx, rtx, int, int));
163 static int move_by_pieces_ninsns PROTO((unsigned int, int));
164 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
165 struct move_by_pieces *));
166 static void clear_by_pieces PROTO((rtx, int, int));
167 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
168 struct clear_by_pieces *));
169 static int is_zeros_p PROTO((tree));
170 static int mostly_zeros_p PROTO((tree));
171 static void store_constructor PROTO((tree, rtx, int));
172 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
173 enum machine_mode, int, int, int));
174 static int get_inner_unaligned_p PROTO((tree));
175 static tree save_noncopied_parts PROTO((tree, tree));
176 static tree init_noncopied_parts PROTO((tree, tree));
177 static int safe_from_p PROTO((rtx, tree));
178 static int fixed_type_p PROTO((tree));
179 static int get_pointer_alignment PROTO((tree, unsigned));
180 static tree string_constant PROTO((tree, tree *));
181 static tree c_strlen PROTO((tree));
182 static rtx expand_builtin PROTO((tree, rtx, rtx,
183 enum machine_mode, int));
184 static int apply_args_size PROTO((void));
185 static int apply_result_size PROTO((void));
186 static rtx result_vector PROTO((int, rtx));
187 static rtx expand_builtin_apply_args PROTO((void));
188 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
189 static void expand_builtin_return PROTO((rtx));
190 static rtx expand_increment PROTO((tree, int, int));
191 void bc_expand_increment PROTO((struct increment_operator *, tree));
192 rtx bc_allocate_local PROTO((int, int));
193 void bc_store_memory PROTO((tree, tree));
194 tree bc_expand_component_address PROTO((tree));
195 tree bc_expand_address PROTO((tree));
196 void bc_expand_constructor PROTO((tree));
197 void bc_adjust_stack PROTO((int));
198 tree bc_canonicalize_array_ref PROTO((tree));
199 void bc_load_memory PROTO((tree, tree));
200 void bc_load_externaddr PROTO((rtx));
201 void bc_load_externaddr_id PROTO((tree, int));
202 void bc_load_localaddr PROTO((rtx));
203 void bc_load_parmaddr PROTO((rtx));
204 static void preexpand_calls PROTO((tree));
205 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
206 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
207 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
208 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
209 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
210 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
211 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
212 static tree defer_cleanups_to PROTO((tree));
213 extern void (*interim_eh_hook) PROTO((tree));
214 extern tree truthvalue_conversion PROTO((tree));
216 /* Record for each mode whether we can move a register directly to or
217 from an object of that mode in memory. If we can't, we won't try
218 to use that mode directly when accessing a field of that mode. */
220 static char direct_load[NUM_MACHINE_MODES];
221 static char direct_store[NUM_MACHINE_MODES];
223 /* MOVE_RATIO is the number of move instructions that is better than
227 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
230 /* A value of around 6 would minimize code size; infinity would minimize
232 #define MOVE_RATIO 15
236 /* This array records the insn_code of insns to perform block moves. */
237 enum insn_code movstr_optab[NUM_MACHINE_MODES];
239 /* This array records the insn_code of insns to perform block clears. */
240 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
244 #ifndef SLOW_UNALIGNED_ACCESS
245 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
248 /* Register mappings for target machines without register windows. */
249 #ifndef INCOMING_REGNO
250 #define INCOMING_REGNO(OUT) (OUT)
252 #ifndef OUTGOING_REGNO
253 #define OUTGOING_REGNO(IN) (IN)
256 /* Maps used to convert modes to const, load, and store bytecodes. */
257 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
258 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
261 /* Initialize maps used to convert modes to const, load, and store
264 bc_init_mode_to_opcode_maps ()
268 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
269 mode_to_const_map[mode] =
270 mode_to_load_map[mode] =
271 mode_to_store_map[mode] = neverneverland;
273 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
274 mode_to_const_map[(int) SYM] = CONST; \
275 mode_to_load_map[(int) SYM] = LOAD; \
276 mode_to_store_map[(int) SYM] = STORE;
278 #include "modemap.def"
282 /* This is run once per compilation to set up which modes can be used
283 directly in memory and to initialize the block move optab. */
289 enum machine_mode mode;
290 /* Try indexing by frame ptr and try by stack ptr.
291 It is known that on the Convex the stack ptr isn't a valid index.
292 With luck, one or the other is valid on any machine. */
293 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
294 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
297 insn = emit_insn (gen_rtx (SET, 0, 0));
298 pat = PATTERN (insn);
300 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
301 mode = (enum machine_mode) ((int) mode + 1))
307 direct_load[(int) mode] = direct_store[(int) mode] = 0;
308 PUT_MODE (mem, mode);
309 PUT_MODE (mem1, mode);
311 /* See if there is some register that can be used in this mode and
312 directly loaded or stored from memory. */
314 if (mode != VOIDmode && mode != BLKmode)
315 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
316 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
319 if (! HARD_REGNO_MODE_OK (regno, mode))
322 reg = gen_rtx (REG, mode, regno);
325 SET_DEST (pat) = reg;
326 if (recog (pat, insn, &num_clobbers) >= 0)
327 direct_load[(int) mode] = 1;
329 SET_SRC (pat) = mem1;
330 SET_DEST (pat) = reg;
331 if (recog (pat, insn, &num_clobbers) >= 0)
332 direct_load[(int) mode] = 1;
335 SET_DEST (pat) = mem;
336 if (recog (pat, insn, &num_clobbers) >= 0)
337 direct_store[(int) mode] = 1;
340 SET_DEST (pat) = mem1;
341 if (recog (pat, insn, &num_clobbers) >= 0)
342 direct_store[(int) mode] = 1;
349 /* This is run at the start of compiling a function. */
356 pending_stack_adjust = 0;
357 inhibit_defer_pop = 0;
358 cleanups_this_call = 0;
360 apply_args_value = 0;
364 /* Save all variables describing the current status into the structure *P.
365 This is used before starting a nested function. */
371 /* Instead of saving the postincrement queue, empty it. */
374 p->pending_stack_adjust = pending_stack_adjust;
375 p->inhibit_defer_pop = inhibit_defer_pop;
376 p->cleanups_this_call = cleanups_this_call;
377 p->saveregs_value = saveregs_value;
378 p->apply_args_value = apply_args_value;
379 p->forced_labels = forced_labels;
381 pending_stack_adjust = 0;
382 inhibit_defer_pop = 0;
383 cleanups_this_call = 0;
385 apply_args_value = 0;
389 /* Restore all variables describing the current status from the structure *P.
390 This is used after a nested function. */
393 restore_expr_status (p)
396 pending_stack_adjust = p->pending_stack_adjust;
397 inhibit_defer_pop = p->inhibit_defer_pop;
398 cleanups_this_call = p->cleanups_this_call;
399 saveregs_value = p->saveregs_value;
400 apply_args_value = p->apply_args_value;
401 forced_labels = p->forced_labels;
404 /* Manage the queue of increment instructions to be output
405 for POSTINCREMENT_EXPR expressions, etc. */
407 static rtx pending_chain;
409 /* Queue up to increment (or change) VAR later. BODY says how:
410 BODY should be the same thing you would pass to emit_insn
411 to increment right away. It will go to emit_insn later on.
413 The value is a QUEUED expression to be used in place of VAR
414 where you want to guarantee the pre-incrementation value of VAR. */
417 enqueue_insn (var, body)
420 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
421 var, NULL_RTX, NULL_RTX, body, pending_chain);
422 return pending_chain;
425 /* Use protect_from_queue to convert a QUEUED expression
426 into something that you can put immediately into an instruction.
427 If the queued incrementation has not happened yet,
428 protect_from_queue returns the variable itself.
429 If the incrementation has happened, protect_from_queue returns a temp
430 that contains a copy of the old value of the variable.
432 Any time an rtx which might possibly be a QUEUED is to be put
433 into an instruction, it must be passed through protect_from_queue first.
434 QUEUED expressions are not meaningful in instructions.
436 Do not pass a value through protect_from_queue and then hold
437 on to it for a while before putting it in an instruction!
438 If the queue is flushed in between, incorrect code will result. */
441 protect_from_queue (x, modify)
445 register RTX_CODE code = GET_CODE (x);
447 #if 0 /* A QUEUED can hang around after the queue is forced out. */
448 /* Shortcut for most common case. */
449 if (pending_chain == 0)
455 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
456 use of autoincrement. Make a copy of the contents of the memory
457 location rather than a copy of the address, but not if the value is
458 of mode BLKmode. Don't modify X in place since it might be
460 if (code == MEM && GET_MODE (x) != BLKmode
461 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
463 register rtx y = XEXP (x, 0);
464 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
466 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
467 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
468 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
472 register rtx temp = gen_reg_rtx (GET_MODE (new));
473 emit_insn_before (gen_move_insn (temp, new),
479 /* Otherwise, recursively protect the subexpressions of all
480 the kinds of rtx's that can contain a QUEUED. */
483 rtx tem = protect_from_queue (XEXP (x, 0), 0);
484 if (tem != XEXP (x, 0))
490 else if (code == PLUS || code == MULT)
492 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
493 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
494 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
503 /* If the increment has not happened, use the variable itself. */
504 if (QUEUED_INSN (x) == 0)
505 return QUEUED_VAR (x);
506 /* If the increment has happened and a pre-increment copy exists,
508 if (QUEUED_COPY (x) != 0)
509 return QUEUED_COPY (x);
510 /* The increment has happened but we haven't set up a pre-increment copy.
511 Set one up now, and use it. */
512 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
513 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
515 return QUEUED_COPY (x);
518 /* Return nonzero if X contains a QUEUED expression:
519 if it contains anything that will be altered by a queued increment.
520 We handle only combinations of MEM, PLUS, MINUS and MULT operators
521 since memory addresses generally contain only those. */
527 register enum rtx_code code = GET_CODE (x);
533 return queued_subexp_p (XEXP (x, 0));
537 return queued_subexp_p (XEXP (x, 0))
538 || queued_subexp_p (XEXP (x, 1));
543 /* Perform all the pending incrementations. */
549 while (p = pending_chain)
551 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
552 pending_chain = QUEUED_NEXT (p);
563 /* Copy data from FROM to TO, where the machine modes are not the same.
564 Both modes may be integer, or both may be floating.
565 UNSIGNEDP should be nonzero if FROM is an unsigned type.
566 This causes zero-extension instead of sign-extension. */
569 convert_move (to, from, unsignedp)
570 register rtx to, from;
573 enum machine_mode to_mode = GET_MODE (to);
574 enum machine_mode from_mode = GET_MODE (from);
575 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
576 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
580 /* rtx code for making an equivalent value. */
581 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
583 to = protect_from_queue (to, 1);
584 from = protect_from_queue (from, 0);
586 if (to_real != from_real)
589 /* If FROM is a SUBREG that indicates that we have already done at least
590 the required extension, strip it. We don't handle such SUBREGs as
593 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
594 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
595 >= GET_MODE_SIZE (to_mode))
596 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
597 from = gen_lowpart (to_mode, from), from_mode = to_mode;
599 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
602 if (to_mode == from_mode
603 || (from_mode == VOIDmode && CONSTANT_P (from)))
605 emit_move_insn (to, from);
613 #ifdef HAVE_extendqfhf2
614 if (HAVE_extendqfhf2 && from_mode == QFmode && to_mode == HFmode)
616 emit_unop_insn (CODE_FOR_extendqfhf2, to, from, UNKNOWN);
620 #ifdef HAVE_extendqfsf2
621 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
623 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
627 #ifdef HAVE_extendqfdf2
628 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
630 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
634 #ifdef HAVE_extendqfxf2
635 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
637 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
641 #ifdef HAVE_extendqftf2
642 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
644 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
649 #ifdef HAVE_extendhftqf2
650 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
652 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
657 #ifdef HAVE_extendhfsf2
658 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
660 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
664 #ifdef HAVE_extendhfdf2
665 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
667 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
671 #ifdef HAVE_extendhfxf2
672 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
674 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
678 #ifdef HAVE_extendhftf2
679 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
681 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
686 #ifdef HAVE_extendsfdf2
687 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
689 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
693 #ifdef HAVE_extendsfxf2
694 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
696 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
700 #ifdef HAVE_extendsftf2
701 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
703 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
707 #ifdef HAVE_extenddfxf2
708 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
710 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
714 #ifdef HAVE_extenddftf2
715 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
717 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
722 #ifdef HAVE_trunchfqf2
723 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
725 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
729 #ifdef HAVE_truncsfqf2
730 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
732 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
736 #ifdef HAVE_truncdfqf2
737 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
739 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
743 #ifdef HAVE_truncxfqf2
744 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
746 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
750 #ifdef HAVE_trunctfqf2
751 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
753 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
758 #ifdef HAVE_trunctqfhf2
759 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
761 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
765 #ifdef HAVE_truncsfhf2
766 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
768 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
772 #ifdef HAVE_truncdfhf2
773 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
775 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
779 #ifdef HAVE_truncxfhf2
780 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
782 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
786 #ifdef HAVE_trunctfhf2
787 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
789 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
793 #ifdef HAVE_truncdfsf2
794 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
796 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
800 #ifdef HAVE_truncxfsf2
801 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
803 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
807 #ifdef HAVE_trunctfsf2
808 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
810 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
814 #ifdef HAVE_truncxfdf2
815 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
817 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
821 #ifdef HAVE_trunctfdf2
822 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
824 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
836 libcall = extendsfdf2_libfunc;
840 libcall = extendsfxf2_libfunc;
844 libcall = extendsftf2_libfunc;
853 libcall = truncdfsf2_libfunc;
857 libcall = extenddfxf2_libfunc;
861 libcall = extenddftf2_libfunc;
870 libcall = truncxfsf2_libfunc;
874 libcall = truncxfdf2_libfunc;
883 libcall = trunctfsf2_libfunc;
887 libcall = trunctfdf2_libfunc;
893 if (libcall == (rtx) 0)
894 /* This conversion is not implemented yet. */
897 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
899 emit_move_insn (to, value);
903 /* Now both modes are integers. */
905 /* Handle expanding beyond a word. */
906 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
907 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
914 enum machine_mode lowpart_mode;
915 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
917 /* Try converting directly if the insn is supported. */
918 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
921 /* If FROM is a SUBREG, put it into a register. Do this
922 so that we always generate the same set of insns for
923 better cse'ing; if an intermediate assignment occurred,
924 we won't be doing the operation directly on the SUBREG. */
925 if (optimize > 0 && GET_CODE (from) == SUBREG)
926 from = force_reg (from_mode, from);
927 emit_unop_insn (code, to, from, equiv_code);
930 /* Next, try converting via full word. */
931 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
932 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
933 != CODE_FOR_nothing))
935 if (GET_CODE (to) == REG)
936 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
937 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
938 emit_unop_insn (code, to,
939 gen_lowpart (word_mode, to), equiv_code);
943 /* No special multiword conversion insn; do it by hand. */
946 /* Since we will turn this into a no conflict block, we must ensure
947 that the source does not overlap the target. */
949 if (reg_overlap_mentioned_p (to, from))
950 from = force_reg (from_mode, from);
952 /* Get a copy of FROM widened to a word, if necessary. */
953 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
954 lowpart_mode = word_mode;
956 lowpart_mode = from_mode;
958 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
960 lowpart = gen_lowpart (lowpart_mode, to);
961 emit_move_insn (lowpart, lowfrom);
963 /* Compute the value to put in each remaining word. */
965 fill_value = const0_rtx;
970 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
971 && STORE_FLAG_VALUE == -1)
973 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
975 fill_value = gen_reg_rtx (word_mode);
976 emit_insn (gen_slt (fill_value));
982 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
983 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
985 fill_value = convert_to_mode (word_mode, fill_value, 1);
989 /* Fill the remaining words. */
990 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
992 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
993 rtx subword = operand_subword (to, index, 1, to_mode);
998 if (fill_value != subword)
999 emit_move_insn (subword, fill_value);
1002 insns = get_insns ();
1005 emit_no_conflict_block (insns, to, from, NULL_RTX,
1006 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
1010 /* Truncating multi-word to a word or less. */
1011 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
1012 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
1014 if (!((GET_CODE (from) == MEM
1015 && ! MEM_VOLATILE_P (from)
1016 && direct_load[(int) to_mode]
1017 && ! mode_dependent_address_p (XEXP (from, 0)))
1018 || GET_CODE (from) == REG
1019 || GET_CODE (from) == SUBREG))
1020 from = force_reg (from_mode, from);
1021 convert_move (to, gen_lowpart (word_mode, from), 0);
1025 /* Handle pointer conversion */ /* SPEE 900220 */
1026 if (to_mode == PSImode)
1028 if (from_mode != SImode)
1029 from = convert_to_mode (SImode, from, unsignedp);
1031 #ifdef HAVE_truncsipsi2
1032 if (HAVE_truncsipsi2)
1034 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1037 #endif /* HAVE_truncsipsi2 */
1041 if (from_mode == PSImode)
1043 if (to_mode != SImode)
1045 from = convert_to_mode (SImode, from, unsignedp);
1050 #ifdef HAVE_extendpsisi2
1051 if (HAVE_extendpsisi2)
1053 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1056 #endif /* HAVE_extendpsisi2 */
1061 if (to_mode == PDImode)
1063 if (from_mode != DImode)
1064 from = convert_to_mode (DImode, from, unsignedp);
1066 #ifdef HAVE_truncdipdi2
1067 if (HAVE_truncdipdi2)
1069 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1072 #endif /* HAVE_truncdipdi2 */
1076 if (from_mode == PDImode)
1078 if (to_mode != DImode)
1080 from = convert_to_mode (DImode, from, unsignedp);
1085 #ifdef HAVE_extendpdidi2
1086 if (HAVE_extendpdidi2)
1088 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1091 #endif /* HAVE_extendpdidi2 */
1096 /* Now follow all the conversions between integers
1097 no more than a word long. */
1099 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1100 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1101 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1102 GET_MODE_BITSIZE (from_mode)))
1104 if (!((GET_CODE (from) == MEM
1105 && ! MEM_VOLATILE_P (from)
1106 && direct_load[(int) to_mode]
1107 && ! mode_dependent_address_p (XEXP (from, 0)))
1108 || GET_CODE (from) == REG
1109 || GET_CODE (from) == SUBREG))
1110 from = force_reg (from_mode, from);
1111 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1112 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1113 from = copy_to_reg (from);
1114 emit_move_insn (to, gen_lowpart (to_mode, from));
1118 /* Handle extension. */
1119 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1121 /* Convert directly if that works. */
1122 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1123 != CODE_FOR_nothing)
1125 emit_unop_insn (code, to, from, equiv_code);
1130 enum machine_mode intermediate;
1132 /* Search for a mode to convert via. */
1133 for (intermediate = from_mode; intermediate != VOIDmode;
1134 intermediate = GET_MODE_WIDER_MODE (intermediate))
1135 if (((can_extend_p (to_mode, intermediate, unsignedp)
1136 != CODE_FOR_nothing)
1137 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1138 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1139 && (can_extend_p (intermediate, from_mode, unsignedp)
1140 != CODE_FOR_nothing))
1142 convert_move (to, convert_to_mode (intermediate, from,
1143 unsignedp), unsignedp);
1147 /* No suitable intermediate mode. */
1152 /* Support special truncate insns for certain modes. */
1154 if (from_mode == DImode && to_mode == SImode)
1156 #ifdef HAVE_truncdisi2
1157 if (HAVE_truncdisi2)
1159 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 if (from_mode == DImode && to_mode == HImode)
1169 #ifdef HAVE_truncdihi2
1170 if (HAVE_truncdihi2)
1172 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 if (from_mode == DImode && to_mode == QImode)
1182 #ifdef HAVE_truncdiqi2
1183 if (HAVE_truncdiqi2)
1185 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 if (from_mode == SImode && to_mode == HImode)
1195 #ifdef HAVE_truncsihi2
1196 if (HAVE_truncsihi2)
1198 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 if (from_mode == SImode && to_mode == QImode)
1208 #ifdef HAVE_truncsiqi2
1209 if (HAVE_truncsiqi2)
1211 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1215 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 if (from_mode == HImode && to_mode == QImode)
1221 #ifdef HAVE_trunchiqi2
1222 if (HAVE_trunchiqi2)
1224 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1228 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 if (from_mode == TImode && to_mode == DImode)
1234 #ifdef HAVE_trunctidi2
1235 if (HAVE_trunctidi2)
1237 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1241 convert_move (to, force_reg (from_mode, from), unsignedp);
1245 if (from_mode == TImode && to_mode == SImode)
1247 #ifdef HAVE_trunctisi2
1248 if (HAVE_trunctisi2)
1250 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1254 convert_move (to, force_reg (from_mode, from), unsignedp);
1258 if (from_mode == TImode && to_mode == HImode)
1260 #ifdef HAVE_trunctihi2
1261 if (HAVE_trunctihi2)
1263 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1267 convert_move (to, force_reg (from_mode, from), unsignedp);
1271 if (from_mode == TImode && to_mode == QImode)
1273 #ifdef HAVE_trunctiqi2
1274 if (HAVE_trunctiqi2)
1276 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1280 convert_move (to, force_reg (from_mode, from), unsignedp);
1284 /* Handle truncation of volatile memrefs, and so on;
1285 the things that couldn't be truncated directly,
1286 and for which there was no special instruction. */
1287 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1289 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1290 emit_move_insn (to, temp);
1294 /* Mode combination is not recognized. */
1298 /* Return an rtx for a value that would result
1299 from converting X to mode MODE.
1300 Both X and MODE may be floating, or both integer.
1301 UNSIGNEDP is nonzero if X is an unsigned value.
1302 This can be done by referring to a part of X in place
1303 or by copying to a new temporary with conversion.
1305 This function *must not* call protect_from_queue
1306 except when putting X into an insn (in which case convert_move does it). */
1309 convert_to_mode (mode, x, unsignedp)
1310 enum machine_mode mode;
1314 return convert_modes (mode, VOIDmode, x, unsignedp);
1317 /* Return an rtx for a value that would result
1318 from converting X from mode OLDMODE to mode MODE.
1319 Both modes may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1322 This can be done by referring to a part of X in place
1323 or by copying to a new temporary with conversion.
1325 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1327 This function *must not* call protect_from_queue
1328 except when putting X into an insn (in which case convert_move does it). */
1331 convert_modes (mode, oldmode, x, unsignedp)
1332 enum machine_mode mode, oldmode;
1338 /* If FROM is a SUBREG that indicates that we have already done at least
1339 the required extension, strip it. */
1341 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1342 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1343 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1344 x = gen_lowpart (mode, x);
1346 if (GET_MODE (x) != VOIDmode)
1347 oldmode = GET_MODE (x);
1349 if (mode == oldmode)
1352 /* There is one case that we must handle specially: If we are converting
1353 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1354 we are to interpret the constant as unsigned, gen_lowpart will do
1355 the wrong if the constant appears negative. What we want to do is
1356 make the high-order word of the constant zero, not all ones. */
1358 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1359 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1360 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1361 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1363 /* We can do this with a gen_lowpart if both desired and current modes
1364 are integer, and this is either a constant integer, a register, or a
1365 non-volatile MEM. Except for the constant case where MODE is no
1366 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1368 if ((GET_CODE (x) == CONST_INT
1369 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1370 || (GET_MODE_CLASS (mode) == MODE_INT
1371 && GET_MODE_CLASS (oldmode) == MODE_INT
1372 && (GET_CODE (x) == CONST_DOUBLE
1373 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1374 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1375 && direct_load[(int) mode])
1376 || (GET_CODE (x) == REG
1377 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1378 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1380 /* ?? If we don't know OLDMODE, we have to assume here that
1381 X does not need sign- or zero-extension. This may not be
1382 the case, but it's the best we can do. */
1383 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1384 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1386 HOST_WIDE_INT val = INTVAL (x);
1387 int width = GET_MODE_BITSIZE (oldmode);
1389 /* We must sign or zero-extend in this case. Start by
1390 zero-extending, then sign extend if we need to. */
1391 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1393 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1394 val |= (HOST_WIDE_INT) (-1) << width;
1396 return GEN_INT (val);
1399 return gen_lowpart (mode, x);
1402 temp = gen_reg_rtx (mode);
1403 convert_move (temp, x, unsignedp);
1407 /* Generate several move instructions to copy LEN bytes
1408 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1409 The caller must pass FROM and TO
1410 through protect_from_queue before calling.
1411 ALIGN (in bytes) is maximum alignment we can assume. */
1414 move_by_pieces (to, from, len, align)
1418 struct move_by_pieces data;
1419 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1420 int max_size = MOVE_MAX + 1;
1423 data.to_addr = to_addr;
1424 data.from_addr = from_addr;
1428 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1429 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1431 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1432 || GET_CODE (from_addr) == POST_INC
1433 || GET_CODE (from_addr) == POST_DEC);
1435 data.explicit_inc_from = 0;
1436 data.explicit_inc_to = 0;
1438 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1439 if (data.reverse) data.offset = len;
1442 data.to_struct = MEM_IN_STRUCT_P (to);
1443 data.from_struct = MEM_IN_STRUCT_P (from);
1445 /* If copying requires more than two move insns,
1446 copy addresses to registers (to make displacements shorter)
1447 and use post-increment if available. */
1448 if (!(data.autinc_from && data.autinc_to)
1449 && move_by_pieces_ninsns (len, align) > 2)
1451 #ifdef HAVE_PRE_DECREMENT
1452 if (data.reverse && ! data.autinc_from)
1454 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1455 data.autinc_from = 1;
1456 data.explicit_inc_from = -1;
1459 #ifdef HAVE_POST_INCREMENT
1460 if (! data.autinc_from)
1462 data.from_addr = copy_addr_to_reg (from_addr);
1463 data.autinc_from = 1;
1464 data.explicit_inc_from = 1;
1467 if (!data.autinc_from && CONSTANT_P (from_addr))
1468 data.from_addr = copy_addr_to_reg (from_addr);
1469 #ifdef HAVE_PRE_DECREMENT
1470 if (data.reverse && ! data.autinc_to)
1472 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1474 data.explicit_inc_to = -1;
1477 #ifdef HAVE_POST_INCREMENT
1478 if (! data.reverse && ! data.autinc_to)
1480 data.to_addr = copy_addr_to_reg (to_addr);
1482 data.explicit_inc_to = 1;
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1489 if (! SLOW_UNALIGNED_ACCESS
1490 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1496 while (max_size > 1)
1498 enum machine_mode mode = VOIDmode, tmode;
1499 enum insn_code icode;
1501 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1502 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1503 if (GET_MODE_SIZE (tmode) < max_size)
1506 if (mode == VOIDmode)
1509 icode = mov_optab->handlers[(int) mode].insn_code;
1510 if (icode != CODE_FOR_nothing
1511 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1512 GET_MODE_SIZE (mode)))
1513 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1515 max_size = GET_MODE_SIZE (mode);
1518 /* The code above should have handled everything. */
1523 /* Return number of insns required to move L bytes by pieces.
1524 ALIGN (in bytes) is maximum alignment we can assume. */
1527 move_by_pieces_ninsns (l, align)
1531 register int n_insns = 0;
1532 int max_size = MOVE_MAX + 1;
1534 if (! SLOW_UNALIGNED_ACCESS
1535 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1538 while (max_size > 1)
1540 enum machine_mode mode = VOIDmode, tmode;
1541 enum insn_code icode;
1543 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1544 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1545 if (GET_MODE_SIZE (tmode) < max_size)
1548 if (mode == VOIDmode)
1551 icode = mov_optab->handlers[(int) mode].insn_code;
1552 if (icode != CODE_FOR_nothing
1553 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1554 GET_MODE_SIZE (mode)))
1555 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1557 max_size = GET_MODE_SIZE (mode);
1563 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1564 with move instructions for mode MODE. GENFUN is the gen_... function
1565 to make a move insn for that mode. DATA has all the other info. */
1568 move_by_pieces_1 (genfun, mode, data)
1570 enum machine_mode mode;
1571 struct move_by_pieces *data;
1573 register int size = GET_MODE_SIZE (mode);
1574 register rtx to1, from1;
1576 while (data->len >= size)
1578 if (data->reverse) data->offset -= size;
1580 to1 = (data->autinc_to
1581 ? gen_rtx (MEM, mode, data->to_addr)
1582 : change_address (data->to, mode,
1583 plus_constant (data->to_addr, data->offset)));
1584 MEM_IN_STRUCT_P (to1) = data->to_struct;
1587 ? gen_rtx (MEM, mode, data->from_addr)
1588 : change_address (data->from, mode,
1589 plus_constant (data->from_addr, data->offset)));
1590 MEM_IN_STRUCT_P (from1) = data->from_struct;
1592 #ifdef HAVE_PRE_DECREMENT
1593 if (data->explicit_inc_to < 0)
1594 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1595 if (data->explicit_inc_from < 0)
1596 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1599 emit_insn ((*genfun) (to1, from1));
1600 #ifdef HAVE_POST_INCREMENT
1601 if (data->explicit_inc_to > 0)
1602 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1603 if (data->explicit_inc_from > 0)
1604 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1607 if (! data->reverse) data->offset += size;
1613 /* Emit code to move a block Y to a block X.
1614 This may be done with string-move instructions,
1615 with multiple scalar move instructions, or with a library call.
1617 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1619 SIZE is an rtx that says how long they are.
1620 ALIGN is the maximum alignment we can assume they have,
1621 measured in bytes. */
1624 emit_block_move (x, y, size, align)
1629 if (GET_MODE (x) != BLKmode)
1632 if (GET_MODE (y) != BLKmode)
1635 x = protect_from_queue (x, 1);
1636 y = protect_from_queue (y, 0);
1637 size = protect_from_queue (size, 0);
1639 if (GET_CODE (x) != MEM)
1641 if (GET_CODE (y) != MEM)
1646 if (GET_CODE (size) == CONST_INT
1647 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1648 move_by_pieces (x, y, INTVAL (size), align);
1651 /* Try the most limited insn first, because there's no point
1652 including more than one in the machine description unless
1653 the more limited one has some advantage. */
1655 rtx opalign = GEN_INT (align);
1656 enum machine_mode mode;
1658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1659 mode = GET_MODE_WIDER_MODE (mode))
1661 enum insn_code code = movstr_optab[(int) mode];
1663 if (code != CODE_FOR_nothing
1664 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1665 here because if SIZE is less than the mode mask, as it is
1666 returned by the macro, it will definitely be less than the
1667 actual mode mask. */
1668 && ((GET_CODE (size) == CONST_INT
1669 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1670 <= GET_MODE_MASK (mode)))
1671 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1672 && (insn_operand_predicate[(int) code][0] == 0
1673 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1674 && (insn_operand_predicate[(int) code][1] == 0
1675 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1676 && (insn_operand_predicate[(int) code][3] == 0
1677 || (*insn_operand_predicate[(int) code][3]) (opalign,
1681 rtx last = get_last_insn ();
1684 op2 = convert_to_mode (mode, size, 1);
1685 if (insn_operand_predicate[(int) code][2] != 0
1686 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1687 op2 = copy_to_mode_reg (mode, op2);
1689 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1696 delete_insns_since (last);
1700 #ifdef TARGET_MEM_FUNCTIONS
1701 emit_library_call (memcpy_libfunc, 0,
1702 VOIDmode, 3, XEXP (x, 0), Pmode,
1704 convert_to_mode (TYPE_MODE (sizetype), size,
1705 TREE_UNSIGNED (sizetype)),
1706 TYPE_MODE (sizetype));
1708 emit_library_call (bcopy_libfunc, 0,
1709 VOIDmode, 3, XEXP (y, 0), Pmode,
1711 convert_to_mode (TYPE_MODE (integer_type_node), size,
1712 TREE_UNSIGNED (integer_type_node)),
1713 TYPE_MODE (integer_type_node));
1718 /* Copy all or part of a value X into registers starting at REGNO.
1719 The number of registers to be filled is NREGS. */
1722 move_block_to_reg (regno, x, nregs, mode)
1726 enum machine_mode mode;
1734 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1735 x = validize_mem (force_const_mem (mode, x));
1737 /* See if the machine can do this with a load multiple insn. */
1738 #ifdef HAVE_load_multiple
1739 if (HAVE_load_multiple)
1741 last = get_last_insn ();
1742 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1750 delete_insns_since (last);
1754 for (i = 0; i < nregs; i++)
1755 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1756 operand_subword_force (x, i, mode));
1759 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1760 The number of registers to be filled is NREGS. SIZE indicates the number
1761 of bytes in the object X. */
1765 move_block_from_reg (regno, x, nregs, size)
1774 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1775 to the left before storing to memory. */
1776 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1778 rtx tem = operand_subword (x, 0, 1, BLKmode);
1784 shift = expand_shift (LSHIFT_EXPR, word_mode,
1785 gen_rtx (REG, word_mode, regno),
1786 build_int_2 ((UNITS_PER_WORD - size)
1787 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1788 emit_move_insn (tem, shift);
1792 /* See if the machine can do this with a store multiple insn. */
1793 #ifdef HAVE_store_multiple
1794 if (HAVE_store_multiple)
1796 last = get_last_insn ();
1797 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1805 delete_insns_since (last);
1809 for (i = 0; i < nregs; i++)
1811 rtx tem = operand_subword (x, i, 1, BLKmode);
1816 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1820 /* Add a USE expression for REG to the (possibly empty) list pointed
1821 to by CALL_FUSAGE. REG must denote a hard register. */
1824 use_reg (call_fusage, reg)
1825 rtx *call_fusage, reg;
1827 if (GET_CODE (reg) != REG
1828 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1832 = gen_rtx (EXPR_LIST, VOIDmode,
1833 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1836 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1837 starting at REGNO. All of these registers must be hard registers. */
1840 use_regs (call_fusage, regno, nregs)
1847 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1850 for (i = 0; i < nregs; i++)
1851 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1854 /* Generate several move instructions to clear LEN bytes of block TO.
1855 (A MEM rtx with BLKmode). The caller must pass TO through
1856 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1860 clear_by_pieces (to, len, align)
1864 struct clear_by_pieces data;
1865 rtx to_addr = XEXP (to, 0);
1866 int max_size = MOVE_MAX + 1;
1869 data.to_addr = to_addr;
1872 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1873 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1875 data.explicit_inc_to = 0;
1877 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1878 if (data.reverse) data.offset = len;
1881 data.to_struct = MEM_IN_STRUCT_P (to);
1883 /* If copying requires more than two move insns,
1884 copy addresses to registers (to make displacements shorter)
1885 and use post-increment if available. */
1887 && move_by_pieces_ninsns (len, align) > 2)
1889 #ifdef HAVE_PRE_DECREMENT
1890 if (data.reverse && ! data.autinc_to)
1892 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1894 data.explicit_inc_to = -1;
1897 #ifdef HAVE_POST_INCREMENT
1898 if (! data.reverse && ! data.autinc_to)
1900 data.to_addr = copy_addr_to_reg (to_addr);
1902 data.explicit_inc_to = 1;
1905 if (!data.autinc_to && CONSTANT_P (to_addr))
1906 data.to_addr = copy_addr_to_reg (to_addr);
1909 if (! SLOW_UNALIGNED_ACCESS
1910 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1913 /* First move what we can in the largest integer mode, then go to
1914 successively smaller modes. */
1916 while (max_size > 1)
1918 enum machine_mode mode = VOIDmode, tmode;
1919 enum insn_code icode;
1921 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1922 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1923 if (GET_MODE_SIZE (tmode) < max_size)
1926 if (mode == VOIDmode)
1929 icode = mov_optab->handlers[(int) mode].insn_code;
1930 if (icode != CODE_FOR_nothing
1931 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1932 GET_MODE_SIZE (mode)))
1933 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1935 max_size = GET_MODE_SIZE (mode);
1938 /* The code above should have handled everything. */
1943 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
1944 with move instructions for mode MODE. GENFUN is the gen_... function
1945 to make a move insn for that mode. DATA has all the other info. */
1948 clear_by_pieces_1 (genfun, mode, data)
1950 enum machine_mode mode;
1951 struct clear_by_pieces *data;
1953 register int size = GET_MODE_SIZE (mode);
1956 while (data->len >= size)
1958 if (data->reverse) data->offset -= size;
1960 to1 = (data->autinc_to
1961 ? gen_rtx (MEM, mode, data->to_addr)
1962 : change_address (data->to, mode,
1963 plus_constant (data->to_addr, data->offset)));
1964 MEM_IN_STRUCT_P (to1) = data->to_struct;
1966 #ifdef HAVE_PRE_DECREMENT
1967 if (data->explicit_inc_to < 0)
1968 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1971 emit_insn ((*genfun) (to1, const0_rtx));
1972 #ifdef HAVE_POST_INCREMENT
1973 if (data->explicit_inc_to > 0)
1974 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1977 if (! data->reverse) data->offset += size;
1983 /* Write zeros through the storage of OBJECT.
1984 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
1985 the maximum alignment we can is has, measured in bytes. */
1988 clear_storage (object, size, align)
1993 if (GET_MODE (object) == BLKmode)
1995 object = protect_from_queue (object, 1);
1996 size = protect_from_queue (size, 0);
1998 if (GET_CODE (size) == CONST_INT
1999 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2000 clear_by_pieces (object, INTVAL (size), align);
2004 /* Try the most limited insn first, because there's no point
2005 including more than one in the machine description unless
2006 the more limited one has some advantage. */
2008 rtx opalign = GEN_INT (align);
2009 enum machine_mode mode;
2011 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2012 mode = GET_MODE_WIDER_MODE (mode))
2014 enum insn_code code = clrstr_optab[(int) mode];
2016 if (code != CODE_FOR_nothing
2017 /* We don't need MODE to be narrower than
2018 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2019 the mode mask, as it is returned by the macro, it will
2020 definitely be less than the actual mode mask. */
2021 && ((GET_CODE (size) == CONST_INT
2022 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2023 <= GET_MODE_MASK (mode)))
2024 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2025 && (insn_operand_predicate[(int) code][0] == 0
2026 || (*insn_operand_predicate[(int) code][0]) (object,
2028 && (insn_operand_predicate[(int) code][2] == 0
2029 || (*insn_operand_predicate[(int) code][2]) (opalign,
2033 rtx last = get_last_insn ();
2036 op1 = convert_to_mode (mode, size, 1);
2037 if (insn_operand_predicate[(int) code][1] != 0
2038 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2040 op1 = copy_to_mode_reg (mode, op1);
2042 pat = GEN_FCN ((int) code) (object, op1, opalign);
2049 delete_insns_since (last);
2054 #ifdef TARGET_MEM_FUNCTIONS
2055 emit_library_call (memset_libfunc, 0,
2057 XEXP (object, 0), Pmode,
2058 const0_rtx, TYPE_MODE (integer_type_node),
2059 convert_to_mode (TYPE_MODE (sizetype),
2060 size, TREE_UNSIGNED (sizetype)),
2061 TYPE_MODE (sizetype));
2063 emit_library_call (bzero_libfunc, 0,
2065 XEXP (object, 0), Pmode,
2066 convert_to_mode (TYPE_MODE (integer_type_node),
2068 TREE_UNSIGNED (integer_type_node)),
2069 TYPE_MODE (integer_type_node));
2074 emit_move_insn (object, const0_rtx);
2077 /* Generate code to copy Y into X.
2078 Both Y and X must have the same mode, except that
2079 Y can be a constant with VOIDmode.
2080 This mode cannot be BLKmode; use emit_block_move for that.
2082 Return the last instruction emitted. */
2085 emit_move_insn (x, y)
2088 enum machine_mode mode = GET_MODE (x);
2090 x = protect_from_queue (x, 1);
2091 y = protect_from_queue (y, 0);
2093 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2096 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2097 y = force_const_mem (mode, y);
2099 /* If X or Y are memory references, verify that their addresses are valid
2101 if (GET_CODE (x) == MEM
2102 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2103 && ! push_operand (x, GET_MODE (x)))
2105 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2106 x = change_address (x, VOIDmode, XEXP (x, 0));
2108 if (GET_CODE (y) == MEM
2109 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2111 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2112 y = change_address (y, VOIDmode, XEXP (y, 0));
2114 if (mode == BLKmode)
2117 return emit_move_insn_1 (x, y);
2120 /* Low level part of emit_move_insn.
2121 Called just like emit_move_insn, but assumes X and Y
2122 are basically valid. */
2125 emit_move_insn_1 (x, y)
2128 enum machine_mode mode = GET_MODE (x);
2129 enum machine_mode submode;
2130 enum mode_class class = GET_MODE_CLASS (mode);
2133 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2135 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2137 /* Expand complex moves by moving real part and imag part, if possible. */
2138 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2139 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2141 (class == MODE_COMPLEX_INT
2142 ? MODE_INT : MODE_FLOAT),
2144 && (mov_optab->handlers[(int) submode].insn_code
2145 != CODE_FOR_nothing))
2147 /* Don't split destination if it is a stack push. */
2148 int stack = push_operand (x, GET_MODE (x));
2151 /* If this is a stack, push the highpart first, so it
2152 will be in the argument order.
2154 In that case, change_address is used only to convert
2155 the mode, not to change the address. */
2158 /* Note that the real part always precedes the imag part in memory
2159 regardless of machine's endianness. */
2160 #ifdef STACK_GROWS_DOWNWARD
2161 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2162 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2163 gen_imagpart (submode, y)));
2164 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2165 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2166 gen_realpart (submode, y)));
2168 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2169 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2170 gen_realpart (submode, y)));
2171 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2172 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2173 gen_imagpart (submode, y)));
2178 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2179 (gen_realpart (submode, x), gen_realpart (submode, y)));
2180 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2181 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2184 return get_last_insn ();
2187 /* This will handle any multi-word mode that lacks a move_insn pattern.
2188 However, you will get better code if you define such patterns,
2189 even if they must turn into multiple assembler instructions. */
2190 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2195 #ifdef PUSH_ROUNDING
2197 /* If X is a push on the stack, do the push now and replace
2198 X with a reference to the stack pointer. */
2199 if (push_operand (x, GET_MODE (x)))
2201 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2202 x = change_address (x, VOIDmode, stack_pointer_rtx);
2206 /* Show the output dies here. */
2208 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2211 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2214 rtx xpart = operand_subword (x, i, 1, mode);
2215 rtx ypart = operand_subword (y, i, 1, mode);
2217 /* If we can't get a part of Y, put Y into memory if it is a
2218 constant. Otherwise, force it into a register. If we still
2219 can't get a part of Y, abort. */
2220 if (ypart == 0 && CONSTANT_P (y))
2222 y = force_const_mem (mode, y);
2223 ypart = operand_subword (y, i, 1, mode);
2225 else if (ypart == 0)
2226 ypart = operand_subword_force (y, i, mode);
2228 if (xpart == 0 || ypart == 0)
2231 last_insn = emit_move_insn (xpart, ypart);
2240 /* Pushing data onto the stack. */
2242 /* Push a block of length SIZE (perhaps variable)
2243 and return an rtx to address the beginning of the block.
2244 Note that it is not possible for the value returned to be a QUEUED.
2245 The value may be virtual_outgoing_args_rtx.
2247 EXTRA is the number of bytes of padding to push in addition to SIZE.
2248 BELOW nonzero means this padding comes at low addresses;
2249 otherwise, the padding comes at high addresses. */
2252 push_block (size, extra, below)
2258 size = convert_modes (Pmode, ptr_mode, size, 1);
2259 if (CONSTANT_P (size))
2260 anti_adjust_stack (plus_constant (size, extra));
2261 else if (GET_CODE (size) == REG && extra == 0)
2262 anti_adjust_stack (size);
2265 rtx temp = copy_to_mode_reg (Pmode, size);
2267 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2268 temp, 0, OPTAB_LIB_WIDEN);
2269 anti_adjust_stack (temp);
2272 #ifdef STACK_GROWS_DOWNWARD
2273 temp = virtual_outgoing_args_rtx;
2274 if (extra != 0 && below)
2275 temp = plus_constant (temp, extra);
2277 if (GET_CODE (size) == CONST_INT)
2278 temp = plus_constant (virtual_outgoing_args_rtx,
2279 - INTVAL (size) - (below ? 0 : extra));
2280 else if (extra != 0 && !below)
2281 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2282 negate_rtx (Pmode, plus_constant (size, extra)));
2284 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2285 negate_rtx (Pmode, size));
2288 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2294 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2297 /* Generate code to push X onto the stack, assuming it has mode MODE and
2299 MODE is redundant except when X is a CONST_INT (since they don't
2301 SIZE is an rtx for the size of data to be copied (in bytes),
2302 needed only if X is BLKmode.
2304 ALIGN (in bytes) is maximum alignment we can assume.
2306 If PARTIAL and REG are both nonzero, then copy that many of the first
2307 words of X into registers starting with REG, and push the rest of X.
2308 The amount of space pushed is decreased by PARTIAL words,
2309 rounded *down* to a multiple of PARM_BOUNDARY.
2310 REG must be a hard register in this case.
2311 If REG is zero but PARTIAL is not, take any all others actions for an
2312 argument partially in registers, but do not actually load any
2315 EXTRA is the amount in bytes of extra space to leave next to this arg.
2316 This is ignored if an argument block has already been allocated.
2318 On a machine that lacks real push insns, ARGS_ADDR is the address of
2319 the bottom of the argument block for this call. We use indexing off there
2320 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2321 argument block has not been preallocated.
2323 ARGS_SO_FAR is the size of args previously pushed for this call. */
2326 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2327 args_addr, args_so_far)
2329 enum machine_mode mode;
2340 enum direction stack_direction
2341 #ifdef STACK_GROWS_DOWNWARD
2347 /* Decide where to pad the argument: `downward' for below,
2348 `upward' for above, or `none' for don't pad it.
2349 Default is below for small data on big-endian machines; else above. */
2350 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2352 /* If we're placing part of X into a register and part of X onto
2353 the stack, indicate that the entire register is clobbered to
2354 keep flow from thinking the unused part of the register is live. */
2355 if (partial > 0 && reg != 0)
2356 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2358 /* Invert direction if stack is post-update. */
2359 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2360 if (where_pad != none)
2361 where_pad = (where_pad == downward ? upward : downward);
2363 xinner = x = protect_from_queue (x, 0);
2365 if (mode == BLKmode)
2367 /* Copy a block into the stack, entirely or partially. */
2370 int used = partial * UNITS_PER_WORD;
2371 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2379 /* USED is now the # of bytes we need not copy to the stack
2380 because registers will take care of them. */
2383 xinner = change_address (xinner, BLKmode,
2384 plus_constant (XEXP (xinner, 0), used));
2386 /* If the partial register-part of the arg counts in its stack size,
2387 skip the part of stack space corresponding to the registers.
2388 Otherwise, start copying to the beginning of the stack space,
2389 by setting SKIP to 0. */
2390 #ifndef REG_PARM_STACK_SPACE
2396 #ifdef PUSH_ROUNDING
2397 /* Do it with several push insns if that doesn't take lots of insns
2398 and if there is no difficulty with push insns that skip bytes
2399 on the stack for alignment purposes. */
2401 && GET_CODE (size) == CONST_INT
2403 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2405 /* Here we avoid the case of a structure whose weak alignment
2406 forces many pushes of a small amount of data,
2407 and such small pushes do rounding that causes trouble. */
2408 && ((! SLOW_UNALIGNED_ACCESS)
2409 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2410 || PUSH_ROUNDING (align) == align)
2411 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2413 /* Push padding now if padding above and stack grows down,
2414 or if padding below and stack grows up.
2415 But if space already allocated, this has already been done. */
2416 if (extra && args_addr == 0
2417 && where_pad != none && where_pad != stack_direction)
2418 anti_adjust_stack (GEN_INT (extra));
2420 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2421 INTVAL (size) - used, align);
2424 #endif /* PUSH_ROUNDING */
2426 /* Otherwise make space on the stack and copy the data
2427 to the address of that space. */
2429 /* Deduct words put into registers from the size we must copy. */
2432 if (GET_CODE (size) == CONST_INT)
2433 size = GEN_INT (INTVAL (size) - used);
2435 size = expand_binop (GET_MODE (size), sub_optab, size,
2436 GEN_INT (used), NULL_RTX, 0,
2440 /* Get the address of the stack space.
2441 In this case, we do not deal with EXTRA separately.
2442 A single stack adjust will do. */
2445 temp = push_block (size, extra, where_pad == downward);
2448 else if (GET_CODE (args_so_far) == CONST_INT)
2449 temp = memory_address (BLKmode,
2450 plus_constant (args_addr,
2451 skip + INTVAL (args_so_far)));
2453 temp = memory_address (BLKmode,
2454 plus_constant (gen_rtx (PLUS, Pmode,
2455 args_addr, args_so_far),
2458 /* TEMP is the address of the block. Copy the data there. */
2459 if (GET_CODE (size) == CONST_INT
2460 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2463 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2464 INTVAL (size), align);
2467 /* Try the most limited insn first, because there's no point
2468 including more than one in the machine description unless
2469 the more limited one has some advantage. */
2470 #ifdef HAVE_movstrqi
2472 && GET_CODE (size) == CONST_INT
2473 && ((unsigned) INTVAL (size)
2474 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2476 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2477 xinner, size, GEN_INT (align));
2485 #ifdef HAVE_movstrhi
2487 && GET_CODE (size) == CONST_INT
2488 && ((unsigned) INTVAL (size)
2489 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2491 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2492 xinner, size, GEN_INT (align));
2500 #ifdef HAVE_movstrsi
2503 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2504 xinner, size, GEN_INT (align));
2512 #ifdef HAVE_movstrdi
2515 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2516 xinner, size, GEN_INT (align));
2525 #ifndef ACCUMULATE_OUTGOING_ARGS
2526 /* If the source is referenced relative to the stack pointer,
2527 copy it to another register to stabilize it. We do not need
2528 to do this if we know that we won't be changing sp. */
2530 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2531 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2532 temp = copy_to_reg (temp);
2535 /* Make inhibit_defer_pop nonzero around the library call
2536 to force it to pop the bcopy-arguments right away. */
2538 #ifdef TARGET_MEM_FUNCTIONS
2539 emit_library_call (memcpy_libfunc, 0,
2540 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2541 convert_to_mode (TYPE_MODE (sizetype),
2542 size, TREE_UNSIGNED (sizetype)),
2543 TYPE_MODE (sizetype));
2545 emit_library_call (bcopy_libfunc, 0,
2546 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2547 convert_to_mode (TYPE_MODE (integer_type_node),
2549 TREE_UNSIGNED (integer_type_node)),
2550 TYPE_MODE (integer_type_node));
2555 else if (partial > 0)
2557 /* Scalar partly in registers. */
2559 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2562 /* # words of start of argument
2563 that we must make space for but need not store. */
2564 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2565 int args_offset = INTVAL (args_so_far);
2568 /* Push padding now if padding above and stack grows down,
2569 or if padding below and stack grows up.
2570 But if space already allocated, this has already been done. */
2571 if (extra && args_addr == 0
2572 && where_pad != none && where_pad != stack_direction)
2573 anti_adjust_stack (GEN_INT (extra));
2575 /* If we make space by pushing it, we might as well push
2576 the real data. Otherwise, we can leave OFFSET nonzero
2577 and leave the space uninitialized. */
2581 /* Now NOT_STACK gets the number of words that we don't need to
2582 allocate on the stack. */
2583 not_stack = partial - offset;
2585 /* If the partial register-part of the arg counts in its stack size,
2586 skip the part of stack space corresponding to the registers.
2587 Otherwise, start copying to the beginning of the stack space,
2588 by setting SKIP to 0. */
2589 #ifndef REG_PARM_STACK_SPACE
2595 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2596 x = validize_mem (force_const_mem (mode, x));
2598 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2599 SUBREGs of such registers are not allowed. */
2600 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2601 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2602 x = copy_to_reg (x);
2604 /* Loop over all the words allocated on the stack for this arg. */
2605 /* We can do it by words, because any scalar bigger than a word
2606 has a size a multiple of a word. */
2607 #ifndef PUSH_ARGS_REVERSED
2608 for (i = not_stack; i < size; i++)
2610 for (i = size - 1; i >= not_stack; i--)
2612 if (i >= not_stack + offset)
2613 emit_push_insn (operand_subword_force (x, i, mode),
2614 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2616 GEN_INT (args_offset + ((i - not_stack + skip)
2617 * UNITS_PER_WORD)));
2623 /* Push padding now if padding above and stack grows down,
2624 or if padding below and stack grows up.
2625 But if space already allocated, this has already been done. */
2626 if (extra && args_addr == 0
2627 && where_pad != none && where_pad != stack_direction)
2628 anti_adjust_stack (GEN_INT (extra));
2630 #ifdef PUSH_ROUNDING
2632 addr = gen_push_operand ();
2635 if (GET_CODE (args_so_far) == CONST_INT)
2637 = memory_address (mode,
2638 plus_constant (args_addr, INTVAL (args_so_far)));
2640 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2643 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2647 /* If part should go in registers, copy that part
2648 into the appropriate registers. Do this now, at the end,
2649 since mem-to-mem copies above may do function calls. */
2650 if (partial > 0 && reg != 0)
2651 move_block_to_reg (REGNO (reg), x, partial, mode);
2653 if (extra && args_addr == 0 && where_pad == stack_direction)
2654 anti_adjust_stack (GEN_INT (extra));
2657 /* Expand an assignment that stores the value of FROM into TO.
2658 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2659 (This may contain a QUEUED rtx;
2660 if the value is constant, this rtx is a constant.)
2661 Otherwise, the returned value is NULL_RTX.
2663 SUGGEST_REG is no longer actually used.
2664 It used to mean, copy the value through a register
2665 and return that register, if that is possible.
2666 We now use WANT_VALUE to decide whether to do this. */
2669 expand_assignment (to, from, want_value, suggest_reg)
2674 register rtx to_rtx = 0;
2677 /* Don't crash if the lhs of the assignment was erroneous. */
2679 if (TREE_CODE (to) == ERROR_MARK)
2681 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2682 return want_value ? result : NULL_RTX;
2685 if (output_bytecode)
2687 tree dest_innermost;
2689 bc_expand_expr (from);
2690 bc_emit_instruction (duplicate);
2692 dest_innermost = bc_expand_address (to);
2694 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2695 take care of it here. */
2697 bc_store_memory (TREE_TYPE (to), dest_innermost);
2701 /* Assignment of a structure component needs special treatment
2702 if the structure component's rtx is not simply a MEM.
2703 Assignment of an array element at a constant index, and assignment of
2704 an array element in an unaligned packed structure field, has the same
2707 if (TREE_CODE (to) == COMPONENT_REF
2708 || TREE_CODE (to) == BIT_FIELD_REF
2709 || (TREE_CODE (to) == ARRAY_REF
2710 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2711 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2712 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2714 enum machine_mode mode1;
2724 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2725 &mode1, &unsignedp, &volatilep);
2727 /* If we are going to use store_bit_field and extract_bit_field,
2728 make sure to_rtx will be safe for multiple use. */
2730 if (mode1 == VOIDmode && want_value)
2731 tem = stabilize_reference (tem);
2733 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2734 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2737 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2739 if (GET_CODE (to_rtx) != MEM)
2741 to_rtx = change_address (to_rtx, VOIDmode,
2742 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2743 force_reg (ptr_mode, offset_rtx)));
2744 /* If we have a variable offset, the known alignment
2745 is only that of the innermost structure containing the field.
2746 (Actually, we could sometimes do better by using the
2747 align of an element of the innermost array, but no need.) */
2748 if (TREE_CODE (to) == COMPONENT_REF
2749 || TREE_CODE (to) == BIT_FIELD_REF)
2751 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2755 if (GET_CODE (to_rtx) == MEM)
2757 /* When the offset is zero, to_rtx is the address of the
2758 structure we are storing into, and hence may be shared.
2759 We must make a new MEM before setting the volatile bit. */
2761 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2762 MEM_VOLATILE_P (to_rtx) = 1;
2764 #if 0 /* This was turned off because, when a field is volatile
2765 in an object which is not volatile, the object may be in a register,
2766 and then we would abort over here. */
2772 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2774 /* Spurious cast makes HPUX compiler happy. */
2775 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2778 /* Required alignment of containing datum. */
2780 int_size_in_bytes (TREE_TYPE (tem)));
2781 preserve_temp_slots (result);
2785 /* If the value is meaningful, convert RESULT to the proper mode.
2786 Otherwise, return nothing. */
2787 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2788 TYPE_MODE (TREE_TYPE (from)),
2790 TREE_UNSIGNED (TREE_TYPE (to)))
2794 /* If the rhs is a function call and its value is not an aggregate,
2795 call the function before we start to compute the lhs.
2796 This is needed for correct code for cases such as
2797 val = setjmp (buf) on machines where reference to val
2798 requires loading up part of an address in a separate insn.
2800 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2801 a promoted variable where the zero- or sign- extension needs to be done.
2802 Handling this in the normal way is safe because no computation is done
2804 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2805 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2806 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2811 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2813 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2815 if (GET_MODE (to_rtx) == BLKmode)
2816 emit_block_move (to_rtx, value, expr_size (from),
2817 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2819 emit_move_insn (to_rtx, value);
2820 preserve_temp_slots (to_rtx);
2823 return want_value ? to_rtx : NULL_RTX;
2826 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2827 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2830 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2832 /* Don't move directly into a return register. */
2833 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2838 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2839 emit_move_insn (to_rtx, temp);
2840 preserve_temp_slots (to_rtx);
2843 return want_value ? to_rtx : NULL_RTX;
2846 /* In case we are returning the contents of an object which overlaps
2847 the place the value is being stored, use a safe function when copying
2848 a value through a pointer into a structure value return block. */
2849 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2850 && current_function_returns_struct
2851 && !current_function_returns_pcc_struct)
2856 size = expr_size (from);
2857 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2859 #ifdef TARGET_MEM_FUNCTIONS
2860 emit_library_call (memcpy_libfunc, 0,
2861 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2862 XEXP (from_rtx, 0), Pmode,
2863 convert_to_mode (TYPE_MODE (sizetype),
2864 size, TREE_UNSIGNED (sizetype)),
2865 TYPE_MODE (sizetype));
2867 emit_library_call (bcopy_libfunc, 0,
2868 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2869 XEXP (to_rtx, 0), Pmode,
2870 convert_to_mode (TYPE_MODE (integer_type_node),
2871 size, TREE_UNSIGNED (integer_type_node)),
2872 TYPE_MODE (integer_type_node));
2875 preserve_temp_slots (to_rtx);
2878 return want_value ? to_rtx : NULL_RTX;
2881 /* Compute FROM and store the value in the rtx we got. */
2884 result = store_expr (from, to_rtx, want_value);
2885 preserve_temp_slots (result);
2888 return want_value ? result : NULL_RTX;
2891 /* Generate code for computing expression EXP,
2892 and storing the value into TARGET.
2893 TARGET may contain a QUEUED rtx.
2895 If WANT_VALUE is nonzero, return a copy of the value
2896 not in TARGET, so that we can be sure to use the proper
2897 value in a containing expression even if TARGET has something
2898 else stored in it. If possible, we copy the value through a pseudo
2899 and return that pseudo. Or, if the value is constant, we try to
2900 return the constant. In some cases, we return a pseudo
2901 copied *from* TARGET.
2903 If the mode is BLKmode then we may return TARGET itself.
2904 It turns out that in BLKmode it doesn't cause a problem.
2905 because C has no operators that could combine two different
2906 assignments into the same BLKmode object with different values
2907 with no sequence point. Will other languages need this to
2910 If WANT_VALUE is 0, we return NULL, to make sure
2911 to catch quickly any cases where the caller uses the value
2912 and fails to set WANT_VALUE. */
2915 store_expr (exp, target, want_value)
2917 register rtx target;
2921 int dont_return_target = 0;
2923 if (TREE_CODE (exp) == COMPOUND_EXPR)
2925 /* Perform first part of compound expression, then assign from second
2927 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2929 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2931 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2933 /* For conditional expression, get safe form of the target. Then
2934 test the condition, doing the appropriate assignment on either
2935 side. This avoids the creation of unnecessary temporaries.
2936 For non-BLKmode, it is more efficient not to do this. */
2938 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2941 target = protect_from_queue (target, 1);
2943 do_pending_stack_adjust ();
2945 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2946 store_expr (TREE_OPERAND (exp, 1), target, 0);
2948 emit_jump_insn (gen_jump (lab2));
2951 store_expr (TREE_OPERAND (exp, 2), target, 0);
2955 return want_value ? target : NULL_RTX;
2957 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2958 && GET_MODE (target) != BLKmode)
2959 /* If target is in memory and caller wants value in a register instead,
2960 arrange that. Pass TARGET as target for expand_expr so that,
2961 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2962 We know expand_expr will not use the target in that case.
2963 Don't do this if TARGET is volatile because we are supposed
2964 to write it and then read it. */
2966 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2967 GET_MODE (target), 0);
2968 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2969 temp = copy_to_reg (temp);
2970 dont_return_target = 1;
2972 else if (queued_subexp_p (target))
2973 /* If target contains a postincrement, let's not risk
2974 using it as the place to generate the rhs. */
2976 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2978 /* Expand EXP into a new pseudo. */
2979 temp = gen_reg_rtx (GET_MODE (target));
2980 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2983 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2985 /* If target is volatile, ANSI requires accessing the value
2986 *from* the target, if it is accessed. So make that happen.
2987 In no case return the target itself. */
2988 if (! MEM_VOLATILE_P (target) && want_value)
2989 dont_return_target = 1;
2991 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2992 /* If this is an scalar in a register that is stored in a wider mode
2993 than the declared mode, compute the result into its declared mode
2994 and then convert to the wider mode. Our value is the computed
2997 /* If we don't want a value, we can do the conversion inside EXP,
2998 which will often result in some optimizations. Do the conversion
2999 in two steps: first change the signedness, if needed, then
3003 if (TREE_UNSIGNED (TREE_TYPE (exp))
3004 != SUBREG_PROMOTED_UNSIGNED_P (target))
3007 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3011 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3012 SUBREG_PROMOTED_UNSIGNED_P (target)),
3016 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3018 /* If TEMP is a volatile MEM and we want a result value, make
3019 the access now so it gets done only once. Likewise if
3020 it contains TARGET. */
3021 if (GET_CODE (temp) == MEM && want_value
3022 && (MEM_VOLATILE_P (temp)
3023 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3024 temp = copy_to_reg (temp);
3026 /* If TEMP is a VOIDmode constant, use convert_modes to make
3027 sure that we properly convert it. */
3028 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3029 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3030 TYPE_MODE (TREE_TYPE (exp)), temp,
3031 SUBREG_PROMOTED_UNSIGNED_P (target));
3033 convert_move (SUBREG_REG (target), temp,
3034 SUBREG_PROMOTED_UNSIGNED_P (target));
3035 return want_value ? temp : NULL_RTX;
3039 temp = expand_expr (exp, target, GET_MODE (target), 0);
3040 /* Return TARGET if it's a specified hardware register.
3041 If TARGET is a volatile mem ref, either return TARGET
3042 or return a reg copied *from* TARGET; ANSI requires this.
3044 Otherwise, if TEMP is not TARGET, return TEMP
3045 if it is constant (for efficiency),
3046 or if we really want the correct value. */
3047 if (!(target && GET_CODE (target) == REG
3048 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3049 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3051 && (CONSTANT_P (temp) || want_value))
3052 dont_return_target = 1;
3055 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3056 the same as that of TARGET, adjust the constant. This is needed, for
3057 example, in case it is a CONST_DOUBLE and we want only a word-sized
3059 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3060 && TREE_CODE (exp) != ERROR_MARK
3061 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3062 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3063 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3065 /* If value was not generated in the target, store it there.
3066 Convert the value to TARGET's type first if nec. */
3068 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3070 target = protect_from_queue (target, 1);
3071 if (GET_MODE (temp) != GET_MODE (target)
3072 && GET_MODE (temp) != VOIDmode)
3074 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3075 if (dont_return_target)
3077 /* In this case, we will return TEMP,
3078 so make sure it has the proper mode.
3079 But don't forget to store the value into TARGET. */
3080 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3081 emit_move_insn (target, temp);
3084 convert_move (target, temp, unsignedp);
3087 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3089 /* Handle copying a string constant into an array.
3090 The string constant may be shorter than the array.
3091 So copy just the string's actual length, and clear the rest. */
3095 /* Get the size of the data type of the string,
3096 which is actually the size of the target. */
3097 size = expr_size (exp);
3098 if (GET_CODE (size) == CONST_INT
3099 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3100 emit_block_move (target, temp, size,
3101 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3104 /* Compute the size of the data to copy from the string. */
3106 = size_binop (MIN_EXPR,
3107 make_tree (sizetype, size),
3109 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3110 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3114 /* Copy that much. */
3115 emit_block_move (target, temp, copy_size_rtx,
3116 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3118 /* Figure out how much is left in TARGET that we have to clear.
3119 Do all calculations in ptr_mode. */
3121 addr = XEXP (target, 0);
3122 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3124 if (GET_CODE (copy_size_rtx) == CONST_INT)
3126 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3127 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3131 addr = force_reg (ptr_mode, addr);
3132 addr = expand_binop (ptr_mode, add_optab, addr,
3133 copy_size_rtx, NULL_RTX, 0,
3136 size = expand_binop (ptr_mode, sub_optab, size,
3137 copy_size_rtx, NULL_RTX, 0,
3140 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3141 GET_MODE (size), 0, 0);
3142 label = gen_label_rtx ();
3143 emit_jump_insn (gen_blt (label));
3146 if (size != const0_rtx)
3148 #ifdef TARGET_MEM_FUNCTIONS
3149 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3151 const0_rtx, TYPE_MODE (integer_type_node),
3152 convert_to_mode (TYPE_MODE (sizetype),
3154 TREE_UNSIGNED (sizetype)),
3155 TYPE_MODE (sizetype));
3157 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3159 convert_to_mode (TYPE_MODE (integer_type_node),
3161 TREE_UNSIGNED (integer_type_node)),
3162 TYPE_MODE (integer_type_node));
3170 else if (GET_MODE (temp) == BLKmode)
3171 emit_block_move (target, temp, expr_size (exp),
3172 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3174 emit_move_insn (target, temp);
3177 /* If we don't want a value, return NULL_RTX. */
3181 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3182 ??? The latter test doesn't seem to make sense. */
3183 else if (dont_return_target && GET_CODE (temp) != MEM)
3186 /* Return TARGET itself if it is a hard register. */
3187 else if (want_value && GET_MODE (target) != BLKmode
3188 && ! (GET_CODE (target) == REG
3189 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3190 return copy_to_reg (target);
3196 /* Return 1 if EXP just contains zeros. */
3204 switch (TREE_CODE (exp))
3208 case NON_LVALUE_EXPR:
3209 return is_zeros_p (TREE_OPERAND (exp, 0));
3212 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3216 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3219 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3222 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3223 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3224 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3225 if (! is_zeros_p (TREE_VALUE (elt)))
3234 /* Return 1 if EXP contains mostly (3/4) zeros. */
3237 mostly_zeros_p (exp)
3240 if (TREE_CODE (exp) == CONSTRUCTOR)
3242 int elts = 0, zeros = 0;
3243 tree elt = CONSTRUCTOR_ELTS (exp);
3244 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3246 /* If there are no ranges of true bits, it is all zero. */
3247 return elt == NULL_TREE;
3249 for (; elt; elt = TREE_CHAIN (elt))
3251 /* We do not handle the case where the index is a RANGE_EXPR,
3252 so the statistic will be somewhat inaccurate.
3253 We do make a more accurate count in store_constructor itself,
3254 so since this function is only used for nested array elements,
3255 this should be close enough. */
3256 if (mostly_zeros_p (TREE_VALUE (elt)))
3261 return 4 * zeros >= 3 * elts;
3264 return is_zeros_p (exp);
3267 /* Helper function for store_constructor.
3268 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3269 TYPE is the type of the CONSTRUCTOR, not the element type.
3270 CLEARED is as for store_constructor.
3272 This provides a recursive shortcut back to store_constructor when it isn't
3273 necessary to go through store_field. This is so that we can pass through
3274 the cleared field to let store_constructor know that we may not have to
3275 clear a substructure if the outer structure has already been cleared. */
3278 store_constructor_field (target, bitsize, bitpos,
3279 mode, exp, type, cleared)
3281 int bitsize, bitpos;
3282 enum machine_mode mode;
3286 if (TREE_CODE (exp) == CONSTRUCTOR
3287 && bitpos % BITS_PER_UNIT == 0
3288 /* If we have a non-zero bitpos for a register target, then we just
3289 let store_field do the bitfield handling. This is unlikely to
3290 generate unnecessary clear instructions anyways. */
3291 && (bitpos == 0 || GET_CODE (target) == MEM))
3294 target = change_address (target, VOIDmode,
3295 plus_constant (XEXP (target, 0),
3296 bitpos / BITS_PER_UNIT));
3297 store_constructor (exp, target, cleared);
3300 store_field (target, bitsize, bitpos, mode, exp,
3301 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3302 int_size_in_bytes (type));
3305 /* Store the value of constructor EXP into the rtx TARGET.
3306 TARGET is either a REG or a MEM.
3307 CLEARED is true if TARGET is known to have been zero'd. */
3310 store_constructor (exp, target, cleared)
3315 tree type = TREE_TYPE (exp);
3317 /* We know our target cannot conflict, since safe_from_p has been called. */
3319 /* Don't try copying piece by piece into a hard register
3320 since that is vulnerable to being clobbered by EXP.
3321 Instead, construct in a pseudo register and then copy it all. */
3322 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3324 rtx temp = gen_reg_rtx (GET_MODE (target));
3325 store_constructor (exp, temp, 0);
3326 emit_move_insn (target, temp);
3331 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3332 || TREE_CODE (type) == QUAL_UNION_TYPE)
3336 /* Inform later passes that the whole union value is dead. */
3337 if (TREE_CODE (type) == UNION_TYPE
3338 || TREE_CODE (type) == QUAL_UNION_TYPE)
3339 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3341 /* If we are building a static constructor into a register,
3342 set the initial value as zero so we can fold the value into
3343 a constant. But if more than one register is involved,
3344 this probably loses. */
3345 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3346 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3349 emit_move_insn (target, const0_rtx);
3354 /* If the constructor has fewer fields than the structure
3355 or if we are initializing the structure to mostly zeros,
3356 clear the whole structure first. */
3357 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3358 != list_length (TYPE_FIELDS (type)))
3359 || mostly_zeros_p (exp))
3362 clear_storage (target, expr_size (exp),
3363 TYPE_ALIGN (type) / BITS_PER_UNIT);
3368 /* Inform later passes that the old value is dead. */
3369 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3371 /* Store each element of the constructor into
3372 the corresponding field of TARGET. */
3374 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3376 register tree field = TREE_PURPOSE (elt);
3377 register enum machine_mode mode;
3381 tree pos, constant = 0, offset = 0;
3382 rtx to_rtx = target;
3384 /* Just ignore missing fields.
3385 We cleared the whole structure, above,
3386 if any fields are missing. */
3390 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3393 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3394 unsignedp = TREE_UNSIGNED (field);
3395 mode = DECL_MODE (field);
3396 if (DECL_BIT_FIELD (field))
3399 pos = DECL_FIELD_BITPOS (field);
3400 if (TREE_CODE (pos) == INTEGER_CST)
3402 else if (TREE_CODE (pos) == PLUS_EXPR
3403 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3404 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3409 bitpos = TREE_INT_CST_LOW (constant);
3415 if (contains_placeholder_p (offset))
3416 offset = build (WITH_RECORD_EXPR, sizetype,
3419 offset = size_binop (FLOOR_DIV_EXPR, offset,
3420 size_int (BITS_PER_UNIT));
3422 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3423 if (GET_CODE (to_rtx) != MEM)
3427 = change_address (to_rtx, VOIDmode,
3428 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3429 force_reg (ptr_mode, offset_rtx)));
3431 if (TREE_READONLY (field))
3433 if (GET_CODE (to_rtx) == MEM)
3434 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3436 RTX_UNCHANGING_P (to_rtx) = 1;
3439 store_constructor_field (to_rtx, bitsize, bitpos,
3440 mode, TREE_VALUE (elt), type, cleared);
3443 else if (TREE_CODE (type) == ARRAY_TYPE)
3448 tree domain = TYPE_DOMAIN (type);
3449 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3450 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3451 tree elttype = TREE_TYPE (type);
3453 /* If the constructor has fewer elements than the array,
3454 clear the whole array first. Similarly if this this is
3455 static constructor of a non-BLKmode object. */
3456 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3460 HOST_WIDE_INT count = 0, zero_count = 0;
3462 /* This loop is a more accurate version of the loop in
3463 mostly_zeros_p (it handles RANGE_EXPR in an index).
3464 It is also needed to check for missing elements. */
3465 for (elt = CONSTRUCTOR_ELTS (exp);
3467 elt = TREE_CHAIN (elt), i++)
3469 tree index = TREE_PURPOSE (elt);
3470 HOST_WIDE_INT this_node_count;
3471 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3473 tree lo_index = TREE_OPERAND (index, 0);
3474 tree hi_index = TREE_OPERAND (index, 1);
3475 if (TREE_CODE (lo_index) != INTEGER_CST
3476 || TREE_CODE (hi_index) != INTEGER_CST)
3481 this_node_count = TREE_INT_CST_LOW (hi_index)
3482 - TREE_INT_CST_LOW (lo_index) + 1;
3485 this_node_count = 1;
3486 count += this_node_count;
3487 if (mostly_zeros_p (TREE_VALUE (elt)))
3488 zero_count += this_node_count;
3490 /* Clear the entire array first if there are any missing elements,
3491 or if the incidence of zero elements is >= 75%. */
3492 if (count < maxelt - minelt + 1
3493 || 4 * zero_count >= 3 * count)
3499 clear_storage (target, expr_size (exp),
3500 TYPE_ALIGN (type) / BITS_PER_UNIT);
3504 /* Inform later passes that the old value is dead. */
3505 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3507 /* Store each element of the constructor into
3508 the corresponding element of TARGET, determined
3509 by counting the elements. */
3510 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3512 elt = TREE_CHAIN (elt), i++)
3514 register enum machine_mode mode;
3518 tree value = TREE_VALUE (elt);
3519 tree index = TREE_PURPOSE (elt);
3520 rtx xtarget = target;
3522 if (cleared && is_zeros_p (value))
3525 mode = TYPE_MODE (elttype);
3526 bitsize = GET_MODE_BITSIZE (mode);
3527 unsignedp = TREE_UNSIGNED (elttype);
3529 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3531 tree lo_index = TREE_OPERAND (index, 0);
3532 tree hi_index = TREE_OPERAND (index, 1);
3533 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3534 struct nesting *loop;
3535 HOST_WIDE_INT lo, hi, count;
3538 /* If the range is constant and "small", unroll the loop. */
3539 if (TREE_CODE (lo_index) == INTEGER_CST
3540 && TREE_CODE (hi_index) == INTEGER_CST
3541 && (lo = TREE_INT_CST_LOW (lo_index),
3542 hi = TREE_INT_CST_LOW (hi_index),
3543 count = hi - lo + 1,
3544 (GET_CODE (target) != MEM
3546 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3547 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3550 lo -= minelt; hi -= minelt;
3551 for (; lo <= hi; lo++)
3553 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3554 store_constructor_field (target, bitsize, bitpos,
3555 mode, value, type, cleared);
3560 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3561 loop_top = gen_label_rtx ();
3562 loop_end = gen_label_rtx ();
3564 unsignedp = TREE_UNSIGNED (domain);
3566 index = build_decl (VAR_DECL, NULL_TREE, domain);
3568 DECL_RTL (index) = index_r
3569 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3572 if (TREE_CODE (value) == SAVE_EXPR
3573 && SAVE_EXPR_RTL (value) == 0)
3575 /* Make sure value gets expanded once before the loop. */
3576 expand_expr (value, const0_rtx, VOIDmode, 0);
3579 store_expr (lo_index, index_r, 0);
3580 loop = expand_start_loop (0);
3582 /* Assign value to element index. */
3583 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3584 size_int (BITS_PER_UNIT));
3585 position = size_binop (MULT_EXPR,
3586 size_binop (MINUS_EXPR, index,
3587 TYPE_MIN_VALUE (domain)),
3589 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3590 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3591 xtarget = change_address (target, mode, addr);
3592 if (TREE_CODE (value) == CONSTRUCTOR)
3593 store_constructor (value, xtarget, cleared);
3595 store_expr (value, xtarget, 0);
3597 expand_exit_loop_if_false (loop,
3598 build (LT_EXPR, integer_type_node,
3601 expand_increment (build (PREINCREMENT_EXPR,
3603 index, integer_one_node), 0, 0);
3605 emit_label (loop_end);
3607 /* Needed by stupid register allocation. to extend the
3608 lifetime of pseudo-regs used by target past the end
3610 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3613 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3614 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3620 index = size_int (i);
3623 index = size_binop (MINUS_EXPR, index,
3624 TYPE_MIN_VALUE (domain));
3625 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3626 size_int (BITS_PER_UNIT));
3627 position = size_binop (MULT_EXPR, index, position);
3628 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3629 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3630 xtarget = change_address (target, mode, addr);
3631 store_expr (value, xtarget, 0);
3636 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3637 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3639 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3640 store_constructor_field (target, bitsize, bitpos,
3641 mode, value, type, cleared);
3645 /* set constructor assignments */
3646 else if (TREE_CODE (type) == SET_TYPE)
3648 tree elt = CONSTRUCTOR_ELTS (exp);
3649 rtx xtarget = XEXP (target, 0);
3650 int set_word_size = TYPE_ALIGN (type);
3651 int nbytes = int_size_in_bytes (type), nbits;
3652 tree domain = TYPE_DOMAIN (type);
3653 tree domain_min, domain_max, bitlength;
3655 /* The default implementation strategy is to extract the constant
3656 parts of the constructor, use that to initialize the target,
3657 and then "or" in whatever non-constant ranges we need in addition.
3659 If a large set is all zero or all ones, it is
3660 probably better to set it using memset (if available) or bzero.
3661 Also, if a large set has just a single range, it may also be
3662 better to first clear all the first clear the set (using
3663 bzero/memset), and set the bits we want. */
3665 /* Check for all zeros. */
3666 if (elt == NULL_TREE)
3669 clear_storage (target, expr_size (exp),
3670 TYPE_ALIGN (type) / BITS_PER_UNIT);
3674 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3675 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3676 bitlength = size_binop (PLUS_EXPR,
3677 size_binop (MINUS_EXPR, domain_max, domain_min),
3680 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3682 nbits = TREE_INT_CST_LOW (bitlength);
3684 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3685 are "complicated" (more than one range), initialize (the
3686 constant parts) by copying from a constant. */
3687 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3688 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3690 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3691 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3692 char *bit_buffer = (char*) alloca (nbits);
3693 HOST_WIDE_INT word = 0;
3696 int offset = 0; /* In bytes from beginning of set. */
3697 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3700 if (bit_buffer[ibit])
3702 if (BYTES_BIG_ENDIAN)
3703 word |= (1 << (set_word_size - 1 - bit_pos));
3705 word |= 1 << bit_pos;
3708 if (bit_pos >= set_word_size || ibit == nbits)
3710 if (word != 0 || ! cleared)
3712 rtx datum = GEN_INT (word);
3714 /* The assumption here is that it is safe to use XEXP if
3715 the set is multi-word, but not if it's single-word. */
3716 if (GET_CODE (target) == MEM)
3718 to_rtx = plus_constant (XEXP (target, 0), offset);
3719 to_rtx = change_address (target, mode, to_rtx);
3721 else if (offset == 0)
3725 emit_move_insn (to_rtx, datum);
3731 offset += set_word_size / BITS_PER_UNIT;
3737 /* Don't bother clearing storage if the set is all ones. */
3738 if (TREE_CHAIN (elt) != NULL_TREE
3739 || (TREE_PURPOSE (elt) == NULL_TREE
3741 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3742 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3743 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3744 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3746 clear_storage (target, expr_size (exp),
3747 TYPE_ALIGN (type) / BITS_PER_UNIT);
3750 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3752 /* start of range of element or NULL */
3753 tree startbit = TREE_PURPOSE (elt);
3754 /* end of range of element, or element value */
3755 tree endbit = TREE_VALUE (elt);
3756 HOST_WIDE_INT startb, endb;
3757 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3759 bitlength_rtx = expand_expr (bitlength,
3760 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3762 /* handle non-range tuple element like [ expr ] */
3763 if (startbit == NULL_TREE)
3765 startbit = save_expr (endbit);
3768 startbit = convert (sizetype, startbit);
3769 endbit = convert (sizetype, endbit);
3770 if (! integer_zerop (domain_min))
3772 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3773 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3775 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3776 EXPAND_CONST_ADDRESS);
3777 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3778 EXPAND_CONST_ADDRESS);
3782 targetx = assign_stack_temp (GET_MODE (target),
3783 GET_MODE_SIZE (GET_MODE (target)),
3785 emit_move_insn (targetx, target);
3787 else if (GET_CODE (target) == MEM)
3792 #ifdef TARGET_MEM_FUNCTIONS
3793 /* Optimization: If startbit and endbit are
3794 constants divisible by BITS_PER_UNIT,
3795 call memset instead. */
3796 if (TREE_CODE (startbit) == INTEGER_CST
3797 && TREE_CODE (endbit) == INTEGER_CST
3798 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3799 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3801 emit_library_call (memset_libfunc, 0,
3803 plus_constant (XEXP (targetx, 0),
3804 startb / BITS_PER_UNIT),
3806 constm1_rtx, TYPE_MODE (integer_type_node),
3807 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3808 TYPE_MODE (sizetype));
3813 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3814 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3815 bitlength_rtx, TYPE_MODE (sizetype),
3816 startbit_rtx, TYPE_MODE (sizetype),
3817 endbit_rtx, TYPE_MODE (sizetype));
3820 emit_move_insn (target, targetx);
3828 /* Store the value of EXP (an expression tree)
3829 into a subfield of TARGET which has mode MODE and occupies
3830 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3831 If MODE is VOIDmode, it means that we are storing into a bit-field.
3833 If VALUE_MODE is VOIDmode, return nothing in particular.
3834 UNSIGNEDP is not used in this case.
3836 Otherwise, return an rtx for the value stored. This rtx
3837 has mode VALUE_MODE if that is convenient to do.
3838 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3840 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3841 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3844 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3845 unsignedp, align, total_size)
3847 int bitsize, bitpos;
3848 enum machine_mode mode;
3850 enum machine_mode value_mode;
3855 HOST_WIDE_INT width_mask = 0;
3857 if (bitsize < HOST_BITS_PER_WIDE_INT)
3858 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3860 /* If we are storing into an unaligned field of an aligned union that is
3861 in a register, we may have the mode of TARGET being an integer mode but
3862 MODE == BLKmode. In that case, get an aligned object whose size and
3863 alignment are the same as TARGET and store TARGET into it (we can avoid
3864 the store if the field being stored is the entire width of TARGET). Then
3865 call ourselves recursively to store the field into a BLKmode version of
3866 that object. Finally, load from the object into TARGET. This is not
3867 very efficient in general, but should only be slightly more expensive
3868 than the otherwise-required unaligned accesses. Perhaps this can be
3869 cleaned up later. */
3872 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3874 rtx object = assign_stack_temp (GET_MODE (target),
3875 GET_MODE_SIZE (GET_MODE (target)), 0);
3876 rtx blk_object = copy_rtx (object);
3878 MEM_IN_STRUCT_P (object) = 1;
3879 MEM_IN_STRUCT_P (blk_object) = 1;
3880 PUT_MODE (blk_object, BLKmode);
3882 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3883 emit_move_insn (object, target);
3885 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3888 /* Even though we aren't returning target, we need to
3889 give it the updated value. */
3890 emit_move_insn (target, object);
3895 /* If the structure is in a register or if the component
3896 is a bit field, we cannot use addressing to access it.
3897 Use bit-field techniques or SUBREG to store in it. */
3899 if (mode == VOIDmode
3900 || (mode != BLKmode && ! direct_store[(int) mode])
3901 || GET_CODE (target) == REG
3902 || GET_CODE (target) == SUBREG
3903 /* If the field isn't aligned enough to store as an ordinary memref,
3904 store it as a bit field. */
3905 || (SLOW_UNALIGNED_ACCESS
3906 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3907 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3909 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3911 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3913 if (mode != VOIDmode && mode != BLKmode
3914 && mode != TYPE_MODE (TREE_TYPE (exp)))
3915 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3917 /* If the modes of TARGET and TEMP are both BLKmode, both
3918 must be in memory and BITPOS must be aligned on a byte
3919 boundary. If so, we simply do a block copy. */
3920 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
3922 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
3923 || bitpos % BITS_PER_UNIT != 0)
3926 target = change_address (target, VOIDmode,
3927 plus_constant (XEXP (target, 0),
3928 bitpos / BITS_PER_UNIT));
3930 emit_block_move (target, temp,
3931 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
3935 return value_mode == VOIDmode ? const0_rtx : target;
3938 /* Store the value in the bitfield. */
3939 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3940 if (value_mode != VOIDmode)
3942 /* The caller wants an rtx for the value. */
3943 /* If possible, avoid refetching from the bitfield itself. */
3945 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3948 enum machine_mode tmode;
3951 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3952 tmode = GET_MODE (temp);
3953 if (tmode == VOIDmode)
3955 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3956 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3957 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3959 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3960 NULL_RTX, value_mode, 0, align,
3967 rtx addr = XEXP (target, 0);
3970 /* If a value is wanted, it must be the lhs;
3971 so make the address stable for multiple use. */
3973 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3974 && ! CONSTANT_ADDRESS_P (addr)
3975 /* A frame-pointer reference is already stable. */
3976 && ! (GET_CODE (addr) == PLUS
3977 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3978 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3979 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3980 addr = copy_to_reg (addr);
3982 /* Now build a reference to just the desired component. */
3984 to_rtx = change_address (target, mode,
3985 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3986 MEM_IN_STRUCT_P (to_rtx) = 1;
3988 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3992 /* Return true if any object containing the innermost array is an unaligned
3993 packed structure field. */
3996 get_inner_unaligned_p (exp)
3999 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4003 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4005 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4009 else if (TREE_CODE (exp) != ARRAY_REF
4010 && TREE_CODE (exp) != NON_LVALUE_EXPR
4011 && ! ((TREE_CODE (exp) == NOP_EXPR
4012 || TREE_CODE (exp) == CONVERT_EXPR)
4013 && (TYPE_MODE (TREE_TYPE (exp))
4014 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4017 exp = TREE_OPERAND (exp, 0);
4023 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4024 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4025 ARRAY_REFs and find the ultimate containing object, which we return.
4027 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4028 bit position, and *PUNSIGNEDP to the signedness of the field.
4029 If the position of the field is variable, we store a tree
4030 giving the variable offset (in units) in *POFFSET.
4031 This offset is in addition to the bit position.
4032 If the position is not variable, we store 0 in *POFFSET.
4034 If any of the extraction expressions is volatile,
4035 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4037 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4038 is a mode that can be used to access the field. In that case, *PBITSIZE
4041 If the field describes a variable-sized object, *PMODE is set to
4042 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4043 this case, but the address of the object can be found. */
4046 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4047 punsignedp, pvolatilep)
4052 enum machine_mode *pmode;
4056 tree orig_exp = exp;
4058 enum machine_mode mode = VOIDmode;
4059 tree offset = integer_zero_node;
4061 if (TREE_CODE (exp) == COMPONENT_REF)
4063 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4064 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4065 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4066 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4068 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4070 size_tree = TREE_OPERAND (exp, 1);
4071 *punsignedp = TREE_UNSIGNED (exp);
4075 mode = TYPE_MODE (TREE_TYPE (exp));
4076 *pbitsize = GET_MODE_BITSIZE (mode);
4077 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4082 if (TREE_CODE (size_tree) != INTEGER_CST)
4083 mode = BLKmode, *pbitsize = -1;
4085 *pbitsize = TREE_INT_CST_LOW (size_tree);
4088 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4089 and find the ultimate containing object. */
4095 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4097 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4098 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4099 : TREE_OPERAND (exp, 2));
4100 tree constant = integer_zero_node, var = pos;
4102 /* If this field hasn't been filled in yet, don't go
4103 past it. This should only happen when folding expressions
4104 made during type construction. */
4108 /* Assume here that the offset is a multiple of a unit.
4109 If not, there should be an explicitly added constant. */
4110 if (TREE_CODE (pos) == PLUS_EXPR
4111 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4112 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4113 else if (TREE_CODE (pos) == INTEGER_CST)
4114 constant = pos, var = integer_zero_node;
4116 *pbitpos += TREE_INT_CST_LOW (constant);
4119 offset = size_binop (PLUS_EXPR, offset,
4120 size_binop (EXACT_DIV_EXPR, var,
4121 size_int (BITS_PER_UNIT)));
4124 else if (TREE_CODE (exp) == ARRAY_REF)
4126 /* This code is based on the code in case ARRAY_REF in expand_expr
4127 below. We assume here that the size of an array element is
4128 always an integral multiple of BITS_PER_UNIT. */
4130 tree index = TREE_OPERAND (exp, 1);
4131 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4133 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4134 tree index_type = TREE_TYPE (index);
4136 if (! integer_zerop (low_bound))
4137 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4139 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4141 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4143 index_type = TREE_TYPE (index);
4146 index = fold (build (MULT_EXPR, index_type, index,
4147 TYPE_SIZE (TREE_TYPE (exp))));
4149 if (TREE_CODE (index) == INTEGER_CST
4150 && TREE_INT_CST_HIGH (index) == 0)
4151 *pbitpos += TREE_INT_CST_LOW (index);
4153 offset = size_binop (PLUS_EXPR, offset,
4154 size_binop (FLOOR_DIV_EXPR, index,
4155 size_int (BITS_PER_UNIT)));
4157 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4158 && ! ((TREE_CODE (exp) == NOP_EXPR
4159 || TREE_CODE (exp) == CONVERT_EXPR)
4160 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4161 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4163 && (TYPE_MODE (TREE_TYPE (exp))
4164 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4167 /* If any reference in the chain is volatile, the effect is volatile. */
4168 if (TREE_THIS_VOLATILE (exp))
4170 exp = TREE_OPERAND (exp, 0);
4173 /* If this was a bit-field, see if there is a mode that allows direct
4174 access in case EXP is in memory. */
4175 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
4177 mode = mode_for_size (*pbitsize,
4178 (TYPE_MODE (TREE_TYPE (orig_exp)) == BLKmode
4180 : GET_MODE_CLASS (TYPE_MODE
4181 (TREE_TYPE (orig_exp)))),
4183 if (mode == BLKmode)
4187 if (integer_zerop (offset))
4190 if (offset != 0 && contains_placeholder_p (offset))
4191 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4198 /* Given an rtx VALUE that may contain additions and multiplications,
4199 return an equivalent value that just refers to a register or memory.
4200 This is done by generating instructions to perform the arithmetic
4201 and returning a pseudo-register containing the value.
4203 The returned value may be a REG, SUBREG, MEM or constant. */
4206 force_operand (value, target)
4209 register optab binoptab = 0;
4210 /* Use a temporary to force order of execution of calls to
4214 /* Use subtarget as the target for operand 0 of a binary operation. */
4215 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4217 if (GET_CODE (value) == PLUS)
4218 binoptab = add_optab;
4219 else if (GET_CODE (value) == MINUS)
4220 binoptab = sub_optab;
4221 else if (GET_CODE (value) == MULT)
4223 op2 = XEXP (value, 1);
4224 if (!CONSTANT_P (op2)
4225 && !(GET_CODE (op2) == REG && op2 != subtarget))
4227 tmp = force_operand (XEXP (value, 0), subtarget);
4228 return expand_mult (GET_MODE (value), tmp,
4229 force_operand (op2, NULL_RTX),
4235 op2 = XEXP (value, 1);
4236 if (!CONSTANT_P (op2)
4237 && !(GET_CODE (op2) == REG && op2 != subtarget))
4239 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4241 binoptab = add_optab;
4242 op2 = negate_rtx (GET_MODE (value), op2);
4245 /* Check for an addition with OP2 a constant integer and our first
4246 operand a PLUS of a virtual register and something else. In that
4247 case, we want to emit the sum of the virtual register and the
4248 constant first and then add the other value. This allows virtual
4249 register instantiation to simply modify the constant rather than
4250 creating another one around this addition. */
4251 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4252 && GET_CODE (XEXP (value, 0)) == PLUS
4253 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4254 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4255 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4257 rtx temp = expand_binop (GET_MODE (value), binoptab,
4258 XEXP (XEXP (value, 0), 0), op2,
4259 subtarget, 0, OPTAB_LIB_WIDEN);
4260 return expand_binop (GET_MODE (value), binoptab, temp,
4261 force_operand (XEXP (XEXP (value, 0), 1), 0),
4262 target, 0, OPTAB_LIB_WIDEN);
4265 tmp = force_operand (XEXP (value, 0), subtarget);
4266 return expand_binop (GET_MODE (value), binoptab, tmp,
4267 force_operand (op2, NULL_RTX),
4268 target, 0, OPTAB_LIB_WIDEN);
4269 /* We give UNSIGNEDP = 0 to expand_binop
4270 because the only operations we are expanding here are signed ones. */
4275 /* Subroutine of expand_expr:
4276 save the non-copied parts (LIST) of an expr (LHS), and return a list
4277 which can restore these values to their previous values,
4278 should something modify their storage. */
4281 save_noncopied_parts (lhs, list)
4288 for (tail = list; tail; tail = TREE_CHAIN (tail))
4289 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4290 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4293 tree part = TREE_VALUE (tail);
4294 tree part_type = TREE_TYPE (part);
4295 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4296 rtx target = assign_temp (part_type, 0, 1, 1);
4297 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4298 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4299 parts = tree_cons (to_be_saved,
4300 build (RTL_EXPR, part_type, NULL_TREE,
4303 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4308 /* Subroutine of expand_expr:
4309 record the non-copied parts (LIST) of an expr (LHS), and return a list
4310 which specifies the initial values of these parts. */
4313 init_noncopied_parts (lhs, list)
4320 for (tail = list; tail; tail = TREE_CHAIN (tail))
4321 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4322 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4325 tree part = TREE_VALUE (tail);
4326 tree part_type = TREE_TYPE (part);
4327 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4328 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4333 /* Subroutine of expand_expr: return nonzero iff there is no way that
4334 EXP can reference X, which is being modified. */
4337 safe_from_p (x, exp)
4345 /* If EXP has varying size, we MUST use a target since we currently
4346 have no way of allocating temporaries of variable size. So we
4347 assume here that something at a higher level has prevented a
4348 clash. This is somewhat bogus, but the best we can do. Only
4349 do this when X is BLKmode. */
4350 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4351 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4352 && GET_MODE (x) == BLKmode))
4355 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4356 find the underlying pseudo. */
4357 if (GET_CODE (x) == SUBREG)
4360 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4364 /* If X is a location in the outgoing argument area, it is always safe. */
4365 if (GET_CODE (x) == MEM
4366 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4367 || (GET_CODE (XEXP (x, 0)) == PLUS
4368 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4371 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4374 exp_rtl = DECL_RTL (exp);
4381 if (TREE_CODE (exp) == TREE_LIST)
4382 return ((TREE_VALUE (exp) == 0
4383 || safe_from_p (x, TREE_VALUE (exp)))
4384 && (TREE_CHAIN (exp) == 0
4385 || safe_from_p (x, TREE_CHAIN (exp))));
4390 return safe_from_p (x, TREE_OPERAND (exp, 0));
4394 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4395 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4399 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4400 the expression. If it is set, we conflict iff we are that rtx or
4401 both are in memory. Otherwise, we check all operands of the
4402 expression recursively. */
4404 switch (TREE_CODE (exp))
4407 return (staticp (TREE_OPERAND (exp, 0))
4408 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4411 if (GET_CODE (x) == MEM)
4416 exp_rtl = CALL_EXPR_RTL (exp);
4419 /* Assume that the call will clobber all hard registers and
4421 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4422 || GET_CODE (x) == MEM)
4429 /* If a sequence exists, we would have to scan every instruction
4430 in the sequence to see if it was safe. This is probably not
4432 if (RTL_EXPR_SEQUENCE (exp))
4435 exp_rtl = RTL_EXPR_RTL (exp);
4438 case WITH_CLEANUP_EXPR:
4439 exp_rtl = RTL_EXPR_RTL (exp);
4442 case CLEANUP_POINT_EXPR:
4443 return safe_from_p (x, TREE_OPERAND (exp, 0));
4446 exp_rtl = SAVE_EXPR_RTL (exp);
4450 /* The only operand we look at is operand 1. The rest aren't
4451 part of the expression. */
4452 return safe_from_p (x, TREE_OPERAND (exp, 1));
4454 case METHOD_CALL_EXPR:
4455 /* This takes a rtx argument, but shouldn't appear here. */
4459 /* If we have an rtx, we do not need to scan our operands. */
4463 nops = tree_code_length[(int) TREE_CODE (exp)];
4464 for (i = 0; i < nops; i++)
4465 if (TREE_OPERAND (exp, i) != 0
4466 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4470 /* If we have an rtl, find any enclosed object. Then see if we conflict
4474 if (GET_CODE (exp_rtl) == SUBREG)
4476 exp_rtl = SUBREG_REG (exp_rtl);
4477 if (GET_CODE (exp_rtl) == REG
4478 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4482 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4483 are memory and EXP is not readonly. */
4484 return ! (rtx_equal_p (x, exp_rtl)
4485 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4486 && ! TREE_READONLY (exp)));
4489 /* If we reach here, it is safe. */
4493 /* Subroutine of expand_expr: return nonzero iff EXP is an
4494 expression whose type is statically determinable. */
4500 if (TREE_CODE (exp) == PARM_DECL
4501 || TREE_CODE (exp) == VAR_DECL
4502 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4503 || TREE_CODE (exp) == COMPONENT_REF
4504 || TREE_CODE (exp) == ARRAY_REF)
4509 /* expand_expr: generate code for computing expression EXP.
4510 An rtx for the computed value is returned. The value is never null.
4511 In the case of a void EXP, const0_rtx is returned.
4513 The value may be stored in TARGET if TARGET is nonzero.
4514 TARGET is just a suggestion; callers must assume that
4515 the rtx returned may not be the same as TARGET.
4517 If TARGET is CONST0_RTX, it means that the value will be ignored.
4519 If TMODE is not VOIDmode, it suggests generating the
4520 result in mode TMODE. But this is done only when convenient.
4521 Otherwise, TMODE is ignored and the value generated in its natural mode.
4522 TMODE is just a suggestion; callers must assume that
4523 the rtx returned may not have mode TMODE.
4525 Note that TARGET may have neither TMODE nor MODE. In that case, it
4526 probably will not be used.
4528 If MODIFIER is EXPAND_SUM then when EXP is an addition
4529 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4530 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4531 products as above, or REG or MEM, or constant.
4532 Ordinarily in such cases we would output mul or add instructions
4533 and then return a pseudo reg containing the sum.
4535 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4536 it also marks a label as absolutely required (it can't be dead).
4537 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4538 This is used for outputting expressions used in initializers.
4540 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4541 with a constant address even if that address is not normally legitimate.
4542 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4545 expand_expr (exp, target, tmode, modifier)
4548 enum machine_mode tmode;
4549 enum expand_modifier modifier;
4551 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4552 This is static so it will be accessible to our recursive callees. */
4553 static tree placeholder_list = 0;
4554 register rtx op0, op1, temp;
4555 tree type = TREE_TYPE (exp);
4556 int unsignedp = TREE_UNSIGNED (type);
4557 register enum machine_mode mode = TYPE_MODE (type);
4558 register enum tree_code code = TREE_CODE (exp);
4560 /* Use subtarget as the target for operand 0 of a binary operation. */
4561 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4562 rtx original_target = target;
4563 /* Maybe defer this until sure not doing bytecode? */
4564 int ignore = (target == const0_rtx
4565 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4566 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4567 || code == COND_EXPR)
4568 && TREE_CODE (type) == VOID_TYPE));
4572 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4574 bc_expand_expr (exp);
4578 /* Don't use hard regs as subtargets, because the combiner
4579 can only handle pseudo regs. */
4580 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4582 /* Avoid subtargets inside loops,
4583 since they hide some invariant expressions. */
4584 if (preserve_subexpressions_p ())
4587 /* If we are going to ignore this result, we need only do something
4588 if there is a side-effect somewhere in the expression. If there
4589 is, short-circuit the most common cases here. Note that we must
4590 not call expand_expr with anything but const0_rtx in case this
4591 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4595 if (! TREE_SIDE_EFFECTS (exp))
4598 /* Ensure we reference a volatile object even if value is ignored. */
4599 if (TREE_THIS_VOLATILE (exp)
4600 && TREE_CODE (exp) != FUNCTION_DECL
4601 && mode != VOIDmode && mode != BLKmode)
4603 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4604 if (GET_CODE (temp) == MEM)
4605 temp = copy_to_reg (temp);
4609 if (TREE_CODE_CLASS (code) == '1')
4610 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4611 VOIDmode, modifier);
4612 else if (TREE_CODE_CLASS (code) == '2'
4613 || TREE_CODE_CLASS (code) == '<')
4615 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4616 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4619 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4620 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4621 /* If the second operand has no side effects, just evaluate
4623 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4624 VOIDmode, modifier);
4629 /* If will do cse, generate all results into pseudo registers
4630 since 1) that allows cse to find more things
4631 and 2) otherwise cse could produce an insn the machine
4634 if (! cse_not_expected && mode != BLKmode && target
4635 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4642 tree function = decl_function_context (exp);
4643 /* Handle using a label in a containing function. */
4644 if (function != current_function_decl && function != 0)
4646 struct function *p = find_function_data (function);
4647 /* Allocate in the memory associated with the function
4648 that the label is in. */
4649 push_obstacks (p->function_obstack,
4650 p->function_maybepermanent_obstack);
4652 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4653 label_rtx (exp), p->forced_labels);
4656 else if (modifier == EXPAND_INITIALIZER)
4657 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4658 label_rtx (exp), forced_labels);
4659 temp = gen_rtx (MEM, FUNCTION_MODE,
4660 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4661 if (function != current_function_decl && function != 0)
4662 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4667 if (DECL_RTL (exp) == 0)
4669 error_with_decl (exp, "prior parameter's size depends on `%s'");
4670 return CONST0_RTX (mode);
4673 /* ... fall through ... */
4676 /* If a static var's type was incomplete when the decl was written,
4677 but the type is complete now, lay out the decl now. */
4678 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4679 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4681 push_obstacks_nochange ();
4682 end_temporary_allocation ();
4683 layout_decl (exp, 0);
4684 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4688 /* ... fall through ... */
4692 if (DECL_RTL (exp) == 0)
4695 /* Ensure variable marked as used even if it doesn't go through
4696 a parser. If it hasn't be used yet, write out an external
4698 if (! TREE_USED (exp))
4700 assemble_external (exp);
4701 TREE_USED (exp) = 1;
4704 /* Show we haven't gotten RTL for this yet. */
4707 /* Handle variables inherited from containing functions. */
4708 context = decl_function_context (exp);
4710 /* We treat inline_function_decl as an alias for the current function
4711 because that is the inline function whose vars, types, etc.
4712 are being merged into the current function.
4713 See expand_inline_function. */
4715 if (context != 0 && context != current_function_decl
4716 && context != inline_function_decl
4717 /* If var is static, we don't need a static chain to access it. */
4718 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4719 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4723 /* Mark as non-local and addressable. */
4724 DECL_NONLOCAL (exp) = 1;
4725 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4727 mark_addressable (exp);
4728 if (GET_CODE (DECL_RTL (exp)) != MEM)
4730 addr = XEXP (DECL_RTL (exp), 0);
4731 if (GET_CODE (addr) == MEM)
4732 addr = gen_rtx (MEM, Pmode,
4733 fix_lexical_addr (XEXP (addr, 0), exp));
4735 addr = fix_lexical_addr (addr, exp);
4736 temp = change_address (DECL_RTL (exp), mode, addr);
4739 /* This is the case of an array whose size is to be determined
4740 from its initializer, while the initializer is still being parsed.
4743 else if (GET_CODE (DECL_RTL (exp)) == MEM
4744 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4745 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4746 XEXP (DECL_RTL (exp), 0));
4748 /* If DECL_RTL is memory, we are in the normal case and either
4749 the address is not valid or it is not a register and -fforce-addr
4750 is specified, get the address into a register. */
4752 else if (GET_CODE (DECL_RTL (exp)) == MEM
4753 && modifier != EXPAND_CONST_ADDRESS
4754 && modifier != EXPAND_SUM
4755 && modifier != EXPAND_INITIALIZER
4756 && (! memory_address_p (DECL_MODE (exp),
4757 XEXP (DECL_RTL (exp), 0))
4759 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4760 temp = change_address (DECL_RTL (exp), VOIDmode,
4761 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4763 /* If we got something, return it. But first, set the alignment
4764 the address is a register. */
4767 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4768 mark_reg_pointer (XEXP (temp, 0),
4769 DECL_ALIGN (exp) / BITS_PER_UNIT);
4774 /* If the mode of DECL_RTL does not match that of the decl, it
4775 must be a promoted value. We return a SUBREG of the wanted mode,
4776 but mark it so that we know that it was already extended. */
4778 if (GET_CODE (DECL_RTL (exp)) == REG
4779 && GET_MODE (DECL_RTL (exp)) != mode)
4781 /* Get the signedness used for this variable. Ensure we get the
4782 same mode we got when the variable was declared. */
4783 if (GET_MODE (DECL_RTL (exp))
4784 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4787 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4788 SUBREG_PROMOTED_VAR_P (temp) = 1;
4789 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4793 return DECL_RTL (exp);
4796 return immed_double_const (TREE_INT_CST_LOW (exp),
4797 TREE_INT_CST_HIGH (exp),
4801 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4804 /* If optimized, generate immediate CONST_DOUBLE
4805 which will be turned into memory by reload if necessary.
4807 We used to force a register so that loop.c could see it. But
4808 this does not allow gen_* patterns to perform optimizations with
4809 the constants. It also produces two insns in cases like "x = 1.0;".
4810 On most machines, floating-point constants are not permitted in
4811 many insns, so we'd end up copying it to a register in any case.
4813 Now, we do the copying in expand_binop, if appropriate. */
4814 return immed_real_const (exp);
4818 if (! TREE_CST_RTL (exp))
4819 output_constant_def (exp);
4821 /* TREE_CST_RTL probably contains a constant address.
4822 On RISC machines where a constant address isn't valid,
4823 make some insns to get that address into a register. */
4824 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4825 && modifier != EXPAND_CONST_ADDRESS
4826 && modifier != EXPAND_INITIALIZER
4827 && modifier != EXPAND_SUM
4828 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4830 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4831 return change_address (TREE_CST_RTL (exp), VOIDmode,
4832 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4833 return TREE_CST_RTL (exp);
4836 context = decl_function_context (exp);
4838 /* We treat inline_function_decl as an alias for the current function
4839 because that is the inline function whose vars, types, etc.
4840 are being merged into the current function.
4841 See expand_inline_function. */
4842 if (context == current_function_decl || context == inline_function_decl)
4845 /* If this is non-local, handle it. */
4848 temp = SAVE_EXPR_RTL (exp);
4849 if (temp && GET_CODE (temp) == REG)
4851 put_var_into_stack (exp);
4852 temp = SAVE_EXPR_RTL (exp);
4854 if (temp == 0 || GET_CODE (temp) != MEM)
4856 return change_address (temp, mode,
4857 fix_lexical_addr (XEXP (temp, 0), exp));
4859 if (SAVE_EXPR_RTL (exp) == 0)
4861 if (mode == VOIDmode)
4864 temp = assign_temp (type, 0, 0, 0);
4866 SAVE_EXPR_RTL (exp) = temp;
4867 if (!optimize && GET_CODE (temp) == REG)
4868 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4871 /* If the mode of TEMP does not match that of the expression, it
4872 must be a promoted value. We pass store_expr a SUBREG of the
4873 wanted mode but mark it so that we know that it was already
4874 extended. Note that `unsignedp' was modified above in
4877 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4879 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4880 SUBREG_PROMOTED_VAR_P (temp) = 1;
4881 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4884 if (temp == const0_rtx)
4885 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4887 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4890 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4891 must be a promoted value. We return a SUBREG of the wanted mode,
4892 but mark it so that we know that it was already extended. */
4894 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4895 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4897 /* Compute the signedness and make the proper SUBREG. */
4898 promote_mode (type, mode, &unsignedp, 0);
4899 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4900 SUBREG_PROMOTED_VAR_P (temp) = 1;
4901 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4905 return SAVE_EXPR_RTL (exp);
4910 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4911 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
4915 case PLACEHOLDER_EXPR:
4916 /* If there is an object on the head of the placeholder list,
4917 see if some object in it's references is of type TYPE. For
4918 further information, see tree.def. */
4919 if (placeholder_list)
4922 tree old_list = placeholder_list;
4924 for (object = TREE_PURPOSE (placeholder_list);
4925 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4926 != TYPE_MAIN_VARIANT (type))
4927 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4928 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4929 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4930 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4931 object = TREE_OPERAND (object, 0))
4935 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4936 == TYPE_MAIN_VARIANT (type)))
4938 /* Expand this object skipping the list entries before
4939 it was found in case it is also a PLACEHOLDER_EXPR.
4940 In that case, we want to translate it using subsequent
4942 placeholder_list = TREE_CHAIN (placeholder_list);
4943 temp = expand_expr (object, original_target, tmode, modifier);
4944 placeholder_list = old_list;
4949 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4952 case WITH_RECORD_EXPR:
4953 /* Put the object on the placeholder list, expand our first operand,
4954 and pop the list. */
4955 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4957 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4959 placeholder_list = TREE_CHAIN (placeholder_list);
4963 expand_exit_loop_if_false (NULL_PTR,
4964 invert_truthvalue (TREE_OPERAND (exp, 0)));
4969 expand_start_loop (1);
4970 expand_expr_stmt (TREE_OPERAND (exp, 0));
4978 tree vars = TREE_OPERAND (exp, 0);
4979 int vars_need_expansion = 0;
4981 /* Need to open a binding contour here because
4982 if there are any cleanups they most be contained here. */
4983 expand_start_bindings (0);
4985 /* Mark the corresponding BLOCK for output in its proper place. */
4986 if (TREE_OPERAND (exp, 2) != 0
4987 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4988 insert_block (TREE_OPERAND (exp, 2));
4990 /* If VARS have not yet been expanded, expand them now. */
4993 if (DECL_RTL (vars) == 0)
4995 vars_need_expansion = 1;
4998 expand_decl_init (vars);
4999 vars = TREE_CHAIN (vars);
5002 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5004 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5010 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5012 emit_insns (RTL_EXPR_SEQUENCE (exp));
5013 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5014 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5015 free_temps_for_rtl_expr (exp);
5016 return RTL_EXPR_RTL (exp);
5019 /* If we don't need the result, just ensure we evaluate any
5024 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5025 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5029 /* All elts simple constants => refer to a constant in memory. But
5030 if this is a non-BLKmode mode, let it store a field at a time
5031 since that should make a CONST_INT or CONST_DOUBLE when we
5032 fold. Likewise, if we have a target we can use, it is best to
5033 store directly into the target unless the type is large enough
5034 that memcpy will be used. If we are making an initializer and
5035 all operands are constant, put it in memory as well. */
5036 else if ((TREE_STATIC (exp)
5037 && ((mode == BLKmode
5038 && ! (target != 0 && safe_from_p (target, exp)))
5039 || TREE_ADDRESSABLE (exp)
5040 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5041 && (move_by_pieces_ninsns
5042 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5043 TYPE_ALIGN (type) / BITS_PER_UNIT)
5045 && ! mostly_zeros_p (exp))))
5046 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5048 rtx constructor = output_constant_def (exp);
5049 if (modifier != EXPAND_CONST_ADDRESS
5050 && modifier != EXPAND_INITIALIZER
5051 && modifier != EXPAND_SUM
5052 && (! memory_address_p (GET_MODE (constructor),
5053 XEXP (constructor, 0))
5055 && GET_CODE (XEXP (constructor, 0)) != REG)))
5056 constructor = change_address (constructor, VOIDmode,
5057 XEXP (constructor, 0));
5063 if (target == 0 || ! safe_from_p (target, exp))
5065 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5066 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5068 target = assign_temp (type, 0, 1, 1);
5071 if (TREE_READONLY (exp))
5073 if (GET_CODE (target) == MEM)
5074 target = change_address (target, GET_MODE (target),
5076 RTX_UNCHANGING_P (target) = 1;
5079 store_constructor (exp, target, 0);
5085 tree exp1 = TREE_OPERAND (exp, 0);
5088 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5089 op0 = memory_address (mode, op0);
5091 temp = gen_rtx (MEM, mode, op0);
5092 /* If address was computed by addition,
5093 mark this as an element of an aggregate. */
5094 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5095 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5096 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5097 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5098 || (TREE_CODE (exp1) == ADDR_EXPR
5099 && (exp2 = TREE_OPERAND (exp1, 0))
5100 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5101 MEM_IN_STRUCT_P (temp) = 1;
5102 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5104 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5105 here, because, in C and C++, the fact that a location is accessed
5106 through a pointer to const does not mean that the value there can
5107 never change. Languages where it can never change should
5108 also set TREE_STATIC. */
5109 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5114 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5118 tree array = TREE_OPERAND (exp, 0);
5119 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5120 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5121 tree index = TREE_OPERAND (exp, 1);
5122 tree index_type = TREE_TYPE (index);
5125 if (TREE_CODE (low_bound) != INTEGER_CST
5126 && contains_placeholder_p (low_bound))
5127 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5129 /* Optimize the special-case of a zero lower bound.
5131 We convert the low_bound to sizetype to avoid some problems
5132 with constant folding. (E.g. suppose the lower bound is 1,
5133 and its mode is QI. Without the conversion, (ARRAY
5134 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5135 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5137 But sizetype isn't quite right either (especially if
5138 the lowbound is negative). FIXME */
5140 if (! integer_zerop (low_bound))
5141 index = fold (build (MINUS_EXPR, index_type, index,
5142 convert (sizetype, low_bound)));
5144 if ((TREE_CODE (index) != INTEGER_CST
5145 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5146 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5148 /* Nonconstant array index or nonconstant element size, and
5149 not an array in an unaligned (packed) structure field.
5150 Generate the tree for *(&array+index) and expand that,
5151 except do it in a language-independent way
5152 and don't complain about non-lvalue arrays.
5153 `mark_addressable' should already have been called
5154 for any array for which this case will be reached. */
5156 /* Don't forget the const or volatile flag from the array
5158 tree variant_type = build_type_variant (type,
5159 TREE_READONLY (exp),
5160 TREE_THIS_VOLATILE (exp));
5161 tree array_adr = build1 (ADDR_EXPR,
5162 build_pointer_type (variant_type), array);
5164 tree size = size_in_bytes (type);
5166 /* Convert the integer argument to a type the same size as sizetype
5167 so the multiply won't overflow spuriously. */
5168 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5169 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5172 if (TREE_CODE (size) != INTEGER_CST
5173 && contains_placeholder_p (size))
5174 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5176 /* Don't think the address has side effects
5177 just because the array does.
5178 (In some cases the address might have side effects,
5179 and we fail to record that fact here. However, it should not
5180 matter, since expand_expr should not care.) */
5181 TREE_SIDE_EFFECTS (array_adr) = 0;
5185 (INDIRECT_REF, type,
5186 fold (build (PLUS_EXPR,
5187 TYPE_POINTER_TO (variant_type),
5192 TYPE_POINTER_TO (variant_type),
5193 fold (build (MULT_EXPR, TREE_TYPE (index),
5195 convert (TREE_TYPE (index),
5198 /* Volatility, etc., of new expression is same as old
5200 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5201 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5202 TREE_READONLY (elt) = TREE_READONLY (exp);
5204 return expand_expr (elt, target, tmode, modifier);
5207 /* Fold an expression like: "foo"[2].
5208 This is not done in fold so it won't happen inside &.
5209 Don't fold if this is for wide characters since it's too
5210 difficult to do correctly and this is a very rare case. */
5212 if (TREE_CODE (array) == STRING_CST
5213 && TREE_CODE (index) == INTEGER_CST
5214 && !TREE_INT_CST_HIGH (index)
5215 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5216 && GET_MODE_CLASS (mode) == MODE_INT
5217 && GET_MODE_SIZE (mode) == 1)
5218 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5220 /* If this is a constant index into a constant array,
5221 just get the value from the array. Handle both the cases when
5222 we have an explicit constructor and when our operand is a variable
5223 that was declared const. */
5225 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5227 if (TREE_CODE (index) == INTEGER_CST
5228 && TREE_INT_CST_HIGH (index) == 0)
5230 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5232 i = TREE_INT_CST_LOW (index);
5234 elem = TREE_CHAIN (elem);
5236 return expand_expr (fold (TREE_VALUE (elem)), target,
5241 else if (optimize >= 1
5242 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5243 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5244 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5246 if (TREE_CODE (index) == INTEGER_CST
5247 && TREE_INT_CST_HIGH (index) == 0)
5249 tree init = DECL_INITIAL (array);
5251 i = TREE_INT_CST_LOW (index);
5252 if (TREE_CODE (init) == CONSTRUCTOR)
5254 tree elem = CONSTRUCTOR_ELTS (init);
5257 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5258 elem = TREE_CHAIN (elem);
5260 return expand_expr (fold (TREE_VALUE (elem)), target,
5263 else if (TREE_CODE (init) == STRING_CST
5264 && i < TREE_STRING_LENGTH (init))
5265 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5270 /* Treat array-ref with constant index as a component-ref. */
5274 /* If the operand is a CONSTRUCTOR, we can just extract the
5275 appropriate field if it is present. Don't do this if we have
5276 already written the data since we want to refer to that copy
5277 and varasm.c assumes that's what we'll do. */
5278 if (code != ARRAY_REF
5279 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5280 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5284 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5285 elt = TREE_CHAIN (elt))
5286 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5287 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5291 enum machine_mode mode1;
5296 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5297 &mode1, &unsignedp, &volatilep);
5300 /* If we got back the original object, something is wrong. Perhaps
5301 we are evaluating an expression too early. In any event, don't
5302 infinitely recurse. */
5306 /* If TEM's type is a union of variable size, pass TARGET to the inner
5307 computation, since it will need a temporary and TARGET is known
5308 to have to do. This occurs in unchecked conversion in Ada. */
5310 op0 = expand_expr (tem,
5311 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5312 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5314 ? target : NULL_RTX),
5316 modifier == EXPAND_INITIALIZER ? modifier : 0);
5318 /* If this is a constant, put it into a register if it is a
5319 legitimate constant and memory if it isn't. */
5320 if (CONSTANT_P (op0))
5322 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5323 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5324 op0 = force_reg (mode, op0);
5326 op0 = validize_mem (force_const_mem (mode, op0));
5329 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
5332 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5334 if (GET_CODE (op0) != MEM)
5336 op0 = change_address (op0, VOIDmode,
5337 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5338 force_reg (ptr_mode, offset_rtx)));
5339 /* If we have a variable offset, the known alignment
5340 is only that of the innermost structure containing the field.
5341 (Actually, we could sometimes do better by using the
5342 size of an element of the innermost array, but no need.) */
5343 if (TREE_CODE (exp) == COMPONENT_REF
5344 || TREE_CODE (exp) == BIT_FIELD_REF)
5345 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5349 /* Don't forget about volatility even if this is a bitfield. */
5350 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5352 op0 = copy_rtx (op0);
5353 MEM_VOLATILE_P (op0) = 1;
5356 /* In cases where an aligned union has an unaligned object
5357 as a field, we might be extracting a BLKmode value from
5358 an integer-mode (e.g., SImode) object. Handle this case
5359 by doing the extract into an object as wide as the field
5360 (which we know to be the width of a basic mode), then
5361 storing into memory, and changing the mode to BLKmode.
5362 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5363 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5364 if (mode1 == VOIDmode
5365 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5366 || (modifier != EXPAND_CONST_ADDRESS
5367 && modifier != EXPAND_INITIALIZER
5368 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
5369 /* If the field isn't aligned enough to fetch as a memref,
5370 fetch it as a bit field. */
5371 || (SLOW_UNALIGNED_ACCESS
5372 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5373 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5375 enum machine_mode ext_mode = mode;
5377 if (ext_mode == BLKmode)
5378 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5380 if (ext_mode == BLKmode)
5382 /* In this case, BITPOS must start at a byte boundary and
5383 TARGET, if specified, must be a MEM. */
5384 if (GET_CODE (op0) != MEM
5385 || (target != 0 && GET_CODE (target) != MEM)
5386 || bitpos % BITS_PER_UNIT != 0)
5389 op0 = change_address (op0, VOIDmode,
5390 plus_constant (XEXP (op0, 0),
5391 bitpos / BITS_PER_UNIT));
5393 target = assign_temp (type, 0, 1, 1);
5395 emit_block_move (target, op0,
5396 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5403 op0 = validize_mem (op0);
5405 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5406 mark_reg_pointer (XEXP (op0, 0), alignment);
5408 op0 = extract_bit_field (op0, bitsize, bitpos,
5409 unsignedp, target, ext_mode, ext_mode,
5411 int_size_in_bytes (TREE_TYPE (tem)));
5412 if (mode == BLKmode)
5414 rtx new = assign_stack_temp (ext_mode,
5415 bitsize / BITS_PER_UNIT, 0);
5417 emit_move_insn (new, op0);
5418 op0 = copy_rtx (new);
5419 PUT_MODE (op0, BLKmode);
5420 MEM_IN_STRUCT_P (op0) = 1;
5426 /* If the result is BLKmode, use that to access the object
5428 if (mode == BLKmode)
5431 /* Get a reference to just this component. */
5432 if (modifier == EXPAND_CONST_ADDRESS
5433 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5434 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5435 (bitpos / BITS_PER_UNIT)));
5437 op0 = change_address (op0, mode1,
5438 plus_constant (XEXP (op0, 0),
5439 (bitpos / BITS_PER_UNIT)));
5440 if (GET_CODE (XEXP (op0, 0)) == REG)
5441 mark_reg_pointer (XEXP (op0, 0), alignment);
5443 MEM_IN_STRUCT_P (op0) = 1;
5444 MEM_VOLATILE_P (op0) |= volatilep;
5445 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5448 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5449 convert_move (target, op0, unsignedp);
5453 /* Intended for a reference to a buffer of a file-object in Pascal.
5454 But it's not certain that a special tree code will really be
5455 necessary for these. INDIRECT_REF might work for them. */
5461 /* Pascal set IN expression.
5464 rlo = set_low - (set_low%bits_per_word);
5465 the_word = set [ (index - rlo)/bits_per_word ];
5466 bit_index = index % bits_per_word;
5467 bitmask = 1 << bit_index;
5468 return !!(the_word & bitmask); */
5470 tree set = TREE_OPERAND (exp, 0);
5471 tree index = TREE_OPERAND (exp, 1);
5472 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5473 tree set_type = TREE_TYPE (set);
5474 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5475 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5476 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5477 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5478 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5479 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5480 rtx setaddr = XEXP (setval, 0);
5481 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5483 rtx diff, quo, rem, addr, bit, result;
5485 preexpand_calls (exp);
5487 /* If domain is empty, answer is no. Likewise if index is constant
5488 and out of bounds. */
5489 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5490 && TREE_CODE (set_low_bound) == INTEGER_CST
5491 && tree_int_cst_lt (set_high_bound, set_low_bound)
5492 || (TREE_CODE (index) == INTEGER_CST
5493 && TREE_CODE (set_low_bound) == INTEGER_CST
5494 && tree_int_cst_lt (index, set_low_bound))
5495 || (TREE_CODE (set_high_bound) == INTEGER_CST
5496 && TREE_CODE (index) == INTEGER_CST
5497 && tree_int_cst_lt (set_high_bound, index))))
5501 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5503 /* If we get here, we have to generate the code for both cases
5504 (in range and out of range). */
5506 op0 = gen_label_rtx ();
5507 op1 = gen_label_rtx ();
5509 if (! (GET_CODE (index_val) == CONST_INT
5510 && GET_CODE (lo_r) == CONST_INT))
5512 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5513 GET_MODE (index_val), iunsignedp, 0);
5514 emit_jump_insn (gen_blt (op1));
5517 if (! (GET_CODE (index_val) == CONST_INT
5518 && GET_CODE (hi_r) == CONST_INT))
5520 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5521 GET_MODE (index_val), iunsignedp, 0);
5522 emit_jump_insn (gen_bgt (op1));
5525 /* Calculate the element number of bit zero in the first word
5527 if (GET_CODE (lo_r) == CONST_INT)
5528 rlow = GEN_INT (INTVAL (lo_r)
5529 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5531 rlow = expand_binop (index_mode, and_optab, lo_r,
5532 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5533 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5535 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5536 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5538 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5539 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5540 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5541 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5543 addr = memory_address (byte_mode,
5544 expand_binop (index_mode, add_optab, diff,
5545 setaddr, NULL_RTX, iunsignedp,
5548 /* Extract the bit we want to examine */
5549 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5550 gen_rtx (MEM, byte_mode, addr),
5551 make_tree (TREE_TYPE (index), rem),
5553 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5554 GET_MODE (target) == byte_mode ? target : 0,
5555 1, OPTAB_LIB_WIDEN);
5557 if (result != target)
5558 convert_move (target, result, 1);
5560 /* Output the code to handle the out-of-range case. */
5563 emit_move_insn (target, const0_rtx);
5568 case WITH_CLEANUP_EXPR:
5569 if (RTL_EXPR_RTL (exp) == 0)
5572 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5574 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5575 /* That's it for this cleanup. */
5576 TREE_OPERAND (exp, 2) = 0;
5577 (*interim_eh_hook) (NULL_TREE);
5579 return RTL_EXPR_RTL (exp);
5581 case CLEANUP_POINT_EXPR:
5583 extern int temp_slot_level;
5584 tree old_cleanups = cleanups_this_call;
5585 int old_temp_level = target_temp_slot_level;
5587 target_temp_slot_level = temp_slot_level;
5588 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5589 /* If we're going to use this value, load it up now. */
5591 op0 = force_not_mem (op0);
5592 expand_cleanups_to (old_cleanups);
5593 preserve_temp_slots (op0);
5596 target_temp_slot_level = old_temp_level;
5601 /* Check for a built-in function. */
5602 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5603 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5605 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5606 return expand_builtin (exp, target, subtarget, tmode, ignore);
5608 /* If this call was expanded already by preexpand_calls,
5609 just return the result we got. */
5610 if (CALL_EXPR_RTL (exp) != 0)
5611 return CALL_EXPR_RTL (exp);
5613 return expand_call (exp, target, ignore);
5615 case NON_LVALUE_EXPR:
5618 case REFERENCE_EXPR:
5619 if (TREE_CODE (type) == UNION_TYPE)
5621 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5624 if (mode != BLKmode)
5625 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5627 target = assign_temp (type, 0, 1, 1);
5630 if (GET_CODE (target) == MEM)
5631 /* Store data into beginning of memory target. */
5632 store_expr (TREE_OPERAND (exp, 0),
5633 change_address (target, TYPE_MODE (valtype), 0), 0);
5635 else if (GET_CODE (target) == REG)
5636 /* Store this field into a union of the proper type. */
5637 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5638 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5640 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5644 /* Return the entire union. */
5648 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5650 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5653 /* If the signedness of the conversion differs and OP0 is
5654 a promoted SUBREG, clear that indication since we now
5655 have to do the proper extension. */
5656 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5657 && GET_CODE (op0) == SUBREG)
5658 SUBREG_PROMOTED_VAR_P (op0) = 0;
5663 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5664 if (GET_MODE (op0) == mode)
5667 /* If OP0 is a constant, just convert it into the proper mode. */
5668 if (CONSTANT_P (op0))
5670 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5671 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5673 if (modifier == EXPAND_INITIALIZER)
5674 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5678 convert_to_mode (mode, op0,
5679 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5681 convert_move (target, op0,
5682 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5686 /* We come here from MINUS_EXPR when the second operand is a constant. */
5688 this_optab = add_optab;
5690 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5691 something else, make sure we add the register to the constant and
5692 then to the other thing. This case can occur during strength
5693 reduction and doing it this way will produce better code if the
5694 frame pointer or argument pointer is eliminated.
5696 fold-const.c will ensure that the constant is always in the inner
5697 PLUS_EXPR, so the only case we need to do anything about is if
5698 sp, ap, or fp is our second argument, in which case we must swap
5699 the innermost first argument and our second argument. */
5701 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5702 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5703 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5704 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5705 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5706 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5708 tree t = TREE_OPERAND (exp, 1);
5710 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5711 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5714 /* If the result is to be ptr_mode and we are adding an integer to
5715 something, we might be forming a constant. So try to use
5716 plus_constant. If it produces a sum and we can't accept it,
5717 use force_operand. This allows P = &ARR[const] to generate
5718 efficient code on machines where a SYMBOL_REF is not a valid
5721 If this is an EXPAND_SUM call, always return the sum. */
5722 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5723 || mode == ptr_mode)
5725 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5726 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5727 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5729 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5731 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5732 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5733 op1 = force_operand (op1, target);
5737 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5738 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5739 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5741 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5743 if (! CONSTANT_P (op0))
5745 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5746 VOIDmode, modifier);
5747 /* Don't go to both_summands if modifier
5748 says it's not right to return a PLUS. */
5749 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5753 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5754 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5755 op0 = force_operand (op0, target);
5760 /* No sense saving up arithmetic to be done
5761 if it's all in the wrong mode to form part of an address.
5762 And force_operand won't know whether to sign-extend or
5764 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5765 || mode != ptr_mode)
5768 preexpand_calls (exp);
5769 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5772 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5773 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5776 /* Make sure any term that's a sum with a constant comes last. */
5777 if (GET_CODE (op0) == PLUS
5778 && CONSTANT_P (XEXP (op0, 1)))
5784 /* If adding to a sum including a constant,
5785 associate it to put the constant outside. */
5786 if (GET_CODE (op1) == PLUS
5787 && CONSTANT_P (XEXP (op1, 1)))
5789 rtx constant_term = const0_rtx;
5791 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5794 /* Ensure that MULT comes first if there is one. */
5795 else if (GET_CODE (op0) == MULT)
5796 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5798 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5800 /* Let's also eliminate constants from op0 if possible. */
5801 op0 = eliminate_constant_term (op0, &constant_term);
5803 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5804 their sum should be a constant. Form it into OP1, since the
5805 result we want will then be OP0 + OP1. */
5807 temp = simplify_binary_operation (PLUS, mode, constant_term,
5812 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5815 /* Put a constant term last and put a multiplication first. */
5816 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5817 temp = op1, op1 = op0, op0 = temp;
5819 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5820 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5823 /* For initializers, we are allowed to return a MINUS of two
5824 symbolic constants. Here we handle all cases when both operands
5826 /* Handle difference of two symbolic constants,
5827 for the sake of an initializer. */
5828 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5829 && really_constant_p (TREE_OPERAND (exp, 0))
5830 && really_constant_p (TREE_OPERAND (exp, 1)))
5832 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5833 VOIDmode, modifier);
5834 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5835 VOIDmode, modifier);
5837 /* If the last operand is a CONST_INT, use plus_constant of
5838 the negated constant. Else make the MINUS. */
5839 if (GET_CODE (op1) == CONST_INT)
5840 return plus_constant (op0, - INTVAL (op1));
5842 return gen_rtx (MINUS, mode, op0, op1);
5844 /* Convert A - const to A + (-const). */
5845 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5847 tree negated = fold (build1 (NEGATE_EXPR, type,
5848 TREE_OPERAND (exp, 1)));
5850 /* Deal with the case where we can't negate the constant
5852 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5854 tree newtype = signed_type (type);
5855 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5856 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5857 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5859 if (! TREE_OVERFLOW (newneg))
5860 return expand_expr (convert (type,
5861 build (PLUS_EXPR, newtype,
5863 target, tmode, modifier);
5867 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5871 this_optab = sub_optab;
5875 preexpand_calls (exp);
5876 /* If first operand is constant, swap them.
5877 Thus the following special case checks need only
5878 check the second operand. */
5879 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5881 register tree t1 = TREE_OPERAND (exp, 0);
5882 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5883 TREE_OPERAND (exp, 1) = t1;
5886 /* Attempt to return something suitable for generating an
5887 indexed address, for machines that support that. */
5889 if (modifier == EXPAND_SUM && mode == ptr_mode
5890 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5891 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5893 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5895 /* Apply distributive law if OP0 is x+c. */
5896 if (GET_CODE (op0) == PLUS
5897 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5898 return gen_rtx (PLUS, mode,
5899 gen_rtx (MULT, mode, XEXP (op0, 0),
5900 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5901 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5902 * INTVAL (XEXP (op0, 1))));
5904 if (GET_CODE (op0) != REG)
5905 op0 = force_operand (op0, NULL_RTX);
5906 if (GET_CODE (op0) != REG)
5907 op0 = copy_to_mode_reg (mode, op0);
5909 return gen_rtx (MULT, mode, op0,
5910 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5913 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5916 /* Check for multiplying things that have been extended
5917 from a narrower type. If this machine supports multiplying
5918 in that narrower type with a result in the desired type,
5919 do it that way, and avoid the explicit type-conversion. */
5920 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5921 && TREE_CODE (type) == INTEGER_TYPE
5922 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5923 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5924 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5925 && int_fits_type_p (TREE_OPERAND (exp, 1),
5926 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5927 /* Don't use a widening multiply if a shift will do. */
5928 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5929 > HOST_BITS_PER_WIDE_INT)
5930 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5932 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5933 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5935 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5936 /* If both operands are extended, they must either both
5937 be zero-extended or both be sign-extended. */
5938 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5940 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5942 enum machine_mode innermode
5943 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5944 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5945 ? smul_widen_optab : umul_widen_optab);
5946 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5947 ? umul_widen_optab : smul_widen_optab);
5948 if (mode == GET_MODE_WIDER_MODE (innermode))
5950 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5952 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5953 NULL_RTX, VOIDmode, 0);
5954 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5955 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5958 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5959 NULL_RTX, VOIDmode, 0);
5962 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5963 && innermode == word_mode)
5966 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5967 NULL_RTX, VOIDmode, 0);
5968 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5969 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5972 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5973 NULL_RTX, VOIDmode, 0);
5974 temp = expand_binop (mode, other_optab, op0, op1, target,
5975 unsignedp, OPTAB_LIB_WIDEN);
5976 htem = expand_mult_highpart_adjust (innermode,
5977 gen_highpart (innermode, temp),
5979 gen_highpart (innermode, temp),
5981 emit_move_insn (gen_highpart (innermode, temp), htem);
5986 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5987 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5988 return expand_mult (mode, op0, op1, target, unsignedp);
5990 case TRUNC_DIV_EXPR:
5991 case FLOOR_DIV_EXPR:
5993 case ROUND_DIV_EXPR:
5994 case EXACT_DIV_EXPR:
5995 preexpand_calls (exp);
5996 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5998 /* Possible optimization: compute the dividend with EXPAND_SUM
5999 then if the divisor is constant can optimize the case
6000 where some terms of the dividend have coeffs divisible by it. */
6001 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6002 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6003 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6006 this_optab = flodiv_optab;
6009 case TRUNC_MOD_EXPR:
6010 case FLOOR_MOD_EXPR:
6012 case ROUND_MOD_EXPR:
6013 preexpand_calls (exp);
6014 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6016 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6017 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6018 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6020 case FIX_ROUND_EXPR:
6021 case FIX_FLOOR_EXPR:
6023 abort (); /* Not used for C. */
6025 case FIX_TRUNC_EXPR:
6026 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6028 target = gen_reg_rtx (mode);
6029 expand_fix (target, op0, unsignedp);
6033 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6035 target = gen_reg_rtx (mode);
6036 /* expand_float can't figure out what to do if FROM has VOIDmode.
6037 So give it the correct mode. With -O, cse will optimize this. */
6038 if (GET_MODE (op0) == VOIDmode)
6039 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6041 expand_float (target, op0,
6042 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6046 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6047 temp = expand_unop (mode, neg_optab, op0, target, 0);
6053 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6055 /* Handle complex values specially. */
6056 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6057 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6058 return expand_complex_abs (mode, op0, target, unsignedp);
6060 /* Unsigned abs is simply the operand. Testing here means we don't
6061 risk generating incorrect code below. */
6062 if (TREE_UNSIGNED (type))
6065 return expand_abs (mode, op0, target, unsignedp,
6066 safe_from_p (target, TREE_OPERAND (exp, 0)));
6070 target = original_target;
6071 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6072 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6073 || GET_MODE (target) != mode
6074 || (GET_CODE (target) == REG
6075 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6076 target = gen_reg_rtx (mode);
6077 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6078 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6080 /* First try to do it with a special MIN or MAX instruction.
6081 If that does not win, use a conditional jump to select the proper
6083 this_optab = (TREE_UNSIGNED (type)
6084 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6085 : (code == MIN_EXPR ? smin_optab : smax_optab));
6087 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6092 /* At this point, a MEM target is no longer useful; we will get better
6095 if (GET_CODE (target) == MEM)
6096 target = gen_reg_rtx (mode);
6099 emit_move_insn (target, op0);
6101 op0 = gen_label_rtx ();
6103 /* If this mode is an integer too wide to compare properly,
6104 compare word by word. Rely on cse to optimize constant cases. */
6105 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6107 if (code == MAX_EXPR)
6108 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6109 target, op1, NULL_RTX, op0);
6111 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6112 op1, target, NULL_RTX, op0);
6113 emit_move_insn (target, op1);
6117 if (code == MAX_EXPR)
6118 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6119 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6120 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6122 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6123 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6124 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6125 if (temp == const0_rtx)
6126 emit_move_insn (target, op1);
6127 else if (temp != const_true_rtx)
6129 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6130 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6133 emit_move_insn (target, op1);
6140 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6141 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6147 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6148 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6153 /* ??? Can optimize bitwise operations with one arg constant.
6154 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6155 and (a bitwise1 b) bitwise2 b (etc)
6156 but that is probably not worth while. */
6158 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6159 boolean values when we want in all cases to compute both of them. In
6160 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6161 as actual zero-or-1 values and then bitwise anding. In cases where
6162 there cannot be any side effects, better code would be made by
6163 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6164 how to recognize those cases. */
6166 case TRUTH_AND_EXPR:
6168 this_optab = and_optab;
6173 this_optab = ior_optab;
6176 case TRUTH_XOR_EXPR:
6178 this_optab = xor_optab;
6185 preexpand_calls (exp);
6186 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6188 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6189 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6192 /* Could determine the answer when only additive constants differ. Also,
6193 the addition of one can be handled by changing the condition. */
6200 preexpand_calls (exp);
6201 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6205 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6206 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6208 && GET_CODE (original_target) == REG
6209 && (GET_MODE (original_target)
6210 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6212 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6215 if (temp != original_target)
6216 temp = copy_to_reg (temp);
6218 op1 = gen_label_rtx ();
6219 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6220 GET_MODE (temp), unsignedp, 0);
6221 emit_jump_insn (gen_beq (op1));
6222 emit_move_insn (temp, const1_rtx);
6227 /* If no set-flag instruction, must generate a conditional
6228 store into a temporary variable. Drop through
6229 and handle this like && and ||. */
6231 case TRUTH_ANDIF_EXPR:
6232 case TRUTH_ORIF_EXPR:
6234 && (target == 0 || ! safe_from_p (target, exp)
6235 /* Make sure we don't have a hard reg (such as function's return
6236 value) live across basic blocks, if not optimizing. */
6237 || (!optimize && GET_CODE (target) == REG
6238 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6239 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6242 emit_clr_insn (target);
6244 op1 = gen_label_rtx ();
6245 jumpifnot (exp, op1);
6248 emit_0_to_1_insn (target);
6251 return ignore ? const0_rtx : target;
6253 case TRUTH_NOT_EXPR:
6254 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6255 /* The parser is careful to generate TRUTH_NOT_EXPR
6256 only with operands that are always zero or one. */
6257 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6258 target, 1, OPTAB_LIB_WIDEN);
6264 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6266 return expand_expr (TREE_OPERAND (exp, 1),
6267 (ignore ? const0_rtx : target),
6272 rtx flag = NULL_RTX;
6273 tree left_cleanups = NULL_TREE;
6274 tree right_cleanups = NULL_TREE;
6276 /* Used to save a pointer to the place to put the setting of
6277 the flag that indicates if this side of the conditional was
6278 taken. We backpatch the code, if we find out later that we
6279 have any conditional cleanups that need to be performed. */
6280 rtx dest_right_flag = NULL_RTX;
6281 rtx dest_left_flag = NULL_RTX;
6283 /* Note that COND_EXPRs whose type is a structure or union
6284 are required to be constructed to contain assignments of
6285 a temporary variable, so that we can evaluate them here
6286 for side effect only. If type is void, we must do likewise. */
6288 /* If an arm of the branch requires a cleanup,
6289 only that cleanup is performed. */
6292 tree binary_op = 0, unary_op = 0;
6293 tree old_cleanups = cleanups_this_call;
6295 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6296 convert it to our mode, if necessary. */
6297 if (integer_onep (TREE_OPERAND (exp, 1))
6298 && integer_zerop (TREE_OPERAND (exp, 2))
6299 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6303 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6308 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6309 if (GET_MODE (op0) == mode)
6313 target = gen_reg_rtx (mode);
6314 convert_move (target, op0, unsignedp);
6318 /* If we are not to produce a result, we have no target. Otherwise,
6319 if a target was specified use it; it will not be used as an
6320 intermediate target unless it is safe. If no target, use a
6325 else if (original_target
6326 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
6327 && GET_MODE (original_target) == mode
6328 && ! (GET_CODE (original_target) == MEM
6329 && MEM_VOLATILE_P (original_target)))
6330 temp = original_target;
6332 temp = assign_temp (type, 0, 0, 1);
6334 /* Check for X ? A + B : A. If we have this, we can copy
6335 A to the output and conditionally add B. Similarly for unary
6336 operations. Don't do this if X has side-effects because
6337 those side effects might affect A or B and the "?" operation is
6338 a sequence point in ANSI. (We test for side effects later.) */
6340 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6341 && operand_equal_p (TREE_OPERAND (exp, 2),
6342 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6343 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6344 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6345 && operand_equal_p (TREE_OPERAND (exp, 1),
6346 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6347 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6348 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6349 && operand_equal_p (TREE_OPERAND (exp, 2),
6350 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6351 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6352 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6353 && operand_equal_p (TREE_OPERAND (exp, 1),
6354 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6355 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6357 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6358 operation, do this as A + (X != 0). Similarly for other simple
6359 binary operators. */
6360 if (temp && singleton && binary_op
6361 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6362 && (TREE_CODE (binary_op) == PLUS_EXPR
6363 || TREE_CODE (binary_op) == MINUS_EXPR
6364 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6365 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6366 && integer_onep (TREE_OPERAND (binary_op, 1))
6367 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6370 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6371 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6372 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6375 /* If we had X ? A : A + 1, do this as A + (X == 0).
6377 We have to invert the truth value here and then put it
6378 back later if do_store_flag fails. We cannot simply copy
6379 TREE_OPERAND (exp, 0) to another variable and modify that
6380 because invert_truthvalue can modify the tree pointed to
6382 if (singleton == TREE_OPERAND (exp, 1))
6383 TREE_OPERAND (exp, 0)
6384 = invert_truthvalue (TREE_OPERAND (exp, 0));
6386 result = do_store_flag (TREE_OPERAND (exp, 0),
6387 (safe_from_p (temp, singleton)
6389 mode, BRANCH_COST <= 1);
6393 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6394 return expand_binop (mode, boptab, op1, result, temp,
6395 unsignedp, OPTAB_LIB_WIDEN);
6397 else if (singleton == TREE_OPERAND (exp, 1))
6398 TREE_OPERAND (exp, 0)
6399 = invert_truthvalue (TREE_OPERAND (exp, 0));
6402 do_pending_stack_adjust ();
6404 op0 = gen_label_rtx ();
6406 flag = gen_reg_rtx (word_mode);
6407 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6411 /* If the target conflicts with the other operand of the
6412 binary op, we can't use it. Also, we can't use the target
6413 if it is a hard register, because evaluating the condition
6414 might clobber it. */
6416 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6417 || (GET_CODE (temp) == REG
6418 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6419 temp = gen_reg_rtx (mode);
6420 store_expr (singleton, temp, 0);
6423 expand_expr (singleton,
6424 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6425 dest_left_flag = get_last_insn ();
6426 if (singleton == TREE_OPERAND (exp, 1))
6427 jumpif (TREE_OPERAND (exp, 0), op0);
6429 jumpifnot (TREE_OPERAND (exp, 0), op0);
6431 /* Allows cleanups up to here. */
6432 old_cleanups = cleanups_this_call;
6433 if (binary_op && temp == 0)
6434 /* Just touch the other operand. */
6435 expand_expr (TREE_OPERAND (binary_op, 1),
6436 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6438 store_expr (build (TREE_CODE (binary_op), type,
6439 make_tree (type, temp),
6440 TREE_OPERAND (binary_op, 1)),
6443 store_expr (build1 (TREE_CODE (unary_op), type,
6444 make_tree (type, temp)),
6447 dest_right_flag = get_last_insn ();
6450 /* This is now done in jump.c and is better done there because it
6451 produces shorter register lifetimes. */
6453 /* Check for both possibilities either constants or variables
6454 in registers (but not the same as the target!). If so, can
6455 save branches by assigning one, branching, and assigning the
6457 else if (temp && GET_MODE (temp) != BLKmode
6458 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6459 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6460 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6461 && DECL_RTL (TREE_OPERAND (exp, 1))
6462 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6463 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6464 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6465 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6466 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6467 && DECL_RTL (TREE_OPERAND (exp, 2))
6468 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6469 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6471 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6472 temp = gen_reg_rtx (mode);
6473 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6474 dest_left_flag = get_last_insn ();
6475 jumpifnot (TREE_OPERAND (exp, 0), op0);
6477 /* Allows cleanups up to here. */
6478 old_cleanups = cleanups_this_call;
6479 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6481 dest_right_flag = get_last_insn ();
6484 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6485 comparison operator. If we have one of these cases, set the
6486 output to A, branch on A (cse will merge these two references),
6487 then set the output to FOO. */
6489 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6490 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6491 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6492 TREE_OPERAND (exp, 1), 0)
6493 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6494 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6496 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6497 temp = gen_reg_rtx (mode);
6498 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6499 dest_left_flag = get_last_insn ();
6500 jumpif (TREE_OPERAND (exp, 0), op0);
6502 /* Allows cleanups up to here. */
6503 old_cleanups = cleanups_this_call;
6504 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6506 dest_right_flag = get_last_insn ();
6509 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6510 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6511 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6512 TREE_OPERAND (exp, 2), 0)
6513 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6514 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6516 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6517 temp = gen_reg_rtx (mode);
6518 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6519 dest_left_flag = get_last_insn ();
6520 jumpifnot (TREE_OPERAND (exp, 0), op0);
6522 /* Allows cleanups up to here. */
6523 old_cleanups = cleanups_this_call;
6524 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6526 dest_right_flag = get_last_insn ();
6530 op1 = gen_label_rtx ();
6531 jumpifnot (TREE_OPERAND (exp, 0), op0);
6533 /* Allows cleanups up to here. */
6534 old_cleanups = cleanups_this_call;
6536 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6538 expand_expr (TREE_OPERAND (exp, 1),
6539 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6540 dest_left_flag = get_last_insn ();
6542 /* Handle conditional cleanups, if any. */
6543 left_cleanups = defer_cleanups_to (old_cleanups);
6546 emit_jump_insn (gen_jump (op1));
6550 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6552 expand_expr (TREE_OPERAND (exp, 2),
6553 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6554 dest_right_flag = get_last_insn ();
6557 /* Handle conditional cleanups, if any. */
6558 right_cleanups = defer_cleanups_to (old_cleanups);
6564 /* Add back in, any conditional cleanups. */
6565 if (left_cleanups || right_cleanups)
6571 /* Now that we know that a flag is needed, go back and add in the
6572 setting of the flag. */
6574 /* Do the left side flag. */
6575 last = get_last_insn ();
6576 /* Flag left cleanups as needed. */
6577 emit_move_insn (flag, const1_rtx);
6578 /* ??? deprecated, use sequences instead. */
6579 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6581 /* Do the right side flag. */
6582 last = get_last_insn ();
6583 /* Flag left cleanups as needed. */
6584 emit_move_insn (flag, const0_rtx);
6585 /* ??? deprecated, use sequences instead. */
6586 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6588 /* All cleanups must be on the function_obstack. */
6589 push_obstacks_nochange ();
6590 resume_temporary_allocation ();
6592 /* convert flag, which is an rtx, into a tree. */
6593 cond = make_node (RTL_EXPR);
6594 TREE_TYPE (cond) = integer_type_node;
6595 RTL_EXPR_RTL (cond) = flag;
6596 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6597 cond = save_expr (cond);
6599 if (! left_cleanups)
6600 left_cleanups = integer_zero_node;
6601 if (! right_cleanups)
6602 right_cleanups = integer_zero_node;
6603 new_cleanups = build (COND_EXPR, void_type_node,
6604 truthvalue_conversion (cond),
6605 left_cleanups, right_cleanups);
6606 new_cleanups = fold (new_cleanups);
6610 /* Now add in the conditionalized cleanups. */
6612 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6613 (*interim_eh_hook) (NULL_TREE);
6620 /* Something needs to be initialized, but we didn't know
6621 where that thing was when building the tree. For example,
6622 it could be the return value of a function, or a parameter
6623 to a function which lays down in the stack, or a temporary
6624 variable which must be passed by reference.
6626 We guarantee that the expression will either be constructed
6627 or copied into our original target. */
6629 tree slot = TREE_OPERAND (exp, 0);
6630 tree cleanups = NULL_TREE;
6634 if (TREE_CODE (slot) != VAR_DECL)
6638 target = original_target;
6642 if (DECL_RTL (slot) != 0)
6644 target = DECL_RTL (slot);
6645 /* If we have already expanded the slot, so don't do
6647 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6652 target = assign_temp (type, 2, 1, 1);
6653 /* All temp slots at this level must not conflict. */
6654 preserve_temp_slots (target);
6655 DECL_RTL (slot) = target;
6657 /* Since SLOT is not known to the called function
6658 to belong to its stack frame, we must build an explicit
6659 cleanup. This case occurs when we must build up a reference
6660 to pass the reference as an argument. In this case,
6661 it is very likely that such a reference need not be
6664 if (TREE_OPERAND (exp, 2) == 0)
6665 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6666 cleanups = TREE_OPERAND (exp, 2);
6671 /* This case does occur, when expanding a parameter which
6672 needs to be constructed on the stack. The target
6673 is the actual stack address that we want to initialize.
6674 The function we call will perform the cleanup in this case. */
6676 /* If we have already assigned it space, use that space,
6677 not target that we were passed in, as our target
6678 parameter is only a hint. */
6679 if (DECL_RTL (slot) != 0)
6681 target = DECL_RTL (slot);
6682 /* If we have already expanded the slot, so don't do
6684 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6688 DECL_RTL (slot) = target;
6691 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6692 /* Mark it as expanded. */
6693 TREE_OPERAND (exp, 1) = NULL_TREE;
6695 store_expr (exp1, target, 0);
6699 cleanups_this_call = tree_cons (NULL_TREE,
6701 cleanups_this_call);
6702 (*interim_eh_hook) (NULL_TREE);
6710 tree lhs = TREE_OPERAND (exp, 0);
6711 tree rhs = TREE_OPERAND (exp, 1);
6712 tree noncopied_parts = 0;
6713 tree lhs_type = TREE_TYPE (lhs);
6715 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6716 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6717 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6718 TYPE_NONCOPIED_PARTS (lhs_type));
6719 while (noncopied_parts != 0)
6721 expand_assignment (TREE_VALUE (noncopied_parts),
6722 TREE_PURPOSE (noncopied_parts), 0, 0);
6723 noncopied_parts = TREE_CHAIN (noncopied_parts);
6730 /* If lhs is complex, expand calls in rhs before computing it.
6731 That's so we don't compute a pointer and save it over a call.
6732 If lhs is simple, compute it first so we can give it as a
6733 target if the rhs is just a call. This avoids an extra temp and copy
6734 and that prevents a partial-subsumption which makes bad code.
6735 Actually we could treat component_ref's of vars like vars. */
6737 tree lhs = TREE_OPERAND (exp, 0);
6738 tree rhs = TREE_OPERAND (exp, 1);
6739 tree noncopied_parts = 0;
6740 tree lhs_type = TREE_TYPE (lhs);
6744 if (TREE_CODE (lhs) != VAR_DECL
6745 && TREE_CODE (lhs) != RESULT_DECL
6746 && TREE_CODE (lhs) != PARM_DECL)
6747 preexpand_calls (exp);
6749 /* Check for |= or &= of a bitfield of size one into another bitfield
6750 of size 1. In this case, (unless we need the result of the
6751 assignment) we can do this more efficiently with a
6752 test followed by an assignment, if necessary.
6754 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6755 things change so we do, this code should be enhanced to
6758 && TREE_CODE (lhs) == COMPONENT_REF
6759 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6760 || TREE_CODE (rhs) == BIT_AND_EXPR)
6761 && TREE_OPERAND (rhs, 0) == lhs
6762 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6763 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6764 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6766 rtx label = gen_label_rtx ();
6768 do_jump (TREE_OPERAND (rhs, 1),
6769 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6770 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6771 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6772 (TREE_CODE (rhs) == BIT_IOR_EXPR
6774 : integer_zero_node)),
6776 do_pending_stack_adjust ();
6781 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6782 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6783 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6784 TYPE_NONCOPIED_PARTS (lhs_type));
6786 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6787 while (noncopied_parts != 0)
6789 expand_assignment (TREE_PURPOSE (noncopied_parts),
6790 TREE_VALUE (noncopied_parts), 0, 0);
6791 noncopied_parts = TREE_CHAIN (noncopied_parts);
6796 case PREINCREMENT_EXPR:
6797 case PREDECREMENT_EXPR:
6798 return expand_increment (exp, 0, ignore);
6800 case POSTINCREMENT_EXPR:
6801 case POSTDECREMENT_EXPR:
6802 /* Faster to treat as pre-increment if result is not used. */
6803 return expand_increment (exp, ! ignore, ignore);
6806 /* If nonzero, TEMP will be set to the address of something that might
6807 be a MEM corresponding to a stack slot. */
6810 /* Are we taking the address of a nested function? */
6811 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6812 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
6813 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
6815 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6816 op0 = force_operand (op0, target);
6818 /* If we are taking the address of something erroneous, just
6820 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6824 /* We make sure to pass const0_rtx down if we came in with
6825 ignore set, to avoid doing the cleanups twice for something. */
6826 op0 = expand_expr (TREE_OPERAND (exp, 0),
6827 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6828 (modifier == EXPAND_INITIALIZER
6829 ? modifier : EXPAND_CONST_ADDRESS));
6831 /* If we are going to ignore the result, OP0 will have been set
6832 to const0_rtx, so just return it. Don't get confused and
6833 think we are taking the address of the constant. */
6837 op0 = protect_from_queue (op0, 0);
6839 /* We would like the object in memory. If it is a constant,
6840 we can have it be statically allocated into memory. For
6841 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6842 memory and store the value into it. */
6844 if (CONSTANT_P (op0))
6845 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6847 else if (GET_CODE (op0) == MEM)
6849 mark_temp_addr_taken (op0);
6850 temp = XEXP (op0, 0);
6853 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6854 || GET_CODE (op0) == CONCAT)
6856 /* If this object is in a register, it must be not
6858 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6859 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6861 mark_temp_addr_taken (memloc);
6862 emit_move_insn (memloc, op0);
6866 if (GET_CODE (op0) != MEM)
6869 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6871 temp = XEXP (op0, 0);
6872 #ifdef POINTERS_EXTEND_UNSIGNED
6873 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6874 && mode == ptr_mode)
6875 temp = convert_memory_address (ptr_mode, temp);
6880 op0 = force_operand (XEXP (op0, 0), target);
6883 if (flag_force_addr && GET_CODE (op0) != REG)
6884 op0 = force_reg (Pmode, op0);
6886 if (GET_CODE (op0) == REG
6887 && ! REG_USERVAR_P (op0))
6888 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
6890 /* If we might have had a temp slot, add an equivalent address
6893 update_temp_slot_address (temp, op0);
6895 #ifdef POINTERS_EXTEND_UNSIGNED
6896 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
6897 && mode == ptr_mode)
6898 op0 = convert_memory_address (ptr_mode, op0);
6903 case ENTRY_VALUE_EXPR:
6906 /* COMPLEX type for Extended Pascal & Fortran */
6909 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6912 /* Get the rtx code of the operands. */
6913 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6914 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6917 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6921 /* Move the real (op0) and imaginary (op1) parts to their location. */
6922 emit_move_insn (gen_realpart (mode, target), op0);
6923 emit_move_insn (gen_imagpart (mode, target), op1);
6925 insns = get_insns ();
6928 /* Complex construction should appear as a single unit. */
6929 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6930 each with a separate pseudo as destination.
6931 It's not correct for flow to treat them as a unit. */
6932 if (GET_CODE (target) != CONCAT)
6933 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6941 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6942 return gen_realpart (mode, op0);
6945 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6946 return gen_imagpart (mode, op0);
6950 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6954 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6957 target = gen_reg_rtx (mode);
6961 /* Store the realpart and the negated imagpart to target. */
6962 emit_move_insn (gen_realpart (partmode, target),
6963 gen_realpart (partmode, op0));
6965 imag_t = gen_imagpart (partmode, target);
6966 temp = expand_unop (partmode, neg_optab,
6967 gen_imagpart (partmode, op0), imag_t, 0);
6969 emit_move_insn (imag_t, temp);
6971 insns = get_insns ();
6974 /* Conjugate should appear as a single unit
6975 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6976 each with a separate pseudo as destination.
6977 It's not correct for flow to treat them as a unit. */
6978 if (GET_CODE (target) != CONCAT)
6979 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6987 op0 = CONST0_RTX (tmode);
6993 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6996 /* Here to do an ordinary binary operator, generating an instruction
6997 from the optab already placed in `this_optab'. */
6999 preexpand_calls (exp);
7000 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7002 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7003 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7005 temp = expand_binop (mode, this_optab, op0, op1, target,
7006 unsignedp, OPTAB_LIB_WIDEN);
7013 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7015 bc_expand_expr (exp)
7018 enum tree_code code;
7021 struct binary_operator *binoptab;
7022 struct unary_operator *unoptab;
7023 struct increment_operator *incroptab;
7024 struct bc_label *lab, *lab1;
7025 enum bytecode_opcode opcode;
7028 code = TREE_CODE (exp);
7034 if (DECL_RTL (exp) == 0)
7036 error_with_decl (exp, "prior parameter's size depends on `%s'");
7040 bc_load_parmaddr (DECL_RTL (exp));
7041 bc_load_memory (TREE_TYPE (exp), exp);
7047 if (DECL_RTL (exp) == 0)
7051 if (BYTECODE_LABEL (DECL_RTL (exp)))
7052 bc_load_externaddr (DECL_RTL (exp));
7054 bc_load_localaddr (DECL_RTL (exp));
7056 if (TREE_PUBLIC (exp))
7057 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7058 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7060 bc_load_localaddr (DECL_RTL (exp));
7062 bc_load_memory (TREE_TYPE (exp), exp);
7067 #ifdef DEBUG_PRINT_CODE
7068 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7070 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7072 : TYPE_MODE (TREE_TYPE (exp)))],
7073 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7079 #ifdef DEBUG_PRINT_CODE
7080 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7082 /* FIX THIS: find a better way to pass real_cst's. -bson */
7083 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7084 (double) TREE_REAL_CST (exp));
7093 /* We build a call description vector describing the type of
7094 the return value and of the arguments; this call vector,
7095 together with a pointer to a location for the return value
7096 and the base of the argument list, is passed to the low
7097 level machine dependent call subroutine, which is responsible
7098 for putting the arguments wherever real functions expect
7099 them, as well as getting the return value back. */
7101 tree calldesc = 0, arg;
7105 /* Push the evaluated args on the evaluation stack in reverse
7106 order. Also make an entry for each arg in the calldesc
7107 vector while we're at it. */
7109 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7111 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7114 bc_expand_expr (TREE_VALUE (arg));
7116 calldesc = tree_cons ((tree) 0,
7117 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7119 calldesc = tree_cons ((tree) 0,
7120 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7124 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7126 /* Allocate a location for the return value and push its
7127 address on the evaluation stack. Also make an entry
7128 at the front of the calldesc for the return value type. */
7130 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7131 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7132 bc_load_localaddr (retval);
7134 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7135 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7137 /* Prepend the argument count. */
7138 calldesc = tree_cons ((tree) 0,
7139 build_int_2 (nargs, 0),
7142 /* Push the address of the call description vector on the stack. */
7143 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7144 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7145 build_index_type (build_int_2 (nargs * 2, 0)));
7146 r = output_constant_def (calldesc);
7147 bc_load_externaddr (r);
7149 /* Push the address of the function to be called. */
7150 bc_expand_expr (TREE_OPERAND (exp, 0));
7152 /* Call the function, popping its address and the calldesc vector
7153 address off the evaluation stack in the process. */
7154 bc_emit_instruction (call);
7156 /* Pop the arguments off the stack. */
7157 bc_adjust_stack (nargs);
7159 /* Load the return value onto the stack. */
7160 bc_load_localaddr (retval);
7161 bc_load_memory (type, TREE_OPERAND (exp, 0));
7167 if (!SAVE_EXPR_RTL (exp))
7169 /* First time around: copy to local variable */
7170 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7171 TYPE_ALIGN (TREE_TYPE(exp)));
7172 bc_expand_expr (TREE_OPERAND (exp, 0));
7173 bc_emit_instruction (duplicate);
7175 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7176 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7180 /* Consecutive reference: use saved copy */
7181 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7182 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7187 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7188 how are they handled instead? */
7191 TREE_USED (exp) = 1;
7192 bc_expand_expr (STMT_BODY (exp));
7199 bc_expand_expr (TREE_OPERAND (exp, 0));
7200 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7205 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7210 bc_expand_address (TREE_OPERAND (exp, 0));
7215 bc_expand_expr (TREE_OPERAND (exp, 0));
7216 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7221 bc_expand_expr (bc_canonicalize_array_ref (exp));
7226 bc_expand_component_address (exp);
7228 /* If we have a bitfield, generate a proper load */
7229 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7234 bc_expand_expr (TREE_OPERAND (exp, 0));
7235 bc_emit_instruction (drop);
7236 bc_expand_expr (TREE_OPERAND (exp, 1));
7241 bc_expand_expr (TREE_OPERAND (exp, 0));
7242 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7243 lab = bc_get_bytecode_label ();
7244 bc_emit_bytecode (xjumpifnot);
7245 bc_emit_bytecode_labelref (lab);
7247 #ifdef DEBUG_PRINT_CODE
7248 fputc ('\n', stderr);
7250 bc_expand_expr (TREE_OPERAND (exp, 1));
7251 lab1 = bc_get_bytecode_label ();
7252 bc_emit_bytecode (jump);
7253 bc_emit_bytecode_labelref (lab1);
7255 #ifdef DEBUG_PRINT_CODE
7256 fputc ('\n', stderr);
7259 bc_emit_bytecode_labeldef (lab);
7260 bc_expand_expr (TREE_OPERAND (exp, 2));
7261 bc_emit_bytecode_labeldef (lab1);
7264 case TRUTH_ANDIF_EXPR:
7266 opcode = xjumpifnot;
7269 case TRUTH_ORIF_EXPR:
7276 binoptab = optab_plus_expr;
7281 binoptab = optab_minus_expr;
7286 binoptab = optab_mult_expr;
7289 case TRUNC_DIV_EXPR:
7290 case FLOOR_DIV_EXPR:
7292 case ROUND_DIV_EXPR:
7293 case EXACT_DIV_EXPR:
7295 binoptab = optab_trunc_div_expr;
7298 case TRUNC_MOD_EXPR:
7299 case FLOOR_MOD_EXPR:
7301 case ROUND_MOD_EXPR:
7303 binoptab = optab_trunc_mod_expr;
7306 case FIX_ROUND_EXPR:
7307 case FIX_FLOOR_EXPR:
7309 abort (); /* Not used for C. */
7311 case FIX_TRUNC_EXPR:
7318 abort (); /* FIXME */
7322 binoptab = optab_rdiv_expr;
7327 binoptab = optab_bit_and_expr;
7332 binoptab = optab_bit_ior_expr;
7337 binoptab = optab_bit_xor_expr;
7342 binoptab = optab_lshift_expr;
7347 binoptab = optab_rshift_expr;
7350 case TRUTH_AND_EXPR:
7352 binoptab = optab_truth_and_expr;
7357 binoptab = optab_truth_or_expr;
7362 binoptab = optab_lt_expr;
7367 binoptab = optab_le_expr;
7372 binoptab = optab_ge_expr;
7377 binoptab = optab_gt_expr;
7382 binoptab = optab_eq_expr;
7387 binoptab = optab_ne_expr;
7392 unoptab = optab_negate_expr;
7397 unoptab = optab_bit_not_expr;
7400 case TRUTH_NOT_EXPR:
7402 unoptab = optab_truth_not_expr;
7405 case PREDECREMENT_EXPR:
7407 incroptab = optab_predecrement_expr;
7410 case PREINCREMENT_EXPR:
7412 incroptab = optab_preincrement_expr;
7415 case POSTDECREMENT_EXPR:
7417 incroptab = optab_postdecrement_expr;
7420 case POSTINCREMENT_EXPR:
7422 incroptab = optab_postincrement_expr;
7427 bc_expand_constructor (exp);
7437 tree vars = TREE_OPERAND (exp, 0);
7438 int vars_need_expansion = 0;
7440 /* Need to open a binding contour here because
7441 if there are any cleanups they most be contained here. */
7442 expand_start_bindings (0);
7444 /* Mark the corresponding BLOCK for output. */
7445 if (TREE_OPERAND (exp, 2) != 0)
7446 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7448 /* If VARS have not yet been expanded, expand them now. */
7451 if (DECL_RTL (vars) == 0)
7453 vars_need_expansion = 1;
7456 expand_decl_init (vars);
7457 vars = TREE_CHAIN (vars);
7460 bc_expand_expr (TREE_OPERAND (exp, 1));
7462 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7472 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7473 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7479 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7485 bc_expand_expr (TREE_OPERAND (exp, 0));
7486 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7487 lab = bc_get_bytecode_label ();
7489 bc_emit_instruction (duplicate);
7490 bc_emit_bytecode (opcode);
7491 bc_emit_bytecode_labelref (lab);
7493 #ifdef DEBUG_PRINT_CODE
7494 fputc ('\n', stderr);
7497 bc_emit_instruction (drop);
7499 bc_expand_expr (TREE_OPERAND (exp, 1));
7500 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7501 bc_emit_bytecode_labeldef (lab);
7507 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7509 /* Push the quantum. */
7510 bc_expand_expr (TREE_OPERAND (exp, 1));
7512 /* Convert it to the lvalue's type. */
7513 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7515 /* Push the address of the lvalue */
7516 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7518 /* Perform actual increment */
7519 bc_expand_increment (incroptab, type);
7523 /* Return the alignment in bits of EXP, a pointer valued expression.
7524 But don't return more than MAX_ALIGN no matter what.
7525 The alignment returned is, by default, the alignment of the thing that
7526 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7528 Otherwise, look at the expression to see if we can do better, i.e., if the
7529 expression is actually pointing at an object whose alignment is tighter. */
7532 get_pointer_alignment (exp, max_align)
7536 unsigned align, inner;
7538 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7541 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7542 align = MIN (align, max_align);
7546 switch (TREE_CODE (exp))
7550 case NON_LVALUE_EXPR:
7551 exp = TREE_OPERAND (exp, 0);
7552 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7554 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7555 align = MIN (inner, max_align);
7559 /* If sum of pointer + int, restrict our maximum alignment to that
7560 imposed by the integer. If not, we can't do any better than
7562 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7565 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7570 exp = TREE_OPERAND (exp, 0);
7574 /* See what we are pointing at and look at its alignment. */
7575 exp = TREE_OPERAND (exp, 0);
7576 if (TREE_CODE (exp) == FUNCTION_DECL)
7577 align = FUNCTION_BOUNDARY;
7578 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7579 align = DECL_ALIGN (exp);
7580 #ifdef CONSTANT_ALIGNMENT
7581 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7582 align = CONSTANT_ALIGNMENT (exp, align);
7584 return MIN (align, max_align);
7592 /* Return the tree node and offset if a given argument corresponds to
7593 a string constant. */
7596 string_constant (arg, ptr_offset)
7602 if (TREE_CODE (arg) == ADDR_EXPR
7603 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7605 *ptr_offset = integer_zero_node;
7606 return TREE_OPERAND (arg, 0);
7608 else if (TREE_CODE (arg) == PLUS_EXPR)
7610 tree arg0 = TREE_OPERAND (arg, 0);
7611 tree arg1 = TREE_OPERAND (arg, 1);
7616 if (TREE_CODE (arg0) == ADDR_EXPR
7617 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7620 return TREE_OPERAND (arg0, 0);
7622 else if (TREE_CODE (arg1) == ADDR_EXPR
7623 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7626 return TREE_OPERAND (arg1, 0);
7633 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7634 way, because it could contain a zero byte in the middle.
7635 TREE_STRING_LENGTH is the size of the character array, not the string.
7637 Unfortunately, string_constant can't access the values of const char
7638 arrays with initializers, so neither can we do so here. */
7648 src = string_constant (src, &offset_node);
7651 max = TREE_STRING_LENGTH (src);
7652 ptr = TREE_STRING_POINTER (src);
7653 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7655 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7656 compute the offset to the following null if we don't know where to
7657 start searching for it. */
7659 for (i = 0; i < max; i++)
7662 /* We don't know the starting offset, but we do know that the string
7663 has no internal zero bytes. We can assume that the offset falls
7664 within the bounds of the string; otherwise, the programmer deserves
7665 what he gets. Subtract the offset from the length of the string,
7667 /* This would perhaps not be valid if we were dealing with named
7668 arrays in addition to literal string constants. */
7669 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7672 /* We have a known offset into the string. Start searching there for
7673 a null character. */
7674 if (offset_node == 0)
7678 /* Did we get a long long offset? If so, punt. */
7679 if (TREE_INT_CST_HIGH (offset_node) != 0)
7681 offset = TREE_INT_CST_LOW (offset_node);
7683 /* If the offset is known to be out of bounds, warn, and call strlen at
7685 if (offset < 0 || offset > max)
7687 warning ("offset outside bounds of constant string");
7690 /* Use strlen to search for the first zero byte. Since any strings
7691 constructed with build_string will have nulls appended, we win even
7692 if we get handed something like (char[4])"abcd".
7694 Since OFFSET is our starting index into the string, no further
7695 calculation is needed. */
7696 return size_int (strlen (ptr + offset));
7700 expand_builtin_return_addr (fndecl_code, count, tem)
7701 enum built_in_function fndecl_code;
7707 /* Some machines need special handling before we can access
7708 arbitrary frames. For example, on the sparc, we must first flush
7709 all register windows to the stack. */
7710 #ifdef SETUP_FRAME_ADDRESSES
7711 SETUP_FRAME_ADDRESSES ();
7714 /* On the sparc, the return address is not in the frame, it is in a
7715 register. There is no way to access it off of the current frame
7716 pointer, but it can be accessed off the previous frame pointer by
7717 reading the value from the register window save area. */
7718 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7719 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7723 /* Scan back COUNT frames to the specified frame. */
7724 for (i = 0; i < count; i++)
7726 /* Assume the dynamic chain pointer is in the word that the
7727 frame address points to, unless otherwise specified. */
7728 #ifdef DYNAMIC_CHAIN_ADDRESS
7729 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7731 tem = memory_address (Pmode, tem);
7732 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7735 /* For __builtin_frame_address, return what we've got. */
7736 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7739 /* For __builtin_return_address, Get the return address from that
7741 #ifdef RETURN_ADDR_RTX
7742 tem = RETURN_ADDR_RTX (count, tem);
7744 tem = memory_address (Pmode,
7745 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7746 tem = gen_rtx (MEM, Pmode, tem);
7751 /* Expand an expression EXP that calls a built-in function,
7752 with result going to TARGET if that's convenient
7753 (and in mode MODE if that's convenient).
7754 SUBTARGET may be used as the target for computing one of EXP's operands.
7755 IGNORE is nonzero if the value is to be ignored. */
7757 #define CALLED_AS_BUILT_IN(NODE) \
7758 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7761 expand_builtin (exp, target, subtarget, mode, ignore)
7765 enum machine_mode mode;
7768 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7769 tree arglist = TREE_OPERAND (exp, 1);
7772 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7773 optab builtin_optab;
7775 switch (DECL_FUNCTION_CODE (fndecl))
7780 /* build_function_call changes these into ABS_EXPR. */
7785 /* Treat these like sqrt, but only if the user asks for them. */
7786 if (! flag_fast_math)
7788 case BUILT_IN_FSQRT:
7789 /* If not optimizing, call the library function. */
7794 /* Arg could be wrong type if user redeclared this fcn wrong. */
7795 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7798 /* Stabilize and compute the argument. */
7799 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7800 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7802 exp = copy_node (exp);
7803 arglist = copy_node (arglist);
7804 TREE_OPERAND (exp, 1) = arglist;
7805 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7807 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7809 /* Make a suitable register to place result in. */
7810 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7815 switch (DECL_FUNCTION_CODE (fndecl))
7818 builtin_optab = sin_optab; break;
7820 builtin_optab = cos_optab; break;
7821 case BUILT_IN_FSQRT:
7822 builtin_optab = sqrt_optab; break;
7827 /* Compute into TARGET.
7828 Set TARGET to wherever the result comes back. */
7829 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7830 builtin_optab, op0, target, 0);
7832 /* If we were unable to expand via the builtin, stop the
7833 sequence (without outputting the insns) and break, causing
7834 a call the the library function. */
7841 /* Check the results by default. But if flag_fast_math is turned on,
7842 then assume sqrt will always be called with valid arguments. */
7844 if (! flag_fast_math)
7846 /* Don't define the builtin FP instructions
7847 if your machine is not IEEE. */
7848 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7851 lab1 = gen_label_rtx ();
7853 /* Test the result; if it is NaN, set errno=EDOM because
7854 the argument was not in the domain. */
7855 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7856 emit_jump_insn (gen_beq (lab1));
7860 #ifdef GEN_ERRNO_RTX
7861 rtx errno_rtx = GEN_ERRNO_RTX;
7864 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7867 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7870 /* We can't set errno=EDOM directly; let the library call do it.
7871 Pop the arguments right away in case the call gets deleted. */
7873 expand_call (exp, target, 0);
7880 /* Output the entire sequence. */
7881 insns = get_insns ();
7887 /* __builtin_apply_args returns block of memory allocated on
7888 the stack into which is stored the arg pointer, structure
7889 value address, static chain, and all the registers that might
7890 possibly be used in performing a function call. The code is
7891 moved to the start of the function so the incoming values are
7893 case BUILT_IN_APPLY_ARGS:
7894 /* Don't do __builtin_apply_args more than once in a function.
7895 Save the result of the first call and reuse it. */
7896 if (apply_args_value != 0)
7897 return apply_args_value;
7899 /* When this function is called, it means that registers must be
7900 saved on entry to this function. So we migrate the
7901 call to the first insn of this function. */
7906 temp = expand_builtin_apply_args ();
7910 apply_args_value = temp;
7912 /* Put the sequence after the NOTE that starts the function.
7913 If this is inside a SEQUENCE, make the outer-level insn
7914 chain current, so the code is placed at the start of the
7916 push_topmost_sequence ();
7917 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7918 pop_topmost_sequence ();
7922 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7923 FUNCTION with a copy of the parameters described by
7924 ARGUMENTS, and ARGSIZE. It returns a block of memory
7925 allocated on the stack into which is stored all the registers
7926 that might possibly be used for returning the result of a
7927 function. ARGUMENTS is the value returned by
7928 __builtin_apply_args. ARGSIZE is the number of bytes of
7929 arguments that must be copied. ??? How should this value be
7930 computed? We'll also need a safe worst case value for varargs
7932 case BUILT_IN_APPLY:
7934 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7935 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7936 || TREE_CHAIN (arglist) == 0
7937 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7938 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7939 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7947 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7948 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7950 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7953 /* __builtin_return (RESULT) causes the function to return the
7954 value described by RESULT. RESULT is address of the block of
7955 memory returned by __builtin_apply. */
7956 case BUILT_IN_RETURN:
7958 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7959 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7960 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7961 NULL_RTX, VOIDmode, 0));
7964 case BUILT_IN_SAVEREGS:
7965 /* Don't do __builtin_saveregs more than once in a function.
7966 Save the result of the first call and reuse it. */
7967 if (saveregs_value != 0)
7968 return saveregs_value;
7970 /* When this function is called, it means that registers must be
7971 saved on entry to this function. So we migrate the
7972 call to the first insn of this function. */
7976 /* Now really call the function. `expand_call' does not call
7977 expand_builtin, so there is no danger of infinite recursion here. */
7980 #ifdef EXPAND_BUILTIN_SAVEREGS
7981 /* Do whatever the machine needs done in this case. */
7982 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7984 /* The register where the function returns its value
7985 is likely to have something else in it, such as an argument.
7986 So preserve that register around the call. */
7988 if (value_mode != VOIDmode)
7990 rtx valreg = hard_libcall_value (value_mode);
7991 rtx saved_valreg = gen_reg_rtx (value_mode);
7993 emit_move_insn (saved_valreg, valreg);
7994 temp = expand_call (exp, target, ignore);
7995 emit_move_insn (valreg, saved_valreg);
7998 /* Generate the call, putting the value in a pseudo. */
7999 temp = expand_call (exp, target, ignore);
8005 saveregs_value = temp;
8007 /* Put the sequence after the NOTE that starts the function.
8008 If this is inside a SEQUENCE, make the outer-level insn
8009 chain current, so the code is placed at the start of the
8011 push_topmost_sequence ();
8012 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8013 pop_topmost_sequence ();
8017 /* __builtin_args_info (N) returns word N of the arg space info
8018 for the current function. The number and meanings of words
8019 is controlled by the definition of CUMULATIVE_ARGS. */
8020 case BUILT_IN_ARGS_INFO:
8022 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8024 int *word_ptr = (int *) ¤t_function_args_info;
8025 tree type, elts, result;
8027 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8028 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8029 __FILE__, __LINE__);
8033 tree arg = TREE_VALUE (arglist);
8034 if (TREE_CODE (arg) != INTEGER_CST)
8035 error ("argument of `__builtin_args_info' must be constant");
8038 int wordnum = TREE_INT_CST_LOW (arg);
8040 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8041 error ("argument of `__builtin_args_info' out of range");
8043 return GEN_INT (word_ptr[wordnum]);
8047 error ("missing argument in `__builtin_args_info'");
8052 for (i = 0; i < nwords; i++)
8053 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8055 type = build_array_type (integer_type_node,
8056 build_index_type (build_int_2 (nwords, 0)));
8057 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8058 TREE_CONSTANT (result) = 1;
8059 TREE_STATIC (result) = 1;
8060 result = build (INDIRECT_REF, build_pointer_type (type), result);
8061 TREE_CONSTANT (result) = 1;
8062 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8066 /* Return the address of the first anonymous stack arg. */
8067 case BUILT_IN_NEXT_ARG:
8069 tree fntype = TREE_TYPE (current_function_decl);
8071 if ((TYPE_ARG_TYPES (fntype) == 0
8072 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8074 && ! current_function_varargs)
8076 error ("`va_start' used in function with fixed args");
8082 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8083 tree arg = TREE_VALUE (arglist);
8085 /* Strip off all nops for the sake of the comparison. This
8086 is not quite the same as STRIP_NOPS. It does more.
8087 We must also strip off INDIRECT_EXPR for C++ reference
8089 while (TREE_CODE (arg) == NOP_EXPR
8090 || TREE_CODE (arg) == CONVERT_EXPR
8091 || TREE_CODE (arg) == NON_LVALUE_EXPR
8092 || TREE_CODE (arg) == INDIRECT_REF)
8093 arg = TREE_OPERAND (arg, 0);
8094 if (arg != last_parm)
8095 warning ("second parameter of `va_start' not last named argument");
8097 else if (! current_function_varargs)
8098 /* Evidently an out of date version of <stdarg.h>; can't validate
8099 va_start's second argument, but can still work as intended. */
8100 warning ("`__builtin_next_arg' called without an argument");
8103 return expand_binop (Pmode, add_optab,
8104 current_function_internal_arg_pointer,
8105 current_function_arg_offset_rtx,
8106 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8108 case BUILT_IN_CLASSIFY_TYPE:
8111 tree type = TREE_TYPE (TREE_VALUE (arglist));
8112 enum tree_code code = TREE_CODE (type);
8113 if (code == VOID_TYPE)
8114 return GEN_INT (void_type_class);
8115 if (code == INTEGER_TYPE)
8116 return GEN_INT (integer_type_class);
8117 if (code == CHAR_TYPE)
8118 return GEN_INT (char_type_class);
8119 if (code == ENUMERAL_TYPE)
8120 return GEN_INT (enumeral_type_class);
8121 if (code == BOOLEAN_TYPE)
8122 return GEN_INT (boolean_type_class);
8123 if (code == POINTER_TYPE)
8124 return GEN_INT (pointer_type_class);
8125 if (code == REFERENCE_TYPE)
8126 return GEN_INT (reference_type_class);
8127 if (code == OFFSET_TYPE)
8128 return GEN_INT (offset_type_class);
8129 if (code == REAL_TYPE)
8130 return GEN_INT (real_type_class);
8131 if (code == COMPLEX_TYPE)
8132 return GEN_INT (complex_type_class);
8133 if (code == FUNCTION_TYPE)
8134 return GEN_INT (function_type_class);
8135 if (code == METHOD_TYPE)
8136 return GEN_INT (method_type_class);
8137 if (code == RECORD_TYPE)
8138 return GEN_INT (record_type_class);
8139 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8140 return GEN_INT (union_type_class);
8141 if (code == ARRAY_TYPE)
8143 if (TYPE_STRING_FLAG (type))
8144 return GEN_INT (string_type_class);
8146 return GEN_INT (array_type_class);
8148 if (code == SET_TYPE)
8149 return GEN_INT (set_type_class);
8150 if (code == FILE_TYPE)
8151 return GEN_INT (file_type_class);
8152 if (code == LANG_TYPE)
8153 return GEN_INT (lang_type_class);
8155 return GEN_INT (no_type_class);
8157 case BUILT_IN_CONSTANT_P:
8162 tree arg = TREE_VALUE (arglist);
8165 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8166 || (TREE_CODE (arg) == ADDR_EXPR
8167 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8168 ? const1_rtx : const0_rtx);
8171 case BUILT_IN_FRAME_ADDRESS:
8172 /* The argument must be a nonnegative integer constant.
8173 It counts the number of frames to scan up the stack.
8174 The value is the address of that frame. */
8175 case BUILT_IN_RETURN_ADDRESS:
8176 /* The argument must be a nonnegative integer constant.
8177 It counts the number of frames to scan up the stack.
8178 The value is the return address saved in that frame. */
8180 /* Warning about missing arg was already issued. */
8182 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8184 error ("invalid arg to `__builtin_return_address'");
8187 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8189 error ("invalid arg to `__builtin_return_address'");
8194 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8195 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8196 hard_frame_pointer_rtx);
8198 /* For __builtin_frame_address, return what we've got. */
8199 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8202 if (GET_CODE (tem) != REG)
8203 tem = copy_to_reg (tem);
8207 case BUILT_IN_ALLOCA:
8209 /* Arg could be non-integer if user redeclared this fcn wrong. */
8210 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8213 /* Compute the argument. */
8214 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8216 /* Allocate the desired space. */
8217 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8220 /* If not optimizing, call the library function. */
8221 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8225 /* Arg could be non-integer if user redeclared this fcn wrong. */
8226 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8229 /* Compute the argument. */
8230 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8231 /* Compute ffs, into TARGET if possible.
8232 Set TARGET to wherever the result comes back. */
8233 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8234 ffs_optab, op0, target, 1);
8239 case BUILT_IN_STRLEN:
8240 /* If not optimizing, call the library function. */
8241 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8245 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8246 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8250 tree src = TREE_VALUE (arglist);
8251 tree len = c_strlen (src);
8254 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8256 rtx result, src_rtx, char_rtx;
8257 enum machine_mode insn_mode = value_mode, char_mode;
8258 enum insn_code icode;
8260 /* If the length is known, just return it. */
8262 return expand_expr (len, target, mode, 0);
8264 /* If SRC is not a pointer type, don't do this operation inline. */
8268 /* Call a function if we can't compute strlen in the right mode. */
8270 while (insn_mode != VOIDmode)
8272 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8273 if (icode != CODE_FOR_nothing)
8276 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8278 if (insn_mode == VOIDmode)
8281 /* Make a place to write the result of the instruction. */
8284 && GET_CODE (result) == REG
8285 && GET_MODE (result) == insn_mode
8286 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8287 result = gen_reg_rtx (insn_mode);
8289 /* Make sure the operands are acceptable to the predicates. */
8291 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8292 result = gen_reg_rtx (insn_mode);
8294 src_rtx = memory_address (BLKmode,
8295 expand_expr (src, NULL_RTX, ptr_mode,
8297 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8298 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8300 char_rtx = const0_rtx;
8301 char_mode = insn_operand_mode[(int)icode][2];
8302 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8303 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8305 emit_insn (GEN_FCN (icode) (result,
8306 gen_rtx (MEM, BLKmode, src_rtx),
8307 char_rtx, GEN_INT (align)));
8309 /* Return the value in the proper mode for this function. */
8310 if (GET_MODE (result) == value_mode)
8312 else if (target != 0)
8314 convert_move (target, result, 0);
8318 return convert_to_mode (value_mode, result, 0);
8321 case BUILT_IN_STRCPY:
8322 /* If not optimizing, call the library function. */
8323 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8327 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8328 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8329 || TREE_CHAIN (arglist) == 0
8330 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8334 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8339 len = size_binop (PLUS_EXPR, len, integer_one_node);
8341 chainon (arglist, build_tree_list (NULL_TREE, len));
8345 case BUILT_IN_MEMCPY:
8346 /* If not optimizing, call the library function. */
8347 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8351 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8352 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8353 || TREE_CHAIN (arglist) == 0
8354 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8355 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8356 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8360 tree dest = TREE_VALUE (arglist);
8361 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8362 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8366 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8368 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8369 rtx dest_rtx, dest_mem, src_mem;
8371 /* If either SRC or DEST is not a pointer type, don't do
8372 this operation in-line. */
8373 if (src_align == 0 || dest_align == 0)
8375 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8376 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8380 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8381 dest_mem = gen_rtx (MEM, BLKmode,
8382 memory_address (BLKmode, dest_rtx));
8383 /* There could be a void* cast on top of the object. */
8384 while (TREE_CODE (dest) == NOP_EXPR)
8385 dest = TREE_OPERAND (dest, 0);
8386 type = TREE_TYPE (TREE_TYPE (dest));
8387 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8388 src_mem = gen_rtx (MEM, BLKmode,
8389 memory_address (BLKmode,
8390 expand_expr (src, NULL_RTX,
8393 /* There could be a void* cast on top of the object. */
8394 while (TREE_CODE (src) == NOP_EXPR)
8395 src = TREE_OPERAND (src, 0);
8396 type = TREE_TYPE (TREE_TYPE (src));
8397 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8399 /* Copy word part most expediently. */
8400 emit_block_move (dest_mem, src_mem,
8401 expand_expr (len, NULL_RTX, VOIDmode, 0),
8402 MIN (src_align, dest_align));
8403 return force_operand (dest_rtx, NULL_RTX);
8406 /* These comparison functions need an instruction that returns an actual
8407 index. An ordinary compare that just sets the condition codes
8409 #ifdef HAVE_cmpstrsi
8410 case BUILT_IN_STRCMP:
8411 /* If not optimizing, call the library function. */
8412 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8416 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8417 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8418 || TREE_CHAIN (arglist) == 0
8419 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8421 else if (!HAVE_cmpstrsi)
8424 tree arg1 = TREE_VALUE (arglist);
8425 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8429 len = c_strlen (arg1);
8431 len = size_binop (PLUS_EXPR, integer_one_node, len);
8432 len2 = c_strlen (arg2);
8434 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8436 /* If we don't have a constant length for the first, use the length
8437 of the second, if we know it. We don't require a constant for
8438 this case; some cost analysis could be done if both are available
8439 but neither is constant. For now, assume they're equally cheap.
8441 If both strings have constant lengths, use the smaller. This
8442 could arise if optimization results in strcpy being called with
8443 two fixed strings, or if the code was machine-generated. We should
8444 add some code to the `memcmp' handler below to deal with such
8445 situations, someday. */
8446 if (!len || TREE_CODE (len) != INTEGER_CST)
8453 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8455 if (tree_int_cst_lt (len2, len))
8459 chainon (arglist, build_tree_list (NULL_TREE, len));
8463 case BUILT_IN_MEMCMP:
8464 /* If not optimizing, call the library function. */
8465 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8469 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8470 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8471 || TREE_CHAIN (arglist) == 0
8472 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8473 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8474 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8476 else if (!HAVE_cmpstrsi)
8479 tree arg1 = TREE_VALUE (arglist);
8480 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8481 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8485 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8487 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8488 enum machine_mode insn_mode
8489 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8491 /* If we don't have POINTER_TYPE, call the function. */
8492 if (arg1_align == 0 || arg2_align == 0)
8494 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8495 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8499 /* Make a place to write the result of the instruction. */
8502 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8503 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8504 result = gen_reg_rtx (insn_mode);
8506 emit_insn (gen_cmpstrsi (result,
8507 gen_rtx (MEM, BLKmode,
8508 expand_expr (arg1, NULL_RTX,
8511 gen_rtx (MEM, BLKmode,
8512 expand_expr (arg2, NULL_RTX,
8515 expand_expr (len, NULL_RTX, VOIDmode, 0),
8516 GEN_INT (MIN (arg1_align, arg2_align))));
8518 /* Return the value in the proper mode for this function. */
8519 mode = TYPE_MODE (TREE_TYPE (exp));
8520 if (GET_MODE (result) == mode)
8522 else if (target != 0)
8524 convert_move (target, result, 0);
8528 return convert_to_mode (mode, result, 0);
8531 case BUILT_IN_STRCMP:
8532 case BUILT_IN_MEMCMP:
8536 /* __builtin_setjmp is passed a pointer to an array of five words
8537 (not all will be used on all machines). It operates similarly to
8538 the C library function of the same name, but is more efficient.
8539 Much of the code below (and for longjmp) is copied from the handling
8542 NOTE: This is intended for use by GNAT and will only work in
8543 the method used by it. This code will likely NOT survive to
8544 the GCC 2.8.0 release. */
8545 case BUILT_IN_SETJMP:
8547 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8553 convert_modes (Pmode, ptr_mode,
8554 expand_expr (TREE_VALUE (arglist),
8558 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8559 enum machine_mode sa_mode = Pmode;
8561 int old_inhibit_defer_pop = inhibit_defer_pop;
8562 int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8563 get_identifier ("__dummy"), 0);
8565 CUMULATIVE_ARGS args_so_far;
8568 if (target == 0 || GET_CODE (target) != REG
8569 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8570 target = gen_reg_rtx (value_mode);
8574 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8575 current_function_calls_setjmp = 1;
8577 /* We store the frame pointer and the address of lab1 in the buffer
8578 and use the rest of it for the stack save area, which is
8579 machine-dependent. */
8580 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8581 virtual_stack_vars_rtx);
8583 (validize_mem (gen_rtx (MEM, Pmode,
8584 plus_constant (buf_addr,
8585 GET_MODE_SIZE (Pmode)))),
8586 gen_rtx (LABEL_REF, Pmode, lab1));
8588 #ifdef HAVE_save_stack_nonlocal
8589 if (HAVE_save_stack_nonlocal)
8590 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8593 stack_save = gen_rtx (MEM, sa_mode,
8594 plus_constant (buf_addr,
8595 2 * GET_MODE_SIZE (Pmode)));
8596 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8600 emit_insn (gen_setjmp ());
8603 /* Set TARGET to zero and branch around the other case. */
8604 emit_move_insn (target, const0_rtx);
8605 emit_jump_insn (gen_jump (lab2));
8609 /* Note that setjmp clobbers FP when we get here, so we have to
8610 make sure it's marked as used by this function. */
8611 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8613 /* Mark the static chain as clobbered here so life information
8614 doesn't get messed up for it. */
8615 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8617 /* Now put in the code to restore the frame pointer, and argument
8618 pointer, if needed. The code below is from expand_end_bindings
8619 in stmt.c; see detailed documentation there. */
8620 #ifdef HAVE_nonlocal_goto
8621 if (! HAVE_nonlocal_goto)
8623 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8625 current_function_has_nonlocal_goto = 1;
8627 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8628 if (fixed_regs[ARG_POINTER_REGNUM])
8630 #ifdef ELIMINABLE_REGS
8631 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8633 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8634 if (elim_regs[i].from == ARG_POINTER_REGNUM
8635 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8638 if (i == sizeof elim_regs / sizeof elim_regs [0])
8641 /* Now restore our arg pointer from the address at which it
8642 was saved in our stack frame.
8643 If there hasn't be space allocated for it yet, make
8645 if (arg_pointer_save_area == 0)
8646 arg_pointer_save_area
8647 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8648 emit_move_insn (virtual_incoming_args_rtx,
8649 copy_to_reg (arg_pointer_save_area));
8654 /* The static chain pointer contains the address of dummy function.
8655 We need to call it here to handle some PIC cases of restoring
8656 a global pointer. Then return 1. */
8657 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8659 /* We can't actually call emit_library_call here, so do everything
8660 it does, which isn't much for a libfunc with no args. */
8661 op0 = memory_address (FUNCTION_MODE, op0);
8663 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8664 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8665 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8667 #ifndef ACCUMULATE_OUTGOING_ARGS
8668 #ifdef HAVE_call_pop
8670 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8671 const0_rtx, next_arg_reg,
8672 GEN_INT (return_pops)));
8679 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8680 const0_rtx, next_arg_reg, const0_rtx));
8685 emit_move_insn (target, const1_rtx);
8690 /* __builtin_longjmp is passed a pointer to an array of five words
8691 and a value, which is a dummy. It's similar to the C library longjmp
8692 function but works with __builtin_setjmp above. */
8693 case BUILT_IN_LONGJMP:
8694 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8695 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8699 tree dummy_id = get_identifier ("__dummy");
8700 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8701 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
8704 convert_modes (Pmode, ptr_mode,
8705 expand_expr (TREE_VALUE (arglist),
8709 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8710 rtx lab = gen_rtx (MEM, Pmode,
8711 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8712 enum machine_mode sa_mode
8713 #ifdef HAVE_save_stack_nonlocal
8714 = (HAVE_save_stack_nonlocal
8715 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8720 rtx stack = gen_rtx (MEM, sa_mode,
8721 plus_constant (buf_addr,
8722 2 * GET_MODE_SIZE (Pmode)));
8724 DECL_EXTERNAL (dummy_decl) = 1;
8725 TREE_PUBLIC (dummy_decl) = 1;
8726 make_decl_rtl (dummy_decl, NULL_PTR, 1);
8728 /* Expand the second expression just for side-effects. */
8729 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8730 const0_rtx, VOIDmode, 0);
8732 assemble_external (dummy_decl);
8734 /* Pick up FP, label, and SP from the block and jump. This code is
8735 from expand_goto in stmt.c; see there for detailed comments. */
8736 #if HAVE_nonlocal_goto
8737 if (HAVE_nonlocal_goto)
8738 emit_insn (gen_nonlocal_goto (fp, lab, stack,
8739 XEXP (DECL_RTL (dummy_decl), 0)));
8743 lab = copy_to_reg (lab);
8744 emit_move_insn (hard_frame_pointer_rtx, fp);
8745 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8747 /* Put in the static chain register the address of the dummy
8749 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
8750 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8751 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8752 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
8753 emit_indirect_jump (lab);
8759 default: /* just do library call, if unknown builtin */
8760 error ("built-in function `%s' not currently supported",
8761 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8764 /* The switch statement above can drop through to cause the function
8765 to be called normally. */
8767 return expand_call (exp, target, ignore);
8770 /* Built-in functions to perform an untyped call and return. */
8772 /* For each register that may be used for calling a function, this
8773 gives a mode used to copy the register's value. VOIDmode indicates
8774 the register is not used for calling a function. If the machine
8775 has register windows, this gives only the outbound registers.
8776 INCOMING_REGNO gives the corresponding inbound register. */
8777 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
8779 /* For each register that may be used for returning values, this gives
8780 a mode used to copy the register's value. VOIDmode indicates the
8781 register is not used for returning values. If the machine has
8782 register windows, this gives only the outbound registers.
8783 INCOMING_REGNO gives the corresponding inbound register. */
8784 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
8786 /* For each register that may be used for calling a function, this
8787 gives the offset of that register into the block returned by
8788 __builtin_apply_args. 0 indicates that the register is not
8789 used for calling a function. */
8790 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8792 /* Return the offset of register REGNO into the block returned by
8793 __builtin_apply_args. This is not declared static, since it is
8794 needed in objc-act.c. */
8797 apply_args_register_offset (regno)
8802 /* Arguments are always put in outgoing registers (in the argument
8803 block) if such make sense. */
8804 #ifdef OUTGOING_REGNO
8805 regno = OUTGOING_REGNO(regno);
8807 return apply_args_reg_offset[regno];
8810 /* Return the size required for the block returned by __builtin_apply_args,
8811 and initialize apply_args_mode. */
8816 static int size = -1;
8818 enum machine_mode mode;
8820 /* The values computed by this function never change. */
8823 /* The first value is the incoming arg-pointer. */
8824 size = GET_MODE_SIZE (Pmode);
8826 /* The second value is the structure value address unless this is
8827 passed as an "invisible" first argument. */
8828 if (struct_value_rtx)
8829 size += GET_MODE_SIZE (Pmode);
8831 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8832 if (FUNCTION_ARG_REGNO_P (regno))
8834 /* Search for the proper mode for copying this register's
8835 value. I'm not sure this is right, but it works so far. */
8836 enum machine_mode best_mode = VOIDmode;
8838 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8840 mode = GET_MODE_WIDER_MODE (mode))
8841 if (HARD_REGNO_MODE_OK (regno, mode)
8842 && HARD_REGNO_NREGS (regno, mode) == 1)
8845 if (best_mode == VOIDmode)
8846 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8848 mode = GET_MODE_WIDER_MODE (mode))
8849 if (HARD_REGNO_MODE_OK (regno, mode)
8850 && (mov_optab->handlers[(int) mode].insn_code
8851 != CODE_FOR_nothing))
8855 if (mode == VOIDmode)
8858 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8859 if (size % align != 0)
8860 size = CEIL (size, align) * align;
8861 apply_args_reg_offset[regno] = size;
8862 size += GET_MODE_SIZE (mode);
8863 apply_args_mode[regno] = mode;
8867 apply_args_mode[regno] = VOIDmode;
8868 apply_args_reg_offset[regno] = 0;
8874 /* Return the size required for the block returned by __builtin_apply,
8875 and initialize apply_result_mode. */
8878 apply_result_size ()
8880 static int size = -1;
8882 enum machine_mode mode;
8884 /* The values computed by this function never change. */
8889 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8890 if (FUNCTION_VALUE_REGNO_P (regno))
8892 /* Search for the proper mode for copying this register's
8893 value. I'm not sure this is right, but it works so far. */
8894 enum machine_mode best_mode = VOIDmode;
8896 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8898 mode = GET_MODE_WIDER_MODE (mode))
8899 if (HARD_REGNO_MODE_OK (regno, mode))
8902 if (best_mode == VOIDmode)
8903 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8905 mode = GET_MODE_WIDER_MODE (mode))
8906 if (HARD_REGNO_MODE_OK (regno, mode)
8907 && (mov_optab->handlers[(int) mode].insn_code
8908 != CODE_FOR_nothing))
8912 if (mode == VOIDmode)
8915 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8916 if (size % align != 0)
8917 size = CEIL (size, align) * align;
8918 size += GET_MODE_SIZE (mode);
8919 apply_result_mode[regno] = mode;
8922 apply_result_mode[regno] = VOIDmode;
8924 /* Allow targets that use untyped_call and untyped_return to override
8925 the size so that machine-specific information can be stored here. */
8926 #ifdef APPLY_RESULT_SIZE
8927 size = APPLY_RESULT_SIZE;
8933 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8934 /* Create a vector describing the result block RESULT. If SAVEP is true,
8935 the result block is used to save the values; otherwise it is used to
8936 restore the values. */
8939 result_vector (savep, result)
8943 int regno, size, align, nelts;
8944 enum machine_mode mode;
8946 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8949 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8950 if ((mode = apply_result_mode[regno]) != VOIDmode)
8952 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8953 if (size % align != 0)
8954 size = CEIL (size, align) * align;
8955 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
8956 mem = change_address (result, mode,
8957 plus_constant (XEXP (result, 0), size));
8958 savevec[nelts++] = (savep
8959 ? gen_rtx (SET, VOIDmode, mem, reg)
8960 : gen_rtx (SET, VOIDmode, reg, mem));
8961 size += GET_MODE_SIZE (mode);
8963 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8965 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8967 /* Save the state required to perform an untyped call with the same
8968 arguments as were passed to the current function. */
8971 expand_builtin_apply_args ()
8974 int size, align, regno;
8975 enum machine_mode mode;
8977 /* Create a block where the arg-pointer, structure value address,
8978 and argument registers can be saved. */
8979 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8981 /* Walk past the arg-pointer and structure value address. */
8982 size = GET_MODE_SIZE (Pmode);
8983 if (struct_value_rtx)
8984 size += GET_MODE_SIZE (Pmode);
8986 /* Save each register used in calling a function to the block. */
8987 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8988 if ((mode = apply_args_mode[regno]) != VOIDmode)
8992 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8993 if (size % align != 0)
8994 size = CEIL (size, align) * align;
8996 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8999 /* For reg-stack.c's stack register household.
9000 Compare with a similar piece of code in function.c. */
9002 emit_insn (gen_rtx (USE, mode, tem));
9005 emit_move_insn (change_address (registers, mode,
9006 plus_constant (XEXP (registers, 0),
9009 size += GET_MODE_SIZE (mode);
9012 /* Save the arg pointer to the block. */
9013 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9014 copy_to_reg (virtual_incoming_args_rtx));
9015 size = GET_MODE_SIZE (Pmode);
9017 /* Save the structure value address unless this is passed as an
9018 "invisible" first argument. */
9019 if (struct_value_incoming_rtx)
9021 emit_move_insn (change_address (registers, Pmode,
9022 plus_constant (XEXP (registers, 0),
9024 copy_to_reg (struct_value_incoming_rtx));
9025 size += GET_MODE_SIZE (Pmode);
9028 /* Return the address of the block. */
9029 return copy_addr_to_reg (XEXP (registers, 0));
9032 /* Perform an untyped call and save the state required to perform an
9033 untyped return of whatever value was returned by the given function. */
9036 expand_builtin_apply (function, arguments, argsize)
9037 rtx function, arguments, argsize;
9039 int size, align, regno;
9040 enum machine_mode mode;
9041 rtx incoming_args, result, reg, dest, call_insn;
9042 rtx old_stack_level = 0;
9043 rtx call_fusage = 0;
9045 /* Create a block where the return registers can be saved. */
9046 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9048 /* ??? The argsize value should be adjusted here. */
9050 /* Fetch the arg pointer from the ARGUMENTS block. */
9051 incoming_args = gen_reg_rtx (Pmode);
9052 emit_move_insn (incoming_args,
9053 gen_rtx (MEM, Pmode, arguments));
9054 #ifndef STACK_GROWS_DOWNWARD
9055 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9056 incoming_args, 0, OPTAB_LIB_WIDEN);
9059 /* Perform postincrements before actually calling the function. */
9062 /* Push a new argument block and copy the arguments. */
9063 do_pending_stack_adjust ();
9064 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9066 /* Push a block of memory onto the stack to store the memory arguments.
9067 Save the address in a register, and copy the memory arguments. ??? I
9068 haven't figured out how the calling convention macros effect this,
9069 but it's likely that the source and/or destination addresses in
9070 the block copy will need updating in machine specific ways. */
9071 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9072 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9073 gen_rtx (MEM, BLKmode, incoming_args),
9075 PARM_BOUNDARY / BITS_PER_UNIT);
9077 /* Refer to the argument block. */
9079 arguments = gen_rtx (MEM, BLKmode, arguments);
9081 /* Walk past the arg-pointer and structure value address. */
9082 size = GET_MODE_SIZE (Pmode);
9083 if (struct_value_rtx)
9084 size += GET_MODE_SIZE (Pmode);
9086 /* Restore each of the registers previously saved. Make USE insns
9087 for each of these registers for use in making the call. */
9088 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9089 if ((mode = apply_args_mode[regno]) != VOIDmode)
9091 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9092 if (size % align != 0)
9093 size = CEIL (size, align) * align;
9094 reg = gen_rtx (REG, mode, regno);
9095 emit_move_insn (reg,
9096 change_address (arguments, mode,
9097 plus_constant (XEXP (arguments, 0),
9100 use_reg (&call_fusage, reg);
9101 size += GET_MODE_SIZE (mode);
9104 /* Restore the structure value address unless this is passed as an
9105 "invisible" first argument. */
9106 size = GET_MODE_SIZE (Pmode);
9107 if (struct_value_rtx)
9109 rtx value = gen_reg_rtx (Pmode);
9110 emit_move_insn (value,
9111 change_address (arguments, Pmode,
9112 plus_constant (XEXP (arguments, 0),
9114 emit_move_insn (struct_value_rtx, value);
9115 if (GET_CODE (struct_value_rtx) == REG)
9116 use_reg (&call_fusage, struct_value_rtx);
9117 size += GET_MODE_SIZE (Pmode);
9120 /* All arguments and registers used for the call are set up by now! */
9121 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9123 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9124 and we don't want to load it into a register as an optimization,
9125 because prepare_call_address already did it if it should be done. */
9126 if (GET_CODE (function) != SYMBOL_REF)
9127 function = memory_address (FUNCTION_MODE, function);
9129 /* Generate the actual call instruction and save the return value. */
9130 #ifdef HAVE_untyped_call
9131 if (HAVE_untyped_call)
9132 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9133 result, result_vector (1, result)));
9136 #ifdef HAVE_call_value
9137 if (HAVE_call_value)
9141 /* Locate the unique return register. It is not possible to
9142 express a call that sets more than one return register using
9143 call_value; use untyped_call for that. In fact, untyped_call
9144 only needs to save the return registers in the given block. */
9145 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9146 if ((mode = apply_result_mode[regno]) != VOIDmode)
9149 abort (); /* HAVE_untyped_call required. */
9150 valreg = gen_rtx (REG, mode, regno);
9153 emit_call_insn (gen_call_value (valreg,
9154 gen_rtx (MEM, FUNCTION_MODE, function),
9155 const0_rtx, NULL_RTX, const0_rtx));
9157 emit_move_insn (change_address (result, GET_MODE (valreg),
9165 /* Find the CALL insn we just emitted. */
9166 for (call_insn = get_last_insn ();
9167 call_insn && GET_CODE (call_insn) != CALL_INSN;
9168 call_insn = PREV_INSN (call_insn))
9174 /* Put the register usage information on the CALL. If there is already
9175 some usage information, put ours at the end. */
9176 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9180 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9181 link = XEXP (link, 1))
9184 XEXP (link, 1) = call_fusage;
9187 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9189 /* Restore the stack. */
9190 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9192 /* Return the address of the result block. */
9193 return copy_addr_to_reg (XEXP (result, 0));
9196 /* Perform an untyped return. */
9199 expand_builtin_return (result)
9202 int size, align, regno;
9203 enum machine_mode mode;
9205 rtx call_fusage = 0;
9207 apply_result_size ();
9208 result = gen_rtx (MEM, BLKmode, result);
9210 #ifdef HAVE_untyped_return
9211 if (HAVE_untyped_return)
9213 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9219 /* Restore the return value and note that each value is used. */
9221 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9222 if ((mode = apply_result_mode[regno]) != VOIDmode)
9224 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9225 if (size % align != 0)
9226 size = CEIL (size, align) * align;
9227 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9228 emit_move_insn (reg,
9229 change_address (result, mode,
9230 plus_constant (XEXP (result, 0),
9233 push_to_sequence (call_fusage);
9234 emit_insn (gen_rtx (USE, VOIDmode, reg));
9235 call_fusage = get_insns ();
9237 size += GET_MODE_SIZE (mode);
9240 /* Put the USE insns before the return. */
9241 emit_insns (call_fusage);
9243 /* Return whatever values was restored by jumping directly to the end
9245 expand_null_return ();
9248 /* Expand code for a post- or pre- increment or decrement
9249 and return the RTX for the result.
9250 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9253 expand_increment (exp, post, ignore)
9257 register rtx op0, op1;
9258 register rtx temp, value;
9259 register tree incremented = TREE_OPERAND (exp, 0);
9260 optab this_optab = add_optab;
9262 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9263 int op0_is_copy = 0;
9264 int single_insn = 0;
9265 /* 1 means we can't store into OP0 directly,
9266 because it is a subreg narrower than a word,
9267 and we don't dare clobber the rest of the word. */
9270 if (output_bytecode)
9272 bc_expand_expr (exp);
9276 /* Stabilize any component ref that might need to be
9277 evaluated more than once below. */
9279 || TREE_CODE (incremented) == BIT_FIELD_REF
9280 || (TREE_CODE (incremented) == COMPONENT_REF
9281 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9282 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9283 incremented = stabilize_reference (incremented);
9284 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9285 ones into save exprs so that they don't accidentally get evaluated
9286 more than once by the code below. */
9287 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9288 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9289 incremented = save_expr (incremented);
9291 /* Compute the operands as RTX.
9292 Note whether OP0 is the actual lvalue or a copy of it:
9293 I believe it is a copy iff it is a register or subreg
9294 and insns were generated in computing it. */
9296 temp = get_last_insn ();
9297 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9299 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9300 in place but instead must do sign- or zero-extension during assignment,
9301 so we copy it into a new register and let the code below use it as
9304 Note that we can safely modify this SUBREG since it is know not to be
9305 shared (it was made by the expand_expr call above). */
9307 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9310 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9314 else if (GET_CODE (op0) == SUBREG
9315 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9317 /* We cannot increment this SUBREG in place. If we are
9318 post-incrementing, get a copy of the old value. Otherwise,
9319 just mark that we cannot increment in place. */
9321 op0 = copy_to_reg (op0);
9326 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9327 && temp != get_last_insn ());
9328 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9330 /* Decide whether incrementing or decrementing. */
9331 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9332 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9333 this_optab = sub_optab;
9335 /* Convert decrement by a constant into a negative increment. */
9336 if (this_optab == sub_optab
9337 && GET_CODE (op1) == CONST_INT)
9339 op1 = GEN_INT (- INTVAL (op1));
9340 this_optab = add_optab;
9343 /* For a preincrement, see if we can do this with a single instruction. */
9346 icode = (int) this_optab->handlers[(int) mode].insn_code;
9347 if (icode != (int) CODE_FOR_nothing
9348 /* Make sure that OP0 is valid for operands 0 and 1
9349 of the insn we want to queue. */
9350 && (*insn_operand_predicate[icode][0]) (op0, mode)
9351 && (*insn_operand_predicate[icode][1]) (op0, mode)
9352 && (*insn_operand_predicate[icode][2]) (op1, mode))
9356 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9357 then we cannot just increment OP0. We must therefore contrive to
9358 increment the original value. Then, for postincrement, we can return
9359 OP0 since it is a copy of the old value. For preincrement, expand here
9360 unless we can do it with a single insn.
9362 Likewise if storing directly into OP0 would clobber high bits
9363 we need to preserve (bad_subreg). */
9364 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9366 /* This is the easiest way to increment the value wherever it is.
9367 Problems with multiple evaluation of INCREMENTED are prevented
9368 because either (1) it is a component_ref or preincrement,
9369 in which case it was stabilized above, or (2) it is an array_ref
9370 with constant index in an array in a register, which is
9371 safe to reevaluate. */
9372 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9373 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9374 ? MINUS_EXPR : PLUS_EXPR),
9377 TREE_OPERAND (exp, 1));
9379 while (TREE_CODE (incremented) == NOP_EXPR
9380 || TREE_CODE (incremented) == CONVERT_EXPR)
9382 newexp = convert (TREE_TYPE (incremented), newexp);
9383 incremented = TREE_OPERAND (incremented, 0);
9386 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9387 return post ? op0 : temp;
9392 /* We have a true reference to the value in OP0.
9393 If there is an insn to add or subtract in this mode, queue it.
9394 Queueing the increment insn avoids the register shuffling
9395 that often results if we must increment now and first save
9396 the old value for subsequent use. */
9398 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9399 op0 = stabilize (op0);
9402 icode = (int) this_optab->handlers[(int) mode].insn_code;
9403 if (icode != (int) CODE_FOR_nothing
9404 /* Make sure that OP0 is valid for operands 0 and 1
9405 of the insn we want to queue. */
9406 && (*insn_operand_predicate[icode][0]) (op0, mode)
9407 && (*insn_operand_predicate[icode][1]) (op0, mode))
9409 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9410 op1 = force_reg (mode, op1);
9412 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9416 /* Preincrement, or we can't increment with one simple insn. */
9418 /* Save a copy of the value before inc or dec, to return it later. */
9419 temp = value = copy_to_reg (op0);
9421 /* Arrange to return the incremented value. */
9422 /* Copy the rtx because expand_binop will protect from the queue,
9423 and the results of that would be invalid for us to return
9424 if our caller does emit_queue before using our result. */
9425 temp = copy_rtx (value = op0);
9427 /* Increment however we can. */
9428 op1 = expand_binop (mode, this_optab, value, op1, op0,
9429 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9430 /* Make sure the value is stored into OP0. */
9432 emit_move_insn (op0, op1);
9437 /* Expand all function calls contained within EXP, innermost ones first.
9438 But don't look within expressions that have sequence points.
9439 For each CALL_EXPR, record the rtx for its value
9440 in the CALL_EXPR_RTL field. */
9443 preexpand_calls (exp)
9446 register int nops, i;
9447 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9449 if (! do_preexpand_calls)
9452 /* Only expressions and references can contain calls. */
9454 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9457 switch (TREE_CODE (exp))
9460 /* Do nothing if already expanded. */
9461 if (CALL_EXPR_RTL (exp) != 0
9462 /* Do nothing if the call returns a variable-sized object. */
9463 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9464 /* Do nothing to built-in functions. */
9465 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9466 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9468 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9471 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9476 case TRUTH_ANDIF_EXPR:
9477 case TRUTH_ORIF_EXPR:
9478 /* If we find one of these, then we can be sure
9479 the adjust will be done for it (since it makes jumps).
9480 Do it now, so that if this is inside an argument
9481 of a function, we don't get the stack adjustment
9482 after some other args have already been pushed. */
9483 do_pending_stack_adjust ();
9488 case WITH_CLEANUP_EXPR:
9489 case CLEANUP_POINT_EXPR:
9493 if (SAVE_EXPR_RTL (exp) != 0)
9497 nops = tree_code_length[(int) TREE_CODE (exp)];
9498 for (i = 0; i < nops; i++)
9499 if (TREE_OPERAND (exp, i) != 0)
9501 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9502 if (type == 'e' || type == '<' || type == '1' || type == '2'
9504 preexpand_calls (TREE_OPERAND (exp, i));
9508 /* At the start of a function, record that we have no previously-pushed
9509 arguments waiting to be popped. */
9512 init_pending_stack_adjust ()
9514 pending_stack_adjust = 0;
9517 /* When exiting from function, if safe, clear out any pending stack adjust
9518 so the adjustment won't get done. */
9521 clear_pending_stack_adjust ()
9523 #ifdef EXIT_IGNORE_STACK
9525 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9526 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9527 && ! flag_inline_functions)
9528 pending_stack_adjust = 0;
9532 /* Pop any previously-pushed arguments that have not been popped yet. */
9535 do_pending_stack_adjust ()
9537 if (inhibit_defer_pop == 0)
9539 if (pending_stack_adjust != 0)
9540 adjust_stack (GEN_INT (pending_stack_adjust));
9541 pending_stack_adjust = 0;
9545 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9546 Returns the cleanups to be performed. */
9549 defer_cleanups_to (old_cleanups)
9552 tree new_cleanups = NULL_TREE;
9553 tree cleanups = cleanups_this_call;
9554 tree last = NULL_TREE;
9556 while (cleanups_this_call != old_cleanups)
9558 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
9559 last = cleanups_this_call;
9560 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9565 /* Remove the list from the chain of cleanups. */
9566 TREE_CHAIN (last) = NULL_TREE;
9568 /* reverse them so that we can build them in the right order. */
9569 cleanups = nreverse (cleanups);
9571 /* All cleanups must be on the function_obstack. */
9572 push_obstacks_nochange ();
9573 resume_temporary_allocation ();
9578 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9579 TREE_VALUE (cleanups), new_cleanups);
9581 new_cleanups = TREE_VALUE (cleanups);
9583 cleanups = TREE_CHAIN (cleanups);
9589 return new_cleanups;
9592 /* Expand all cleanups up to OLD_CLEANUPS.
9593 Needed here, and also for language-dependent calls. */
9596 expand_cleanups_to (old_cleanups)
9599 while (cleanups_this_call != old_cleanups)
9601 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
9602 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9603 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9607 /* Expand conditional expressions. */
9609 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9610 LABEL is an rtx of code CODE_LABEL, in this function and all the
9614 jumpifnot (exp, label)
9618 do_jump (exp, label, NULL_RTX);
9621 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9628 do_jump (exp, NULL_RTX, label);
9631 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9632 the result is zero, or IF_TRUE_LABEL if the result is one.
9633 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9634 meaning fall through in that case.
9636 do_jump always does any pending stack adjust except when it does not
9637 actually perform a jump. An example where there is no jump
9638 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9640 This function is responsible for optimizing cases such as
9641 &&, || and comparison operators in EXP. */
9644 do_jump (exp, if_false_label, if_true_label)
9646 rtx if_false_label, if_true_label;
9648 register enum tree_code code = TREE_CODE (exp);
9649 /* Some cases need to create a label to jump to
9650 in order to properly fall through.
9651 These cases set DROP_THROUGH_LABEL nonzero. */
9652 rtx drop_through_label = 0;
9657 enum machine_mode mode;
9667 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9673 /* This is not true with #pragma weak */
9675 /* The address of something can never be zero. */
9677 emit_jump (if_true_label);
9682 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9683 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9684 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9687 /* If we are narrowing the operand, we have to do the compare in the
9689 if ((TYPE_PRECISION (TREE_TYPE (exp))
9690 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9692 case NON_LVALUE_EXPR:
9693 case REFERENCE_EXPR:
9698 /* These cannot change zero->non-zero or vice versa. */
9699 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9703 /* This is never less insns than evaluating the PLUS_EXPR followed by
9704 a test and can be longer if the test is eliminated. */
9706 /* Reduce to minus. */
9707 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9708 TREE_OPERAND (exp, 0),
9709 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9710 TREE_OPERAND (exp, 1))));
9711 /* Process as MINUS. */
9715 /* Non-zero iff operands of minus differ. */
9716 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9717 TREE_OPERAND (exp, 0),
9718 TREE_OPERAND (exp, 1)),
9723 /* If we are AND'ing with a small constant, do this comparison in the
9724 smallest type that fits. If the machine doesn't have comparisons
9725 that small, it will be converted back to the wider comparison.
9726 This helps if we are testing the sign bit of a narrower object.
9727 combine can't do this for us because it can't know whether a
9728 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9730 if (! SLOW_BYTE_ACCESS
9731 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9732 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9733 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9734 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9735 && (type = type_for_mode (mode, 1)) != 0
9736 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9737 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9738 != CODE_FOR_nothing))
9740 do_jump (convert (type, exp), if_false_label, if_true_label);
9745 case TRUTH_NOT_EXPR:
9746 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9749 case TRUTH_ANDIF_EXPR:
9752 tree cleanups, old_cleanups;
9754 if (if_false_label == 0)
9755 if_false_label = drop_through_label = gen_label_rtx ();
9757 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9758 seq1 = get_insns ();
9761 old_cleanups = cleanups_this_call;
9763 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9764 seq2 = get_insns ();
9767 cleanups = defer_cleanups_to (old_cleanups);
9770 rtx flag = gen_reg_rtx (word_mode);
9774 /* Flag cleanups as not needed. */
9775 emit_move_insn (flag, const0_rtx);
9778 /* Flag cleanups as needed. */
9779 emit_move_insn (flag, const1_rtx);
9782 /* All cleanups must be on the function_obstack. */
9783 push_obstacks_nochange ();
9784 resume_temporary_allocation ();
9786 /* convert flag, which is an rtx, into a tree. */
9787 cond = make_node (RTL_EXPR);
9788 TREE_TYPE (cond) = integer_type_node;
9789 RTL_EXPR_RTL (cond) = flag;
9790 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9791 cond = save_expr (cond);
9793 new_cleanups = build (COND_EXPR, void_type_node,
9794 truthvalue_conversion (cond),
9795 cleanups, integer_zero_node);
9796 new_cleanups = fold (new_cleanups);
9800 /* Now add in the conditionalized cleanups. */
9802 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9803 (*interim_eh_hook) (NULL_TREE);
9813 case TRUTH_ORIF_EXPR:
9816 tree cleanups, old_cleanups;
9818 if (if_true_label == 0)
9819 if_true_label = drop_through_label = gen_label_rtx ();
9821 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9822 seq1 = get_insns ();
9825 old_cleanups = cleanups_this_call;
9827 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9828 seq2 = get_insns ();
9831 cleanups = defer_cleanups_to (old_cleanups);
9834 rtx flag = gen_reg_rtx (word_mode);
9838 /* Flag cleanups as not needed. */
9839 emit_move_insn (flag, const0_rtx);
9842 /* Flag cleanups as needed. */
9843 emit_move_insn (flag, const1_rtx);
9846 /* All cleanups must be on the function_obstack. */
9847 push_obstacks_nochange ();
9848 resume_temporary_allocation ();
9850 /* convert flag, which is an rtx, into a tree. */
9851 cond = make_node (RTL_EXPR);
9852 TREE_TYPE (cond) = integer_type_node;
9853 RTL_EXPR_RTL (cond) = flag;
9854 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9855 cond = save_expr (cond);
9857 new_cleanups = build (COND_EXPR, void_type_node,
9858 truthvalue_conversion (cond),
9859 cleanups, integer_zero_node);
9860 new_cleanups = fold (new_cleanups);
9864 /* Now add in the conditionalized cleanups. */
9866 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9867 (*interim_eh_hook) (NULL_TREE);
9879 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9880 preserve_temp_slots (NULL_RTX);
9884 do_pending_stack_adjust ();
9885 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9892 int bitsize, bitpos, unsignedp;
9893 enum machine_mode mode;
9898 /* Get description of this reference. We don't actually care
9899 about the underlying object here. */
9900 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9901 &mode, &unsignedp, &volatilep);
9903 type = type_for_size (bitsize, unsignedp);
9904 if (! SLOW_BYTE_ACCESS
9905 && type != 0 && bitsize >= 0
9906 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9907 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9908 != CODE_FOR_nothing))
9910 do_jump (convert (type, exp), if_false_label, if_true_label);
9917 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9918 if (integer_onep (TREE_OPERAND (exp, 1))
9919 && integer_zerop (TREE_OPERAND (exp, 2)))
9920 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9922 else if (integer_zerop (TREE_OPERAND (exp, 1))
9923 && integer_onep (TREE_OPERAND (exp, 2)))
9924 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9928 register rtx label1 = gen_label_rtx ();
9929 drop_through_label = gen_label_rtx ();
9930 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9931 /* Now the THEN-expression. */
9932 do_jump (TREE_OPERAND (exp, 1),
9933 if_false_label ? if_false_label : drop_through_label,
9934 if_true_label ? if_true_label : drop_through_label);
9935 /* In case the do_jump just above never jumps. */
9936 do_pending_stack_adjust ();
9937 emit_label (label1);
9938 /* Now the ELSE-expression. */
9939 do_jump (TREE_OPERAND (exp, 2),
9940 if_false_label ? if_false_label : drop_through_label,
9941 if_true_label ? if_true_label : drop_through_label);
9947 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9949 if (integer_zerop (TREE_OPERAND (exp, 1)))
9950 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9951 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9952 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9955 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9956 fold (build (EQ_EXPR, TREE_TYPE (exp),
9957 fold (build1 (REALPART_EXPR,
9958 TREE_TYPE (inner_type),
9959 TREE_OPERAND (exp, 0))),
9960 fold (build1 (REALPART_EXPR,
9961 TREE_TYPE (inner_type),
9962 TREE_OPERAND (exp, 1))))),
9963 fold (build (EQ_EXPR, TREE_TYPE (exp),
9964 fold (build1 (IMAGPART_EXPR,
9965 TREE_TYPE (inner_type),
9966 TREE_OPERAND (exp, 0))),
9967 fold (build1 (IMAGPART_EXPR,
9968 TREE_TYPE (inner_type),
9969 TREE_OPERAND (exp, 1))))))),
9970 if_false_label, if_true_label);
9971 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9972 && !can_compare_p (TYPE_MODE (inner_type)))
9973 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9975 comparison = compare (exp, EQ, EQ);
9981 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9983 if (integer_zerop (TREE_OPERAND (exp, 1)))
9984 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9985 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9986 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9989 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9990 fold (build (NE_EXPR, TREE_TYPE (exp),
9991 fold (build1 (REALPART_EXPR,
9992 TREE_TYPE (inner_type),
9993 TREE_OPERAND (exp, 0))),
9994 fold (build1 (REALPART_EXPR,
9995 TREE_TYPE (inner_type),
9996 TREE_OPERAND (exp, 1))))),
9997 fold (build (NE_EXPR, TREE_TYPE (exp),
9998 fold (build1 (IMAGPART_EXPR,
9999 TREE_TYPE (inner_type),
10000 TREE_OPERAND (exp, 0))),
10001 fold (build1 (IMAGPART_EXPR,
10002 TREE_TYPE (inner_type),
10003 TREE_OPERAND (exp, 1))))))),
10004 if_false_label, if_true_label);
10005 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10006 && !can_compare_p (TYPE_MODE (inner_type)))
10007 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10009 comparison = compare (exp, NE, NE);
10014 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10016 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10017 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10019 comparison = compare (exp, LT, LTU);
10023 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10025 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10026 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10028 comparison = compare (exp, LE, LEU);
10032 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10034 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10035 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10037 comparison = compare (exp, GT, GTU);
10041 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10043 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10044 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10046 comparison = compare (exp, GE, GEU);
10051 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10053 /* This is not needed any more and causes poor code since it causes
10054 comparisons and tests from non-SI objects to have different code
10056 /* Copy to register to avoid generating bad insns by cse
10057 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10058 if (!cse_not_expected && GET_CODE (temp) == MEM)
10059 temp = copy_to_reg (temp);
10061 do_pending_stack_adjust ();
10062 if (GET_CODE (temp) == CONST_INT)
10063 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10064 else if (GET_CODE (temp) == LABEL_REF)
10065 comparison = const_true_rtx;
10066 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10067 && !can_compare_p (GET_MODE (temp)))
10068 /* Note swapping the labels gives us not-equal. */
10069 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10070 else if (GET_MODE (temp) != VOIDmode)
10071 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10072 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10073 GET_MODE (temp), NULL_RTX, 0);
10078 /* Do any postincrements in the expression that was tested. */
10081 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10082 straight into a conditional jump instruction as the jump condition.
10083 Otherwise, all the work has been done already. */
10085 if (comparison == const_true_rtx)
10088 emit_jump (if_true_label);
10090 else if (comparison == const0_rtx)
10092 if (if_false_label)
10093 emit_jump (if_false_label);
10095 else if (comparison)
10096 do_jump_for_compare (comparison, if_false_label, if_true_label);
10098 if (drop_through_label)
10100 /* If do_jump produces code that might be jumped around,
10101 do any stack adjusts from that code, before the place
10102 where control merges in. */
10103 do_pending_stack_adjust ();
10104 emit_label (drop_through_label);
10108 /* Given a comparison expression EXP for values too wide to be compared
10109 with one insn, test the comparison and jump to the appropriate label.
10110 The code of EXP is ignored; we always test GT if SWAP is 0,
10111 and LT if SWAP is 1. */
10114 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10117 rtx if_false_label, if_true_label;
10119 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10120 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10121 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10122 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10123 rtx drop_through_label = 0;
10124 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10127 if (! if_true_label || ! if_false_label)
10128 drop_through_label = gen_label_rtx ();
10129 if (! if_true_label)
10130 if_true_label = drop_through_label;
10131 if (! if_false_label)
10132 if_false_label = drop_through_label;
10134 /* Compare a word at a time, high order first. */
10135 for (i = 0; i < nwords; i++)
10138 rtx op0_word, op1_word;
10140 if (WORDS_BIG_ENDIAN)
10142 op0_word = operand_subword_force (op0, i, mode);
10143 op1_word = operand_subword_force (op1, i, mode);
10147 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10148 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10151 /* All but high-order word must be compared as unsigned. */
10152 comp = compare_from_rtx (op0_word, op1_word,
10153 (unsignedp || i > 0) ? GTU : GT,
10154 unsignedp, word_mode, NULL_RTX, 0);
10155 if (comp == const_true_rtx)
10156 emit_jump (if_true_label);
10157 else if (comp != const0_rtx)
10158 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10160 /* Consider lower words only if these are equal. */
10161 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10163 if (comp == const_true_rtx)
10164 emit_jump (if_false_label);
10165 else if (comp != const0_rtx)
10166 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10169 if (if_false_label)
10170 emit_jump (if_false_label);
10171 if (drop_through_label)
10172 emit_label (drop_through_label);
10175 /* Compare OP0 with OP1, word at a time, in mode MODE.
10176 UNSIGNEDP says to do unsigned comparison.
10177 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10180 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10181 enum machine_mode mode;
10184 rtx if_false_label, if_true_label;
10186 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10187 rtx drop_through_label = 0;
10190 if (! if_true_label || ! if_false_label)
10191 drop_through_label = gen_label_rtx ();
10192 if (! if_true_label)
10193 if_true_label = drop_through_label;
10194 if (! if_false_label)
10195 if_false_label = drop_through_label;
10197 /* Compare a word at a time, high order first. */
10198 for (i = 0; i < nwords; i++)
10201 rtx op0_word, op1_word;
10203 if (WORDS_BIG_ENDIAN)
10205 op0_word = operand_subword_force (op0, i, mode);
10206 op1_word = operand_subword_force (op1, i, mode);
10210 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10211 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10214 /* All but high-order word must be compared as unsigned. */
10215 comp = compare_from_rtx (op0_word, op1_word,
10216 (unsignedp || i > 0) ? GTU : GT,
10217 unsignedp, word_mode, NULL_RTX, 0);
10218 if (comp == const_true_rtx)
10219 emit_jump (if_true_label);
10220 else if (comp != const0_rtx)
10221 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10223 /* Consider lower words only if these are equal. */
10224 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10226 if (comp == const_true_rtx)
10227 emit_jump (if_false_label);
10228 else if (comp != const0_rtx)
10229 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10232 if (if_false_label)
10233 emit_jump (if_false_label);
10234 if (drop_through_label)
10235 emit_label (drop_through_label);
10238 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10239 with one insn, test the comparison and jump to the appropriate label. */
10242 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10244 rtx if_false_label, if_true_label;
10246 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10247 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10248 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10249 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10251 rtx drop_through_label = 0;
10253 if (! if_false_label)
10254 drop_through_label = if_false_label = gen_label_rtx ();
10256 for (i = 0; i < nwords; i++)
10258 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10259 operand_subword_force (op1, i, mode),
10260 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10261 word_mode, NULL_RTX, 0);
10262 if (comp == const_true_rtx)
10263 emit_jump (if_false_label);
10264 else if (comp != const0_rtx)
10265 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10269 emit_jump (if_true_label);
10270 if (drop_through_label)
10271 emit_label (drop_through_label);
10274 /* Jump according to whether OP0 is 0.
10275 We assume that OP0 has an integer mode that is too wide
10276 for the available compare insns. */
10279 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10281 rtx if_false_label, if_true_label;
10283 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10285 rtx drop_through_label = 0;
10287 if (! if_false_label)
10288 drop_through_label = if_false_label = gen_label_rtx ();
10290 for (i = 0; i < nwords; i++)
10292 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10294 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10295 if (comp == const_true_rtx)
10296 emit_jump (if_false_label);
10297 else if (comp != const0_rtx)
10298 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10302 emit_jump (if_true_label);
10303 if (drop_through_label)
10304 emit_label (drop_through_label);
10307 /* Given a comparison expression in rtl form, output conditional branches to
10308 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10311 do_jump_for_compare (comparison, if_false_label, if_true_label)
10312 rtx comparison, if_false_label, if_true_label;
10316 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10317 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10321 if (if_false_label)
10322 emit_jump (if_false_label);
10324 else if (if_false_label)
10327 rtx prev = get_last_insn ();
10330 /* Output the branch with the opposite condition. Then try to invert
10331 what is generated. If more than one insn is a branch, or if the
10332 branch is not the last insn written, abort. If we can't invert
10333 the branch, emit make a true label, redirect this jump to that,
10334 emit a jump to the false label and define the true label. */
10336 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10337 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10341 /* Here we get the first insn that was just emitted. It used to be the
10342 case that, on some machines, emitting the branch would discard
10343 the previous compare insn and emit a replacement. This isn't
10344 done anymore, but abort if we see that PREV is deleted. */
10347 insn = get_insns ();
10348 else if (INSN_DELETED_P (prev))
10351 insn = NEXT_INSN (prev);
10353 for (; insn; insn = NEXT_INSN (insn))
10354 if (GET_CODE (insn) == JUMP_INSN)
10361 if (branch != get_last_insn ())
10364 JUMP_LABEL (branch) = if_false_label;
10365 if (! invert_jump (branch, if_false_label))
10367 if_true_label = gen_label_rtx ();
10368 redirect_jump (branch, if_true_label);
10369 emit_jump (if_false_label);
10370 emit_label (if_true_label);
10375 /* Generate code for a comparison expression EXP
10376 (including code to compute the values to be compared)
10377 and set (CC0) according to the result.
10378 SIGNED_CODE should be the rtx operation for this comparison for
10379 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10381 We force a stack adjustment unless there are currently
10382 things pushed on the stack that aren't yet used. */
10385 compare (exp, signed_code, unsigned_code)
10387 enum rtx_code signed_code, unsigned_code;
10390 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10392 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10393 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10394 register enum machine_mode mode = TYPE_MODE (type);
10395 int unsignedp = TREE_UNSIGNED (type);
10396 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10398 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10400 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10401 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10404 /* Like compare but expects the values to compare as two rtx's.
10405 The decision as to signed or unsigned comparison must be made by the caller.
10407 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10410 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10411 size of MODE should be used. */
10414 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10415 register rtx op0, op1;
10416 enum rtx_code code;
10418 enum machine_mode mode;
10424 /* If one operand is constant, make it the second one. Only do this
10425 if the other operand is not constant as well. */
10427 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10428 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10433 code = swap_condition (code);
10436 if (flag_force_mem)
10438 op0 = force_not_mem (op0);
10439 op1 = force_not_mem (op1);
10442 do_pending_stack_adjust ();
10444 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10445 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10449 /* There's no need to do this now that combine.c can eliminate lots of
10450 sign extensions. This can be less efficient in certain cases on other
10453 /* If this is a signed equality comparison, we can do it as an
10454 unsigned comparison since zero-extension is cheaper than sign
10455 extension and comparisons with zero are done as unsigned. This is
10456 the case even on machines that can do fast sign extension, since
10457 zero-extension is easier to combine with other operations than
10458 sign-extension is. If we are comparing against a constant, we must
10459 convert it to what it would look like unsigned. */
10460 if ((code == EQ || code == NE) && ! unsignedp
10461 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10463 if (GET_CODE (op1) == CONST_INT
10464 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10465 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10470 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10472 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10475 /* Generate code to calculate EXP using a store-flag instruction
10476 and return an rtx for the result. EXP is either a comparison
10477 or a TRUTH_NOT_EXPR whose operand is a comparison.
10479 If TARGET is nonzero, store the result there if convenient.
10481 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10484 Return zero if there is no suitable set-flag instruction
10485 available on this machine.
10487 Once expand_expr has been called on the arguments of the comparison,
10488 we are committed to doing the store flag, since it is not safe to
10489 re-evaluate the expression. We emit the store-flag insn by calling
10490 emit_store_flag, but only expand the arguments if we have a reason
10491 to believe that emit_store_flag will be successful. If we think that
10492 it will, but it isn't, we have to simulate the store-flag with a
10493 set/jump/set sequence. */
10496 do_store_flag (exp, target, mode, only_cheap)
10499 enum machine_mode mode;
10502 enum rtx_code code;
10503 tree arg0, arg1, type;
10505 enum machine_mode operand_mode;
10509 enum insn_code icode;
10510 rtx subtarget = target;
10511 rtx result, label, pattern, jump_pat;
10513 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10514 result at the end. We can't simply invert the test since it would
10515 have already been inverted if it were valid. This case occurs for
10516 some floating-point comparisons. */
10518 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10519 invert = 1, exp = TREE_OPERAND (exp, 0);
10521 arg0 = TREE_OPERAND (exp, 0);
10522 arg1 = TREE_OPERAND (exp, 1);
10523 type = TREE_TYPE (arg0);
10524 operand_mode = TYPE_MODE (type);
10525 unsignedp = TREE_UNSIGNED (type);
10527 /* We won't bother with BLKmode store-flag operations because it would mean
10528 passing a lot of information to emit_store_flag. */
10529 if (operand_mode == BLKmode)
10535 /* Get the rtx comparison code to use. We know that EXP is a comparison
10536 operation of some type. Some comparisons against 1 and -1 can be
10537 converted to comparisons with zero. Do so here so that the tests
10538 below will be aware that we have a comparison with zero. These
10539 tests will not catch constants in the first operand, but constants
10540 are rarely passed as the first operand. */
10542 switch (TREE_CODE (exp))
10551 if (integer_onep (arg1))
10552 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10554 code = unsignedp ? LTU : LT;
10557 if (! unsignedp && integer_all_onesp (arg1))
10558 arg1 = integer_zero_node, code = LT;
10560 code = unsignedp ? LEU : LE;
10563 if (! unsignedp && integer_all_onesp (arg1))
10564 arg1 = integer_zero_node, code = GE;
10566 code = unsignedp ? GTU : GT;
10569 if (integer_onep (arg1))
10570 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10572 code = unsignedp ? GEU : GE;
10578 /* Put a constant second. */
10579 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10581 tem = arg0; arg0 = arg1; arg1 = tem;
10582 code = swap_condition (code);
10585 /* If this is an equality or inequality test of a single bit, we can
10586 do this by shifting the bit being tested to the low-order bit and
10587 masking the result with the constant 1. If the condition was EQ,
10588 we xor it with 1. This does not require an scc insn and is faster
10589 than an scc insn even if we have it. */
10591 if ((code == NE || code == EQ)
10592 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10593 && integer_pow2p (TREE_OPERAND (arg0, 1))
10594 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10596 tree inner = TREE_OPERAND (arg0, 0);
10601 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10602 NULL_RTX, VOIDmode, 0));
10603 /* In this case, immed_double_const will sign extend the value to make
10604 it look the same on the host and target. We must remove the
10605 sign-extension before calling exact_log2, since exact_log2 will
10606 fail for negative values. */
10607 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10608 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
10609 /* We don't use the obvious constant shift to generate the mask,
10610 because that generates compiler warnings when BITS_PER_WORD is
10611 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10612 code is unreachable in that case. */
10613 tem = tem & GET_MODE_MASK (word_mode);
10614 bitnum = exact_log2 (tem);
10616 /* If INNER is a right shift of a constant and it plus BITNUM does
10617 not overflow, adjust BITNUM and INNER. */
10619 if (TREE_CODE (inner) == RSHIFT_EXPR
10620 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10621 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10622 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10623 < TYPE_PRECISION (type)))
10625 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10626 inner = TREE_OPERAND (inner, 0);
10629 /* If we are going to be able to omit the AND below, we must do our
10630 operations as unsigned. If we must use the AND, we have a choice.
10631 Normally unsigned is faster, but for some machines signed is. */
10632 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10633 #ifdef LOAD_EXTEND_OP
10634 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10640 if (subtarget == 0 || GET_CODE (subtarget) != REG
10641 || GET_MODE (subtarget) != operand_mode
10642 || ! safe_from_p (subtarget, inner))
10645 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10648 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10649 size_int (bitnum), subtarget, ops_unsignedp);
10651 if (GET_MODE (op0) != mode)
10652 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10654 if ((code == EQ && ! invert) || (code == NE && invert))
10655 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10656 ops_unsignedp, OPTAB_LIB_WIDEN);
10658 /* Put the AND last so it can combine with more things. */
10659 if (bitnum != TYPE_PRECISION (type) - 1)
10660 op0 = expand_and (op0, const1_rtx, subtarget);
10665 /* Now see if we are likely to be able to do this. Return if not. */
10666 if (! can_compare_p (operand_mode))
10668 icode = setcc_gen_code[(int) code];
10669 if (icode == CODE_FOR_nothing
10670 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10672 /* We can only do this if it is one of the special cases that
10673 can be handled without an scc insn. */
10674 if ((code == LT && integer_zerop (arg1))
10675 || (! only_cheap && code == GE && integer_zerop (arg1)))
10677 else if (BRANCH_COST >= 0
10678 && ! only_cheap && (code == NE || code == EQ)
10679 && TREE_CODE (type) != REAL_TYPE
10680 && ((abs_optab->handlers[(int) operand_mode].insn_code
10681 != CODE_FOR_nothing)
10682 || (ffs_optab->handlers[(int) operand_mode].insn_code
10683 != CODE_FOR_nothing)))
10689 preexpand_calls (exp);
10690 if (subtarget == 0 || GET_CODE (subtarget) != REG
10691 || GET_MODE (subtarget) != operand_mode
10692 || ! safe_from_p (subtarget, arg1))
10695 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10696 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10699 target = gen_reg_rtx (mode);
10701 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10702 because, if the emit_store_flag does anything it will succeed and
10703 OP0 and OP1 will not be used subsequently. */
10705 result = emit_store_flag (target, code,
10706 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10707 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10708 operand_mode, unsignedp, 1);
10713 result = expand_binop (mode, xor_optab, result, const1_rtx,
10714 result, 0, OPTAB_LIB_WIDEN);
10718 /* If this failed, we have to do this with set/compare/jump/set code. */
10719 if (target == 0 || GET_CODE (target) != REG
10720 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10721 target = gen_reg_rtx (GET_MODE (target));
10723 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10724 result = compare_from_rtx (op0, op1, code, unsignedp,
10725 operand_mode, NULL_RTX, 0);
10726 if (GET_CODE (result) == CONST_INT)
10727 return (((result == const0_rtx && ! invert)
10728 || (result != const0_rtx && invert))
10729 ? const0_rtx : const1_rtx);
10731 label = gen_label_rtx ();
10732 if (bcc_gen_fctn[(int) code] == 0)
10735 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10736 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10737 emit_label (label);
10742 /* Generate a tablejump instruction (used for switch statements). */
10744 #ifdef HAVE_tablejump
10746 /* INDEX is the value being switched on, with the lowest value
10747 in the table already subtracted.
10748 MODE is its expected mode (needed if INDEX is constant).
10749 RANGE is the length of the jump table.
10750 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10752 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10753 index value is out of range. */
10756 do_tablejump (index, mode, range, table_label, default_label)
10757 rtx index, range, table_label, default_label;
10758 enum machine_mode mode;
10760 register rtx temp, vector;
10762 /* Do an unsigned comparison (in the proper mode) between the index
10763 expression and the value which represents the length of the range.
10764 Since we just finished subtracting the lower bound of the range
10765 from the index expression, this comparison allows us to simultaneously
10766 check that the original index expression value is both greater than
10767 or equal to the minimum value of the range and less than or equal to
10768 the maximum value of the range. */
10770 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10771 emit_jump_insn (gen_bgtu (default_label));
10773 /* If index is in range, it must fit in Pmode.
10774 Convert to Pmode so we can index with it. */
10776 index = convert_to_mode (Pmode, index, 1);
10778 /* Don't let a MEM slip thru, because then INDEX that comes
10779 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10780 and break_out_memory_refs will go to work on it and mess it up. */
10781 #ifdef PIC_CASE_VECTOR_ADDRESS
10782 if (flag_pic && GET_CODE (index) != REG)
10783 index = copy_to_mode_reg (Pmode, index);
10786 /* If flag_force_addr were to affect this address
10787 it could interfere with the tricky assumptions made
10788 about addresses that contain label-refs,
10789 which may be valid only very near the tablejump itself. */
10790 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10791 GET_MODE_SIZE, because this indicates how large insns are. The other
10792 uses should all be Pmode, because they are addresses. This code
10793 could fail if addresses and insns are not the same size. */
10794 index = gen_rtx (PLUS, Pmode,
10795 gen_rtx (MULT, Pmode, index,
10796 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10797 gen_rtx (LABEL_REF, Pmode, table_label));
10798 #ifdef PIC_CASE_VECTOR_ADDRESS
10800 index = PIC_CASE_VECTOR_ADDRESS (index);
10803 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10804 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10805 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
10806 RTX_UNCHANGING_P (vector) = 1;
10807 convert_move (temp, vector, 0);
10809 emit_jump_insn (gen_tablejump (temp, table_label));
10811 #ifndef CASE_VECTOR_PC_RELATIVE
10812 /* If we are generating PIC code or if the table is PC-relative, the
10813 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10819 #endif /* HAVE_tablejump */
10822 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
10823 to that value is on the top of the stack. The resulting type is TYPE, and
10824 the source declaration is DECL. */
10827 bc_load_memory (type, decl)
10830 enum bytecode_opcode opcode;
10833 /* Bit fields are special. We only know about signed and
10834 unsigned ints, and enums. The latter are treated as
10835 signed integers. */
10837 if (DECL_BIT_FIELD (decl))
10838 if (TREE_CODE (type) == ENUMERAL_TYPE
10839 || TREE_CODE (type) == INTEGER_TYPE)
10840 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
10844 /* See corresponding comment in bc_store_memory(). */
10845 if (TYPE_MODE (type) == BLKmode
10846 || TYPE_MODE (type) == VOIDmode)
10849 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
10851 if (opcode == neverneverland)
10854 bc_emit_bytecode (opcode);
10856 #ifdef DEBUG_PRINT_CODE
10857 fputc ('\n', stderr);
10862 /* Store the contents of the second stack slot to the address in the
10863 top stack slot. DECL is the declaration of the destination and is used
10864 to determine whether we're dealing with a bitfield. */
10867 bc_store_memory (type, decl)
10870 enum bytecode_opcode opcode;
10873 if (DECL_BIT_FIELD (decl))
10875 if (TREE_CODE (type) == ENUMERAL_TYPE
10876 || TREE_CODE (type) == INTEGER_TYPE)
10882 if (TYPE_MODE (type) == BLKmode)
10884 /* Copy structure. This expands to a block copy instruction, storeBLK.
10885 In addition to the arguments expected by the other store instructions,
10886 it also expects a type size (SImode) on top of the stack, which is the
10887 structure size in size units (usually bytes). The two first arguments
10888 are already on the stack; so we just put the size on level 1. For some
10889 other languages, the size may be variable, this is why we don't encode
10890 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
10892 bc_expand_expr (TYPE_SIZE (type));
10896 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
10898 if (opcode == neverneverland)
10901 bc_emit_bytecode (opcode);
10903 #ifdef DEBUG_PRINT_CODE
10904 fputc ('\n', stderr);
10909 /* Allocate local stack space sufficient to hold a value of the given
10910 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
10911 integral power of 2. A special case is locals of type VOID, which
10912 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
10913 remapped into the corresponding attribute of SI. */
10916 bc_allocate_local (size, alignment)
10917 int size, alignment;
10920 int byte_alignment;
10925 /* Normalize size and alignment */
10927 size = UNITS_PER_WORD;
10929 if (alignment < BITS_PER_UNIT)
10930 byte_alignment = 1 << (INT_ALIGN - 1);
10933 byte_alignment = alignment / BITS_PER_UNIT;
10935 if (local_vars_size & (byte_alignment - 1))
10936 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
10938 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10939 local_vars_size += size;
10945 /* Allocate variable-sized local array. Variable-sized arrays are
10946 actually pointers to the address in memory where they are stored. */
10949 bc_allocate_variable_array (size)
10953 const int ptralign = (1 << (PTR_ALIGN - 1));
10955 /* Align pointer */
10956 if (local_vars_size & ptralign)
10957 local_vars_size += ptralign - (local_vars_size & ptralign);
10959 /* Note down local space needed: pointer to block; also return
10962 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10963 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
10968 /* Push the machine address for the given external variable offset. */
10970 bc_load_externaddr (externaddr)
10973 bc_emit_bytecode (constP);
10974 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
10975 BYTECODE_BC_LABEL (externaddr)->offset);
10977 #ifdef DEBUG_PRINT_CODE
10978 fputc ('\n', stderr);
10983 /* Like above, but expects an IDENTIFIER. */
10985 bc_load_externaddr_id (id, offset)
10989 if (!IDENTIFIER_POINTER (id))
10992 bc_emit_bytecode (constP);
10993 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
10995 #ifdef DEBUG_PRINT_CODE
10996 fputc ('\n', stderr);
11001 /* Push the machine address for the given local variable offset. */
11003 bc_load_localaddr (localaddr)
11006 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11010 /* Push the machine address for the given parameter offset.
11011 NOTE: offset is in bits. */
11013 bc_load_parmaddr (parmaddr)
11016 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11021 /* Convert a[i] into *(a + i). */
11023 bc_canonicalize_array_ref (exp)
11026 tree type = TREE_TYPE (exp);
11027 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11028 TREE_OPERAND (exp, 0));
11029 tree index = TREE_OPERAND (exp, 1);
11032 /* Convert the integer argument to a type the same size as a pointer
11033 so the multiply won't overflow spuriously. */
11035 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11036 index = convert (type_for_size (POINTER_SIZE, 0), index);
11038 /* The array address isn't volatile even if the array is.
11039 (Of course this isn't terribly relevant since the bytecode
11040 translator treats nearly everything as volatile anyway.) */
11041 TREE_THIS_VOLATILE (array_adr) = 0;
11043 return build1 (INDIRECT_REF, type,
11044 fold (build (PLUS_EXPR,
11045 TYPE_POINTER_TO (type),
11047 fold (build (MULT_EXPR,
11048 TYPE_POINTER_TO (type),
11050 size_in_bytes (type))))));
11054 /* Load the address of the component referenced by the given
11055 COMPONENT_REF expression.
11057 Returns innermost lvalue. */
11060 bc_expand_component_address (exp)
11064 enum machine_mode mode;
11066 HOST_WIDE_INT SIval;
11069 tem = TREE_OPERAND (exp, 1);
11070 mode = DECL_MODE (tem);
11073 /* Compute cumulative bit offset for nested component refs
11074 and array refs, and find the ultimate containing object. */
11076 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11078 if (TREE_CODE (tem) == COMPONENT_REF)
11079 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11081 if (TREE_CODE (tem) == ARRAY_REF
11082 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11083 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11085 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11086 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11087 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11092 bc_expand_expr (tem);
11095 /* For bitfields also push their offset and size */
11096 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11097 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11099 if (SIval = bitpos / BITS_PER_UNIT)
11100 bc_emit_instruction (addconstPSI, SIval);
11102 return (TREE_OPERAND (exp, 1));
11106 /* Emit code to push two SI constants */
11108 bc_push_offset_and_size (offset, size)
11109 HOST_WIDE_INT offset, size;
11111 bc_emit_instruction (constSI, offset);
11112 bc_emit_instruction (constSI, size);
11116 /* Emit byte code to push the address of the given lvalue expression to
11117 the stack. If it's a bit field, we also push offset and size info.
11119 Returns innermost component, which allows us to determine not only
11120 its type, but also whether it's a bitfield. */
11123 bc_expand_address (exp)
11127 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11131 switch (TREE_CODE (exp))
11135 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11137 case COMPONENT_REF:
11139 return (bc_expand_component_address (exp));
11143 bc_expand_expr (TREE_OPERAND (exp, 0));
11145 /* For variable-sized types: retrieve pointer. Sometimes the
11146 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11147 also make sure we have an operand, just in case... */
11149 if (TREE_OPERAND (exp, 0)
11150 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11151 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11152 bc_emit_instruction (loadP);
11154 /* If packed, also return offset and size */
11155 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11157 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11158 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11160 return (TREE_OPERAND (exp, 0));
11162 case FUNCTION_DECL:
11164 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11165 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11170 bc_load_parmaddr (DECL_RTL (exp));
11172 /* For variable-sized types: retrieve pointer */
11173 if (TYPE_SIZE (TREE_TYPE (exp))
11174 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11175 bc_emit_instruction (loadP);
11177 /* If packed, also return offset and size */
11178 if (DECL_BIT_FIELD (exp))
11179 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11180 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11186 bc_emit_instruction (returnP);
11192 if (BYTECODE_LABEL (DECL_RTL (exp)))
11193 bc_load_externaddr (DECL_RTL (exp));
11196 if (DECL_EXTERNAL (exp))
11197 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11198 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11200 bc_load_localaddr (DECL_RTL (exp));
11202 /* For variable-sized types: retrieve pointer */
11203 if (TYPE_SIZE (TREE_TYPE (exp))
11204 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11205 bc_emit_instruction (loadP);
11207 /* If packed, also return offset and size */
11208 if (DECL_BIT_FIELD (exp))
11209 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11210 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11218 bc_emit_bytecode (constP);
11219 r = output_constant_def (exp);
11220 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11222 #ifdef DEBUG_PRINT_CODE
11223 fputc ('\n', stderr);
11234 /* Most lvalues don't have components. */
11239 /* Emit a type code to be used by the runtime support in handling
11240 parameter passing. The type code consists of the machine mode
11241 plus the minimal alignment shifted left 8 bits. */
11244 bc_runtime_type_code (type)
11249 switch (TREE_CODE (type))
11255 case ENUMERAL_TYPE:
11259 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11271 return build_int_2 (val, 0);
11275 /* Generate constructor label */
11277 bc_gen_constr_label ()
11279 static int label_counter;
11280 static char label[20];
11282 sprintf (label, "*LR%d", label_counter++);
11284 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11288 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11289 expand the constructor data as static data, and push a pointer to it.
11290 The pointer is put in the pointer table and is retrieved by a constP
11291 bytecode instruction. We then loop and store each constructor member in
11292 the corresponding component. Finally, we return the original pointer on
11296 bc_expand_constructor (constr)
11300 HOST_WIDE_INT ptroffs;
11304 /* Literal constructors are handled as constants, whereas
11305 non-literals are evaluated and stored element by element
11306 into the data segment. */
11308 /* Allocate space in proper segment and push pointer to space on stack.
11311 l = bc_gen_constr_label ();
11313 if (TREE_CONSTANT (constr))
11317 bc_emit_const_labeldef (l);
11318 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11324 bc_emit_data_labeldef (l);
11325 bc_output_data_constructor (constr);
11329 /* Add reference to pointer table and recall pointer to stack;
11330 this code is common for both types of constructors: literals
11331 and non-literals. */
11333 ptroffs = bc_define_pointer (l);
11334 bc_emit_instruction (constP, ptroffs);
11336 /* This is all that has to be done if it's a literal. */
11337 if (TREE_CONSTANT (constr))
11341 /* At this point, we have the pointer to the structure on top of the stack.
11342 Generate sequences of store_memory calls for the constructor. */
11344 /* constructor type is structure */
11345 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11349 /* If the constructor has fewer fields than the structure,
11350 clear the whole structure first. */
11352 if (list_length (CONSTRUCTOR_ELTS (constr))
11353 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11355 bc_emit_instruction (duplicate);
11356 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11357 bc_emit_instruction (clearBLK);
11360 /* Store each element of the constructor into the corresponding
11361 field of TARGET. */
11363 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11365 register tree field = TREE_PURPOSE (elt);
11366 register enum machine_mode mode;
11371 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11372 mode = DECL_MODE (field);
11373 unsignedp = TREE_UNSIGNED (field);
11375 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11377 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11378 /* The alignment of TARGET is
11379 at least what its type requires. */
11381 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11382 int_size_in_bytes (TREE_TYPE (constr)));
11387 /* Constructor type is array */
11388 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11392 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11393 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11394 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11395 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11397 /* If the constructor has fewer fields than the structure,
11398 clear the whole structure first. */
11400 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11402 bc_emit_instruction (duplicate);
11403 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11404 bc_emit_instruction (clearBLK);
11408 /* Store each element of the constructor into the corresponding
11409 element of TARGET, determined by counting the elements. */
11411 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11413 elt = TREE_CHAIN (elt), i++)
11415 register enum machine_mode mode;
11420 mode = TYPE_MODE (elttype);
11421 bitsize = GET_MODE_BITSIZE (mode);
11422 unsignedp = TREE_UNSIGNED (elttype);
11424 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11425 /* * TYPE_SIZE_UNIT (elttype) */ );
11427 bc_store_field (elt, bitsize, bitpos, mode,
11428 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11429 /* The alignment of TARGET is
11430 at least what its type requires. */
11432 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11433 int_size_in_bytes (TREE_TYPE (constr)));
11440 /* Store the value of EXP (an expression tree) into member FIELD of
11441 structure at address on stack, which has type TYPE, mode MODE and
11442 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11445 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11446 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11449 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11450 value_mode, unsignedp, align, total_size)
11451 int bitsize, bitpos;
11452 enum machine_mode mode;
11453 tree field, exp, type;
11454 enum machine_mode value_mode;
11460 /* Expand expression and copy pointer */
11461 bc_expand_expr (exp);
11462 bc_emit_instruction (over);
11465 /* If the component is a bit field, we cannot use addressing to access
11466 it. Use bit-field techniques to store in it. */
11468 if (DECL_BIT_FIELD (field))
11470 bc_store_bit_field (bitpos, bitsize, unsignedp);
11474 /* Not bit field */
11476 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11478 /* Advance pointer to the desired member */
11480 bc_emit_instruction (addconstPSI, offset);
11483 bc_store_memory (type, field);
11488 /* Store SI/SU in bitfield */
11490 bc_store_bit_field (offset, size, unsignedp)
11491 int offset, size, unsignedp;
11493 /* Push bitfield offset and size */
11494 bc_push_offset_and_size (offset, size);
11497 bc_emit_instruction (sstoreBI);
11501 /* Load SI/SU from bitfield */
11503 bc_load_bit_field (offset, size, unsignedp)
11504 int offset, size, unsignedp;
11506 /* Push bitfield offset and size */
11507 bc_push_offset_and_size (offset, size);
11509 /* Load: sign-extend if signed, else zero-extend */
11510 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11514 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11515 (adjust stack pointer upwards), negative means add that number of
11516 levels (adjust the stack pointer downwards). Only positive values
11517 normally make sense. */
11520 bc_adjust_stack (nlevels)
11529 bc_emit_instruction (drop);
11532 bc_emit_instruction (drop);
11537 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11538 stack_depth -= nlevels;
11541 #if defined (VALIDATE_STACK_FOR_BC)
11542 VALIDATE_STACK_FOR_BC ();