1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call;
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
105 int target_temp_slot_level;
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
110 static rtx saveregs_value;
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
115 /* This structure is used by move_by_pieces to describe the move to
118 struct move_by_pieces
128 int explicit_inc_from;
135 /* This structure is used by clear_by_pieces to describe the clear to
138 struct clear_by_pieces
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
154 extern int local_vars_size;
155 extern int stack_depth;
156 extern int max_stack_depth;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
160 static rtx enqueue_insn PROTO((rtx, rtx));
161 static int queued_subexp_p PROTO((rtx));
162 static void init_queue PROTO((void));
163 static void move_by_pieces PROTO((rtx, rtx, int, int));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
175 static int get_inner_unaligned_p PROTO((tree));
176 static tree save_noncopied_parts PROTO((tree, tree));
177 static tree init_noncopied_parts PROTO((tree, tree));
178 static int safe_from_p PROTO((rtx, tree));
179 static int fixed_type_p PROTO((tree));
180 static rtx var_rtx PROTO((tree));
181 static int get_pointer_alignment PROTO((tree, unsigned));
182 static tree string_constant PROTO((tree, tree *));
183 static tree c_strlen PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 void bc_expand_increment PROTO((struct increment_operator *, tree));
194 rtx bc_allocate_local PROTO((int, int));
195 void bc_store_memory PROTO((tree, tree));
196 tree bc_expand_component_address PROTO((tree));
197 tree bc_expand_address PROTO((tree));
198 void bc_expand_constructor PROTO((tree));
199 void bc_adjust_stack PROTO((int));
200 tree bc_canonicalize_array_ref PROTO((tree));
201 void bc_load_memory PROTO((tree, tree));
202 void bc_load_externaddr PROTO((rtx));
203 void bc_load_externaddr_id PROTO((tree, int));
204 void bc_load_localaddr PROTO((rtx));
205 void bc_load_parmaddr PROTO((rtx));
206 static void preexpand_calls PROTO((tree));
207 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
208 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
209 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
210 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
211 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
212 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
213 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
214 static tree defer_cleanups_to PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
224 /* MOVE_RATIO is the number of move instructions that is better than
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
231 /* A value of around 6 would minimize code size; infinity would minimize
233 #define MOVE_RATIO 15
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
262 /* Initialize maps used to convert modes to const, load, and store
266 bc_init_mode_to_opcode_maps ()
270 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
271 mode_to_const_map[mode] =
272 mode_to_load_map[mode] =
273 mode_to_store_map[mode] = neverneverland;
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
280 #include "modemap.def"
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
291 enum machine_mode mode;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
296 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
299 insn = emit_insn (gen_rtx (SET, 0, 0));
300 pat = PATTERN (insn);
302 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
303 mode = (enum machine_mode) ((int) mode + 1))
309 direct_load[(int) mode] = direct_store[(int) mode] = 0;
310 PUT_MODE (mem, mode);
311 PUT_MODE (mem1, mode);
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
316 if (mode != VOIDmode && mode != BLKmode)
317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
318 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
321 if (! HARD_REGNO_MODE_OK (regno, mode))
324 reg = gen_rtx (REG, mode, regno);
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
331 SET_SRC (pat) = mem1;
332 SET_DEST (pat) = reg;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_load[(int) mode] = 1;
337 SET_DEST (pat) = mem;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
342 SET_DEST (pat) = mem1;
343 if (recog (pat, insn, &num_clobbers) >= 0)
344 direct_store[(int) mode] = 1;
351 /* This is run at the start of compiling a function. */
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
362 apply_args_value = 0;
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
373 /* Instead of saving the postincrement queue, empty it. */
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->cleanups_this_call = cleanups_this_call;
379 p->saveregs_value = saveregs_value;
380 p->apply_args_value = apply_args_value;
381 p->forced_labels = forced_labels;
383 pending_stack_adjust = 0;
384 inhibit_defer_pop = 0;
385 cleanups_this_call = 0;
387 apply_args_value = 0;
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
395 restore_expr_status (p)
398 pending_stack_adjust = p->pending_stack_adjust;
399 inhibit_defer_pop = p->inhibit_defer_pop;
400 cleanups_this_call = p->cleanups_this_call;
401 saveregs_value = p->saveregs_value;
402 apply_args_value = p->apply_args_value;
403 forced_labels = p->forced_labels;
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
409 static rtx pending_chain;
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
419 enqueue_insn (var, body)
422 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
423 var, NULL_RTX, NULL_RTX, body, pending_chain);
424 return pending_chain;
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
443 protect_from_queue (x, modify)
447 register RTX_CODE code = GET_CODE (x);
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain == 0)
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
462 if (code == MEM && GET_MODE (x) != BLKmode
463 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
465 register rtx y = XEXP (x, 0);
466 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
474 register rtx temp = gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp, new),
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
485 rtx tem = protect_from_queue (XEXP (x, 0), 0);
486 if (tem != XEXP (x, 0))
492 else if (code == PLUS || code == MULT)
494 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
495 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
496 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x) == 0)
507 return QUEUED_VAR (x);
508 /* If the increment has happened and a pre-increment copy exists,
510 if (QUEUED_COPY (x) != 0)
511 return QUEUED_COPY (x);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
517 return QUEUED_COPY (x);
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
529 register enum rtx_code code = GET_CODE (x);
535 return queued_subexp_p (XEXP (x, 0));
539 return queued_subexp_p (XEXP (x, 0))
540 || queued_subexp_p (XEXP (x, 1));
545 /* Perform all the pending incrementations. */
551 while (p = pending_chain)
553 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
554 pending_chain = QUEUED_NEXT (p);
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
571 convert_move (to, from, unsignedp)
572 register rtx to, from;
575 enum machine_mode to_mode = GET_MODE (to);
576 enum machine_mode from_mode = GET_MODE (from);
577 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
578 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
585 to = protect_from_queue (to, 1);
586 from = protect_from_queue (from, 0);
588 if (to_real != from_real)
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
595 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
597 >= GET_MODE_SIZE (to_mode))
598 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
599 from = gen_lowpart (to_mode, from), from_mode = to_mode;
601 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
604 if (to_mode == from_mode
605 || (from_mode == VOIDmode && CONSTANT_P (from)))
607 emit_move_insn (to, from);
615 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
617 /* Try converting directly if the insn is supported. */
618 if ((code = can_extend_p (to_mode, from_mode, 0))
621 emit_unop_insn (code, to, from, UNKNOWN);
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
770 libcall = extendsfdf2_libfunc;
774 libcall = extendsfxf2_libfunc;
778 libcall = extendsftf2_libfunc;
787 libcall = truncdfsf2_libfunc;
791 libcall = extenddfxf2_libfunc;
795 libcall = extenddftf2_libfunc;
804 libcall = truncxfsf2_libfunc;
808 libcall = truncxfdf2_libfunc;
817 libcall = trunctfsf2_libfunc;
821 libcall = trunctfdf2_libfunc;
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
833 emit_move_insn (to, value);
837 /* Now both modes are integers. */
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
861 emit_unop_insn (code, to, from, equiv_code);
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
877 /* No special multiword conversion insn; do it by hand. */
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
890 lowpart_mode = from_mode;
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
897 /* Compute the value to put in each remaining word. */
899 fill_value = const0_rtx;
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
936 insns = get_insns ();
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
971 #endif /* HAVE_truncsipsi2 */
975 if (from_mode == PSImode)
977 if (to_mode != SImode)
979 from = convert_to_mode (SImode, from, unsignedp);
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2)
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
990 #endif /* HAVE_extendpsisi2 */
995 if (to_mode == PDImode)
997 if (from_mode != DImode)
998 from = convert_to_mode (DImode, from, unsignedp);
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2)
1003 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1006 #endif /* HAVE_truncdipdi2 */
1010 if (from_mode == PDImode)
1012 if (to_mode != DImode)
1014 from = convert_to_mode (DImode, from, unsignedp);
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2)
1022 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1025 #endif /* HAVE_extendpdidi2 */
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1036 GET_MODE_BITSIZE (from_mode)))
1038 if (!((GET_CODE (from) == MEM
1039 && ! MEM_VOLATILE_P (from)
1040 && direct_load[(int) to_mode]
1041 && ! mode_dependent_address_p (XEXP (from, 0)))
1042 || GET_CODE (from) == REG
1043 || GET_CODE (from) == SUBREG))
1044 from = force_reg (from_mode, from);
1045 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047 from = copy_to_reg (from);
1048 emit_move_insn (to, gen_lowpart (to_mode, from));
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1055 /* Convert directly if that works. */
1056 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057 != CODE_FOR_nothing)
1059 emit_unop_insn (code, to, from, equiv_code);
1064 enum machine_mode intermediate;
1066 /* Search for a mode to convert via. */
1067 for (intermediate = from_mode; intermediate != VOIDmode;
1068 intermediate = GET_MODE_WIDER_MODE (intermediate))
1069 if (((can_extend_p (to_mode, intermediate, unsignedp)
1070 != CODE_FOR_nothing)
1071 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1072 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1073 && (can_extend_p (intermediate, from_mode, unsignedp)
1074 != CODE_FOR_nothing))
1076 convert_move (to, convert_to_mode (intermediate, from,
1077 unsignedp), unsignedp);
1081 /* No suitable intermediate mode. */
1086 /* Support special truncate insns for certain modes. */
1088 if (from_mode == DImode && to_mode == SImode)
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2)
1093 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1101 if (from_mode == DImode && to_mode == HImode)
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2)
1106 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1114 if (from_mode == DImode && to_mode == QImode)
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2)
1119 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 if (from_mode == SImode && to_mode == HImode)
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2)
1132 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 if (from_mode == SImode && to_mode == QImode)
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2)
1145 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 if (from_mode == HImode && to_mode == QImode)
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2)
1158 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 if (from_mode == TImode && to_mode == DImode)
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2)
1171 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 if (from_mode == TImode && to_mode == SImode)
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2)
1184 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 if (from_mode == TImode && to_mode == HImode)
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2)
1197 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 if (from_mode == TImode && to_mode == QImode)
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2)
1210 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1223 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1224 emit_move_insn (to, temp);
1228 /* Mode combination is not recognized. */
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1243 convert_to_mode (mode, x, unsignedp)
1244 enum machine_mode mode;
1248 return convert_modes (mode, VOIDmode, x, unsignedp);
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1265 convert_modes (mode, oldmode, x, unsignedp)
1266 enum machine_mode mode, oldmode;
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1275 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1278 x = gen_lowpart (mode, x);
1280 if (GET_MODE (x) != VOIDmode)
1281 oldmode = GET_MODE (x);
1283 if (mode == oldmode)
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1292 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1293 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1296 HOST_WIDE_INT val = INTVAL (x);
1298 if (oldmode != VOIDmode
1299 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1301 int width = GET_MODE_BITSIZE (oldmode);
1303 /* We need to zero extend VAL. */
1304 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1307 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1310 /* We can do this with a gen_lowpart if both desired and current modes
1311 are integer, and this is either a constant integer, a register, or a
1312 non-volatile MEM. Except for the constant case where MODE is no
1313 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1315 if ((GET_CODE (x) == CONST_INT
1316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1317 || (GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_CLASS (oldmode) == MODE_INT
1319 && (GET_CODE (x) == CONST_DOUBLE
1320 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1321 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1322 && direct_load[(int) mode])
1323 || (GET_CODE (x) == REG
1324 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1325 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1327 /* ?? If we don't know OLDMODE, we have to assume here that
1328 X does not need sign- or zero-extension. This may not be
1329 the case, but it's the best we can do. */
1330 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1331 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1333 HOST_WIDE_INT val = INTVAL (x);
1334 int width = GET_MODE_BITSIZE (oldmode);
1336 /* We must sign or zero-extend in this case. Start by
1337 zero-extending, then sign extend if we need to. */
1338 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1340 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1341 val |= (HOST_WIDE_INT) (-1) << width;
1343 return GEN_INT (val);
1346 return gen_lowpart (mode, x);
1349 temp = gen_reg_rtx (mode);
1350 convert_move (temp, x, unsignedp);
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1361 move_by_pieces (to, from, len, align)
1365 struct move_by_pieces data;
1366 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1367 int max_size = MOVE_MAX + 1;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1392 /* If copying requires more than two move insns,
1393 copy addresses to registers (to make displacements shorter)
1394 and use post-increment if available. */
1395 if (!(data.autinc_from && data.autinc_to)
1396 && move_by_pieces_ninsns (len, align) > 2)
1398 #ifdef HAVE_PRE_DECREMENT
1399 if (data.reverse && ! data.autinc_from)
1401 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1402 data.autinc_from = 1;
1403 data.explicit_inc_from = -1;
1406 #ifdef HAVE_POST_INCREMENT
1407 if (! data.autinc_from)
1409 data.from_addr = copy_addr_to_reg (from_addr);
1410 data.autinc_from = 1;
1411 data.explicit_inc_from = 1;
1414 if (!data.autinc_from && CONSTANT_P (from_addr))
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416 #ifdef HAVE_PRE_DECREMENT
1417 if (data.reverse && ! data.autinc_to)
1419 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1421 data.explicit_inc_to = -1;
1424 #ifdef HAVE_POST_INCREMENT
1425 if (! data.reverse && ! data.autinc_to)
1427 data.to_addr = copy_addr_to_reg (to_addr);
1429 data.explicit_inc_to = 1;
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1443 while (max_size > 1)
1445 enum machine_mode mode = VOIDmode, tmode;
1446 enum insn_code icode;
1448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1450 if (GET_MODE_SIZE (tmode) < max_size)
1453 if (mode == VOIDmode)
1456 icode = mov_optab->handlers[(int) mode].insn_code;
1457 if (icode != CODE_FOR_nothing
1458 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1459 GET_MODE_SIZE (mode)))
1460 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1462 max_size = GET_MODE_SIZE (mode);
1465 /* The code above should have handled everything. */
1470 /* Return number of insns required to move L bytes by pieces.
1471 ALIGN (in bytes) is maximum alignment we can assume. */
1474 move_by_pieces_ninsns (l, align)
1478 register int n_insns = 0;
1479 int max_size = MOVE_MAX + 1;
1481 if (! SLOW_UNALIGNED_ACCESS
1482 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1485 while (max_size > 1)
1487 enum machine_mode mode = VOIDmode, tmode;
1488 enum insn_code icode;
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1495 if (mode == VOIDmode)
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing
1500 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1501 GET_MODE_SIZE (mode)))
1502 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1504 max_size = GET_MODE_SIZE (mode);
1510 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1515 move_by_pieces_1 (genfun, mode, data)
1517 enum machine_mode mode;
1518 struct move_by_pieces *data;
1520 register int size = GET_MODE_SIZE (mode);
1521 register rtx to1, from1;
1523 while (data->len >= size)
1525 if (data->reverse) data->offset -= size;
1527 to1 = (data->autinc_to
1528 ? gen_rtx (MEM, mode, data->to_addr)
1529 : change_address (data->to, mode,
1530 plus_constant (data->to_addr, data->offset)));
1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
1534 ? gen_rtx (MEM, mode, data->from_addr)
1535 : change_address (data->from, mode,
1536 plus_constant (data->from_addr, data->offset)));
1537 MEM_IN_STRUCT_P (from1) = data->from_struct;
1539 #ifdef HAVE_PRE_DECREMENT
1540 if (data->explicit_inc_to < 0)
1541 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1542 if (data->explicit_inc_from < 0)
1543 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1546 emit_insn ((*genfun) (to1, from1));
1547 #ifdef HAVE_POST_INCREMENT
1548 if (data->explicit_inc_to > 0)
1549 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1550 if (data->explicit_inc_from > 0)
1551 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1554 if (! data->reverse) data->offset += size;
1560 /* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
1568 measured in bytes. */
1571 emit_block_move (x, y, size, align)
1576 if (GET_MODE (x) != BLKmode)
1579 if (GET_MODE (y) != BLKmode)
1582 x = protect_from_queue (x, 1);
1583 y = protect_from_queue (y, 0);
1584 size = protect_from_queue (size, 0);
1586 if (GET_CODE (x) != MEM)
1588 if (GET_CODE (y) != MEM)
1593 if (GET_CODE (size) == CONST_INT
1594 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1595 move_by_pieces (x, y, INTVAL (size), align);
1598 /* Try the most limited insn first, because there's no point
1599 including more than one in the machine description unless
1600 the more limited one has some advantage. */
1602 rtx opalign = GEN_INT (align);
1603 enum machine_mode mode;
1605 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1606 mode = GET_MODE_WIDER_MODE (mode))
1608 enum insn_code code = movstr_optab[(int) mode];
1610 if (code != CODE_FOR_nothing
1611 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1612 here because if SIZE is less than the mode mask, as it is
1613 returned by the macro, it will definitely be less than the
1614 actual mode mask. */
1615 && ((GET_CODE (size) == CONST_INT
1616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1617 <= GET_MODE_MASK (mode)))
1618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1619 && (insn_operand_predicate[(int) code][0] == 0
1620 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1621 && (insn_operand_predicate[(int) code][1] == 0
1622 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1623 && (insn_operand_predicate[(int) code][3] == 0
1624 || (*insn_operand_predicate[(int) code][3]) (opalign,
1628 rtx last = get_last_insn ();
1631 op2 = convert_to_mode (mode, size, 1);
1632 if (insn_operand_predicate[(int) code][2] != 0
1633 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1634 op2 = copy_to_mode_reg (mode, op2);
1636 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1643 delete_insns_since (last);
1647 #ifdef TARGET_MEM_FUNCTIONS
1648 emit_library_call (memcpy_libfunc, 0,
1649 VOIDmode, 3, XEXP (x, 0), Pmode,
1651 convert_to_mode (TYPE_MODE (sizetype), size,
1652 TREE_UNSIGNED (sizetype)),
1653 TYPE_MODE (sizetype));
1655 emit_library_call (bcopy_libfunc, 0,
1656 VOIDmode, 3, XEXP (y, 0), Pmode,
1658 convert_to_mode (TYPE_MODE (integer_type_node), size,
1659 TREE_UNSIGNED (integer_type_node)),
1660 TYPE_MODE (integer_type_node));
1665 /* Copy all or part of a value X into registers starting at REGNO.
1666 The number of registers to be filled is NREGS. */
1669 move_block_to_reg (regno, x, nregs, mode)
1673 enum machine_mode mode;
1681 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1682 x = validize_mem (force_const_mem (mode, x));
1684 /* See if the machine can do this with a load multiple insn. */
1685 #ifdef HAVE_load_multiple
1686 if (HAVE_load_multiple)
1688 last = get_last_insn ();
1689 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1697 delete_insns_since (last);
1701 for (i = 0; i < nregs; i++)
1702 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1703 operand_subword_force (x, i, mode));
1706 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1707 The number of registers to be filled is NREGS. SIZE indicates the number
1708 of bytes in the object X. */
1712 move_block_from_reg (regno, x, nregs, size)
1721 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1722 to the left before storing to memory. */
1723 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1725 rtx tem = operand_subword (x, 0, 1, BLKmode);
1731 shift = expand_shift (LSHIFT_EXPR, word_mode,
1732 gen_rtx (REG, word_mode, regno),
1733 build_int_2 ((UNITS_PER_WORD - size)
1734 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1735 emit_move_insn (tem, shift);
1739 /* See if the machine can do this with a store multiple insn. */
1740 #ifdef HAVE_store_multiple
1741 if (HAVE_store_multiple)
1743 last = get_last_insn ();
1744 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1752 delete_insns_since (last);
1756 for (i = 0; i < nregs; i++)
1758 rtx tem = operand_subword (x, i, 1, BLKmode);
1763 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1767 /* Emit code to move a block Y to a block X, where X is non-consecutive
1768 registers represented by a PARALLEL. */
1771 emit_group_load (x, y)
1774 rtx target_reg, source;
1777 if (GET_CODE (x) != PARALLEL)
1780 /* Check for a NULL entry, used to indicate that the parameter goes
1781 both on the stack and in registers. */
1782 if (XEXP (XVECEXP (x, 0, 0), 0))
1787 for (; i < XVECLEN (x, 0); i++)
1789 rtx element = XVECEXP (x, 0, i);
1791 target_reg = XEXP (element, 0);
1793 if (GET_CODE (y) == MEM)
1794 source = change_address (y, GET_MODE (target_reg),
1795 plus_constant (XEXP (y, 0),
1796 INTVAL (XEXP (element, 1))));
1797 else if (XEXP (element, 1) == const0_rtx)
1799 if (GET_MODE (target_reg) == GET_MODE (y))
1801 /* Allow for the target_reg to be smaller than the input register
1802 to allow for AIX with 4 DF arguments after a single SI arg. The
1803 last DF argument will only load 1 word into the integer registers,
1804 but load a DF value into the float registers. */
1805 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1806 <= GET_MODE_SIZE (GET_MODE (y)))
1807 source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
1814 emit_move_insn (target_reg, source);
1818 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1819 registers represented by a PARALLEL. */
1822 emit_group_store (x, y)
1825 rtx source_reg, target;
1828 if (GET_CODE (y) != PARALLEL)
1831 /* Check for a NULL entry, used to indicate that the parameter goes
1832 both on the stack and in registers. */
1833 if (XEXP (XVECEXP (y, 0, 0), 0))
1838 for (; i < XVECLEN (y, 0); i++)
1840 rtx element = XVECEXP (y, 0, i);
1842 source_reg = XEXP (element, 0);
1844 if (GET_CODE (x) == MEM)
1845 target = change_address (x, GET_MODE (source_reg),
1846 plus_constant (XEXP (x, 0),
1847 INTVAL (XEXP (element, 1))));
1848 else if (XEXP (element, 1) == const0_rtx)
1851 if (GET_MODE (target) != GET_MODE (source_reg))
1852 target = gen_lowpart (GET_MODE (source_reg), target);
1857 emit_move_insn (target, source_reg);
1861 /* Add a USE expression for REG to the (possibly empty) list pointed
1862 to by CALL_FUSAGE. REG must denote a hard register. */
1865 use_reg (call_fusage, reg)
1866 rtx *call_fusage, reg;
1868 if (GET_CODE (reg) != REG
1869 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1873 = gen_rtx (EXPR_LIST, VOIDmode,
1874 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1877 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1878 starting at REGNO. All of these registers must be hard registers. */
1881 use_regs (call_fusage, regno, nregs)
1888 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1891 for (i = 0; i < nregs; i++)
1892 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1895 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1896 PARALLEL REGS. This is for calls that pass values in multiple
1897 non-contiguous locations. The Irix 6 ABI has examples of this. */
1900 use_group_regs (call_fusage, regs)
1906 /* Check for a NULL entry, used to indicate that the parameter goes
1907 both on the stack and in registers. */
1908 if (XEXP (XVECEXP (regs, 0, 0), 0))
1913 for (; i < XVECLEN (regs, 0); i++)
1914 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1917 /* Generate several move instructions to clear LEN bytes of block TO.
1918 (A MEM rtx with BLKmode). The caller must pass TO through
1919 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1923 clear_by_pieces (to, len, align)
1927 struct clear_by_pieces data;
1928 rtx to_addr = XEXP (to, 0);
1929 int max_size = MOVE_MAX + 1;
1932 data.to_addr = to_addr;
1935 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1936 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1938 data.explicit_inc_to = 0;
1940 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1941 if (data.reverse) data.offset = len;
1944 data.to_struct = MEM_IN_STRUCT_P (to);
1946 /* If copying requires more than two move insns,
1947 copy addresses to registers (to make displacements shorter)
1948 and use post-increment if available. */
1950 && move_by_pieces_ninsns (len, align) > 2)
1952 #ifdef HAVE_PRE_DECREMENT
1953 if (data.reverse && ! data.autinc_to)
1955 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1957 data.explicit_inc_to = -1;
1960 #ifdef HAVE_POST_INCREMENT
1961 if (! data.reverse && ! data.autinc_to)
1963 data.to_addr = copy_addr_to_reg (to_addr);
1965 data.explicit_inc_to = 1;
1968 if (!data.autinc_to && CONSTANT_P (to_addr))
1969 data.to_addr = copy_addr_to_reg (to_addr);
1972 if (! SLOW_UNALIGNED_ACCESS
1973 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1976 /* First move what we can in the largest integer mode, then go to
1977 successively smaller modes. */
1979 while (max_size > 1)
1981 enum machine_mode mode = VOIDmode, tmode;
1982 enum insn_code icode;
1984 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1985 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1986 if (GET_MODE_SIZE (tmode) < max_size)
1989 if (mode == VOIDmode)
1992 icode = mov_optab->handlers[(int) mode].insn_code;
1993 if (icode != CODE_FOR_nothing
1994 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1995 GET_MODE_SIZE (mode)))
1996 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1998 max_size = GET_MODE_SIZE (mode);
2001 /* The code above should have handled everything. */
2006 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2007 with move instructions for mode MODE. GENFUN is the gen_... function
2008 to make a move insn for that mode. DATA has all the other info. */
2011 clear_by_pieces_1 (genfun, mode, data)
2013 enum machine_mode mode;
2014 struct clear_by_pieces *data;
2016 register int size = GET_MODE_SIZE (mode);
2019 while (data->len >= size)
2021 if (data->reverse) data->offset -= size;
2023 to1 = (data->autinc_to
2024 ? gen_rtx (MEM, mode, data->to_addr)
2025 : change_address (data->to, mode,
2026 plus_constant (data->to_addr, data->offset)));
2027 MEM_IN_STRUCT_P (to1) = data->to_struct;
2029 #ifdef HAVE_PRE_DECREMENT
2030 if (data->explicit_inc_to < 0)
2031 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2034 emit_insn ((*genfun) (to1, const0_rtx));
2035 #ifdef HAVE_POST_INCREMENT
2036 if (data->explicit_inc_to > 0)
2037 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2040 if (! data->reverse) data->offset += size;
2046 /* Write zeros through the storage of OBJECT.
2047 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2048 the maximum alignment we can is has, measured in bytes. */
2051 clear_storage (object, size, align)
2056 if (GET_MODE (object) == BLKmode)
2058 object = protect_from_queue (object, 1);
2059 size = protect_from_queue (size, 0);
2061 if (GET_CODE (size) == CONST_INT
2062 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2063 clear_by_pieces (object, INTVAL (size), align);
2067 /* Try the most limited insn first, because there's no point
2068 including more than one in the machine description unless
2069 the more limited one has some advantage. */
2071 rtx opalign = GEN_INT (align);
2072 enum machine_mode mode;
2074 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2075 mode = GET_MODE_WIDER_MODE (mode))
2077 enum insn_code code = clrstr_optab[(int) mode];
2079 if (code != CODE_FOR_nothing
2080 /* We don't need MODE to be narrower than
2081 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2082 the mode mask, as it is returned by the macro, it will
2083 definitely be less than the actual mode mask. */
2084 && ((GET_CODE (size) == CONST_INT
2085 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2086 <= GET_MODE_MASK (mode)))
2087 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2088 && (insn_operand_predicate[(int) code][0] == 0
2089 || (*insn_operand_predicate[(int) code][0]) (object,
2091 && (insn_operand_predicate[(int) code][2] == 0
2092 || (*insn_operand_predicate[(int) code][2]) (opalign,
2096 rtx last = get_last_insn ();
2099 op1 = convert_to_mode (mode, size, 1);
2100 if (insn_operand_predicate[(int) code][1] != 0
2101 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2103 op1 = copy_to_mode_reg (mode, op1);
2105 pat = GEN_FCN ((int) code) (object, op1, opalign);
2112 delete_insns_since (last);
2117 #ifdef TARGET_MEM_FUNCTIONS
2118 emit_library_call (memset_libfunc, 0,
2120 XEXP (object, 0), Pmode,
2121 const0_rtx, TYPE_MODE (integer_type_node),
2122 convert_to_mode (TYPE_MODE (sizetype),
2123 size, TREE_UNSIGNED (sizetype)),
2124 TYPE_MODE (sizetype));
2126 emit_library_call (bzero_libfunc, 0,
2128 XEXP (object, 0), Pmode,
2129 convert_to_mode (TYPE_MODE (integer_type_node),
2131 TREE_UNSIGNED (integer_type_node)),
2132 TYPE_MODE (integer_type_node));
2137 emit_move_insn (object, const0_rtx);
2140 /* Generate code to copy Y into X.
2141 Both Y and X must have the same mode, except that
2142 Y can be a constant with VOIDmode.
2143 This mode cannot be BLKmode; use emit_block_move for that.
2145 Return the last instruction emitted. */
2148 emit_move_insn (x, y)
2151 enum machine_mode mode = GET_MODE (x);
2153 x = protect_from_queue (x, 1);
2154 y = protect_from_queue (y, 0);
2156 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2159 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2160 y = force_const_mem (mode, y);
2162 /* If X or Y are memory references, verify that their addresses are valid
2164 if (GET_CODE (x) == MEM
2165 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2166 && ! push_operand (x, GET_MODE (x)))
2168 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2169 x = change_address (x, VOIDmode, XEXP (x, 0));
2171 if (GET_CODE (y) == MEM
2172 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2174 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2175 y = change_address (y, VOIDmode, XEXP (y, 0));
2177 if (mode == BLKmode)
2180 return emit_move_insn_1 (x, y);
2183 /* Low level part of emit_move_insn.
2184 Called just like emit_move_insn, but assumes X and Y
2185 are basically valid. */
2188 emit_move_insn_1 (x, y)
2191 enum machine_mode mode = GET_MODE (x);
2192 enum machine_mode submode;
2193 enum mode_class class = GET_MODE_CLASS (mode);
2196 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2198 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2200 /* Expand complex moves by moving real part and imag part, if possible. */
2201 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2202 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2204 (class == MODE_COMPLEX_INT
2205 ? MODE_INT : MODE_FLOAT),
2207 && (mov_optab->handlers[(int) submode].insn_code
2208 != CODE_FOR_nothing))
2210 /* Don't split destination if it is a stack push. */
2211 int stack = push_operand (x, GET_MODE (x));
2214 /* If this is a stack, push the highpart first, so it
2215 will be in the argument order.
2217 In that case, change_address is used only to convert
2218 the mode, not to change the address. */
2221 /* Note that the real part always precedes the imag part in memory
2222 regardless of machine's endianness. */
2223 #ifdef STACK_GROWS_DOWNWARD
2224 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2225 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2226 gen_imagpart (submode, y)));
2227 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2228 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2229 gen_realpart (submode, y)));
2231 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2232 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2233 gen_realpart (submode, y)));
2234 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2235 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2236 gen_imagpart (submode, y)));
2241 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2242 (gen_realpart (submode, x), gen_realpart (submode, y)));
2243 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2244 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2247 return get_last_insn ();
2250 /* This will handle any multi-word mode that lacks a move_insn pattern.
2251 However, you will get better code if you define such patterns,
2252 even if they must turn into multiple assembler instructions. */
2253 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2258 #ifdef PUSH_ROUNDING
2260 /* If X is a push on the stack, do the push now and replace
2261 X with a reference to the stack pointer. */
2262 if (push_operand (x, GET_MODE (x)))
2264 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2265 x = change_address (x, VOIDmode, stack_pointer_rtx);
2269 /* Show the output dies here. */
2271 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2274 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2277 rtx xpart = operand_subword (x, i, 1, mode);
2278 rtx ypart = operand_subword (y, i, 1, mode);
2280 /* If we can't get a part of Y, put Y into memory if it is a
2281 constant. Otherwise, force it into a register. If we still
2282 can't get a part of Y, abort. */
2283 if (ypart == 0 && CONSTANT_P (y))
2285 y = force_const_mem (mode, y);
2286 ypart = operand_subword (y, i, 1, mode);
2288 else if (ypart == 0)
2289 ypart = operand_subword_force (y, i, mode);
2291 if (xpart == 0 || ypart == 0)
2294 last_insn = emit_move_insn (xpart, ypart);
2303 /* Pushing data onto the stack. */
2305 /* Push a block of length SIZE (perhaps variable)
2306 and return an rtx to address the beginning of the block.
2307 Note that it is not possible for the value returned to be a QUEUED.
2308 The value may be virtual_outgoing_args_rtx.
2310 EXTRA is the number of bytes of padding to push in addition to SIZE.
2311 BELOW nonzero means this padding comes at low addresses;
2312 otherwise, the padding comes at high addresses. */
2315 push_block (size, extra, below)
2321 size = convert_modes (Pmode, ptr_mode, size, 1);
2322 if (CONSTANT_P (size))
2323 anti_adjust_stack (plus_constant (size, extra));
2324 else if (GET_CODE (size) == REG && extra == 0)
2325 anti_adjust_stack (size);
2328 rtx temp = copy_to_mode_reg (Pmode, size);
2330 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2331 temp, 0, OPTAB_LIB_WIDEN);
2332 anti_adjust_stack (temp);
2335 #ifdef STACK_GROWS_DOWNWARD
2336 temp = virtual_outgoing_args_rtx;
2337 if (extra != 0 && below)
2338 temp = plus_constant (temp, extra);
2340 if (GET_CODE (size) == CONST_INT)
2341 temp = plus_constant (virtual_outgoing_args_rtx,
2342 - INTVAL (size) - (below ? 0 : extra));
2343 else if (extra != 0 && !below)
2344 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2345 negate_rtx (Pmode, plus_constant (size, extra)));
2347 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2348 negate_rtx (Pmode, size));
2351 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2357 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2360 /* Generate code to push X onto the stack, assuming it has mode MODE and
2362 MODE is redundant except when X is a CONST_INT (since they don't
2364 SIZE is an rtx for the size of data to be copied (in bytes),
2365 needed only if X is BLKmode.
2367 ALIGN (in bytes) is maximum alignment we can assume.
2369 If PARTIAL and REG are both nonzero, then copy that many of the first
2370 words of X into registers starting with REG, and push the rest of X.
2371 The amount of space pushed is decreased by PARTIAL words,
2372 rounded *down* to a multiple of PARM_BOUNDARY.
2373 REG must be a hard register in this case.
2374 If REG is zero but PARTIAL is not, take any all others actions for an
2375 argument partially in registers, but do not actually load any
2378 EXTRA is the amount in bytes of extra space to leave next to this arg.
2379 This is ignored if an argument block has already been allocated.
2381 On a machine that lacks real push insns, ARGS_ADDR is the address of
2382 the bottom of the argument block for this call. We use indexing off there
2383 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2384 argument block has not been preallocated.
2386 ARGS_SO_FAR is the size of args previously pushed for this call. */
2389 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2390 args_addr, args_so_far)
2392 enum machine_mode mode;
2403 enum direction stack_direction
2404 #ifdef STACK_GROWS_DOWNWARD
2410 /* Decide where to pad the argument: `downward' for below,
2411 `upward' for above, or `none' for don't pad it.
2412 Default is below for small data on big-endian machines; else above. */
2413 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2415 /* If we're placing part of X into a register and part of X onto
2416 the stack, indicate that the entire register is clobbered to
2417 keep flow from thinking the unused part of the register is live. */
2418 if (partial > 0 && reg != 0)
2419 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2421 /* Invert direction if stack is post-update. */
2422 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2423 if (where_pad != none)
2424 where_pad = (where_pad == downward ? upward : downward);
2426 xinner = x = protect_from_queue (x, 0);
2428 if (mode == BLKmode)
2430 /* Copy a block into the stack, entirely or partially. */
2433 int used = partial * UNITS_PER_WORD;
2434 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2442 /* USED is now the # of bytes we need not copy to the stack
2443 because registers will take care of them. */
2446 xinner = change_address (xinner, BLKmode,
2447 plus_constant (XEXP (xinner, 0), used));
2449 /* If the partial register-part of the arg counts in its stack size,
2450 skip the part of stack space corresponding to the registers.
2451 Otherwise, start copying to the beginning of the stack space,
2452 by setting SKIP to 0. */
2453 #ifndef REG_PARM_STACK_SPACE
2459 #ifdef PUSH_ROUNDING
2460 /* Do it with several push insns if that doesn't take lots of insns
2461 and if there is no difficulty with push insns that skip bytes
2462 on the stack for alignment purposes. */
2464 && GET_CODE (size) == CONST_INT
2466 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2468 /* Here we avoid the case of a structure whose weak alignment
2469 forces many pushes of a small amount of data,
2470 and such small pushes do rounding that causes trouble. */
2471 && ((! SLOW_UNALIGNED_ACCESS)
2472 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2473 || PUSH_ROUNDING (align) == align)
2474 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2476 /* Push padding now if padding above and stack grows down,
2477 or if padding below and stack grows up.
2478 But if space already allocated, this has already been done. */
2479 if (extra && args_addr == 0
2480 && where_pad != none && where_pad != stack_direction)
2481 anti_adjust_stack (GEN_INT (extra));
2483 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2484 INTVAL (size) - used, align);
2487 #endif /* PUSH_ROUNDING */
2489 /* Otherwise make space on the stack and copy the data
2490 to the address of that space. */
2492 /* Deduct words put into registers from the size we must copy. */
2495 if (GET_CODE (size) == CONST_INT)
2496 size = GEN_INT (INTVAL (size) - used);
2498 size = expand_binop (GET_MODE (size), sub_optab, size,
2499 GEN_INT (used), NULL_RTX, 0,
2503 /* Get the address of the stack space.
2504 In this case, we do not deal with EXTRA separately.
2505 A single stack adjust will do. */
2508 temp = push_block (size, extra, where_pad == downward);
2511 else if (GET_CODE (args_so_far) == CONST_INT)
2512 temp = memory_address (BLKmode,
2513 plus_constant (args_addr,
2514 skip + INTVAL (args_so_far)));
2516 temp = memory_address (BLKmode,
2517 plus_constant (gen_rtx (PLUS, Pmode,
2518 args_addr, args_so_far),
2521 /* TEMP is the address of the block. Copy the data there. */
2522 if (GET_CODE (size) == CONST_INT
2523 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2526 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2527 INTVAL (size), align);
2530 /* Try the most limited insn first, because there's no point
2531 including more than one in the machine description unless
2532 the more limited one has some advantage. */
2533 #ifdef HAVE_movstrqi
2535 && GET_CODE (size) == CONST_INT
2536 && ((unsigned) INTVAL (size)
2537 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2539 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2540 xinner, size, GEN_INT (align));
2548 #ifdef HAVE_movstrhi
2550 && GET_CODE (size) == CONST_INT
2551 && ((unsigned) INTVAL (size)
2552 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2554 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2555 xinner, size, GEN_INT (align));
2563 #ifdef HAVE_movstrsi
2566 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2567 xinner, size, GEN_INT (align));
2575 #ifdef HAVE_movstrdi
2578 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2579 xinner, size, GEN_INT (align));
2588 #ifndef ACCUMULATE_OUTGOING_ARGS
2589 /* If the source is referenced relative to the stack pointer,
2590 copy it to another register to stabilize it. We do not need
2591 to do this if we know that we won't be changing sp. */
2593 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2594 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2595 temp = copy_to_reg (temp);
2598 /* Make inhibit_defer_pop nonzero around the library call
2599 to force it to pop the bcopy-arguments right away. */
2601 #ifdef TARGET_MEM_FUNCTIONS
2602 emit_library_call (memcpy_libfunc, 0,
2603 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2604 convert_to_mode (TYPE_MODE (sizetype),
2605 size, TREE_UNSIGNED (sizetype)),
2606 TYPE_MODE (sizetype));
2608 emit_library_call (bcopy_libfunc, 0,
2609 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2610 convert_to_mode (TYPE_MODE (integer_type_node),
2612 TREE_UNSIGNED (integer_type_node)),
2613 TYPE_MODE (integer_type_node));
2618 else if (partial > 0)
2620 /* Scalar partly in registers. */
2622 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2625 /* # words of start of argument
2626 that we must make space for but need not store. */
2627 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2628 int args_offset = INTVAL (args_so_far);
2631 /* Push padding now if padding above and stack grows down,
2632 or if padding below and stack grows up.
2633 But if space already allocated, this has already been done. */
2634 if (extra && args_addr == 0
2635 && where_pad != none && where_pad != stack_direction)
2636 anti_adjust_stack (GEN_INT (extra));
2638 /* If we make space by pushing it, we might as well push
2639 the real data. Otherwise, we can leave OFFSET nonzero
2640 and leave the space uninitialized. */
2644 /* Now NOT_STACK gets the number of words that we don't need to
2645 allocate on the stack. */
2646 not_stack = partial - offset;
2648 /* If the partial register-part of the arg counts in its stack size,
2649 skip the part of stack space corresponding to the registers.
2650 Otherwise, start copying to the beginning of the stack space,
2651 by setting SKIP to 0. */
2652 #ifndef REG_PARM_STACK_SPACE
2658 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2659 x = validize_mem (force_const_mem (mode, x));
2661 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2662 SUBREGs of such registers are not allowed. */
2663 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2664 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2665 x = copy_to_reg (x);
2667 /* Loop over all the words allocated on the stack for this arg. */
2668 /* We can do it by words, because any scalar bigger than a word
2669 has a size a multiple of a word. */
2670 #ifndef PUSH_ARGS_REVERSED
2671 for (i = not_stack; i < size; i++)
2673 for (i = size - 1; i >= not_stack; i--)
2675 if (i >= not_stack + offset)
2676 emit_push_insn (operand_subword_force (x, i, mode),
2677 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2679 GEN_INT (args_offset + ((i - not_stack + skip)
2680 * UNITS_PER_WORD)));
2686 /* Push padding now if padding above and stack grows down,
2687 or if padding below and stack grows up.
2688 But if space already allocated, this has already been done. */
2689 if (extra && args_addr == 0
2690 && where_pad != none && where_pad != stack_direction)
2691 anti_adjust_stack (GEN_INT (extra));
2693 #ifdef PUSH_ROUNDING
2695 addr = gen_push_operand ();
2698 if (GET_CODE (args_so_far) == CONST_INT)
2700 = memory_address (mode,
2701 plus_constant (args_addr, INTVAL (args_so_far)));
2703 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2706 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2710 /* If part should go in registers, copy that part
2711 into the appropriate registers. Do this now, at the end,
2712 since mem-to-mem copies above may do function calls. */
2713 if (partial > 0 && reg != 0)
2715 /* Handle calls that pass values in multiple non-contiguous locations.
2716 The Irix 6 ABI has examples of this. */
2717 if (GET_CODE (reg) == PARALLEL)
2718 emit_group_load (reg, x);
2720 move_block_to_reg (REGNO (reg), x, partial, mode);
2723 if (extra && args_addr == 0 && where_pad == stack_direction)
2724 anti_adjust_stack (GEN_INT (extra));
2727 /* Expand an assignment that stores the value of FROM into TO.
2728 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2729 (This may contain a QUEUED rtx;
2730 if the value is constant, this rtx is a constant.)
2731 Otherwise, the returned value is NULL_RTX.
2733 SUGGEST_REG is no longer actually used.
2734 It used to mean, copy the value through a register
2735 and return that register, if that is possible.
2736 We now use WANT_VALUE to decide whether to do this. */
2739 expand_assignment (to, from, want_value, suggest_reg)
2744 register rtx to_rtx = 0;
2747 /* Don't crash if the lhs of the assignment was erroneous. */
2749 if (TREE_CODE (to) == ERROR_MARK)
2751 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2752 return want_value ? result : NULL_RTX;
2755 if (output_bytecode)
2757 tree dest_innermost;
2759 bc_expand_expr (from);
2760 bc_emit_instruction (duplicate);
2762 dest_innermost = bc_expand_address (to);
2764 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2765 take care of it here. */
2767 bc_store_memory (TREE_TYPE (to), dest_innermost);
2771 /* Assignment of a structure component needs special treatment
2772 if the structure component's rtx is not simply a MEM.
2773 Assignment of an array element at a constant index, and assignment of
2774 an array element in an unaligned packed structure field, has the same
2777 if (TREE_CODE (to) == COMPONENT_REF
2778 || TREE_CODE (to) == BIT_FIELD_REF
2779 || (TREE_CODE (to) == ARRAY_REF
2780 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2781 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2782 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2784 enum machine_mode mode1;
2794 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2795 &unsignedp, &volatilep, &alignment);
2797 /* If we are going to use store_bit_field and extract_bit_field,
2798 make sure to_rtx will be safe for multiple use. */
2800 if (mode1 == VOIDmode && want_value)
2801 tem = stabilize_reference (tem);
2803 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2806 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2808 if (GET_CODE (to_rtx) != MEM)
2810 to_rtx = change_address (to_rtx, VOIDmode,
2811 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2812 force_reg (ptr_mode, offset_rtx)));
2816 if (GET_CODE (to_rtx) == MEM)
2818 /* When the offset is zero, to_rtx is the address of the
2819 structure we are storing into, and hence may be shared.
2820 We must make a new MEM before setting the volatile bit. */
2822 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2823 MEM_VOLATILE_P (to_rtx) = 1;
2825 #if 0 /* This was turned off because, when a field is volatile
2826 in an object which is not volatile, the object may be in a register,
2827 and then we would abort over here. */
2833 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2835 /* Spurious cast makes HPUX compiler happy. */
2836 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2839 /* Required alignment of containing datum. */
2841 int_size_in_bytes (TREE_TYPE (tem)));
2842 preserve_temp_slots (result);
2846 /* If the value is meaningful, convert RESULT to the proper mode.
2847 Otherwise, return nothing. */
2848 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2849 TYPE_MODE (TREE_TYPE (from)),
2851 TREE_UNSIGNED (TREE_TYPE (to)))
2855 /* If the rhs is a function call and its value is not an aggregate,
2856 call the function before we start to compute the lhs.
2857 This is needed for correct code for cases such as
2858 val = setjmp (buf) on machines where reference to val
2859 requires loading up part of an address in a separate insn.
2861 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2862 a promoted variable where the zero- or sign- extension needs to be done.
2863 Handling this in the normal way is safe because no computation is done
2865 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2866 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2867 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2872 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2874 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2876 /* Handle calls that return values in multiple non-contiguous locations.
2877 The Irix 6 ABI has examples of this. */
2878 if (GET_CODE (to_rtx) == PARALLEL)
2879 emit_group_load (to_rtx, value);
2880 else if (GET_MODE (to_rtx) == BLKmode)
2881 emit_block_move (to_rtx, value, expr_size (from),
2882 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2884 emit_move_insn (to_rtx, value);
2885 preserve_temp_slots (to_rtx);
2888 return want_value ? to_rtx : NULL_RTX;
2891 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2892 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2895 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2897 /* Don't move directly into a return register. */
2898 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2903 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2904 emit_move_insn (to_rtx, temp);
2905 preserve_temp_slots (to_rtx);
2908 return want_value ? to_rtx : NULL_RTX;
2911 /* In case we are returning the contents of an object which overlaps
2912 the place the value is being stored, use a safe function when copying
2913 a value through a pointer into a structure value return block. */
2914 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2915 && current_function_returns_struct
2916 && !current_function_returns_pcc_struct)
2921 size = expr_size (from);
2922 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2924 #ifdef TARGET_MEM_FUNCTIONS
2925 emit_library_call (memcpy_libfunc, 0,
2926 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2927 XEXP (from_rtx, 0), Pmode,
2928 convert_to_mode (TYPE_MODE (sizetype),
2929 size, TREE_UNSIGNED (sizetype)),
2930 TYPE_MODE (sizetype));
2932 emit_library_call (bcopy_libfunc, 0,
2933 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2934 XEXP (to_rtx, 0), Pmode,
2935 convert_to_mode (TYPE_MODE (integer_type_node),
2936 size, TREE_UNSIGNED (integer_type_node)),
2937 TYPE_MODE (integer_type_node));
2940 preserve_temp_slots (to_rtx);
2943 return want_value ? to_rtx : NULL_RTX;
2946 /* Compute FROM and store the value in the rtx we got. */
2949 result = store_expr (from, to_rtx, want_value);
2950 preserve_temp_slots (result);
2953 return want_value ? result : NULL_RTX;
2956 /* Generate code for computing expression EXP,
2957 and storing the value into TARGET.
2958 TARGET may contain a QUEUED rtx.
2960 If WANT_VALUE is nonzero, return a copy of the value
2961 not in TARGET, so that we can be sure to use the proper
2962 value in a containing expression even if TARGET has something
2963 else stored in it. If possible, we copy the value through a pseudo
2964 and return that pseudo. Or, if the value is constant, we try to
2965 return the constant. In some cases, we return a pseudo
2966 copied *from* TARGET.
2968 If the mode is BLKmode then we may return TARGET itself.
2969 It turns out that in BLKmode it doesn't cause a problem.
2970 because C has no operators that could combine two different
2971 assignments into the same BLKmode object with different values
2972 with no sequence point. Will other languages need this to
2975 If WANT_VALUE is 0, we return NULL, to make sure
2976 to catch quickly any cases where the caller uses the value
2977 and fails to set WANT_VALUE. */
2980 store_expr (exp, target, want_value)
2982 register rtx target;
2986 int dont_return_target = 0;
2988 if (TREE_CODE (exp) == COMPOUND_EXPR)
2990 /* Perform first part of compound expression, then assign from second
2992 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2994 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2996 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2998 /* For conditional expression, get safe form of the target. Then
2999 test the condition, doing the appropriate assignment on either
3000 side. This avoids the creation of unnecessary temporaries.
3001 For non-BLKmode, it is more efficient not to do this. */
3003 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3004 rtx flag = NULL_RTX;
3005 tree left_cleanups = NULL_TREE;
3006 tree right_cleanups = NULL_TREE;
3007 tree old_cleanups = cleanups_this_call;
3009 /* Used to save a pointer to the place to put the setting of
3010 the flag that indicates if this side of the conditional was
3011 taken. We backpatch the code, if we find out later that we
3012 have any conditional cleanups that need to be performed. */
3013 rtx dest_right_flag = NULL_RTX;
3014 rtx dest_left_flag = NULL_RTX;
3017 target = protect_from_queue (target, 1);
3019 do_pending_stack_adjust ();
3021 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3022 store_expr (TREE_OPERAND (exp, 1), target, 0);
3023 dest_left_flag = get_last_insn ();
3024 /* Handle conditional cleanups, if any. */
3025 left_cleanups = defer_cleanups_to (old_cleanups);
3027 emit_jump_insn (gen_jump (lab2));
3030 store_expr (TREE_OPERAND (exp, 2), target, 0);
3031 dest_right_flag = get_last_insn ();
3032 /* Handle conditional cleanups, if any. */
3033 right_cleanups = defer_cleanups_to (old_cleanups);
3038 /* Add back in any conditional cleanups. */
3039 if (left_cleanups || right_cleanups)
3045 /* Now that we know that a flag is needed, go back and add in the
3046 setting of the flag. */
3048 flag = gen_reg_rtx (word_mode);
3050 /* Do the left side flag. */
3051 last = get_last_insn ();
3052 /* Flag left cleanups as needed. */
3053 emit_move_insn (flag, const1_rtx);
3054 /* ??? deprecated, use sequences instead. */
3055 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3057 /* Do the right side flag. */
3058 last = get_last_insn ();
3059 /* Flag left cleanups as needed. */
3060 emit_move_insn (flag, const0_rtx);
3061 /* ??? deprecated, use sequences instead. */
3062 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3064 /* All cleanups must be on the function_obstack. */
3065 push_obstacks_nochange ();
3066 resume_temporary_allocation ();
3068 /* convert flag, which is an rtx, into a tree. */
3069 cond = make_node (RTL_EXPR);
3070 TREE_TYPE (cond) = integer_type_node;
3071 RTL_EXPR_RTL (cond) = flag;
3072 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3073 cond = save_expr (cond);
3075 if (! left_cleanups)
3076 left_cleanups = integer_zero_node;
3077 if (! right_cleanups)
3078 right_cleanups = integer_zero_node;
3079 new_cleanups = build (COND_EXPR, void_type_node,
3080 truthvalue_conversion (cond),
3081 left_cleanups, right_cleanups);
3082 new_cleanups = fold (new_cleanups);
3086 /* Now add in the conditionalized cleanups. */
3088 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3089 expand_eh_region_start ();
3091 return want_value ? target : NULL_RTX;
3093 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3094 && GET_MODE (target) != BLKmode)
3095 /* If target is in memory and caller wants value in a register instead,
3096 arrange that. Pass TARGET as target for expand_expr so that,
3097 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3098 We know expand_expr will not use the target in that case.
3099 Don't do this if TARGET is volatile because we are supposed
3100 to write it and then read it. */
3102 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3103 GET_MODE (target), 0);
3104 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3105 temp = copy_to_reg (temp);
3106 dont_return_target = 1;
3108 else if (queued_subexp_p (target))
3109 /* If target contains a postincrement, let's not risk
3110 using it as the place to generate the rhs. */
3112 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3114 /* Expand EXP into a new pseudo. */
3115 temp = gen_reg_rtx (GET_MODE (target));
3116 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3119 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3121 /* If target is volatile, ANSI requires accessing the value
3122 *from* the target, if it is accessed. So make that happen.
3123 In no case return the target itself. */
3124 if (! MEM_VOLATILE_P (target) && want_value)
3125 dont_return_target = 1;
3127 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3128 /* If this is an scalar in a register that is stored in a wider mode
3129 than the declared mode, compute the result into its declared mode
3130 and then convert to the wider mode. Our value is the computed
3133 /* If we don't want a value, we can do the conversion inside EXP,
3134 which will often result in some optimizations. Do the conversion
3135 in two steps: first change the signedness, if needed, then
3136 the extend. But don't do this if the type of EXP is a subtype
3137 of something else since then the conversion might involve
3138 more than just converting modes. */
3139 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3140 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3142 if (TREE_UNSIGNED (TREE_TYPE (exp))
3143 != SUBREG_PROMOTED_UNSIGNED_P (target))
3146 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3150 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3151 SUBREG_PROMOTED_UNSIGNED_P (target)),
3155 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3157 /* If TEMP is a volatile MEM and we want a result value, make
3158 the access now so it gets done only once. Likewise if
3159 it contains TARGET. */
3160 if (GET_CODE (temp) == MEM && want_value
3161 && (MEM_VOLATILE_P (temp)
3162 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3163 temp = copy_to_reg (temp);
3165 /* If TEMP is a VOIDmode constant, use convert_modes to make
3166 sure that we properly convert it. */
3167 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3168 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3169 TYPE_MODE (TREE_TYPE (exp)), temp,
3170 SUBREG_PROMOTED_UNSIGNED_P (target));
3172 convert_move (SUBREG_REG (target), temp,
3173 SUBREG_PROMOTED_UNSIGNED_P (target));
3174 return want_value ? temp : NULL_RTX;
3178 temp = expand_expr (exp, target, GET_MODE (target), 0);
3179 /* Return TARGET if it's a specified hardware register.
3180 If TARGET is a volatile mem ref, either return TARGET
3181 or return a reg copied *from* TARGET; ANSI requires this.
3183 Otherwise, if TEMP is not TARGET, return TEMP
3184 if it is constant (for efficiency),
3185 or if we really want the correct value. */
3186 if (!(target && GET_CODE (target) == REG
3187 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3188 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3190 && (CONSTANT_P (temp) || want_value))
3191 dont_return_target = 1;
3194 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3195 the same as that of TARGET, adjust the constant. This is needed, for
3196 example, in case it is a CONST_DOUBLE and we want only a word-sized
3198 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3199 && TREE_CODE (exp) != ERROR_MARK
3200 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3201 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3202 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3204 /* If value was not generated in the target, store it there.
3205 Convert the value to TARGET's type first if nec. */
3207 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3209 target = protect_from_queue (target, 1);
3210 if (GET_MODE (temp) != GET_MODE (target)
3211 && GET_MODE (temp) != VOIDmode)
3213 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3214 if (dont_return_target)
3216 /* In this case, we will return TEMP,
3217 so make sure it has the proper mode.
3218 But don't forget to store the value into TARGET. */
3219 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3220 emit_move_insn (target, temp);
3223 convert_move (target, temp, unsignedp);
3226 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3228 /* Handle copying a string constant into an array.
3229 The string constant may be shorter than the array.
3230 So copy just the string's actual length, and clear the rest. */
3234 /* Get the size of the data type of the string,
3235 which is actually the size of the target. */
3236 size = expr_size (exp);
3237 if (GET_CODE (size) == CONST_INT
3238 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3239 emit_block_move (target, temp, size,
3240 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3243 /* Compute the size of the data to copy from the string. */
3245 = size_binop (MIN_EXPR,
3246 make_tree (sizetype, size),
3248 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3249 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3253 /* Copy that much. */
3254 emit_block_move (target, temp, copy_size_rtx,
3255 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3257 /* Figure out how much is left in TARGET that we have to clear.
3258 Do all calculations in ptr_mode. */
3260 addr = XEXP (target, 0);
3261 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3263 if (GET_CODE (copy_size_rtx) == CONST_INT)
3265 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3266 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3270 addr = force_reg (ptr_mode, addr);
3271 addr = expand_binop (ptr_mode, add_optab, addr,
3272 copy_size_rtx, NULL_RTX, 0,
3275 size = expand_binop (ptr_mode, sub_optab, size,
3276 copy_size_rtx, NULL_RTX, 0,
3279 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3280 GET_MODE (size), 0, 0);
3281 label = gen_label_rtx ();
3282 emit_jump_insn (gen_blt (label));
3285 if (size != const0_rtx)
3287 #ifdef TARGET_MEM_FUNCTIONS
3288 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3290 const0_rtx, TYPE_MODE (integer_type_node),
3291 convert_to_mode (TYPE_MODE (sizetype),
3293 TREE_UNSIGNED (sizetype)),
3294 TYPE_MODE (sizetype));
3296 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3298 convert_to_mode (TYPE_MODE (integer_type_node),
3300 TREE_UNSIGNED (integer_type_node)),
3301 TYPE_MODE (integer_type_node));
3309 /* Handle calls that return values in multiple non-contiguous locations.
3310 The Irix 6 ABI has examples of this. */
3311 else if (GET_CODE (target) == PARALLEL)
3312 emit_group_load (target, temp);
3313 else if (GET_MODE (temp) == BLKmode)
3314 emit_block_move (target, temp, expr_size (exp),
3315 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3317 emit_move_insn (target, temp);
3320 /* If we don't want a value, return NULL_RTX. */
3324 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3325 ??? The latter test doesn't seem to make sense. */
3326 else if (dont_return_target && GET_CODE (temp) != MEM)
3329 /* Return TARGET itself if it is a hard register. */
3330 else if (want_value && GET_MODE (target) != BLKmode
3331 && ! (GET_CODE (target) == REG
3332 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3333 return copy_to_reg (target);
3339 /* Return 1 if EXP just contains zeros. */
3347 switch (TREE_CODE (exp))
3351 case NON_LVALUE_EXPR:
3352 return is_zeros_p (TREE_OPERAND (exp, 0));
3355 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3359 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3362 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3365 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3366 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3367 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3368 if (! is_zeros_p (TREE_VALUE (elt)))
3377 /* Return 1 if EXP contains mostly (3/4) zeros. */
3380 mostly_zeros_p (exp)
3383 if (TREE_CODE (exp) == CONSTRUCTOR)
3385 int elts = 0, zeros = 0;
3386 tree elt = CONSTRUCTOR_ELTS (exp);
3387 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3389 /* If there are no ranges of true bits, it is all zero. */
3390 return elt == NULL_TREE;
3392 for (; elt; elt = TREE_CHAIN (elt))
3394 /* We do not handle the case where the index is a RANGE_EXPR,
3395 so the statistic will be somewhat inaccurate.
3396 We do make a more accurate count in store_constructor itself,
3397 so since this function is only used for nested array elements,
3398 this should be close enough. */
3399 if (mostly_zeros_p (TREE_VALUE (elt)))
3404 return 4 * zeros >= 3 * elts;
3407 return is_zeros_p (exp);
3410 /* Helper function for store_constructor.
3411 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3412 TYPE is the type of the CONSTRUCTOR, not the element type.
3413 CLEARED is as for store_constructor.
3415 This provides a recursive shortcut back to store_constructor when it isn't
3416 necessary to go through store_field. This is so that we can pass through
3417 the cleared field to let store_constructor know that we may not have to
3418 clear a substructure if the outer structure has already been cleared. */
3421 store_constructor_field (target, bitsize, bitpos,
3422 mode, exp, type, cleared)
3424 int bitsize, bitpos;
3425 enum machine_mode mode;
3429 if (TREE_CODE (exp) == CONSTRUCTOR
3430 && bitpos % BITS_PER_UNIT == 0
3431 /* If we have a non-zero bitpos for a register target, then we just
3432 let store_field do the bitfield handling. This is unlikely to
3433 generate unnecessary clear instructions anyways. */
3434 && (bitpos == 0 || GET_CODE (target) == MEM))
3437 target = change_address (target, VOIDmode,
3438 plus_constant (XEXP (target, 0),
3439 bitpos / BITS_PER_UNIT));
3440 store_constructor (exp, target, cleared);
3443 store_field (target, bitsize, bitpos, mode, exp,
3444 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3445 int_size_in_bytes (type));
3448 /* Store the value of constructor EXP into the rtx TARGET.
3449 TARGET is either a REG or a MEM.
3450 CLEARED is true if TARGET is known to have been zero'd. */
3453 store_constructor (exp, target, cleared)
3458 tree type = TREE_TYPE (exp);
3460 /* We know our target cannot conflict, since safe_from_p has been called. */
3462 /* Don't try copying piece by piece into a hard register
3463 since that is vulnerable to being clobbered by EXP.
3464 Instead, construct in a pseudo register and then copy it all. */
3465 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3467 rtx temp = gen_reg_rtx (GET_MODE (target));
3468 store_constructor (exp, temp, 0);
3469 emit_move_insn (target, temp);
3474 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3475 || TREE_CODE (type) == QUAL_UNION_TYPE)
3479 /* Inform later passes that the whole union value is dead. */
3480 if (TREE_CODE (type) == UNION_TYPE
3481 || TREE_CODE (type) == QUAL_UNION_TYPE)
3482 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3484 /* If we are building a static constructor into a register,
3485 set the initial value as zero so we can fold the value into
3486 a constant. But if more than one register is involved,
3487 this probably loses. */
3488 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3489 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3492 emit_move_insn (target, const0_rtx);
3497 /* If the constructor has fewer fields than the structure
3498 or if we are initializing the structure to mostly zeros,
3499 clear the whole structure first. */
3500 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3501 != list_length (TYPE_FIELDS (type)))
3502 || mostly_zeros_p (exp))
3505 clear_storage (target, expr_size (exp),
3506 TYPE_ALIGN (type) / BITS_PER_UNIT);
3511 /* Inform later passes that the old value is dead. */
3512 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3514 /* Store each element of the constructor into
3515 the corresponding field of TARGET. */
3517 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3519 register tree field = TREE_PURPOSE (elt);
3520 register enum machine_mode mode;
3524 tree pos, constant = 0, offset = 0;
3525 rtx to_rtx = target;
3527 /* Just ignore missing fields.
3528 We cleared the whole structure, above,
3529 if any fields are missing. */
3533 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3536 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3537 unsignedp = TREE_UNSIGNED (field);
3538 mode = DECL_MODE (field);
3539 if (DECL_BIT_FIELD (field))
3542 pos = DECL_FIELD_BITPOS (field);
3543 if (TREE_CODE (pos) == INTEGER_CST)
3545 else if (TREE_CODE (pos) == PLUS_EXPR
3546 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3547 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3552 bitpos = TREE_INT_CST_LOW (constant);
3558 if (contains_placeholder_p (offset))
3559 offset = build (WITH_RECORD_EXPR, sizetype,
3562 offset = size_binop (FLOOR_DIV_EXPR, offset,
3563 size_int (BITS_PER_UNIT));
3565 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3566 if (GET_CODE (to_rtx) != MEM)
3570 = change_address (to_rtx, VOIDmode,
3571 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3572 force_reg (ptr_mode, offset_rtx)));
3574 if (TREE_READONLY (field))
3576 if (GET_CODE (to_rtx) == MEM)
3577 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3579 RTX_UNCHANGING_P (to_rtx) = 1;
3582 store_constructor_field (to_rtx, bitsize, bitpos,
3583 mode, TREE_VALUE (elt), type, cleared);
3586 else if (TREE_CODE (type) == ARRAY_TYPE)
3591 tree domain = TYPE_DOMAIN (type);
3592 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3593 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3594 tree elttype = TREE_TYPE (type);
3596 /* If the constructor has fewer elements than the array,
3597 clear the whole array first. Similarly if this this is
3598 static constructor of a non-BLKmode object. */
3599 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3603 HOST_WIDE_INT count = 0, zero_count = 0;
3605 /* This loop is a more accurate version of the loop in
3606 mostly_zeros_p (it handles RANGE_EXPR in an index).
3607 It is also needed to check for missing elements. */
3608 for (elt = CONSTRUCTOR_ELTS (exp);
3610 elt = TREE_CHAIN (elt))
3612 tree index = TREE_PURPOSE (elt);
3613 HOST_WIDE_INT this_node_count;
3614 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3616 tree lo_index = TREE_OPERAND (index, 0);
3617 tree hi_index = TREE_OPERAND (index, 1);
3618 if (TREE_CODE (lo_index) != INTEGER_CST
3619 || TREE_CODE (hi_index) != INTEGER_CST)
3624 this_node_count = TREE_INT_CST_LOW (hi_index)
3625 - TREE_INT_CST_LOW (lo_index) + 1;
3628 this_node_count = 1;
3629 count += this_node_count;
3630 if (mostly_zeros_p (TREE_VALUE (elt)))
3631 zero_count += this_node_count;
3633 /* Clear the entire array first if there are any missing elements,
3634 or if the incidence of zero elements is >= 75%. */
3635 if (count < maxelt - minelt + 1
3636 || 4 * zero_count >= 3 * count)
3642 clear_storage (target, expr_size (exp),
3643 TYPE_ALIGN (type) / BITS_PER_UNIT);
3647 /* Inform later passes that the old value is dead. */
3648 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3650 /* Store each element of the constructor into
3651 the corresponding element of TARGET, determined
3652 by counting the elements. */
3653 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3655 elt = TREE_CHAIN (elt), i++)
3657 register enum machine_mode mode;
3661 tree value = TREE_VALUE (elt);
3662 tree index = TREE_PURPOSE (elt);
3663 rtx xtarget = target;
3665 if (cleared && is_zeros_p (value))
3668 mode = TYPE_MODE (elttype);
3669 bitsize = GET_MODE_BITSIZE (mode);
3670 unsignedp = TREE_UNSIGNED (elttype);
3672 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3674 tree lo_index = TREE_OPERAND (index, 0);
3675 tree hi_index = TREE_OPERAND (index, 1);
3676 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3677 struct nesting *loop;
3678 HOST_WIDE_INT lo, hi, count;
3681 /* If the range is constant and "small", unroll the loop. */
3682 if (TREE_CODE (lo_index) == INTEGER_CST
3683 && TREE_CODE (hi_index) == INTEGER_CST
3684 && (lo = TREE_INT_CST_LOW (lo_index),
3685 hi = TREE_INT_CST_LOW (hi_index),
3686 count = hi - lo + 1,
3687 (GET_CODE (target) != MEM
3689 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3690 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3693 lo -= minelt; hi -= minelt;
3694 for (; lo <= hi; lo++)
3696 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3697 store_constructor_field (target, bitsize, bitpos,
3698 mode, value, type, cleared);
3703 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3704 loop_top = gen_label_rtx ();
3705 loop_end = gen_label_rtx ();
3707 unsignedp = TREE_UNSIGNED (domain);
3709 index = build_decl (VAR_DECL, NULL_TREE, domain);
3711 DECL_RTL (index) = index_r
3712 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3715 if (TREE_CODE (value) == SAVE_EXPR
3716 && SAVE_EXPR_RTL (value) == 0)
3718 /* Make sure value gets expanded once before the
3720 expand_expr (value, const0_rtx, VOIDmode, 0);
3723 store_expr (lo_index, index_r, 0);
3724 loop = expand_start_loop (0);
3726 /* Assign value to element index. */
3727 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3728 size_int (BITS_PER_UNIT));
3729 position = size_binop (MULT_EXPR,
3730 size_binop (MINUS_EXPR, index,
3731 TYPE_MIN_VALUE (domain)),
3733 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3734 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3735 xtarget = change_address (target, mode, addr);
3736 if (TREE_CODE (value) == CONSTRUCTOR)
3737 store_constructor (value, xtarget, cleared);
3739 store_expr (value, xtarget, 0);
3741 expand_exit_loop_if_false (loop,
3742 build (LT_EXPR, integer_type_node,
3745 expand_increment (build (PREINCREMENT_EXPR,
3747 index, integer_one_node), 0, 0);
3749 emit_label (loop_end);
3751 /* Needed by stupid register allocation. to extend the
3752 lifetime of pseudo-regs used by target past the end
3754 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3757 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3758 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3764 index = size_int (i);
3767 index = size_binop (MINUS_EXPR, index,
3768 TYPE_MIN_VALUE (domain));
3769 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3770 size_int (BITS_PER_UNIT));
3771 position = size_binop (MULT_EXPR, index, position);
3772 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3773 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3774 xtarget = change_address (target, mode, addr);
3775 store_expr (value, xtarget, 0);
3780 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3781 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3783 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3784 store_constructor_field (target, bitsize, bitpos,
3785 mode, value, type, cleared);
3789 /* set constructor assignments */
3790 else if (TREE_CODE (type) == SET_TYPE)
3792 tree elt = CONSTRUCTOR_ELTS (exp);
3793 rtx xtarget = XEXP (target, 0);
3794 int set_word_size = TYPE_ALIGN (type);
3795 int nbytes = int_size_in_bytes (type), nbits;
3796 tree domain = TYPE_DOMAIN (type);
3797 tree domain_min, domain_max, bitlength;
3799 /* The default implementation strategy is to extract the constant
3800 parts of the constructor, use that to initialize the target,
3801 and then "or" in whatever non-constant ranges we need in addition.
3803 If a large set is all zero or all ones, it is
3804 probably better to set it using memset (if available) or bzero.
3805 Also, if a large set has just a single range, it may also be
3806 better to first clear all the first clear the set (using
3807 bzero/memset), and set the bits we want. */
3809 /* Check for all zeros. */
3810 if (elt == NULL_TREE)
3813 clear_storage (target, expr_size (exp),
3814 TYPE_ALIGN (type) / BITS_PER_UNIT);
3818 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3819 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3820 bitlength = size_binop (PLUS_EXPR,
3821 size_binop (MINUS_EXPR, domain_max, domain_min),
3824 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3826 nbits = TREE_INT_CST_LOW (bitlength);
3828 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3829 are "complicated" (more than one range), initialize (the
3830 constant parts) by copying from a constant. */
3831 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3832 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3834 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3835 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3836 char *bit_buffer = (char *) alloca (nbits);
3837 HOST_WIDE_INT word = 0;
3840 int offset = 0; /* In bytes from beginning of set. */
3841 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3844 if (bit_buffer[ibit])
3846 if (BYTES_BIG_ENDIAN)
3847 word |= (1 << (set_word_size - 1 - bit_pos));
3849 word |= 1 << bit_pos;
3852 if (bit_pos >= set_word_size || ibit == nbits)
3854 if (word != 0 || ! cleared)
3856 rtx datum = GEN_INT (word);
3858 /* The assumption here is that it is safe to use
3859 XEXP if the set is multi-word, but not if
3860 it's single-word. */
3861 if (GET_CODE (target) == MEM)
3863 to_rtx = plus_constant (XEXP (target, 0), offset);
3864 to_rtx = change_address (target, mode, to_rtx);
3866 else if (offset == 0)
3870 emit_move_insn (to_rtx, datum);
3876 offset += set_word_size / BITS_PER_UNIT;
3882 /* Don't bother clearing storage if the set is all ones. */
3883 if (TREE_CHAIN (elt) != NULL_TREE
3884 || (TREE_PURPOSE (elt) == NULL_TREE
3886 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3887 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3888 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3889 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3891 clear_storage (target, expr_size (exp),
3892 TYPE_ALIGN (type) / BITS_PER_UNIT);
3895 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3897 /* start of range of element or NULL */
3898 tree startbit = TREE_PURPOSE (elt);
3899 /* end of range of element, or element value */
3900 tree endbit = TREE_VALUE (elt);
3901 HOST_WIDE_INT startb, endb;
3902 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3904 bitlength_rtx = expand_expr (bitlength,
3905 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3907 /* handle non-range tuple element like [ expr ] */
3908 if (startbit == NULL_TREE)
3910 startbit = save_expr (endbit);
3913 startbit = convert (sizetype, startbit);
3914 endbit = convert (sizetype, endbit);
3915 if (! integer_zerop (domain_min))
3917 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3918 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3920 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3921 EXPAND_CONST_ADDRESS);
3922 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3923 EXPAND_CONST_ADDRESS);
3927 targetx = assign_stack_temp (GET_MODE (target),
3928 GET_MODE_SIZE (GET_MODE (target)),
3930 emit_move_insn (targetx, target);
3932 else if (GET_CODE (target) == MEM)
3937 #ifdef TARGET_MEM_FUNCTIONS
3938 /* Optimization: If startbit and endbit are
3939 constants divisible by BITS_PER_UNIT,
3940 call memset instead. */
3941 if (TREE_CODE (startbit) == INTEGER_CST
3942 && TREE_CODE (endbit) == INTEGER_CST
3943 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3944 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3946 emit_library_call (memset_libfunc, 0,
3948 plus_constant (XEXP (targetx, 0),
3949 startb / BITS_PER_UNIT),
3951 constm1_rtx, TYPE_MODE (integer_type_node),
3952 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3953 TYPE_MODE (sizetype));
3958 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3959 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3960 bitlength_rtx, TYPE_MODE (sizetype),
3961 startbit_rtx, TYPE_MODE (sizetype),
3962 endbit_rtx, TYPE_MODE (sizetype));
3965 emit_move_insn (target, targetx);
3973 /* Store the value of EXP (an expression tree)
3974 into a subfield of TARGET which has mode MODE and occupies
3975 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3976 If MODE is VOIDmode, it means that we are storing into a bit-field.
3978 If VALUE_MODE is VOIDmode, return nothing in particular.
3979 UNSIGNEDP is not used in this case.
3981 Otherwise, return an rtx for the value stored. This rtx
3982 has mode VALUE_MODE if that is convenient to do.
3983 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3985 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3986 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3989 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3990 unsignedp, align, total_size)
3992 int bitsize, bitpos;
3993 enum machine_mode mode;
3995 enum machine_mode value_mode;
4000 HOST_WIDE_INT width_mask = 0;
4002 if (bitsize < HOST_BITS_PER_WIDE_INT)
4003 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4005 /* If we are storing into an unaligned field of an aligned union that is
4006 in a register, we may have the mode of TARGET being an integer mode but
4007 MODE == BLKmode. In that case, get an aligned object whose size and
4008 alignment are the same as TARGET and store TARGET into it (we can avoid
4009 the store if the field being stored is the entire width of TARGET). Then
4010 call ourselves recursively to store the field into a BLKmode version of
4011 that object. Finally, load from the object into TARGET. This is not
4012 very efficient in general, but should only be slightly more expensive
4013 than the otherwise-required unaligned accesses. Perhaps this can be
4014 cleaned up later. */
4017 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4019 rtx object = assign_stack_temp (GET_MODE (target),
4020 GET_MODE_SIZE (GET_MODE (target)), 0);
4021 rtx blk_object = copy_rtx (object);
4023 MEM_IN_STRUCT_P (object) = 1;
4024 MEM_IN_STRUCT_P (blk_object) = 1;
4025 PUT_MODE (blk_object, BLKmode);
4027 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4028 emit_move_insn (object, target);
4030 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4033 /* Even though we aren't returning target, we need to
4034 give it the updated value. */
4035 emit_move_insn (target, object);
4040 /* If the structure is in a register or if the component
4041 is a bit field, we cannot use addressing to access it.
4042 Use bit-field techniques or SUBREG to store in it. */
4044 if (mode == VOIDmode
4045 || (mode != BLKmode && ! direct_store[(int) mode])
4046 || GET_CODE (target) == REG
4047 || GET_CODE (target) == SUBREG
4048 /* If the field isn't aligned enough to store as an ordinary memref,
4049 store it as a bit field. */
4050 || (SLOW_UNALIGNED_ACCESS
4051 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4052 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4054 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4056 /* If BITSIZE is narrower than the size of the type of EXP
4057 we will be narrowing TEMP. Normally, what's wanted are the
4058 low-order bits. However, if EXP's type is a record and this is
4059 big-endian machine, we want the upper BITSIZE bits. */
4060 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4061 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4062 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4063 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4064 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4068 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4070 if (mode != VOIDmode && mode != BLKmode
4071 && mode != TYPE_MODE (TREE_TYPE (exp)))
4072 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4074 /* If the modes of TARGET and TEMP are both BLKmode, both
4075 must be in memory and BITPOS must be aligned on a byte
4076 boundary. If so, we simply do a block copy. */
4077 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4079 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4080 || bitpos % BITS_PER_UNIT != 0)
4083 target = change_address (target, VOIDmode,
4084 plus_constant (XEXP (target, 0),
4085 bitpos / BITS_PER_UNIT));
4087 emit_block_move (target, temp,
4088 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4092 return value_mode == VOIDmode ? const0_rtx : target;
4095 /* Store the value in the bitfield. */
4096 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4097 if (value_mode != VOIDmode)
4099 /* The caller wants an rtx for the value. */
4100 /* If possible, avoid refetching from the bitfield itself. */
4102 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4105 enum machine_mode tmode;
4108 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4109 tmode = GET_MODE (temp);
4110 if (tmode == VOIDmode)
4112 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4113 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4114 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4116 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4117 NULL_RTX, value_mode, 0, align,
4124 rtx addr = XEXP (target, 0);
4127 /* If a value is wanted, it must be the lhs;
4128 so make the address stable for multiple use. */
4130 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4131 && ! CONSTANT_ADDRESS_P (addr)
4132 /* A frame-pointer reference is already stable. */
4133 && ! (GET_CODE (addr) == PLUS
4134 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4135 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4136 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4137 addr = copy_to_reg (addr);
4139 /* Now build a reference to just the desired component. */
4141 to_rtx = change_address (target, mode,
4142 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4143 MEM_IN_STRUCT_P (to_rtx) = 1;
4145 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4149 /* Return true if any object containing the innermost array is an unaligned
4150 packed structure field. */
4153 get_inner_unaligned_p (exp)
4156 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4160 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4162 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4166 else if (TREE_CODE (exp) != ARRAY_REF
4167 && TREE_CODE (exp) != NON_LVALUE_EXPR
4168 && ! ((TREE_CODE (exp) == NOP_EXPR
4169 || TREE_CODE (exp) == CONVERT_EXPR)
4170 && (TYPE_MODE (TREE_TYPE (exp))
4171 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4174 exp = TREE_OPERAND (exp, 0);
4180 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4181 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4182 ARRAY_REFs and find the ultimate containing object, which we return.
4184 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4185 bit position, and *PUNSIGNEDP to the signedness of the field.
4186 If the position of the field is variable, we store a tree
4187 giving the variable offset (in units) in *POFFSET.
4188 This offset is in addition to the bit position.
4189 If the position is not variable, we store 0 in *POFFSET.
4190 We set *PALIGNMENT to the alignment in bytes of the address that will be
4191 computed. This is the alignment of the thing we return if *POFFSET
4192 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4194 If any of the extraction expressions is volatile,
4195 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4197 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4198 is a mode that can be used to access the field. In that case, *PBITSIZE
4201 If the field describes a variable-sized object, *PMODE is set to
4202 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4203 this case, but the address of the object can be found. */
4206 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4207 punsignedp, pvolatilep, palignment)
4212 enum machine_mode *pmode;
4217 tree orig_exp = exp;
4219 enum machine_mode mode = VOIDmode;
4220 tree offset = integer_zero_node;
4221 int alignment = BIGGEST_ALIGNMENT;
4223 if (TREE_CODE (exp) == COMPONENT_REF)
4225 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4226 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4227 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4228 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4230 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4232 size_tree = TREE_OPERAND (exp, 1);
4233 *punsignedp = TREE_UNSIGNED (exp);
4237 mode = TYPE_MODE (TREE_TYPE (exp));
4238 *pbitsize = GET_MODE_BITSIZE (mode);
4239 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4244 if (TREE_CODE (size_tree) != INTEGER_CST)
4245 mode = BLKmode, *pbitsize = -1;
4247 *pbitsize = TREE_INT_CST_LOW (size_tree);
4250 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4251 and find the ultimate containing object. */
4257 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4259 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4260 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4261 : TREE_OPERAND (exp, 2));
4262 tree constant = integer_zero_node, var = pos;
4264 /* If this field hasn't been filled in yet, don't go
4265 past it. This should only happen when folding expressions
4266 made during type construction. */
4270 /* Assume here that the offset is a multiple of a unit.
4271 If not, there should be an explicitly added constant. */
4272 if (TREE_CODE (pos) == PLUS_EXPR
4273 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4274 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4275 else if (TREE_CODE (pos) == INTEGER_CST)
4276 constant = pos, var = integer_zero_node;
4278 *pbitpos += TREE_INT_CST_LOW (constant);
4279 offset = size_binop (PLUS_EXPR, offset,
4280 size_binop (EXACT_DIV_EXPR, var,
4281 size_int (BITS_PER_UNIT)));
4284 else if (TREE_CODE (exp) == ARRAY_REF)
4286 /* This code is based on the code in case ARRAY_REF in expand_expr
4287 below. We assume here that the size of an array element is
4288 always an integral multiple of BITS_PER_UNIT. */
4290 tree index = TREE_OPERAND (exp, 1);
4291 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4293 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4294 tree index_type = TREE_TYPE (index);
4296 if (! integer_zerop (low_bound))
4297 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4299 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4301 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4303 index_type = TREE_TYPE (index);
4306 index = fold (build (MULT_EXPR, index_type, index,
4307 convert (index_type,
4308 TYPE_SIZE (TREE_TYPE (exp)))));
4310 if (TREE_CODE (index) == INTEGER_CST
4311 && TREE_INT_CST_HIGH (index) == 0)
4312 *pbitpos += TREE_INT_CST_LOW (index);
4314 offset = size_binop (PLUS_EXPR, offset,
4315 size_binop (FLOOR_DIV_EXPR, index,
4316 size_int (BITS_PER_UNIT)));
4318 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4319 && ! ((TREE_CODE (exp) == NOP_EXPR
4320 || TREE_CODE (exp) == CONVERT_EXPR)
4321 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4322 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4324 && (TYPE_MODE (TREE_TYPE (exp))
4325 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4328 /* If any reference in the chain is volatile, the effect is volatile. */
4329 if (TREE_THIS_VOLATILE (exp))
4332 /* If the offset is non-constant already, then we can't assume any
4333 alignment more than the alignment here. */
4334 if (! integer_zerop (offset))
4335 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4337 exp = TREE_OPERAND (exp, 0);
4340 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4341 alignment = MIN (alignment, DECL_ALIGN (exp));
4342 else if (TREE_TYPE (exp) != 0)
4343 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4345 if (integer_zerop (offset))
4348 if (offset != 0 && contains_placeholder_p (offset))
4349 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4353 *palignment = alignment / BITS_PER_UNIT;
4357 /* Given an rtx VALUE that may contain additions and multiplications,
4358 return an equivalent value that just refers to a register or memory.
4359 This is done by generating instructions to perform the arithmetic
4360 and returning a pseudo-register containing the value.
4362 The returned value may be a REG, SUBREG, MEM or constant. */
4365 force_operand (value, target)
4368 register optab binoptab = 0;
4369 /* Use a temporary to force order of execution of calls to
4373 /* Use subtarget as the target for operand 0 of a binary operation. */
4374 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4376 if (GET_CODE (value) == PLUS)
4377 binoptab = add_optab;
4378 else if (GET_CODE (value) == MINUS)
4379 binoptab = sub_optab;
4380 else if (GET_CODE (value) == MULT)
4382 op2 = XEXP (value, 1);
4383 if (!CONSTANT_P (op2)
4384 && !(GET_CODE (op2) == REG && op2 != subtarget))
4386 tmp = force_operand (XEXP (value, 0), subtarget);
4387 return expand_mult (GET_MODE (value), tmp,
4388 force_operand (op2, NULL_RTX),
4394 op2 = XEXP (value, 1);
4395 if (!CONSTANT_P (op2)
4396 && !(GET_CODE (op2) == REG && op2 != subtarget))
4398 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4400 binoptab = add_optab;
4401 op2 = negate_rtx (GET_MODE (value), op2);
4404 /* Check for an addition with OP2 a constant integer and our first
4405 operand a PLUS of a virtual register and something else. In that
4406 case, we want to emit the sum of the virtual register and the
4407 constant first and then add the other value. This allows virtual
4408 register instantiation to simply modify the constant rather than
4409 creating another one around this addition. */
4410 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4411 && GET_CODE (XEXP (value, 0)) == PLUS
4412 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4413 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4414 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4416 rtx temp = expand_binop (GET_MODE (value), binoptab,
4417 XEXP (XEXP (value, 0), 0), op2,
4418 subtarget, 0, OPTAB_LIB_WIDEN);
4419 return expand_binop (GET_MODE (value), binoptab, temp,
4420 force_operand (XEXP (XEXP (value, 0), 1), 0),
4421 target, 0, OPTAB_LIB_WIDEN);
4424 tmp = force_operand (XEXP (value, 0), subtarget);
4425 return expand_binop (GET_MODE (value), binoptab, tmp,
4426 force_operand (op2, NULL_RTX),
4427 target, 0, OPTAB_LIB_WIDEN);
4428 /* We give UNSIGNEDP = 0 to expand_binop
4429 because the only operations we are expanding here are signed ones. */
4434 /* Subroutine of expand_expr:
4435 save the non-copied parts (LIST) of an expr (LHS), and return a list
4436 which can restore these values to their previous values,
4437 should something modify their storage. */
4440 save_noncopied_parts (lhs, list)
4447 for (tail = list; tail; tail = TREE_CHAIN (tail))
4448 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4449 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4452 tree part = TREE_VALUE (tail);
4453 tree part_type = TREE_TYPE (part);
4454 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4455 rtx target = assign_temp (part_type, 0, 1, 1);
4456 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4457 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4458 parts = tree_cons (to_be_saved,
4459 build (RTL_EXPR, part_type, NULL_TREE,
4462 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4467 /* Subroutine of expand_expr:
4468 record the non-copied parts (LIST) of an expr (LHS), and return a list
4469 which specifies the initial values of these parts. */
4472 init_noncopied_parts (lhs, list)
4479 for (tail = list; tail; tail = TREE_CHAIN (tail))
4480 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4481 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4484 tree part = TREE_VALUE (tail);
4485 tree part_type = TREE_TYPE (part);
4486 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4487 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4492 /* Subroutine of expand_expr: return nonzero iff there is no way that
4493 EXP can reference X, which is being modified. */
4496 safe_from_p (x, exp)
4504 /* If EXP has varying size, we MUST use a target since we currently
4505 have no way of allocating temporaries of variable size
4506 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4507 So we assume here that something at a higher level has prevented a
4508 clash. This is somewhat bogus, but the best we can do. Only
4509 do this when X is BLKmode. */
4510 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4511 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4512 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4513 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4514 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4516 && GET_MODE (x) == BLKmode))
4519 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4520 find the underlying pseudo. */
4521 if (GET_CODE (x) == SUBREG)
4524 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4528 /* If X is a location in the outgoing argument area, it is always safe. */
4529 if (GET_CODE (x) == MEM
4530 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4531 || (GET_CODE (XEXP (x, 0)) == PLUS
4532 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4535 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4538 exp_rtl = DECL_RTL (exp);
4545 if (TREE_CODE (exp) == TREE_LIST)
4546 return ((TREE_VALUE (exp) == 0
4547 || safe_from_p (x, TREE_VALUE (exp)))
4548 && (TREE_CHAIN (exp) == 0
4549 || safe_from_p (x, TREE_CHAIN (exp))));
4554 return safe_from_p (x, TREE_OPERAND (exp, 0));
4558 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4559 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4563 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4564 the expression. If it is set, we conflict iff we are that rtx or
4565 both are in memory. Otherwise, we check all operands of the
4566 expression recursively. */
4568 switch (TREE_CODE (exp))
4571 return (staticp (TREE_OPERAND (exp, 0))
4572 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4575 if (GET_CODE (x) == MEM)
4580 exp_rtl = CALL_EXPR_RTL (exp);
4583 /* Assume that the call will clobber all hard registers and
4585 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4586 || GET_CODE (x) == MEM)
4593 /* If a sequence exists, we would have to scan every instruction
4594 in the sequence to see if it was safe. This is probably not
4596 if (RTL_EXPR_SEQUENCE (exp))
4599 exp_rtl = RTL_EXPR_RTL (exp);
4602 case WITH_CLEANUP_EXPR:
4603 exp_rtl = RTL_EXPR_RTL (exp);
4606 case CLEANUP_POINT_EXPR:
4607 return safe_from_p (x, TREE_OPERAND (exp, 0));
4610 exp_rtl = SAVE_EXPR_RTL (exp);
4614 /* The only operand we look at is operand 1. The rest aren't
4615 part of the expression. */
4616 return safe_from_p (x, TREE_OPERAND (exp, 1));
4618 case METHOD_CALL_EXPR:
4619 /* This takes a rtx argument, but shouldn't appear here. */
4623 /* If we have an rtx, we do not need to scan our operands. */
4627 nops = tree_code_length[(int) TREE_CODE (exp)];
4628 for (i = 0; i < nops; i++)
4629 if (TREE_OPERAND (exp, i) != 0
4630 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4634 /* If we have an rtl, find any enclosed object. Then see if we conflict
4638 if (GET_CODE (exp_rtl) == SUBREG)
4640 exp_rtl = SUBREG_REG (exp_rtl);
4641 if (GET_CODE (exp_rtl) == REG
4642 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4646 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4647 are memory and EXP is not readonly. */
4648 return ! (rtx_equal_p (x, exp_rtl)
4649 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4650 && ! TREE_READONLY (exp)));
4653 /* If we reach here, it is safe. */
4657 /* Subroutine of expand_expr: return nonzero iff EXP is an
4658 expression whose type is statically determinable. */
4664 if (TREE_CODE (exp) == PARM_DECL
4665 || TREE_CODE (exp) == VAR_DECL
4666 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4667 || TREE_CODE (exp) == COMPONENT_REF
4668 || TREE_CODE (exp) == ARRAY_REF)
4673 /* Subroutine of expand_expr: return rtx if EXP is a
4674 variable or parameter; else return 0. */
4681 switch (TREE_CODE (exp))
4685 return DECL_RTL (exp);
4691 /* expand_expr: generate code for computing expression EXP.
4692 An rtx for the computed value is returned. The value is never null.
4693 In the case of a void EXP, const0_rtx is returned.
4695 The value may be stored in TARGET if TARGET is nonzero.
4696 TARGET is just a suggestion; callers must assume that
4697 the rtx returned may not be the same as TARGET.
4699 If TARGET is CONST0_RTX, it means that the value will be ignored.
4701 If TMODE is not VOIDmode, it suggests generating the
4702 result in mode TMODE. But this is done only when convenient.
4703 Otherwise, TMODE is ignored and the value generated in its natural mode.
4704 TMODE is just a suggestion; callers must assume that
4705 the rtx returned may not have mode TMODE.
4707 Note that TARGET may have neither TMODE nor MODE. In that case, it
4708 probably will not be used.
4710 If MODIFIER is EXPAND_SUM then when EXP is an addition
4711 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4712 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4713 products as above, or REG or MEM, or constant.
4714 Ordinarily in such cases we would output mul or add instructions
4715 and then return a pseudo reg containing the sum.
4717 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4718 it also marks a label as absolutely required (it can't be dead).
4719 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4720 This is used for outputting expressions used in initializers.
4722 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4723 with a constant address even if that address is not normally legitimate.
4724 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4727 expand_expr (exp, target, tmode, modifier)
4730 enum machine_mode tmode;
4731 enum expand_modifier modifier;
4733 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4734 This is static so it will be accessible to our recursive callees. */
4735 static tree placeholder_list = 0;
4736 register rtx op0, op1, temp;
4737 tree type = TREE_TYPE (exp);
4738 int unsignedp = TREE_UNSIGNED (type);
4739 register enum machine_mode mode = TYPE_MODE (type);
4740 register enum tree_code code = TREE_CODE (exp);
4742 /* Use subtarget as the target for operand 0 of a binary operation. */
4743 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4744 rtx original_target = target;
4745 /* Maybe defer this until sure not doing bytecode? */
4746 int ignore = (target == const0_rtx
4747 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4748 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4749 || code == COND_EXPR)
4750 && TREE_CODE (type) == VOID_TYPE));
4754 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4756 bc_expand_expr (exp);
4760 /* Don't use hard regs as subtargets, because the combiner
4761 can only handle pseudo regs. */
4762 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4764 /* Avoid subtargets inside loops,
4765 since they hide some invariant expressions. */
4766 if (preserve_subexpressions_p ())
4769 /* If we are going to ignore this result, we need only do something
4770 if there is a side-effect somewhere in the expression. If there
4771 is, short-circuit the most common cases here. Note that we must
4772 not call expand_expr with anything but const0_rtx in case this
4773 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4777 if (! TREE_SIDE_EFFECTS (exp))
4780 /* Ensure we reference a volatile object even if value is ignored. */
4781 if (TREE_THIS_VOLATILE (exp)
4782 && TREE_CODE (exp) != FUNCTION_DECL
4783 && mode != VOIDmode && mode != BLKmode)
4785 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4786 if (GET_CODE (temp) == MEM)
4787 temp = copy_to_reg (temp);
4791 if (TREE_CODE_CLASS (code) == '1')
4792 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4793 VOIDmode, modifier);
4794 else if (TREE_CODE_CLASS (code) == '2'
4795 || TREE_CODE_CLASS (code) == '<')
4797 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4798 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4801 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4802 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4803 /* If the second operand has no side effects, just evaluate
4805 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4806 VOIDmode, modifier);
4811 /* If will do cse, generate all results into pseudo registers
4812 since 1) that allows cse to find more things
4813 and 2) otherwise cse could produce an insn the machine
4816 if (! cse_not_expected && mode != BLKmode && target
4817 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4824 tree function = decl_function_context (exp);
4825 /* Handle using a label in a containing function. */
4826 if (function != current_function_decl && function != 0)
4828 struct function *p = find_function_data (function);
4829 /* Allocate in the memory associated with the function
4830 that the label is in. */
4831 push_obstacks (p->function_obstack,
4832 p->function_maybepermanent_obstack);
4834 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4835 label_rtx (exp), p->forced_labels);
4838 else if (modifier == EXPAND_INITIALIZER)
4839 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4840 label_rtx (exp), forced_labels);
4841 temp = gen_rtx (MEM, FUNCTION_MODE,
4842 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4843 if (function != current_function_decl && function != 0)
4844 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4849 if (DECL_RTL (exp) == 0)
4851 error_with_decl (exp, "prior parameter's size depends on `%s'");
4852 return CONST0_RTX (mode);
4855 /* ... fall through ... */
4858 /* If a static var's type was incomplete when the decl was written,
4859 but the type is complete now, lay out the decl now. */
4860 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4861 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4863 push_obstacks_nochange ();
4864 end_temporary_allocation ();
4865 layout_decl (exp, 0);
4866 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4870 /* ... fall through ... */
4874 if (DECL_RTL (exp) == 0)
4877 /* Ensure variable marked as used even if it doesn't go through
4878 a parser. If it hasn't be used yet, write out an external
4880 if (! TREE_USED (exp))
4882 assemble_external (exp);
4883 TREE_USED (exp) = 1;
4886 /* Show we haven't gotten RTL for this yet. */
4889 /* Handle variables inherited from containing functions. */
4890 context = decl_function_context (exp);
4892 /* We treat inline_function_decl as an alias for the current function
4893 because that is the inline function whose vars, types, etc.
4894 are being merged into the current function.
4895 See expand_inline_function. */
4897 if (context != 0 && context != current_function_decl
4898 && context != inline_function_decl
4899 /* If var is static, we don't need a static chain to access it. */
4900 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4901 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4905 /* Mark as non-local and addressable. */
4906 DECL_NONLOCAL (exp) = 1;
4907 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4909 mark_addressable (exp);
4910 if (GET_CODE (DECL_RTL (exp)) != MEM)
4912 addr = XEXP (DECL_RTL (exp), 0);
4913 if (GET_CODE (addr) == MEM)
4914 addr = gen_rtx (MEM, Pmode,
4915 fix_lexical_addr (XEXP (addr, 0), exp));
4917 addr = fix_lexical_addr (addr, exp);
4918 temp = change_address (DECL_RTL (exp), mode, addr);
4921 /* This is the case of an array whose size is to be determined
4922 from its initializer, while the initializer is still being parsed.
4925 else if (GET_CODE (DECL_RTL (exp)) == MEM
4926 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4927 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4928 XEXP (DECL_RTL (exp), 0));
4930 /* If DECL_RTL is memory, we are in the normal case and either
4931 the address is not valid or it is not a register and -fforce-addr
4932 is specified, get the address into a register. */
4934 else if (GET_CODE (DECL_RTL (exp)) == MEM
4935 && modifier != EXPAND_CONST_ADDRESS
4936 && modifier != EXPAND_SUM
4937 && modifier != EXPAND_INITIALIZER
4938 && (! memory_address_p (DECL_MODE (exp),
4939 XEXP (DECL_RTL (exp), 0))
4941 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4942 temp = change_address (DECL_RTL (exp), VOIDmode,
4943 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4945 /* If we got something, return it. But first, set the alignment
4946 the address is a register. */
4949 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4950 mark_reg_pointer (XEXP (temp, 0),
4951 DECL_ALIGN (exp) / BITS_PER_UNIT);
4956 /* If the mode of DECL_RTL does not match that of the decl, it
4957 must be a promoted value. We return a SUBREG of the wanted mode,
4958 but mark it so that we know that it was already extended. */
4960 if (GET_CODE (DECL_RTL (exp)) == REG
4961 && GET_MODE (DECL_RTL (exp)) != mode)
4963 /* Get the signedness used for this variable. Ensure we get the
4964 same mode we got when the variable was declared. */
4965 if (GET_MODE (DECL_RTL (exp))
4966 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4969 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4970 SUBREG_PROMOTED_VAR_P (temp) = 1;
4971 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4975 return DECL_RTL (exp);
4978 return immed_double_const (TREE_INT_CST_LOW (exp),
4979 TREE_INT_CST_HIGH (exp),
4983 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4986 /* If optimized, generate immediate CONST_DOUBLE
4987 which will be turned into memory by reload if necessary.
4989 We used to force a register so that loop.c could see it. But
4990 this does not allow gen_* patterns to perform optimizations with
4991 the constants. It also produces two insns in cases like "x = 1.0;".
4992 On most machines, floating-point constants are not permitted in
4993 many insns, so we'd end up copying it to a register in any case.
4995 Now, we do the copying in expand_binop, if appropriate. */
4996 return immed_real_const (exp);
5000 if (! TREE_CST_RTL (exp))
5001 output_constant_def (exp);
5003 /* TREE_CST_RTL probably contains a constant address.
5004 On RISC machines where a constant address isn't valid,
5005 make some insns to get that address into a register. */
5006 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5007 && modifier != EXPAND_CONST_ADDRESS
5008 && modifier != EXPAND_INITIALIZER
5009 && modifier != EXPAND_SUM
5010 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5012 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5013 return change_address (TREE_CST_RTL (exp), VOIDmode,
5014 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5015 return TREE_CST_RTL (exp);
5018 context = decl_function_context (exp);
5020 /* We treat inline_function_decl as an alias for the current function
5021 because that is the inline function whose vars, types, etc.
5022 are being merged into the current function.
5023 See expand_inline_function. */
5024 if (context == current_function_decl || context == inline_function_decl)
5027 /* If this is non-local, handle it. */
5030 temp = SAVE_EXPR_RTL (exp);
5031 if (temp && GET_CODE (temp) == REG)
5033 put_var_into_stack (exp);
5034 temp = SAVE_EXPR_RTL (exp);
5036 if (temp == 0 || GET_CODE (temp) != MEM)
5038 return change_address (temp, mode,
5039 fix_lexical_addr (XEXP (temp, 0), exp));
5041 if (SAVE_EXPR_RTL (exp) == 0)
5043 if (mode == VOIDmode)
5046 temp = assign_temp (type, 0, 0, 0);
5048 SAVE_EXPR_RTL (exp) = temp;
5049 if (!optimize && GET_CODE (temp) == REG)
5050 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5053 /* If the mode of TEMP does not match that of the expression, it
5054 must be a promoted value. We pass store_expr a SUBREG of the
5055 wanted mode but mark it so that we know that it was already
5056 extended. Note that `unsignedp' was modified above in
5059 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5061 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5062 SUBREG_PROMOTED_VAR_P (temp) = 1;
5063 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5066 if (temp == const0_rtx)
5067 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5069 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5072 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5073 must be a promoted value. We return a SUBREG of the wanted mode,
5074 but mark it so that we know that it was already extended. */
5076 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5077 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5079 /* Compute the signedness and make the proper SUBREG. */
5080 promote_mode (type, mode, &unsignedp, 0);
5081 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5082 SUBREG_PROMOTED_VAR_P (temp) = 1;
5083 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5087 return SAVE_EXPR_RTL (exp);
5092 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5093 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5097 case PLACEHOLDER_EXPR:
5098 /* If there is an object on the head of the placeholder list,
5099 see if some object in it's references is of type TYPE. For
5100 further information, see tree.def. */
5101 if (placeholder_list)
5104 tree old_list = placeholder_list;
5106 for (object = TREE_PURPOSE (placeholder_list);
5107 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5108 != TYPE_MAIN_VARIANT (type))
5109 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
5110 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
5111 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
5112 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
5113 object = TREE_OPERAND (object, 0))
5117 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5118 == TYPE_MAIN_VARIANT (type)))
5120 /* Expand this object skipping the list entries before
5121 it was found in case it is also a PLACEHOLDER_EXPR.
5122 In that case, we want to translate it using subsequent
5124 placeholder_list = TREE_CHAIN (placeholder_list);
5125 temp = expand_expr (object, original_target, tmode, modifier);
5126 placeholder_list = old_list;
5131 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5134 case WITH_RECORD_EXPR:
5135 /* Put the object on the placeholder list, expand our first operand,
5136 and pop the list. */
5137 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5139 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5141 placeholder_list = TREE_CHAIN (placeholder_list);
5145 expand_exit_loop_if_false (NULL_PTR,
5146 invert_truthvalue (TREE_OPERAND (exp, 0)));
5151 expand_start_loop (1);
5152 expand_expr_stmt (TREE_OPERAND (exp, 0));
5160 tree vars = TREE_OPERAND (exp, 0);
5161 int vars_need_expansion = 0;
5163 /* Need to open a binding contour here because
5164 if there are any cleanups they most be contained here. */
5165 expand_start_bindings (0);
5167 /* Mark the corresponding BLOCK for output in its proper place. */
5168 if (TREE_OPERAND (exp, 2) != 0
5169 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5170 insert_block (TREE_OPERAND (exp, 2));
5172 /* If VARS have not yet been expanded, expand them now. */
5175 if (DECL_RTL (vars) == 0)
5177 vars_need_expansion = 1;
5180 expand_decl_init (vars);
5181 vars = TREE_CHAIN (vars);
5184 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5186 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5192 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5194 emit_insns (RTL_EXPR_SEQUENCE (exp));
5195 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5196 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5197 free_temps_for_rtl_expr (exp);
5198 return RTL_EXPR_RTL (exp);
5201 /* If we don't need the result, just ensure we evaluate any
5206 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5207 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5211 /* All elts simple constants => refer to a constant in memory. But
5212 if this is a non-BLKmode mode, let it store a field at a time
5213 since that should make a CONST_INT or CONST_DOUBLE when we
5214 fold. Likewise, if we have a target we can use, it is best to
5215 store directly into the target unless the type is large enough
5216 that memcpy will be used. If we are making an initializer and
5217 all operands are constant, put it in memory as well. */
5218 else if ((TREE_STATIC (exp)
5219 && ((mode == BLKmode
5220 && ! (target != 0 && safe_from_p (target, exp)))
5221 || TREE_ADDRESSABLE (exp)
5222 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5223 && (move_by_pieces_ninsns
5224 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5225 TYPE_ALIGN (type) / BITS_PER_UNIT)
5227 && ! mostly_zeros_p (exp))))
5228 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5230 rtx constructor = output_constant_def (exp);
5231 if (modifier != EXPAND_CONST_ADDRESS
5232 && modifier != EXPAND_INITIALIZER
5233 && modifier != EXPAND_SUM
5234 && (! memory_address_p (GET_MODE (constructor),
5235 XEXP (constructor, 0))
5237 && GET_CODE (XEXP (constructor, 0)) != REG)))
5238 constructor = change_address (constructor, VOIDmode,
5239 XEXP (constructor, 0));
5245 /* Handle calls that pass values in multiple non-contiguous
5246 locations. The Irix 6 ABI has examples of this. */
5247 if (target == 0 || ! safe_from_p (target, exp)
5248 || GET_CODE (target) == PARALLEL)
5250 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5251 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5253 target = assign_temp (type, 0, 1, 1);
5256 if (TREE_READONLY (exp))
5258 if (GET_CODE (target) == MEM)
5259 target = change_address (target, GET_MODE (target),
5261 RTX_UNCHANGING_P (target) = 1;
5264 store_constructor (exp, target, 0);
5270 tree exp1 = TREE_OPERAND (exp, 0);
5273 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5274 op0 = memory_address (mode, op0);
5276 temp = gen_rtx (MEM, mode, op0);
5277 /* If address was computed by addition,
5278 mark this as an element of an aggregate. */
5279 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5280 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5281 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5282 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5283 || (TREE_CODE (exp1) == ADDR_EXPR
5284 && (exp2 = TREE_OPERAND (exp1, 0))
5285 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5286 MEM_IN_STRUCT_P (temp) = 1;
5287 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5289 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5290 here, because, in C and C++, the fact that a location is accessed
5291 through a pointer to const does not mean that the value there can
5292 never change. Languages where it can never change should
5293 also set TREE_STATIC. */
5294 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5299 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5303 tree array = TREE_OPERAND (exp, 0);
5304 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5305 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5306 tree index = TREE_OPERAND (exp, 1);
5307 tree index_type = TREE_TYPE (index);
5310 if (TREE_CODE (low_bound) != INTEGER_CST
5311 && contains_placeholder_p (low_bound))
5312 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5314 /* Optimize the special-case of a zero lower bound.
5316 We convert the low_bound to sizetype to avoid some problems
5317 with constant folding. (E.g. suppose the lower bound is 1,
5318 and its mode is QI. Without the conversion, (ARRAY
5319 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5320 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5322 But sizetype isn't quite right either (especially if
5323 the lowbound is negative). FIXME */
5325 if (! integer_zerop (low_bound))
5326 index = fold (build (MINUS_EXPR, index_type, index,
5327 convert (sizetype, low_bound)));
5329 if ((TREE_CODE (index) != INTEGER_CST
5330 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5331 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5333 /* Nonconstant array index or nonconstant element size, and
5334 not an array in an unaligned (packed) structure field.
5335 Generate the tree for *(&array+index) and expand that,
5336 except do it in a language-independent way
5337 and don't complain about non-lvalue arrays.
5338 `mark_addressable' should already have been called
5339 for any array for which this case will be reached. */
5341 /* Don't forget the const or volatile flag from the array
5343 tree variant_type = build_type_variant (type,
5344 TREE_READONLY (exp),
5345 TREE_THIS_VOLATILE (exp));
5346 tree array_adr = build1 (ADDR_EXPR,
5347 build_pointer_type (variant_type), array);
5349 tree size = size_in_bytes (type);
5351 /* Convert the integer argument to a type the same size as sizetype
5352 so the multiply won't overflow spuriously. */
5353 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5354 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5357 if (TREE_CODE (size) != INTEGER_CST
5358 && contains_placeholder_p (size))
5359 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5361 /* Don't think the address has side effects
5362 just because the array does.
5363 (In some cases the address might have side effects,
5364 and we fail to record that fact here. However, it should not
5365 matter, since expand_expr should not care.) */
5366 TREE_SIDE_EFFECTS (array_adr) = 0;
5370 (INDIRECT_REF, type,
5371 fold (build (PLUS_EXPR,
5372 TYPE_POINTER_TO (variant_type),
5377 TYPE_POINTER_TO (variant_type),
5378 fold (build (MULT_EXPR, TREE_TYPE (index),
5380 convert (TREE_TYPE (index),
5383 /* Volatility, etc., of new expression is same as old
5385 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5386 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5387 TREE_READONLY (elt) = TREE_READONLY (exp);
5389 return expand_expr (elt, target, tmode, modifier);
5392 /* Fold an expression like: "foo"[2].
5393 This is not done in fold so it won't happen inside &.
5394 Don't fold if this is for wide characters since it's too
5395 difficult to do correctly and this is a very rare case. */
5397 if (TREE_CODE (array) == STRING_CST
5398 && TREE_CODE (index) == INTEGER_CST
5399 && !TREE_INT_CST_HIGH (index)
5400 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5401 && GET_MODE_CLASS (mode) == MODE_INT
5402 && GET_MODE_SIZE (mode) == 1)
5403 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5405 /* If this is a constant index into a constant array,
5406 just get the value from the array. Handle both the cases when
5407 we have an explicit constructor and when our operand is a variable
5408 that was declared const. */
5410 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5412 if (TREE_CODE (index) == INTEGER_CST
5413 && TREE_INT_CST_HIGH (index) == 0)
5415 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5417 i = TREE_INT_CST_LOW (index);
5419 elem = TREE_CHAIN (elem);
5421 return expand_expr (fold (TREE_VALUE (elem)), target,
5426 else if (optimize >= 1
5427 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5428 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5429 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5431 if (TREE_CODE (index) == INTEGER_CST
5432 && TREE_INT_CST_HIGH (index) == 0)
5434 tree init = DECL_INITIAL (array);
5436 i = TREE_INT_CST_LOW (index);
5437 if (TREE_CODE (init) == CONSTRUCTOR)
5439 tree elem = CONSTRUCTOR_ELTS (init);
5442 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5443 elem = TREE_CHAIN (elem);
5445 return expand_expr (fold (TREE_VALUE (elem)), target,
5448 else if (TREE_CODE (init) == STRING_CST
5449 && i < TREE_STRING_LENGTH (init))
5450 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5455 /* Treat array-ref with constant index as a component-ref. */
5459 /* If the operand is a CONSTRUCTOR, we can just extract the
5460 appropriate field if it is present. Don't do this if we have
5461 already written the data since we want to refer to that copy
5462 and varasm.c assumes that's what we'll do. */
5463 if (code != ARRAY_REF
5464 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5465 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5469 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5470 elt = TREE_CHAIN (elt))
5471 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5472 /* We can normally use the value of the field in the
5473 CONSTRUCTOR. However, if this is a bitfield in
5474 an integral mode that we can fit in a HOST_WIDE_INT,
5475 we must mask only the number of bits in the bitfield,
5476 since this is done implicitly by the constructor. If
5477 the bitfield does not meet either of those conditions,
5478 we can't do this optimization. */
5479 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5480 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5482 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5483 <= HOST_BITS_PER_WIDE_INT))))
5485 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5486 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5488 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5489 enum machine_mode imode
5490 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5492 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5494 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5495 op0 = expand_and (op0, op1, target);
5500 = build_int_2 (imode - bitsize, 0);
5502 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5504 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5514 enum machine_mode mode1;
5520 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5521 &mode1, &unsignedp, &volatilep,
5524 /* If we got back the original object, something is wrong. Perhaps
5525 we are evaluating an expression too early. In any event, don't
5526 infinitely recurse. */
5530 /* If TEM's type is a union of variable size, pass TARGET to the inner
5531 computation, since it will need a temporary and TARGET is known
5532 to have to do. This occurs in unchecked conversion in Ada. */
5534 op0 = expand_expr (tem,
5535 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5536 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5538 ? target : NULL_RTX),
5540 modifier == EXPAND_INITIALIZER ? modifier : 0);
5542 /* If this is a constant, put it into a register if it is a
5543 legitimate constant and memory if it isn't. */
5544 if (CONSTANT_P (op0))
5546 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5547 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5548 op0 = force_reg (mode, op0);
5550 op0 = validize_mem (force_const_mem (mode, op0));
5555 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5557 if (GET_CODE (op0) != MEM)
5559 op0 = change_address (op0, VOIDmode,
5560 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5561 force_reg (ptr_mode, offset_rtx)));
5564 /* Don't forget about volatility even if this is a bitfield. */
5565 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5567 op0 = copy_rtx (op0);
5568 MEM_VOLATILE_P (op0) = 1;
5571 /* In cases where an aligned union has an unaligned object
5572 as a field, we might be extracting a BLKmode value from
5573 an integer-mode (e.g., SImode) object. Handle this case
5574 by doing the extract into an object as wide as the field
5575 (which we know to be the width of a basic mode), then
5576 storing into memory, and changing the mode to BLKmode.
5577 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5578 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5579 if (mode1 == VOIDmode
5580 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5581 || (modifier != EXPAND_CONST_ADDRESS
5582 && modifier != EXPAND_INITIALIZER
5583 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5584 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5585 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5586 /* If the field isn't aligned enough to fetch as a memref,
5587 fetch it as a bit field. */
5588 || (SLOW_UNALIGNED_ACCESS
5589 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5590 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5592 enum machine_mode ext_mode = mode;
5594 if (ext_mode == BLKmode)
5595 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5597 if (ext_mode == BLKmode)
5599 /* In this case, BITPOS must start at a byte boundary and
5600 TARGET, if specified, must be a MEM. */
5601 if (GET_CODE (op0) != MEM
5602 || (target != 0 && GET_CODE (target) != MEM)
5603 || bitpos % BITS_PER_UNIT != 0)
5606 op0 = change_address (op0, VOIDmode,
5607 plus_constant (XEXP (op0, 0),
5608 bitpos / BITS_PER_UNIT));
5610 target = assign_temp (type, 0, 1, 1);
5612 emit_block_move (target, op0,
5613 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5620 op0 = validize_mem (op0);
5622 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5623 mark_reg_pointer (XEXP (op0, 0), alignment);
5625 op0 = extract_bit_field (op0, bitsize, bitpos,
5626 unsignedp, target, ext_mode, ext_mode,
5628 int_size_in_bytes (TREE_TYPE (tem)));
5630 /* If the result is a record type and BITSIZE is narrower than
5631 the mode of OP0, an integral mode, and this is a big endian
5632 machine, we must put the field into the high-order bits. */
5633 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5634 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5635 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5636 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5637 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5641 if (mode == BLKmode)
5643 rtx new = assign_stack_temp (ext_mode,
5644 bitsize / BITS_PER_UNIT, 0);
5646 emit_move_insn (new, op0);
5647 op0 = copy_rtx (new);
5648 PUT_MODE (op0, BLKmode);
5649 MEM_IN_STRUCT_P (op0) = 1;
5655 /* If the result is BLKmode, use that to access the object
5657 if (mode == BLKmode)
5660 /* Get a reference to just this component. */
5661 if (modifier == EXPAND_CONST_ADDRESS
5662 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5663 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5664 (bitpos / BITS_PER_UNIT)));
5666 op0 = change_address (op0, mode1,
5667 plus_constant (XEXP (op0, 0),
5668 (bitpos / BITS_PER_UNIT)));
5669 if (GET_CODE (XEXP (op0, 0)) == REG)
5670 mark_reg_pointer (XEXP (op0, 0), alignment);
5672 MEM_IN_STRUCT_P (op0) = 1;
5673 MEM_VOLATILE_P (op0) |= volatilep;
5674 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5675 || modifier == EXPAND_CONST_ADDRESS
5676 || modifier == EXPAND_INITIALIZER)
5678 else if (target == 0)
5679 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5681 convert_move (target, op0, unsignedp);
5685 /* Intended for a reference to a buffer of a file-object in Pascal.
5686 But it's not certain that a special tree code will really be
5687 necessary for these. INDIRECT_REF might work for them. */
5693 /* Pascal set IN expression.
5696 rlo = set_low - (set_low%bits_per_word);
5697 the_word = set [ (index - rlo)/bits_per_word ];
5698 bit_index = index % bits_per_word;
5699 bitmask = 1 << bit_index;
5700 return !!(the_word & bitmask); */
5702 tree set = TREE_OPERAND (exp, 0);
5703 tree index = TREE_OPERAND (exp, 1);
5704 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5705 tree set_type = TREE_TYPE (set);
5706 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5707 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5708 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5709 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5710 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5711 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5712 rtx setaddr = XEXP (setval, 0);
5713 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5715 rtx diff, quo, rem, addr, bit, result;
5717 preexpand_calls (exp);
5719 /* If domain is empty, answer is no. Likewise if index is constant
5720 and out of bounds. */
5721 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5722 && TREE_CODE (set_low_bound) == INTEGER_CST
5723 && tree_int_cst_lt (set_high_bound, set_low_bound)
5724 || (TREE_CODE (index) == INTEGER_CST
5725 && TREE_CODE (set_low_bound) == INTEGER_CST
5726 && tree_int_cst_lt (index, set_low_bound))
5727 || (TREE_CODE (set_high_bound) == INTEGER_CST
5728 && TREE_CODE (index) == INTEGER_CST
5729 && tree_int_cst_lt (set_high_bound, index))))
5733 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5735 /* If we get here, we have to generate the code for both cases
5736 (in range and out of range). */
5738 op0 = gen_label_rtx ();
5739 op1 = gen_label_rtx ();
5741 if (! (GET_CODE (index_val) == CONST_INT
5742 && GET_CODE (lo_r) == CONST_INT))
5744 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5745 GET_MODE (index_val), iunsignedp, 0);
5746 emit_jump_insn (gen_blt (op1));
5749 if (! (GET_CODE (index_val) == CONST_INT
5750 && GET_CODE (hi_r) == CONST_INT))
5752 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5753 GET_MODE (index_val), iunsignedp, 0);
5754 emit_jump_insn (gen_bgt (op1));
5757 /* Calculate the element number of bit zero in the first word
5759 if (GET_CODE (lo_r) == CONST_INT)
5760 rlow = GEN_INT (INTVAL (lo_r)
5761 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5763 rlow = expand_binop (index_mode, and_optab, lo_r,
5764 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5765 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5767 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5768 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5770 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5771 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5772 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5773 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5775 addr = memory_address (byte_mode,
5776 expand_binop (index_mode, add_optab, diff,
5777 setaddr, NULL_RTX, iunsignedp,
5780 /* Extract the bit we want to examine */
5781 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5782 gen_rtx (MEM, byte_mode, addr),
5783 make_tree (TREE_TYPE (index), rem),
5785 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5786 GET_MODE (target) == byte_mode ? target : 0,
5787 1, OPTAB_LIB_WIDEN);
5789 if (result != target)
5790 convert_move (target, result, 1);
5792 /* Output the code to handle the out-of-range case. */
5795 emit_move_insn (target, const0_rtx);
5800 case WITH_CLEANUP_EXPR:
5801 if (RTL_EXPR_RTL (exp) == 0)
5804 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5806 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5807 /* That's it for this cleanup. */
5808 TREE_OPERAND (exp, 2) = 0;
5809 expand_eh_region_start ();
5811 return RTL_EXPR_RTL (exp);
5813 case CLEANUP_POINT_EXPR:
5815 extern int temp_slot_level;
5816 tree old_cleanups = cleanups_this_call;
5817 int old_temp_level = target_temp_slot_level;
5819 target_temp_slot_level = temp_slot_level;
5820 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5821 /* If we're going to use this value, load it up now. */
5823 op0 = force_not_mem (op0);
5824 expand_cleanups_to (old_cleanups);
5825 preserve_temp_slots (op0);
5828 target_temp_slot_level = old_temp_level;
5833 /* Check for a built-in function. */
5834 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5835 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5837 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5838 return expand_builtin (exp, target, subtarget, tmode, ignore);
5840 /* If this call was expanded already by preexpand_calls,
5841 just return the result we got. */
5842 if (CALL_EXPR_RTL (exp) != 0)
5843 return CALL_EXPR_RTL (exp);
5845 return expand_call (exp, target, ignore);
5847 case NON_LVALUE_EXPR:
5850 case REFERENCE_EXPR:
5851 if (TREE_CODE (type) == UNION_TYPE)
5853 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5856 if (mode != BLKmode)
5857 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5859 target = assign_temp (type, 0, 1, 1);
5862 if (GET_CODE (target) == MEM)
5863 /* Store data into beginning of memory target. */
5864 store_expr (TREE_OPERAND (exp, 0),
5865 change_address (target, TYPE_MODE (valtype), 0), 0);
5867 else if (GET_CODE (target) == REG)
5868 /* Store this field into a union of the proper type. */
5869 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5870 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5872 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5876 /* Return the entire union. */
5880 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5882 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5885 /* If the signedness of the conversion differs and OP0 is
5886 a promoted SUBREG, clear that indication since we now
5887 have to do the proper extension. */
5888 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5889 && GET_CODE (op0) == SUBREG)
5890 SUBREG_PROMOTED_VAR_P (op0) = 0;
5895 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5896 if (GET_MODE (op0) == mode)
5899 /* If OP0 is a constant, just convert it into the proper mode. */
5900 if (CONSTANT_P (op0))
5902 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5903 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5905 if (modifier == EXPAND_INITIALIZER)
5906 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5910 convert_to_mode (mode, op0,
5911 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5913 convert_move (target, op0,
5914 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5918 /* We come here from MINUS_EXPR when the second operand is a
5921 this_optab = add_optab;
5923 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5924 something else, make sure we add the register to the constant and
5925 then to the other thing. This case can occur during strength
5926 reduction and doing it this way will produce better code if the
5927 frame pointer or argument pointer is eliminated.
5929 fold-const.c will ensure that the constant is always in the inner
5930 PLUS_EXPR, so the only case we need to do anything about is if
5931 sp, ap, or fp is our second argument, in which case we must swap
5932 the innermost first argument and our second argument. */
5934 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5935 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5936 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5937 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5938 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5939 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5941 tree t = TREE_OPERAND (exp, 1);
5943 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5944 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5947 /* If the result is to be ptr_mode and we are adding an integer to
5948 something, we might be forming a constant. So try to use
5949 plus_constant. If it produces a sum and we can't accept it,
5950 use force_operand. This allows P = &ARR[const] to generate
5951 efficient code on machines where a SYMBOL_REF is not a valid
5954 If this is an EXPAND_SUM call, always return the sum. */
5955 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5956 || mode == ptr_mode)
5958 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5959 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5960 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5962 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5964 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5965 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5966 op1 = force_operand (op1, target);
5970 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5971 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5972 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5974 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5976 if (! CONSTANT_P (op0))
5978 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5979 VOIDmode, modifier);
5980 /* Don't go to both_summands if modifier
5981 says it's not right to return a PLUS. */
5982 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5986 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5987 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5988 op0 = force_operand (op0, target);
5993 /* No sense saving up arithmetic to be done
5994 if it's all in the wrong mode to form part of an address.
5995 And force_operand won't know whether to sign-extend or
5997 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5998 || mode != ptr_mode)
6001 preexpand_calls (exp);
6002 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6005 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
6006 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
6009 /* Make sure any term that's a sum with a constant comes last. */
6010 if (GET_CODE (op0) == PLUS
6011 && CONSTANT_P (XEXP (op0, 1)))
6017 /* If adding to a sum including a constant,
6018 associate it to put the constant outside. */
6019 if (GET_CODE (op1) == PLUS
6020 && CONSTANT_P (XEXP (op1, 1)))
6022 rtx constant_term = const0_rtx;
6024 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6027 /* Ensure that MULT comes first if there is one. */
6028 else if (GET_CODE (op0) == MULT)
6029 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
6031 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
6033 /* Let's also eliminate constants from op0 if possible. */
6034 op0 = eliminate_constant_term (op0, &constant_term);
6036 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6037 their sum should be a constant. Form it into OP1, since the
6038 result we want will then be OP0 + OP1. */
6040 temp = simplify_binary_operation (PLUS, mode, constant_term,
6045 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6048 /* Put a constant term last and put a multiplication first. */
6049 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6050 temp = op1, op1 = op0, op0 = temp;
6052 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6053 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6056 /* For initializers, we are allowed to return a MINUS of two
6057 symbolic constants. Here we handle all cases when both operands
6059 /* Handle difference of two symbolic constants,
6060 for the sake of an initializer. */
6061 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6062 && really_constant_p (TREE_OPERAND (exp, 0))
6063 && really_constant_p (TREE_OPERAND (exp, 1)))
6065 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6066 VOIDmode, modifier);
6067 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6068 VOIDmode, modifier);
6070 /* If the last operand is a CONST_INT, use plus_constant of
6071 the negated constant. Else make the MINUS. */
6072 if (GET_CODE (op1) == CONST_INT)
6073 return plus_constant (op0, - INTVAL (op1));
6075 return gen_rtx (MINUS, mode, op0, op1);
6077 /* Convert A - const to A + (-const). */
6078 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6080 tree negated = fold (build1 (NEGATE_EXPR, type,
6081 TREE_OPERAND (exp, 1)));
6083 /* Deal with the case where we can't negate the constant
6085 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6087 tree newtype = signed_type (type);
6088 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6089 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6090 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6092 if (! TREE_OVERFLOW (newneg))
6093 return expand_expr (convert (type,
6094 build (PLUS_EXPR, newtype,
6096 target, tmode, modifier);
6100 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6104 this_optab = sub_optab;
6108 preexpand_calls (exp);
6109 /* If first operand is constant, swap them.
6110 Thus the following special case checks need only
6111 check the second operand. */
6112 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6114 register tree t1 = TREE_OPERAND (exp, 0);
6115 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6116 TREE_OPERAND (exp, 1) = t1;
6119 /* Attempt to return something suitable for generating an
6120 indexed address, for machines that support that. */
6122 if (modifier == EXPAND_SUM && mode == ptr_mode
6123 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6124 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6126 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6128 /* Apply distributive law if OP0 is x+c. */
6129 if (GET_CODE (op0) == PLUS
6130 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6131 return gen_rtx (PLUS, mode,
6132 gen_rtx (MULT, mode, XEXP (op0, 0),
6133 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6134 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6135 * INTVAL (XEXP (op0, 1))));
6137 if (GET_CODE (op0) != REG)
6138 op0 = force_operand (op0, NULL_RTX);
6139 if (GET_CODE (op0) != REG)
6140 op0 = copy_to_mode_reg (mode, op0);
6142 return gen_rtx (MULT, mode, op0,
6143 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6146 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6149 /* Check for multiplying things that have been extended
6150 from a narrower type. If this machine supports multiplying
6151 in that narrower type with a result in the desired type,
6152 do it that way, and avoid the explicit type-conversion. */
6153 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6154 && TREE_CODE (type) == INTEGER_TYPE
6155 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6156 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6157 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6158 && int_fits_type_p (TREE_OPERAND (exp, 1),
6159 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6160 /* Don't use a widening multiply if a shift will do. */
6161 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6162 > HOST_BITS_PER_WIDE_INT)
6163 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6165 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6166 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6168 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6169 /* If both operands are extended, they must either both
6170 be zero-extended or both be sign-extended. */
6171 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6173 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6175 enum machine_mode innermode
6176 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6177 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6178 ? smul_widen_optab : umul_widen_optab);
6179 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6180 ? umul_widen_optab : smul_widen_optab);
6181 if (mode == GET_MODE_WIDER_MODE (innermode))
6183 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6185 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6186 NULL_RTX, VOIDmode, 0);
6187 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6188 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6191 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6192 NULL_RTX, VOIDmode, 0);
6195 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6196 && innermode == word_mode)
6199 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6200 NULL_RTX, VOIDmode, 0);
6201 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6202 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6205 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6206 NULL_RTX, VOIDmode, 0);
6207 temp = expand_binop (mode, other_optab, op0, op1, target,
6208 unsignedp, OPTAB_LIB_WIDEN);
6209 htem = expand_mult_highpart_adjust (innermode,
6210 gen_highpart (innermode, temp),
6212 gen_highpart (innermode, temp),
6214 emit_move_insn (gen_highpart (innermode, temp), htem);
6219 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6220 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6221 return expand_mult (mode, op0, op1, target, unsignedp);
6223 case TRUNC_DIV_EXPR:
6224 case FLOOR_DIV_EXPR:
6226 case ROUND_DIV_EXPR:
6227 case EXACT_DIV_EXPR:
6228 preexpand_calls (exp);
6229 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6231 /* Possible optimization: compute the dividend with EXPAND_SUM
6232 then if the divisor is constant can optimize the case
6233 where some terms of the dividend have coeffs divisible by it. */
6234 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6235 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6236 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6239 this_optab = flodiv_optab;
6242 case TRUNC_MOD_EXPR:
6243 case FLOOR_MOD_EXPR:
6245 case ROUND_MOD_EXPR:
6246 preexpand_calls (exp);
6247 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6249 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6250 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6251 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6253 case FIX_ROUND_EXPR:
6254 case FIX_FLOOR_EXPR:
6256 abort (); /* Not used for C. */
6258 case FIX_TRUNC_EXPR:
6259 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6261 target = gen_reg_rtx (mode);
6262 expand_fix (target, op0, unsignedp);
6266 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6268 target = gen_reg_rtx (mode);
6269 /* expand_float can't figure out what to do if FROM has VOIDmode.
6270 So give it the correct mode. With -O, cse will optimize this. */
6271 if (GET_MODE (op0) == VOIDmode)
6272 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6274 expand_float (target, op0,
6275 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6279 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6280 temp = expand_unop (mode, neg_optab, op0, target, 0);
6286 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6288 /* Handle complex values specially. */
6289 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6290 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6291 return expand_complex_abs (mode, op0, target, unsignedp);
6293 /* Unsigned abs is simply the operand. Testing here means we don't
6294 risk generating incorrect code below. */
6295 if (TREE_UNSIGNED (type))
6298 return expand_abs (mode, op0, target, unsignedp,
6299 safe_from_p (target, TREE_OPERAND (exp, 0)));
6303 target = original_target;
6304 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6305 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6306 || GET_MODE (target) != mode
6307 || (GET_CODE (target) == REG
6308 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6309 target = gen_reg_rtx (mode);
6310 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6311 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6313 /* First try to do it with a special MIN or MAX instruction.
6314 If that does not win, use a conditional jump to select the proper
6316 this_optab = (TREE_UNSIGNED (type)
6317 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6318 : (code == MIN_EXPR ? smin_optab : smax_optab));
6320 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6325 /* At this point, a MEM target is no longer useful; we will get better
6328 if (GET_CODE (target) == MEM)
6329 target = gen_reg_rtx (mode);
6332 emit_move_insn (target, op0);
6334 op0 = gen_label_rtx ();
6336 /* If this mode is an integer too wide to compare properly,
6337 compare word by word. Rely on cse to optimize constant cases. */
6338 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6340 if (code == MAX_EXPR)
6341 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6342 target, op1, NULL_RTX, op0);
6344 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6345 op1, target, NULL_RTX, op0);
6346 emit_move_insn (target, op1);
6350 if (code == MAX_EXPR)
6351 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6352 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6353 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6355 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6356 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6357 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6358 if (temp == const0_rtx)
6359 emit_move_insn (target, op1);
6360 else if (temp != const_true_rtx)
6362 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6363 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6366 emit_move_insn (target, op1);
6373 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6374 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6380 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6381 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6386 /* ??? Can optimize bitwise operations with one arg constant.
6387 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6388 and (a bitwise1 b) bitwise2 b (etc)
6389 but that is probably not worth while. */
6391 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6392 boolean values when we want in all cases to compute both of them. In
6393 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6394 as actual zero-or-1 values and then bitwise anding. In cases where
6395 there cannot be any side effects, better code would be made by
6396 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6397 how to recognize those cases. */
6399 case TRUTH_AND_EXPR:
6401 this_optab = and_optab;
6406 this_optab = ior_optab;
6409 case TRUTH_XOR_EXPR:
6411 this_optab = xor_optab;
6418 preexpand_calls (exp);
6419 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6421 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6422 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6425 /* Could determine the answer when only additive constants differ. Also,
6426 the addition of one can be handled by changing the condition. */
6433 preexpand_calls (exp);
6434 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6438 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6439 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6441 && GET_CODE (original_target) == REG
6442 && (GET_MODE (original_target)
6443 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6445 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6448 if (temp != original_target)
6449 temp = copy_to_reg (temp);
6451 op1 = gen_label_rtx ();
6452 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6453 GET_MODE (temp), unsignedp, 0);
6454 emit_jump_insn (gen_beq (op1));
6455 emit_move_insn (temp, const1_rtx);
6460 /* If no set-flag instruction, must generate a conditional
6461 store into a temporary variable. Drop through
6462 and handle this like && and ||. */
6464 case TRUTH_ANDIF_EXPR:
6465 case TRUTH_ORIF_EXPR:
6467 && (target == 0 || ! safe_from_p (target, exp)
6468 /* Make sure we don't have a hard reg (such as function's return
6469 value) live across basic blocks, if not optimizing. */
6470 || (!optimize && GET_CODE (target) == REG
6471 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6472 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6475 emit_clr_insn (target);
6477 op1 = gen_label_rtx ();
6478 jumpifnot (exp, op1);
6481 emit_0_to_1_insn (target);
6484 return ignore ? const0_rtx : target;
6486 case TRUTH_NOT_EXPR:
6487 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6488 /* The parser is careful to generate TRUTH_NOT_EXPR
6489 only with operands that are always zero or one. */
6490 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6491 target, 1, OPTAB_LIB_WIDEN);
6497 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6499 return expand_expr (TREE_OPERAND (exp, 1),
6500 (ignore ? const0_rtx : target),
6504 /* If we would have a "singleton" (see below) were it not for a
6505 conversion in each arm, bring that conversion back out. */
6506 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6507 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6508 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6509 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6511 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6512 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6514 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6515 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6516 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6517 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6518 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6519 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6520 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6521 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6522 return expand_expr (build1 (NOP_EXPR, type,
6523 build (COND_EXPR, TREE_TYPE (true),
6524 TREE_OPERAND (exp, 0),
6526 target, tmode, modifier);
6530 rtx flag = NULL_RTX;
6531 tree left_cleanups = NULL_TREE;
6532 tree right_cleanups = NULL_TREE;
6534 /* Used to save a pointer to the place to put the setting of
6535 the flag that indicates if this side of the conditional was
6536 taken. We backpatch the code, if we find out later that we
6537 have any conditional cleanups that need to be performed. */
6538 rtx dest_right_flag = NULL_RTX;
6539 rtx dest_left_flag = NULL_RTX;
6541 /* Note that COND_EXPRs whose type is a structure or union
6542 are required to be constructed to contain assignments of
6543 a temporary variable, so that we can evaluate them here
6544 for side effect only. If type is void, we must do likewise. */
6546 /* If an arm of the branch requires a cleanup,
6547 only that cleanup is performed. */
6550 tree binary_op = 0, unary_op = 0;
6551 tree old_cleanups = cleanups_this_call;
6553 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6554 convert it to our mode, if necessary. */
6555 if (integer_onep (TREE_OPERAND (exp, 1))
6556 && integer_zerop (TREE_OPERAND (exp, 2))
6557 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6561 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6566 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6567 if (GET_MODE (op0) == mode)
6571 target = gen_reg_rtx (mode);
6572 convert_move (target, op0, unsignedp);
6576 /* Check for X ? A + B : A. If we have this, we can copy A to the
6577 output and conditionally add B. Similarly for unary operations.
6578 Don't do this if X has side-effects because those side effects
6579 might affect A or B and the "?" operation is a sequence point in
6580 ANSI. (operand_equal_p tests for side effects.) */
6582 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6583 && operand_equal_p (TREE_OPERAND (exp, 2),
6584 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6585 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6586 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6587 && operand_equal_p (TREE_OPERAND (exp, 1),
6588 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6589 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6590 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6591 && operand_equal_p (TREE_OPERAND (exp, 2),
6592 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6593 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6594 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6595 && operand_equal_p (TREE_OPERAND (exp, 1),
6596 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6597 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6599 /* If we are not to produce a result, we have no target. Otherwise,
6600 if a target was specified use it; it will not be used as an
6601 intermediate target unless it is safe. If no target, use a
6606 else if (original_target
6607 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6608 || (singleton && GET_CODE (original_target) == REG
6609 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6610 && original_target == var_rtx (singleton)))
6611 && GET_MODE (original_target) == mode
6612 && ! (GET_CODE (original_target) == MEM
6613 && MEM_VOLATILE_P (original_target)))
6614 temp = original_target;
6615 else if (TREE_ADDRESSABLE (type))
6618 temp = assign_temp (type, 0, 0, 1);
6620 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6621 do the test of X as a store-flag operation, do this as
6622 A + ((X != 0) << log C). Similarly for other simple binary
6623 operators. Only do for C == 1 if BRANCH_COST is low. */
6624 if (temp && singleton && binary_op
6625 && (TREE_CODE (binary_op) == PLUS_EXPR
6626 || TREE_CODE (binary_op) == MINUS_EXPR
6627 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6628 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6629 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6630 : integer_onep (TREE_OPERAND (binary_op, 1)))
6631 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6634 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6635 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6636 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6639 /* If we had X ? A : A + 1, do this as A + (X == 0).
6641 We have to invert the truth value here and then put it
6642 back later if do_store_flag fails. We cannot simply copy
6643 TREE_OPERAND (exp, 0) to another variable and modify that
6644 because invert_truthvalue can modify the tree pointed to
6646 if (singleton == TREE_OPERAND (exp, 1))
6647 TREE_OPERAND (exp, 0)
6648 = invert_truthvalue (TREE_OPERAND (exp, 0));
6650 result = do_store_flag (TREE_OPERAND (exp, 0),
6651 (safe_from_p (temp, singleton)
6653 mode, BRANCH_COST <= 1);
6655 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6656 result = expand_shift (LSHIFT_EXPR, mode, result,
6657 build_int_2 (tree_log2
6661 (safe_from_p (temp, singleton)
6662 ? temp : NULL_RTX), 0);
6666 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6667 return expand_binop (mode, boptab, op1, result, temp,
6668 unsignedp, OPTAB_LIB_WIDEN);
6670 else if (singleton == TREE_OPERAND (exp, 1))
6671 TREE_OPERAND (exp, 0)
6672 = invert_truthvalue (TREE_OPERAND (exp, 0));
6675 do_pending_stack_adjust ();
6677 op0 = gen_label_rtx ();
6679 flag = gen_reg_rtx (word_mode);
6680 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6684 /* If the target conflicts with the other operand of the
6685 binary op, we can't use it. Also, we can't use the target
6686 if it is a hard register, because evaluating the condition
6687 might clobber it. */
6689 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6690 || (GET_CODE (temp) == REG
6691 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6692 temp = gen_reg_rtx (mode);
6693 store_expr (singleton, temp, 0);
6696 expand_expr (singleton,
6697 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6698 dest_left_flag = get_last_insn ();
6699 if (singleton == TREE_OPERAND (exp, 1))
6700 jumpif (TREE_OPERAND (exp, 0), op0);
6702 jumpifnot (TREE_OPERAND (exp, 0), op0);
6704 /* Allows cleanups up to here. */
6705 old_cleanups = cleanups_this_call;
6706 if (binary_op && temp == 0)
6707 /* Just touch the other operand. */
6708 expand_expr (TREE_OPERAND (binary_op, 1),
6709 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6711 store_expr (build (TREE_CODE (binary_op), type,
6712 make_tree (type, temp),
6713 TREE_OPERAND (binary_op, 1)),
6716 store_expr (build1 (TREE_CODE (unary_op), type,
6717 make_tree (type, temp)),
6720 dest_right_flag = get_last_insn ();
6723 /* This is now done in jump.c and is better done there because it
6724 produces shorter register lifetimes. */
6726 /* Check for both possibilities either constants or variables
6727 in registers (but not the same as the target!). If so, can
6728 save branches by assigning one, branching, and assigning the
6730 else if (temp && GET_MODE (temp) != BLKmode
6731 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6732 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6733 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6734 && DECL_RTL (TREE_OPERAND (exp, 1))
6735 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6736 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6737 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6738 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6739 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6740 && DECL_RTL (TREE_OPERAND (exp, 2))
6741 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6742 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6744 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6745 temp = gen_reg_rtx (mode);
6746 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6747 dest_left_flag = get_last_insn ();
6748 jumpifnot (TREE_OPERAND (exp, 0), op0);
6750 /* Allows cleanups up to here. */
6751 old_cleanups = cleanups_this_call;
6752 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6754 dest_right_flag = get_last_insn ();
6757 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6758 comparison operator. If we have one of these cases, set the
6759 output to A, branch on A (cse will merge these two references),
6760 then set the output to FOO. */
6762 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6763 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6764 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6765 TREE_OPERAND (exp, 1), 0)
6766 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6767 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6769 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6770 temp = gen_reg_rtx (mode);
6771 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6772 dest_left_flag = get_last_insn ();
6773 jumpif (TREE_OPERAND (exp, 0), op0);
6775 /* Allows cleanups up to here. */
6776 old_cleanups = cleanups_this_call;
6777 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6779 dest_right_flag = get_last_insn ();
6782 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6783 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6784 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6785 TREE_OPERAND (exp, 2), 0)
6786 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6787 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6789 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6790 temp = gen_reg_rtx (mode);
6791 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6792 dest_left_flag = get_last_insn ();
6793 jumpifnot (TREE_OPERAND (exp, 0), op0);
6795 /* Allows cleanups up to here. */
6796 old_cleanups = cleanups_this_call;
6797 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6799 dest_right_flag = get_last_insn ();
6803 op1 = gen_label_rtx ();
6804 jumpifnot (TREE_OPERAND (exp, 0), op0);
6806 /* Allows cleanups up to here. */
6807 old_cleanups = cleanups_this_call;
6809 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6811 expand_expr (TREE_OPERAND (exp, 1),
6812 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6813 dest_left_flag = get_last_insn ();
6815 /* Handle conditional cleanups, if any. */
6816 left_cleanups = defer_cleanups_to (old_cleanups);
6819 emit_jump_insn (gen_jump (op1));
6823 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6825 expand_expr (TREE_OPERAND (exp, 2),
6826 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6827 dest_right_flag = get_last_insn ();
6830 /* Handle conditional cleanups, if any. */
6831 right_cleanups = defer_cleanups_to (old_cleanups);
6837 /* Add back in, any conditional cleanups. */
6838 if (left_cleanups || right_cleanups)
6844 /* Now that we know that a flag is needed, go back and add in the
6845 setting of the flag. */
6847 /* Do the left side flag. */
6848 last = get_last_insn ();
6849 /* Flag left cleanups as needed. */
6850 emit_move_insn (flag, const1_rtx);
6851 /* ??? deprecated, use sequences instead. */
6852 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6854 /* Do the right side flag. */
6855 last = get_last_insn ();
6856 /* Flag left cleanups as needed. */
6857 emit_move_insn (flag, const0_rtx);
6858 /* ??? deprecated, use sequences instead. */
6859 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6861 /* All cleanups must be on the function_obstack. */
6862 push_obstacks_nochange ();
6863 resume_temporary_allocation ();
6865 /* convert flag, which is an rtx, into a tree. */
6866 cond = make_node (RTL_EXPR);
6867 TREE_TYPE (cond) = integer_type_node;
6868 RTL_EXPR_RTL (cond) = flag;
6869 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6870 cond = save_expr (cond);
6872 if (! left_cleanups)
6873 left_cleanups = integer_zero_node;
6874 if (! right_cleanups)
6875 right_cleanups = integer_zero_node;
6876 new_cleanups = build (COND_EXPR, void_type_node,
6877 truthvalue_conversion (cond),
6878 left_cleanups, right_cleanups);
6879 new_cleanups = fold (new_cleanups);
6883 /* Now add in the conditionalized cleanups. */
6885 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6886 expand_eh_region_start ();
6893 /* Something needs to be initialized, but we didn't know
6894 where that thing was when building the tree. For example,
6895 it could be the return value of a function, or a parameter
6896 to a function which lays down in the stack, or a temporary
6897 variable which must be passed by reference.
6899 We guarantee that the expression will either be constructed
6900 or copied into our original target. */
6902 tree slot = TREE_OPERAND (exp, 0);
6903 tree cleanups = NULL_TREE;
6907 if (TREE_CODE (slot) != VAR_DECL)
6911 target = original_target;
6915 if (DECL_RTL (slot) != 0)
6917 target = DECL_RTL (slot);
6918 /* If we have already expanded the slot, so don't do
6920 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6925 target = assign_temp (type, 2, 1, 1);
6926 /* All temp slots at this level must not conflict. */
6927 preserve_temp_slots (target);
6928 DECL_RTL (slot) = target;
6930 /* Since SLOT is not known to the called function
6931 to belong to its stack frame, we must build an explicit
6932 cleanup. This case occurs when we must build up a reference
6933 to pass the reference as an argument. In this case,
6934 it is very likely that such a reference need not be
6937 if (TREE_OPERAND (exp, 2) == 0)
6938 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6939 cleanups = TREE_OPERAND (exp, 2);
6944 /* This case does occur, when expanding a parameter which
6945 needs to be constructed on the stack. The target
6946 is the actual stack address that we want to initialize.
6947 The function we call will perform the cleanup in this case. */
6949 /* If we have already assigned it space, use that space,
6950 not target that we were passed in, as our target
6951 parameter is only a hint. */
6952 if (DECL_RTL (slot) != 0)
6954 target = DECL_RTL (slot);
6955 /* If we have already expanded the slot, so don't do
6957 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6961 DECL_RTL (slot) = target;
6964 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6965 /* Mark it as expanded. */
6966 TREE_OPERAND (exp, 1) = NULL_TREE;
6968 store_expr (exp1, target, 0);
6972 cleanups_this_call = tree_cons (NULL_TREE,
6974 cleanups_this_call);
6975 expand_eh_region_start ();
6983 tree lhs = TREE_OPERAND (exp, 0);
6984 tree rhs = TREE_OPERAND (exp, 1);
6985 tree noncopied_parts = 0;
6986 tree lhs_type = TREE_TYPE (lhs);
6988 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6989 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6990 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6991 TYPE_NONCOPIED_PARTS (lhs_type));
6992 while (noncopied_parts != 0)
6994 expand_assignment (TREE_VALUE (noncopied_parts),
6995 TREE_PURPOSE (noncopied_parts), 0, 0);
6996 noncopied_parts = TREE_CHAIN (noncopied_parts);
7003 /* If lhs is complex, expand calls in rhs before computing it.
7004 That's so we don't compute a pointer and save it over a call.
7005 If lhs is simple, compute it first so we can give it as a
7006 target if the rhs is just a call. This avoids an extra temp and copy
7007 and that prevents a partial-subsumption which makes bad code.
7008 Actually we could treat component_ref's of vars like vars. */
7010 tree lhs = TREE_OPERAND (exp, 0);
7011 tree rhs = TREE_OPERAND (exp, 1);
7012 tree noncopied_parts = 0;
7013 tree lhs_type = TREE_TYPE (lhs);
7017 if (TREE_CODE (lhs) != VAR_DECL
7018 && TREE_CODE (lhs) != RESULT_DECL
7019 && TREE_CODE (lhs) != PARM_DECL)
7020 preexpand_calls (exp);
7022 /* Check for |= or &= of a bitfield of size one into another bitfield
7023 of size 1. In this case, (unless we need the result of the
7024 assignment) we can do this more efficiently with a
7025 test followed by an assignment, if necessary.
7027 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7028 things change so we do, this code should be enhanced to
7031 && TREE_CODE (lhs) == COMPONENT_REF
7032 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7033 || TREE_CODE (rhs) == BIT_AND_EXPR)
7034 && TREE_OPERAND (rhs, 0) == lhs
7035 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7036 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7037 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7039 rtx label = gen_label_rtx ();
7041 do_jump (TREE_OPERAND (rhs, 1),
7042 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7043 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7044 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7045 (TREE_CODE (rhs) == BIT_IOR_EXPR
7047 : integer_zero_node)),
7049 do_pending_stack_adjust ();
7054 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7055 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7056 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7057 TYPE_NONCOPIED_PARTS (lhs_type));
7059 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7060 while (noncopied_parts != 0)
7062 expand_assignment (TREE_PURPOSE (noncopied_parts),
7063 TREE_VALUE (noncopied_parts), 0, 0);
7064 noncopied_parts = TREE_CHAIN (noncopied_parts);
7069 case PREINCREMENT_EXPR:
7070 case PREDECREMENT_EXPR:
7071 return expand_increment (exp, 0, ignore);
7073 case POSTINCREMENT_EXPR:
7074 case POSTDECREMENT_EXPR:
7075 /* Faster to treat as pre-increment if result is not used. */
7076 return expand_increment (exp, ! ignore, ignore);
7079 /* If nonzero, TEMP will be set to the address of something that might
7080 be a MEM corresponding to a stack slot. */
7083 /* Are we taking the address of a nested function? */
7084 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7085 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7086 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7088 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7089 op0 = force_operand (op0, target);
7091 /* If we are taking the address of something erroneous, just
7093 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7097 /* We make sure to pass const0_rtx down if we came in with
7098 ignore set, to avoid doing the cleanups twice for something. */
7099 op0 = expand_expr (TREE_OPERAND (exp, 0),
7100 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7101 (modifier == EXPAND_INITIALIZER
7102 ? modifier : EXPAND_CONST_ADDRESS));
7104 /* If we are going to ignore the result, OP0 will have been set
7105 to const0_rtx, so just return it. Don't get confused and
7106 think we are taking the address of the constant. */
7110 op0 = protect_from_queue (op0, 0);
7112 /* We would like the object in memory. If it is a constant,
7113 we can have it be statically allocated into memory. For
7114 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7115 memory and store the value into it. */
7117 if (CONSTANT_P (op0))
7118 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7120 else if (GET_CODE (op0) == MEM)
7122 mark_temp_addr_taken (op0);
7123 temp = XEXP (op0, 0);
7126 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7127 || GET_CODE (op0) == CONCAT)
7129 /* If this object is in a register, it must be not
7131 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7132 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7134 mark_temp_addr_taken (memloc);
7135 emit_move_insn (memloc, op0);
7139 if (GET_CODE (op0) != MEM)
7142 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7144 temp = XEXP (op0, 0);
7145 #ifdef POINTERS_EXTEND_UNSIGNED
7146 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7147 && mode == ptr_mode)
7148 temp = convert_memory_address (ptr_mode, temp);
7153 op0 = force_operand (XEXP (op0, 0), target);
7156 if (flag_force_addr && GET_CODE (op0) != REG)
7157 op0 = force_reg (Pmode, op0);
7159 if (GET_CODE (op0) == REG
7160 && ! REG_USERVAR_P (op0))
7161 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7163 /* If we might have had a temp slot, add an equivalent address
7166 update_temp_slot_address (temp, op0);
7168 #ifdef POINTERS_EXTEND_UNSIGNED
7169 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7170 && mode == ptr_mode)
7171 op0 = convert_memory_address (ptr_mode, op0);
7176 case ENTRY_VALUE_EXPR:
7179 /* COMPLEX type for Extended Pascal & Fortran */
7182 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7185 /* Get the rtx code of the operands. */
7186 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7187 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7190 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7194 /* Move the real (op0) and imaginary (op1) parts to their location. */
7195 emit_move_insn (gen_realpart (mode, target), op0);
7196 emit_move_insn (gen_imagpart (mode, target), op1);
7198 insns = get_insns ();
7201 /* Complex construction should appear as a single unit. */
7202 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7203 each with a separate pseudo as destination.
7204 It's not correct for flow to treat them as a unit. */
7205 if (GET_CODE (target) != CONCAT)
7206 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7214 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7215 return gen_realpart (mode, op0);
7218 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7219 return gen_imagpart (mode, op0);
7223 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7227 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7230 target = gen_reg_rtx (mode);
7234 /* Store the realpart and the negated imagpart to target. */
7235 emit_move_insn (gen_realpart (partmode, target),
7236 gen_realpart (partmode, op0));
7238 imag_t = gen_imagpart (partmode, target);
7239 temp = expand_unop (partmode, neg_optab,
7240 gen_imagpart (partmode, op0), imag_t, 0);
7242 emit_move_insn (imag_t, temp);
7244 insns = get_insns ();
7247 /* Conjugate should appear as a single unit
7248 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7249 each with a separate pseudo as destination.
7250 It's not correct for flow to treat them as a unit. */
7251 if (GET_CODE (target) != CONCAT)
7252 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7260 op0 = CONST0_RTX (tmode);
7266 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7269 /* Here to do an ordinary binary operator, generating an instruction
7270 from the optab already placed in `this_optab'. */
7272 preexpand_calls (exp);
7273 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7275 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7276 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7278 temp = expand_binop (mode, this_optab, op0, op1, target,
7279 unsignedp, OPTAB_LIB_WIDEN);
7286 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7289 bc_expand_expr (exp)
7292 enum tree_code code;
7295 struct binary_operator *binoptab;
7296 struct unary_operator *unoptab;
7297 struct increment_operator *incroptab;
7298 struct bc_label *lab, *lab1;
7299 enum bytecode_opcode opcode;
7302 code = TREE_CODE (exp);
7308 if (DECL_RTL (exp) == 0)
7310 error_with_decl (exp, "prior parameter's size depends on `%s'");
7314 bc_load_parmaddr (DECL_RTL (exp));
7315 bc_load_memory (TREE_TYPE (exp), exp);
7321 if (DECL_RTL (exp) == 0)
7325 if (BYTECODE_LABEL (DECL_RTL (exp)))
7326 bc_load_externaddr (DECL_RTL (exp));
7328 bc_load_localaddr (DECL_RTL (exp));
7330 if (TREE_PUBLIC (exp))
7331 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7332 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7334 bc_load_localaddr (DECL_RTL (exp));
7336 bc_load_memory (TREE_TYPE (exp), exp);
7341 #ifdef DEBUG_PRINT_CODE
7342 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7344 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7346 : TYPE_MODE (TREE_TYPE (exp)))],
7347 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7353 #ifdef DEBUG_PRINT_CODE
7354 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7356 /* FIX THIS: find a better way to pass real_cst's. -bson */
7357 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7358 (double) TREE_REAL_CST (exp));
7367 /* We build a call description vector describing the type of
7368 the return value and of the arguments; this call vector,
7369 together with a pointer to a location for the return value
7370 and the base of the argument list, is passed to the low
7371 level machine dependent call subroutine, which is responsible
7372 for putting the arguments wherever real functions expect
7373 them, as well as getting the return value back. */
7375 tree calldesc = 0, arg;
7379 /* Push the evaluated args on the evaluation stack in reverse
7380 order. Also make an entry for each arg in the calldesc
7381 vector while we're at it. */
7383 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7385 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7388 bc_expand_expr (TREE_VALUE (arg));
7390 calldesc = tree_cons ((tree) 0,
7391 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7393 calldesc = tree_cons ((tree) 0,
7394 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7398 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7400 /* Allocate a location for the return value and push its
7401 address on the evaluation stack. Also make an entry
7402 at the front of the calldesc for the return value type. */
7404 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7405 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7406 bc_load_localaddr (retval);
7408 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7409 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7411 /* Prepend the argument count. */
7412 calldesc = tree_cons ((tree) 0,
7413 build_int_2 (nargs, 0),
7416 /* Push the address of the call description vector on the stack. */
7417 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7418 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7419 build_index_type (build_int_2 (nargs * 2, 0)));
7420 r = output_constant_def (calldesc);
7421 bc_load_externaddr (r);
7423 /* Push the address of the function to be called. */
7424 bc_expand_expr (TREE_OPERAND (exp, 0));
7426 /* Call the function, popping its address and the calldesc vector
7427 address off the evaluation stack in the process. */
7428 bc_emit_instruction (call);
7430 /* Pop the arguments off the stack. */
7431 bc_adjust_stack (nargs);
7433 /* Load the return value onto the stack. */
7434 bc_load_localaddr (retval);
7435 bc_load_memory (type, TREE_OPERAND (exp, 0));
7441 if (!SAVE_EXPR_RTL (exp))
7443 /* First time around: copy to local variable */
7444 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7445 TYPE_ALIGN (TREE_TYPE(exp)));
7446 bc_expand_expr (TREE_OPERAND (exp, 0));
7447 bc_emit_instruction (duplicate);
7449 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7450 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7454 /* Consecutive reference: use saved copy */
7455 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7456 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7461 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7462 how are they handled instead? */
7465 TREE_USED (exp) = 1;
7466 bc_expand_expr (STMT_BODY (exp));
7473 bc_expand_expr (TREE_OPERAND (exp, 0));
7474 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7479 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7484 bc_expand_address (TREE_OPERAND (exp, 0));
7489 bc_expand_expr (TREE_OPERAND (exp, 0));
7490 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7495 bc_expand_expr (bc_canonicalize_array_ref (exp));
7500 bc_expand_component_address (exp);
7502 /* If we have a bitfield, generate a proper load */
7503 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7508 bc_expand_expr (TREE_OPERAND (exp, 0));
7509 bc_emit_instruction (drop);
7510 bc_expand_expr (TREE_OPERAND (exp, 1));
7515 bc_expand_expr (TREE_OPERAND (exp, 0));
7516 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7517 lab = bc_get_bytecode_label ();
7518 bc_emit_bytecode (xjumpifnot);
7519 bc_emit_bytecode_labelref (lab);
7521 #ifdef DEBUG_PRINT_CODE
7522 fputc ('\n', stderr);
7524 bc_expand_expr (TREE_OPERAND (exp, 1));
7525 lab1 = bc_get_bytecode_label ();
7526 bc_emit_bytecode (jump);
7527 bc_emit_bytecode_labelref (lab1);
7529 #ifdef DEBUG_PRINT_CODE
7530 fputc ('\n', stderr);
7533 bc_emit_bytecode_labeldef (lab);
7534 bc_expand_expr (TREE_OPERAND (exp, 2));
7535 bc_emit_bytecode_labeldef (lab1);
7538 case TRUTH_ANDIF_EXPR:
7540 opcode = xjumpifnot;
7543 case TRUTH_ORIF_EXPR:
7550 binoptab = optab_plus_expr;
7555 binoptab = optab_minus_expr;
7560 binoptab = optab_mult_expr;
7563 case TRUNC_DIV_EXPR:
7564 case FLOOR_DIV_EXPR:
7566 case ROUND_DIV_EXPR:
7567 case EXACT_DIV_EXPR:
7569 binoptab = optab_trunc_div_expr;
7572 case TRUNC_MOD_EXPR:
7573 case FLOOR_MOD_EXPR:
7575 case ROUND_MOD_EXPR:
7577 binoptab = optab_trunc_mod_expr;
7580 case FIX_ROUND_EXPR:
7581 case FIX_FLOOR_EXPR:
7583 abort (); /* Not used for C. */
7585 case FIX_TRUNC_EXPR:
7592 abort (); /* FIXME */
7596 binoptab = optab_rdiv_expr;
7601 binoptab = optab_bit_and_expr;
7606 binoptab = optab_bit_ior_expr;
7611 binoptab = optab_bit_xor_expr;
7616 binoptab = optab_lshift_expr;
7621 binoptab = optab_rshift_expr;
7624 case TRUTH_AND_EXPR:
7626 binoptab = optab_truth_and_expr;
7631 binoptab = optab_truth_or_expr;
7636 binoptab = optab_lt_expr;
7641 binoptab = optab_le_expr;
7646 binoptab = optab_ge_expr;
7651 binoptab = optab_gt_expr;
7656 binoptab = optab_eq_expr;
7661 binoptab = optab_ne_expr;
7666 unoptab = optab_negate_expr;
7671 unoptab = optab_bit_not_expr;
7674 case TRUTH_NOT_EXPR:
7676 unoptab = optab_truth_not_expr;
7679 case PREDECREMENT_EXPR:
7681 incroptab = optab_predecrement_expr;
7684 case PREINCREMENT_EXPR:
7686 incroptab = optab_preincrement_expr;
7689 case POSTDECREMENT_EXPR:
7691 incroptab = optab_postdecrement_expr;
7694 case POSTINCREMENT_EXPR:
7696 incroptab = optab_postincrement_expr;
7701 bc_expand_constructor (exp);
7711 tree vars = TREE_OPERAND (exp, 0);
7712 int vars_need_expansion = 0;
7714 /* Need to open a binding contour here because
7715 if there are any cleanups they most be contained here. */
7716 expand_start_bindings (0);
7718 /* Mark the corresponding BLOCK for output. */
7719 if (TREE_OPERAND (exp, 2) != 0)
7720 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7722 /* If VARS have not yet been expanded, expand them now. */
7725 if (DECL_RTL (vars) == 0)
7727 vars_need_expansion = 1;
7730 expand_decl_init (vars);
7731 vars = TREE_CHAIN (vars);
7734 bc_expand_expr (TREE_OPERAND (exp, 1));
7736 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7746 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7747 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7753 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7759 bc_expand_expr (TREE_OPERAND (exp, 0));
7760 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7761 lab = bc_get_bytecode_label ();
7763 bc_emit_instruction (duplicate);
7764 bc_emit_bytecode (opcode);
7765 bc_emit_bytecode_labelref (lab);
7767 #ifdef DEBUG_PRINT_CODE
7768 fputc ('\n', stderr);
7771 bc_emit_instruction (drop);
7773 bc_expand_expr (TREE_OPERAND (exp, 1));
7774 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7775 bc_emit_bytecode_labeldef (lab);
7781 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7783 /* Push the quantum. */
7784 bc_expand_expr (TREE_OPERAND (exp, 1));
7786 /* Convert it to the lvalue's type. */
7787 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7789 /* Push the address of the lvalue */
7790 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7792 /* Perform actual increment */
7793 bc_expand_increment (incroptab, type);
7797 /* Return the alignment in bits of EXP, a pointer valued expression.
7798 But don't return more than MAX_ALIGN no matter what.
7799 The alignment returned is, by default, the alignment of the thing that
7800 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7802 Otherwise, look at the expression to see if we can do better, i.e., if the
7803 expression is actually pointing at an object whose alignment is tighter. */
7806 get_pointer_alignment (exp, max_align)
7810 unsigned align, inner;
7812 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7815 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7816 align = MIN (align, max_align);
7820 switch (TREE_CODE (exp))
7824 case NON_LVALUE_EXPR:
7825 exp = TREE_OPERAND (exp, 0);
7826 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7828 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7829 align = MIN (inner, max_align);
7833 /* If sum of pointer + int, restrict our maximum alignment to that
7834 imposed by the integer. If not, we can't do any better than
7836 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7839 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7844 exp = TREE_OPERAND (exp, 0);
7848 /* See what we are pointing at and look at its alignment. */
7849 exp = TREE_OPERAND (exp, 0);
7850 if (TREE_CODE (exp) == FUNCTION_DECL)
7851 align = FUNCTION_BOUNDARY;
7852 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7853 align = DECL_ALIGN (exp);
7854 #ifdef CONSTANT_ALIGNMENT
7855 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7856 align = CONSTANT_ALIGNMENT (exp, align);
7858 return MIN (align, max_align);
7866 /* Return the tree node and offset if a given argument corresponds to
7867 a string constant. */
7870 string_constant (arg, ptr_offset)
7876 if (TREE_CODE (arg) == ADDR_EXPR
7877 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7879 *ptr_offset = integer_zero_node;
7880 return TREE_OPERAND (arg, 0);
7882 else if (TREE_CODE (arg) == PLUS_EXPR)
7884 tree arg0 = TREE_OPERAND (arg, 0);
7885 tree arg1 = TREE_OPERAND (arg, 1);
7890 if (TREE_CODE (arg0) == ADDR_EXPR
7891 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7894 return TREE_OPERAND (arg0, 0);
7896 else if (TREE_CODE (arg1) == ADDR_EXPR
7897 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7900 return TREE_OPERAND (arg1, 0);
7907 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7908 way, because it could contain a zero byte in the middle.
7909 TREE_STRING_LENGTH is the size of the character array, not the string.
7911 Unfortunately, string_constant can't access the values of const char
7912 arrays with initializers, so neither can we do so here. */
7922 src = string_constant (src, &offset_node);
7925 max = TREE_STRING_LENGTH (src);
7926 ptr = TREE_STRING_POINTER (src);
7927 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7929 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7930 compute the offset to the following null if we don't know where to
7931 start searching for it. */
7933 for (i = 0; i < max; i++)
7936 /* We don't know the starting offset, but we do know that the string
7937 has no internal zero bytes. We can assume that the offset falls
7938 within the bounds of the string; otherwise, the programmer deserves
7939 what he gets. Subtract the offset from the length of the string,
7941 /* This would perhaps not be valid if we were dealing with named
7942 arrays in addition to literal string constants. */
7943 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7946 /* We have a known offset into the string. Start searching there for
7947 a null character. */
7948 if (offset_node == 0)
7952 /* Did we get a long long offset? If so, punt. */
7953 if (TREE_INT_CST_HIGH (offset_node) != 0)
7955 offset = TREE_INT_CST_LOW (offset_node);
7957 /* If the offset is known to be out of bounds, warn, and call strlen at
7959 if (offset < 0 || offset > max)
7961 warning ("offset outside bounds of constant string");
7964 /* Use strlen to search for the first zero byte. Since any strings
7965 constructed with build_string will have nulls appended, we win even
7966 if we get handed something like (char[4])"abcd".
7968 Since OFFSET is our starting index into the string, no further
7969 calculation is needed. */
7970 return size_int (strlen (ptr + offset));
7974 expand_builtin_return_addr (fndecl_code, count, tem)
7975 enum built_in_function fndecl_code;
7981 /* Some machines need special handling before we can access
7982 arbitrary frames. For example, on the sparc, we must first flush
7983 all register windows to the stack. */
7984 #ifdef SETUP_FRAME_ADDRESSES
7985 SETUP_FRAME_ADDRESSES ();
7988 /* On the sparc, the return address is not in the frame, it is in a
7989 register. There is no way to access it off of the current frame
7990 pointer, but it can be accessed off the previous frame pointer by
7991 reading the value from the register window save area. */
7992 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7993 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7997 /* Scan back COUNT frames to the specified frame. */
7998 for (i = 0; i < count; i++)
8000 /* Assume the dynamic chain pointer is in the word that the
8001 frame address points to, unless otherwise specified. */
8002 #ifdef DYNAMIC_CHAIN_ADDRESS
8003 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8005 tem = memory_address (Pmode, tem);
8006 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
8009 /* For __builtin_frame_address, return what we've got. */
8010 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8013 /* For __builtin_return_address, Get the return address from that
8015 #ifdef RETURN_ADDR_RTX
8016 tem = RETURN_ADDR_RTX (count, tem);
8018 tem = memory_address (Pmode,
8019 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8020 tem = gen_rtx (MEM, Pmode, tem);
8025 /* Expand an expression EXP that calls a built-in function,
8026 with result going to TARGET if that's convenient
8027 (and in mode MODE if that's convenient).
8028 SUBTARGET may be used as the target for computing one of EXP's operands.
8029 IGNORE is nonzero if the value is to be ignored. */
8031 #define CALLED_AS_BUILT_IN(NODE) \
8032 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8035 expand_builtin (exp, target, subtarget, mode, ignore)
8039 enum machine_mode mode;
8042 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8043 tree arglist = TREE_OPERAND (exp, 1);
8046 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8047 optab builtin_optab;
8049 switch (DECL_FUNCTION_CODE (fndecl))
8054 /* build_function_call changes these into ABS_EXPR. */
8059 /* Treat these like sqrt, but only if the user asks for them. */
8060 if (! flag_fast_math)
8062 case BUILT_IN_FSQRT:
8063 /* If not optimizing, call the library function. */
8068 /* Arg could be wrong type if user redeclared this fcn wrong. */
8069 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8072 /* Stabilize and compute the argument. */
8073 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8074 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8076 exp = copy_node (exp);
8077 arglist = copy_node (arglist);
8078 TREE_OPERAND (exp, 1) = arglist;
8079 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8081 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8083 /* Make a suitable register to place result in. */
8084 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8089 switch (DECL_FUNCTION_CODE (fndecl))
8092 builtin_optab = sin_optab; break;
8094 builtin_optab = cos_optab; break;
8095 case BUILT_IN_FSQRT:
8096 builtin_optab = sqrt_optab; break;
8101 /* Compute into TARGET.
8102 Set TARGET to wherever the result comes back. */
8103 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8104 builtin_optab, op0, target, 0);
8106 /* If we were unable to expand via the builtin, stop the
8107 sequence (without outputting the insns) and break, causing
8108 a call the the library function. */
8115 /* Check the results by default. But if flag_fast_math is turned on,
8116 then assume sqrt will always be called with valid arguments. */
8118 if (! flag_fast_math)
8120 /* Don't define the builtin FP instructions
8121 if your machine is not IEEE. */
8122 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8125 lab1 = gen_label_rtx ();
8127 /* Test the result; if it is NaN, set errno=EDOM because
8128 the argument was not in the domain. */
8129 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8130 emit_jump_insn (gen_beq (lab1));
8134 #ifdef GEN_ERRNO_RTX
8135 rtx errno_rtx = GEN_ERRNO_RTX;
8138 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8141 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8144 /* We can't set errno=EDOM directly; let the library call do it.
8145 Pop the arguments right away in case the call gets deleted. */
8147 expand_call (exp, target, 0);
8154 /* Output the entire sequence. */
8155 insns = get_insns ();
8161 /* __builtin_apply_args returns block of memory allocated on
8162 the stack into which is stored the arg pointer, structure
8163 value address, static chain, and all the registers that might
8164 possibly be used in performing a function call. The code is
8165 moved to the start of the function so the incoming values are
8167 case BUILT_IN_APPLY_ARGS:
8168 /* Don't do __builtin_apply_args more than once in a function.
8169 Save the result of the first call and reuse it. */
8170 if (apply_args_value != 0)
8171 return apply_args_value;
8173 /* When this function is called, it means that registers must be
8174 saved on entry to this function. So we migrate the
8175 call to the first insn of this function. */
8180 temp = expand_builtin_apply_args ();
8184 apply_args_value = temp;
8186 /* Put the sequence after the NOTE that starts the function.
8187 If this is inside a SEQUENCE, make the outer-level insn
8188 chain current, so the code is placed at the start of the
8190 push_topmost_sequence ();
8191 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8192 pop_topmost_sequence ();
8196 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8197 FUNCTION with a copy of the parameters described by
8198 ARGUMENTS, and ARGSIZE. It returns a block of memory
8199 allocated on the stack into which is stored all the registers
8200 that might possibly be used for returning the result of a
8201 function. ARGUMENTS is the value returned by
8202 __builtin_apply_args. ARGSIZE is the number of bytes of
8203 arguments that must be copied. ??? How should this value be
8204 computed? We'll also need a safe worst case value for varargs
8206 case BUILT_IN_APPLY:
8208 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8209 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8210 || TREE_CHAIN (arglist) == 0
8211 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8212 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8213 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8221 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8222 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8224 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8227 /* __builtin_return (RESULT) causes the function to return the
8228 value described by RESULT. RESULT is address of the block of
8229 memory returned by __builtin_apply. */
8230 case BUILT_IN_RETURN:
8232 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8233 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8234 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8235 NULL_RTX, VOIDmode, 0));
8238 case BUILT_IN_SAVEREGS:
8239 /* Don't do __builtin_saveregs more than once in a function.
8240 Save the result of the first call and reuse it. */
8241 if (saveregs_value != 0)
8242 return saveregs_value;
8244 /* When this function is called, it means that registers must be
8245 saved on entry to this function. So we migrate the
8246 call to the first insn of this function. */
8250 /* Now really call the function. `expand_call' does not call
8251 expand_builtin, so there is no danger of infinite recursion here. */
8254 #ifdef EXPAND_BUILTIN_SAVEREGS
8255 /* Do whatever the machine needs done in this case. */
8256 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8258 /* The register where the function returns its value
8259 is likely to have something else in it, such as an argument.
8260 So preserve that register around the call. */
8262 if (value_mode != VOIDmode)
8264 rtx valreg = hard_libcall_value (value_mode);
8265 rtx saved_valreg = gen_reg_rtx (value_mode);
8267 emit_move_insn (saved_valreg, valreg);
8268 temp = expand_call (exp, target, ignore);
8269 emit_move_insn (valreg, saved_valreg);
8272 /* Generate the call, putting the value in a pseudo. */
8273 temp = expand_call (exp, target, ignore);
8279 saveregs_value = temp;
8281 /* Put the sequence after the NOTE that starts the function.
8282 If this is inside a SEQUENCE, make the outer-level insn
8283 chain current, so the code is placed at the start of the
8285 push_topmost_sequence ();
8286 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8287 pop_topmost_sequence ();
8291 /* __builtin_args_info (N) returns word N of the arg space info
8292 for the current function. The number and meanings of words
8293 is controlled by the definition of CUMULATIVE_ARGS. */
8294 case BUILT_IN_ARGS_INFO:
8296 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8298 int *word_ptr = (int *) ¤t_function_args_info;
8299 tree type, elts, result;
8301 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8302 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8303 __FILE__, __LINE__);
8307 tree arg = TREE_VALUE (arglist);
8308 if (TREE_CODE (arg) != INTEGER_CST)
8309 error ("argument of `__builtin_args_info' must be constant");
8312 int wordnum = TREE_INT_CST_LOW (arg);
8314 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8315 error ("argument of `__builtin_args_info' out of range");
8317 return GEN_INT (word_ptr[wordnum]);
8321 error ("missing argument in `__builtin_args_info'");
8326 for (i = 0; i < nwords; i++)
8327 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8329 type = build_array_type (integer_type_node,
8330 build_index_type (build_int_2 (nwords, 0)));
8331 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8332 TREE_CONSTANT (result) = 1;
8333 TREE_STATIC (result) = 1;
8334 result = build (INDIRECT_REF, build_pointer_type (type), result);
8335 TREE_CONSTANT (result) = 1;
8336 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8340 /* Return the address of the first anonymous stack arg. */
8341 case BUILT_IN_NEXT_ARG:
8343 tree fntype = TREE_TYPE (current_function_decl);
8345 if ((TYPE_ARG_TYPES (fntype) == 0
8346 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8348 && ! current_function_varargs)
8350 error ("`va_start' used in function with fixed args");
8356 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8357 tree arg = TREE_VALUE (arglist);
8359 /* Strip off all nops for the sake of the comparison. This
8360 is not quite the same as STRIP_NOPS. It does more.
8361 We must also strip off INDIRECT_EXPR for C++ reference
8363 while (TREE_CODE (arg) == NOP_EXPR
8364 || TREE_CODE (arg) == CONVERT_EXPR
8365 || TREE_CODE (arg) == NON_LVALUE_EXPR
8366 || TREE_CODE (arg) == INDIRECT_REF)
8367 arg = TREE_OPERAND (arg, 0);
8368 if (arg != last_parm)
8369 warning ("second parameter of `va_start' not last named argument");
8371 else if (! current_function_varargs)
8372 /* Evidently an out of date version of <stdarg.h>; can't validate
8373 va_start's second argument, but can still work as intended. */
8374 warning ("`__builtin_next_arg' called without an argument");
8377 return expand_binop (Pmode, add_optab,
8378 current_function_internal_arg_pointer,
8379 current_function_arg_offset_rtx,
8380 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8382 case BUILT_IN_CLASSIFY_TYPE:
8385 tree type = TREE_TYPE (TREE_VALUE (arglist));
8386 enum tree_code code = TREE_CODE (type);
8387 if (code == VOID_TYPE)
8388 return GEN_INT (void_type_class);
8389 if (code == INTEGER_TYPE)
8390 return GEN_INT (integer_type_class);
8391 if (code == CHAR_TYPE)
8392 return GEN_INT (char_type_class);
8393 if (code == ENUMERAL_TYPE)
8394 return GEN_INT (enumeral_type_class);
8395 if (code == BOOLEAN_TYPE)
8396 return GEN_INT (boolean_type_class);
8397 if (code == POINTER_TYPE)
8398 return GEN_INT (pointer_type_class);
8399 if (code == REFERENCE_TYPE)
8400 return GEN_INT (reference_type_class);
8401 if (code == OFFSET_TYPE)
8402 return GEN_INT (offset_type_class);
8403 if (code == REAL_TYPE)
8404 return GEN_INT (real_type_class);
8405 if (code == COMPLEX_TYPE)
8406 return GEN_INT (complex_type_class);
8407 if (code == FUNCTION_TYPE)
8408 return GEN_INT (function_type_class);
8409 if (code == METHOD_TYPE)
8410 return GEN_INT (method_type_class);
8411 if (code == RECORD_TYPE)
8412 return GEN_INT (record_type_class);
8413 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8414 return GEN_INT (union_type_class);
8415 if (code == ARRAY_TYPE)
8417 if (TYPE_STRING_FLAG (type))
8418 return GEN_INT (string_type_class);
8420 return GEN_INT (array_type_class);
8422 if (code == SET_TYPE)
8423 return GEN_INT (set_type_class);
8424 if (code == FILE_TYPE)
8425 return GEN_INT (file_type_class);
8426 if (code == LANG_TYPE)
8427 return GEN_INT (lang_type_class);
8429 return GEN_INT (no_type_class);
8431 case BUILT_IN_CONSTANT_P:
8436 tree arg = TREE_VALUE (arglist);
8439 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8440 || (TREE_CODE (arg) == ADDR_EXPR
8441 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8442 ? const1_rtx : const0_rtx);
8445 case BUILT_IN_FRAME_ADDRESS:
8446 /* The argument must be a nonnegative integer constant.
8447 It counts the number of frames to scan up the stack.
8448 The value is the address of that frame. */
8449 case BUILT_IN_RETURN_ADDRESS:
8450 /* The argument must be a nonnegative integer constant.
8451 It counts the number of frames to scan up the stack.
8452 The value is the return address saved in that frame. */
8454 /* Warning about missing arg was already issued. */
8456 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8458 error ("invalid arg to `__builtin_return_address'");
8461 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8463 error ("invalid arg to `__builtin_return_address'");
8468 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8469 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8470 hard_frame_pointer_rtx);
8472 /* For __builtin_frame_address, return what we've got. */
8473 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8476 if (GET_CODE (tem) != REG)
8477 tem = copy_to_reg (tem);
8481 case BUILT_IN_ALLOCA:
8483 /* Arg could be non-integer if user redeclared this fcn wrong. */
8484 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8487 /* Compute the argument. */
8488 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8490 /* Allocate the desired space. */
8491 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8494 /* If not optimizing, call the library function. */
8495 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8499 /* Arg could be non-integer if user redeclared this fcn wrong. */
8500 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8503 /* Compute the argument. */
8504 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8505 /* Compute ffs, into TARGET if possible.
8506 Set TARGET to wherever the result comes back. */
8507 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8508 ffs_optab, op0, target, 1);
8513 case BUILT_IN_STRLEN:
8514 /* If not optimizing, call the library function. */
8515 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8519 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8520 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8524 tree src = TREE_VALUE (arglist);
8525 tree len = c_strlen (src);
8528 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8530 rtx result, src_rtx, char_rtx;
8531 enum machine_mode insn_mode = value_mode, char_mode;
8532 enum insn_code icode;
8534 /* If the length is known, just return it. */
8536 return expand_expr (len, target, mode, 0);
8538 /* If SRC is not a pointer type, don't do this operation inline. */
8542 /* Call a function if we can't compute strlen in the right mode. */
8544 while (insn_mode != VOIDmode)
8546 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8547 if (icode != CODE_FOR_nothing)
8550 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8552 if (insn_mode == VOIDmode)
8555 /* Make a place to write the result of the instruction. */
8558 && GET_CODE (result) == REG
8559 && GET_MODE (result) == insn_mode
8560 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8561 result = gen_reg_rtx (insn_mode);
8563 /* Make sure the operands are acceptable to the predicates. */
8565 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8566 result = gen_reg_rtx (insn_mode);
8568 src_rtx = memory_address (BLKmode,
8569 expand_expr (src, NULL_RTX, ptr_mode,
8571 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8572 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8574 char_rtx = const0_rtx;
8575 char_mode = insn_operand_mode[(int)icode][2];
8576 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8577 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8579 emit_insn (GEN_FCN (icode) (result,
8580 gen_rtx (MEM, BLKmode, src_rtx),
8581 char_rtx, GEN_INT (align)));
8583 /* Return the value in the proper mode for this function. */
8584 if (GET_MODE (result) == value_mode)
8586 else if (target != 0)
8588 convert_move (target, result, 0);
8592 return convert_to_mode (value_mode, result, 0);
8595 case BUILT_IN_STRCPY:
8596 /* If not optimizing, call the library function. */
8597 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8601 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8602 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8603 || TREE_CHAIN (arglist) == 0
8604 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8608 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8613 len = size_binop (PLUS_EXPR, len, integer_one_node);
8615 chainon (arglist, build_tree_list (NULL_TREE, len));
8619 case BUILT_IN_MEMCPY:
8620 /* If not optimizing, call the library function. */
8621 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8625 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8626 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8627 || TREE_CHAIN (arglist) == 0
8628 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8629 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8630 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8634 tree dest = TREE_VALUE (arglist);
8635 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8636 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8640 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8642 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8643 rtx dest_rtx, dest_mem, src_mem;
8645 /* If either SRC or DEST is not a pointer type, don't do
8646 this operation in-line. */
8647 if (src_align == 0 || dest_align == 0)
8649 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8650 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8654 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8655 dest_mem = gen_rtx (MEM, BLKmode,
8656 memory_address (BLKmode, dest_rtx));
8657 /* There could be a void* cast on top of the object. */
8658 while (TREE_CODE (dest) == NOP_EXPR)
8659 dest = TREE_OPERAND (dest, 0);
8660 type = TREE_TYPE (TREE_TYPE (dest));
8661 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8662 src_mem = gen_rtx (MEM, BLKmode,
8663 memory_address (BLKmode,
8664 expand_expr (src, NULL_RTX,
8667 /* There could be a void* cast on top of the object. */
8668 while (TREE_CODE (src) == NOP_EXPR)
8669 src = TREE_OPERAND (src, 0);
8670 type = TREE_TYPE (TREE_TYPE (src));
8671 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8673 /* Copy word part most expediently. */
8674 emit_block_move (dest_mem, src_mem,
8675 expand_expr (len, NULL_RTX, VOIDmode, 0),
8676 MIN (src_align, dest_align));
8677 return force_operand (dest_rtx, NULL_RTX);
8680 case BUILT_IN_MEMSET:
8681 /* If not optimizing, call the library function. */
8682 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8686 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8687 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8688 || TREE_CHAIN (arglist) == 0
8689 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8691 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8693 != (TREE_CODE (TREE_TYPE
8695 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8699 tree dest = TREE_VALUE (arglist);
8700 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8701 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8705 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8706 rtx dest_rtx, dest_mem;
8708 /* If DEST is not a pointer type, don't do this
8709 operation in-line. */
8710 if (dest_align == 0)
8713 /* If VAL is not 0, don't do this operation in-line. */
8714 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8717 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8718 dest_mem = gen_rtx (MEM, BLKmode,
8719 memory_address (BLKmode, dest_rtx));
8720 /* There could be a void* cast on top of the object. */
8721 while (TREE_CODE (dest) == NOP_EXPR)
8722 dest = TREE_OPERAND (dest, 0);
8723 type = TREE_TYPE (TREE_TYPE (dest));
8724 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8726 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8729 return force_operand (dest_rtx, NULL_RTX);
8732 /* These comparison functions need an instruction that returns an actual
8733 index. An ordinary compare that just sets the condition codes
8735 #ifdef HAVE_cmpstrsi
8736 case BUILT_IN_STRCMP:
8737 /* If not optimizing, call the library function. */
8738 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8742 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8743 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8744 || TREE_CHAIN (arglist) == 0
8745 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8747 else if (!HAVE_cmpstrsi)
8750 tree arg1 = TREE_VALUE (arglist);
8751 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8755 len = c_strlen (arg1);
8757 len = size_binop (PLUS_EXPR, integer_one_node, len);
8758 len2 = c_strlen (arg2);
8760 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8762 /* If we don't have a constant length for the first, use the length
8763 of the second, if we know it. We don't require a constant for
8764 this case; some cost analysis could be done if both are available
8765 but neither is constant. For now, assume they're equally cheap.
8767 If both strings have constant lengths, use the smaller. This
8768 could arise if optimization results in strcpy being called with
8769 two fixed strings, or if the code was machine-generated. We should
8770 add some code to the `memcmp' handler below to deal with such
8771 situations, someday. */
8772 if (!len || TREE_CODE (len) != INTEGER_CST)
8779 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8781 if (tree_int_cst_lt (len2, len))
8785 chainon (arglist, build_tree_list (NULL_TREE, len));
8789 case BUILT_IN_MEMCMP:
8790 /* If not optimizing, call the library function. */
8791 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8795 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8796 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8797 || TREE_CHAIN (arglist) == 0
8798 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8799 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8800 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8802 else if (!HAVE_cmpstrsi)
8805 tree arg1 = TREE_VALUE (arglist);
8806 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8807 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8811 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8813 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8814 enum machine_mode insn_mode
8815 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8817 /* If we don't have POINTER_TYPE, call the function. */
8818 if (arg1_align == 0 || arg2_align == 0)
8820 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8821 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8825 /* Make a place to write the result of the instruction. */
8828 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8829 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8830 result = gen_reg_rtx (insn_mode);
8832 emit_insn (gen_cmpstrsi (result,
8833 gen_rtx (MEM, BLKmode,
8834 expand_expr (arg1, NULL_RTX,
8837 gen_rtx (MEM, BLKmode,
8838 expand_expr (arg2, NULL_RTX,
8841 expand_expr (len, NULL_RTX, VOIDmode, 0),
8842 GEN_INT (MIN (arg1_align, arg2_align))));
8844 /* Return the value in the proper mode for this function. */
8845 mode = TYPE_MODE (TREE_TYPE (exp));
8846 if (GET_MODE (result) == mode)
8848 else if (target != 0)
8850 convert_move (target, result, 0);
8854 return convert_to_mode (mode, result, 0);
8857 case BUILT_IN_STRCMP:
8858 case BUILT_IN_MEMCMP:
8862 /* __builtin_setjmp is passed a pointer to an array of five words
8863 (not all will be used on all machines). It operates similarly to
8864 the C library function of the same name, but is more efficient.
8865 Much of the code below (and for longjmp) is copied from the handling
8868 NOTE: This is intended for use by GNAT and will only work in
8869 the method used by it. This code will likely NOT survive to
8870 the GCC 2.8.0 release. */
8871 case BUILT_IN_SETJMP:
8873 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8877 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8879 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8880 enum machine_mode sa_mode = Pmode;
8882 int old_inhibit_defer_pop = inhibit_defer_pop;
8884 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8885 build_function_type (void_type_node, NULL_TREE),
8888 CUMULATIVE_ARGS args_so_far;
8891 #ifdef POINTERS_EXTEND_UNSIGNED
8892 buf_addr = convert_memory_address (Pmode, buf_addr);
8895 buf_addr = force_reg (Pmode, buf_addr);
8897 if (target == 0 || GET_CODE (target) != REG
8898 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8899 target = gen_reg_rtx (value_mode);
8903 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8904 current_function_calls_setjmp = 1;
8906 /* We store the frame pointer and the address of lab1 in the buffer
8907 and use the rest of it for the stack save area, which is
8908 machine-dependent. */
8909 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8910 virtual_stack_vars_rtx);
8912 (validize_mem (gen_rtx (MEM, Pmode,
8913 plus_constant (buf_addr,
8914 GET_MODE_SIZE (Pmode)))),
8915 gen_rtx (LABEL_REF, Pmode, lab1));
8917 #ifdef HAVE_save_stack_nonlocal
8918 if (HAVE_save_stack_nonlocal)
8919 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8922 stack_save = gen_rtx (MEM, sa_mode,
8923 plus_constant (buf_addr,
8924 2 * GET_MODE_SIZE (Pmode)));
8925 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8929 emit_insn (gen_setjmp ());
8932 /* Set TARGET to zero and branch around the other case. */
8933 emit_move_insn (target, const0_rtx);
8934 emit_jump_insn (gen_jump (lab2));
8938 /* Note that setjmp clobbers FP when we get here, so we have to
8939 make sure it's marked as used by this function. */
8940 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8942 /* Mark the static chain as clobbered here so life information
8943 doesn't get messed up for it. */
8944 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8946 /* Now put in the code to restore the frame pointer, and argument
8947 pointer, if needed. The code below is from expand_end_bindings
8948 in stmt.c; see detailed documentation there. */
8949 #ifdef HAVE_nonlocal_goto
8950 if (! HAVE_nonlocal_goto)
8952 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8954 current_function_has_nonlocal_goto = 1;
8956 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8957 if (fixed_regs[ARG_POINTER_REGNUM])
8959 #ifdef ELIMINABLE_REGS
8960 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8962 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8963 if (elim_regs[i].from == ARG_POINTER_REGNUM
8964 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8967 if (i == sizeof elim_regs / sizeof elim_regs [0])
8970 /* Now restore our arg pointer from the address at which it
8971 was saved in our stack frame.
8972 If there hasn't be space allocated for it yet, make
8974 if (arg_pointer_save_area == 0)
8975 arg_pointer_save_area
8976 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8977 emit_move_insn (virtual_incoming_args_rtx,
8978 copy_to_reg (arg_pointer_save_area));
8983 #ifdef HAVE_nonlocal_goto_receiver
8984 if (HAVE_nonlocal_goto_receiver)
8985 emit_insn (gen_nonlocal_goto_receiver ());
8987 /* The static chain pointer contains the address of dummy function.
8988 We need to call it here to handle some PIC cases of restoring
8989 a global pointer. Then return 1. */
8990 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8992 /* We can't actually call emit_library_call here, so do everything
8993 it does, which isn't much for a libfunc with no args. */
8994 op0 = memory_address (FUNCTION_MODE, op0);
8996 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8997 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8998 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
9000 #ifndef ACCUMULATE_OUTGOING_ARGS
9001 #ifdef HAVE_call_pop
9003 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
9004 const0_rtx, next_arg_reg,
9005 GEN_INT (return_pops)));
9012 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
9013 const0_rtx, next_arg_reg, const0_rtx));
9018 emit_move_insn (target, const1_rtx);
9023 /* __builtin_longjmp is passed a pointer to an array of five words
9024 and a value, which is a dummy. It's similar to the C library longjmp
9025 function but works with __builtin_setjmp above. */
9026 case BUILT_IN_LONGJMP:
9027 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9028 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9032 tree dummy_id = get_identifier ("__dummy");
9033 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
9034 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
9035 #ifdef POINTERS_EXTEND_UNSIGNED
9038 convert_memory_address
9040 expand_expr (TREE_VALUE (arglist),
9041 NULL_RTX, VOIDmode, 0)));
9044 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9048 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
9049 rtx lab = gen_rtx (MEM, Pmode,
9050 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
9051 enum machine_mode sa_mode
9052 #ifdef HAVE_save_stack_nonlocal
9053 = (HAVE_save_stack_nonlocal
9054 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9059 rtx stack = gen_rtx (MEM, sa_mode,
9060 plus_constant (buf_addr,
9061 2 * GET_MODE_SIZE (Pmode)));
9063 DECL_EXTERNAL (dummy_decl) = 1;
9064 TREE_PUBLIC (dummy_decl) = 1;
9065 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9067 /* Expand the second expression just for side-effects. */
9068 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9069 const0_rtx, VOIDmode, 0);
9071 assemble_external (dummy_decl);
9073 /* Pick up FP, label, and SP from the block and jump. This code is
9074 from expand_goto in stmt.c; see there for detailed comments. */
9075 #if HAVE_nonlocal_goto
9076 if (HAVE_nonlocal_goto)
9077 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9078 XEXP (DECL_RTL (dummy_decl), 0)));
9082 lab = copy_to_reg (lab);
9083 emit_move_insn (hard_frame_pointer_rtx, fp);
9084 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9086 /* Put in the static chain register the address of the dummy
9088 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9089 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9090 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9091 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9092 emit_indirect_jump (lab);
9098 default: /* just do library call, if unknown builtin */
9099 error ("built-in function `%s' not currently supported",
9100 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9103 /* The switch statement above can drop through to cause the function
9104 to be called normally. */
9106 return expand_call (exp, target, ignore);
9109 /* Built-in functions to perform an untyped call and return. */
9111 /* For each register that may be used for calling a function, this
9112 gives a mode used to copy the register's value. VOIDmode indicates
9113 the register is not used for calling a function. If the machine
9114 has register windows, this gives only the outbound registers.
9115 INCOMING_REGNO gives the corresponding inbound register. */
9116 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9118 /* For each register that may be used for returning values, this gives
9119 a mode used to copy the register's value. VOIDmode indicates the
9120 register is not used for returning values. If the machine has
9121 register windows, this gives only the outbound registers.
9122 INCOMING_REGNO gives the corresponding inbound register. */
9123 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9125 /* For each register that may be used for calling a function, this
9126 gives the offset of that register into the block returned by
9127 __builtin_apply_args. 0 indicates that the register is not
9128 used for calling a function. */
9129 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9131 /* Return the offset of register REGNO into the block returned by
9132 __builtin_apply_args. This is not declared static, since it is
9133 needed in objc-act.c. */
9136 apply_args_register_offset (regno)
9141 /* Arguments are always put in outgoing registers (in the argument
9142 block) if such make sense. */
9143 #ifdef OUTGOING_REGNO
9144 regno = OUTGOING_REGNO(regno);
9146 return apply_args_reg_offset[regno];
9149 /* Return the size required for the block returned by __builtin_apply_args,
9150 and initialize apply_args_mode. */
9155 static int size = -1;
9157 enum machine_mode mode;
9159 /* The values computed by this function never change. */
9162 /* The first value is the incoming arg-pointer. */
9163 size = GET_MODE_SIZE (Pmode);
9165 /* The second value is the structure value address unless this is
9166 passed as an "invisible" first argument. */
9167 if (struct_value_rtx)
9168 size += GET_MODE_SIZE (Pmode);
9170 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9171 if (FUNCTION_ARG_REGNO_P (regno))
9173 /* Search for the proper mode for copying this register's
9174 value. I'm not sure this is right, but it works so far. */
9175 enum machine_mode best_mode = VOIDmode;
9177 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9179 mode = GET_MODE_WIDER_MODE (mode))
9180 if (HARD_REGNO_MODE_OK (regno, mode)
9181 && HARD_REGNO_NREGS (regno, mode) == 1)
9184 if (best_mode == VOIDmode)
9185 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9187 mode = GET_MODE_WIDER_MODE (mode))
9188 if (HARD_REGNO_MODE_OK (regno, mode)
9189 && (mov_optab->handlers[(int) mode].insn_code
9190 != CODE_FOR_nothing))
9194 if (mode == VOIDmode)
9197 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9198 if (size % align != 0)
9199 size = CEIL (size, align) * align;
9200 apply_args_reg_offset[regno] = size;
9201 size += GET_MODE_SIZE (mode);
9202 apply_args_mode[regno] = mode;
9206 apply_args_mode[regno] = VOIDmode;
9207 apply_args_reg_offset[regno] = 0;
9213 /* Return the size required for the block returned by __builtin_apply,
9214 and initialize apply_result_mode. */
9217 apply_result_size ()
9219 static int size = -1;
9221 enum machine_mode mode;
9223 /* The values computed by this function never change. */
9228 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9229 if (FUNCTION_VALUE_REGNO_P (regno))
9231 /* Search for the proper mode for copying this register's
9232 value. I'm not sure this is right, but it works so far. */
9233 enum machine_mode best_mode = VOIDmode;
9235 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9237 mode = GET_MODE_WIDER_MODE (mode))
9238 if (HARD_REGNO_MODE_OK (regno, mode))
9241 if (best_mode == VOIDmode)
9242 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9244 mode = GET_MODE_WIDER_MODE (mode))
9245 if (HARD_REGNO_MODE_OK (regno, mode)
9246 && (mov_optab->handlers[(int) mode].insn_code
9247 != CODE_FOR_nothing))
9251 if (mode == VOIDmode)
9254 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9255 if (size % align != 0)
9256 size = CEIL (size, align) * align;
9257 size += GET_MODE_SIZE (mode);
9258 apply_result_mode[regno] = mode;
9261 apply_result_mode[regno] = VOIDmode;
9263 /* Allow targets that use untyped_call and untyped_return to override
9264 the size so that machine-specific information can be stored here. */
9265 #ifdef APPLY_RESULT_SIZE
9266 size = APPLY_RESULT_SIZE;
9272 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9273 /* Create a vector describing the result block RESULT. If SAVEP is true,
9274 the result block is used to save the values; otherwise it is used to
9275 restore the values. */
9278 result_vector (savep, result)
9282 int regno, size, align, nelts;
9283 enum machine_mode mode;
9285 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9289 if ((mode = apply_result_mode[regno]) != VOIDmode)
9291 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9292 if (size % align != 0)
9293 size = CEIL (size, align) * align;
9294 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9295 mem = change_address (result, mode,
9296 plus_constant (XEXP (result, 0), size));
9297 savevec[nelts++] = (savep
9298 ? gen_rtx (SET, VOIDmode, mem, reg)
9299 : gen_rtx (SET, VOIDmode, reg, mem));
9300 size += GET_MODE_SIZE (mode);
9302 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9304 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9306 /* Save the state required to perform an untyped call with the same
9307 arguments as were passed to the current function. */
9310 expand_builtin_apply_args ()
9313 int size, align, regno;
9314 enum machine_mode mode;
9316 /* Create a block where the arg-pointer, structure value address,
9317 and argument registers can be saved. */
9318 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9320 /* Walk past the arg-pointer and structure value address. */
9321 size = GET_MODE_SIZE (Pmode);
9322 if (struct_value_rtx)
9323 size += GET_MODE_SIZE (Pmode);
9325 /* Save each register used in calling a function to the block. */
9326 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9327 if ((mode = apply_args_mode[regno]) != VOIDmode)
9331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9332 if (size % align != 0)
9333 size = CEIL (size, align) * align;
9335 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9338 /* For reg-stack.c's stack register household.
9339 Compare with a similar piece of code in function.c. */
9341 emit_insn (gen_rtx (USE, mode, tem));
9344 emit_move_insn (change_address (registers, mode,
9345 plus_constant (XEXP (registers, 0),
9348 size += GET_MODE_SIZE (mode);
9351 /* Save the arg pointer to the block. */
9352 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9353 copy_to_reg (virtual_incoming_args_rtx));
9354 size = GET_MODE_SIZE (Pmode);
9356 /* Save the structure value address unless this is passed as an
9357 "invisible" first argument. */
9358 if (struct_value_incoming_rtx)
9360 emit_move_insn (change_address (registers, Pmode,
9361 plus_constant (XEXP (registers, 0),
9363 copy_to_reg (struct_value_incoming_rtx));
9364 size += GET_MODE_SIZE (Pmode);
9367 /* Return the address of the block. */
9368 return copy_addr_to_reg (XEXP (registers, 0));
9371 /* Perform an untyped call and save the state required to perform an
9372 untyped return of whatever value was returned by the given function. */
9375 expand_builtin_apply (function, arguments, argsize)
9376 rtx function, arguments, argsize;
9378 int size, align, regno;
9379 enum machine_mode mode;
9380 rtx incoming_args, result, reg, dest, call_insn;
9381 rtx old_stack_level = 0;
9382 rtx call_fusage = 0;
9384 /* Create a block where the return registers can be saved. */
9385 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9387 /* ??? The argsize value should be adjusted here. */
9389 /* Fetch the arg pointer from the ARGUMENTS block. */
9390 incoming_args = gen_reg_rtx (Pmode);
9391 emit_move_insn (incoming_args,
9392 gen_rtx (MEM, Pmode, arguments));
9393 #ifndef STACK_GROWS_DOWNWARD
9394 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9395 incoming_args, 0, OPTAB_LIB_WIDEN);
9398 /* Perform postincrements before actually calling the function. */
9401 /* Push a new argument block and copy the arguments. */
9402 do_pending_stack_adjust ();
9403 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9405 /* Push a block of memory onto the stack to store the memory arguments.
9406 Save the address in a register, and copy the memory arguments. ??? I
9407 haven't figured out how the calling convention macros effect this,
9408 but it's likely that the source and/or destination addresses in
9409 the block copy will need updating in machine specific ways. */
9410 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9411 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9412 gen_rtx (MEM, BLKmode, incoming_args),
9414 PARM_BOUNDARY / BITS_PER_UNIT);
9416 /* Refer to the argument block. */
9418 arguments = gen_rtx (MEM, BLKmode, arguments);
9420 /* Walk past the arg-pointer and structure value address. */
9421 size = GET_MODE_SIZE (Pmode);
9422 if (struct_value_rtx)
9423 size += GET_MODE_SIZE (Pmode);
9425 /* Restore each of the registers previously saved. Make USE insns
9426 for each of these registers for use in making the call. */
9427 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9428 if ((mode = apply_args_mode[regno]) != VOIDmode)
9430 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9431 if (size % align != 0)
9432 size = CEIL (size, align) * align;
9433 reg = gen_rtx (REG, mode, regno);
9434 emit_move_insn (reg,
9435 change_address (arguments, mode,
9436 plus_constant (XEXP (arguments, 0),
9439 use_reg (&call_fusage, reg);
9440 size += GET_MODE_SIZE (mode);
9443 /* Restore the structure value address unless this is passed as an
9444 "invisible" first argument. */
9445 size = GET_MODE_SIZE (Pmode);
9446 if (struct_value_rtx)
9448 rtx value = gen_reg_rtx (Pmode);
9449 emit_move_insn (value,
9450 change_address (arguments, Pmode,
9451 plus_constant (XEXP (arguments, 0),
9453 emit_move_insn (struct_value_rtx, value);
9454 if (GET_CODE (struct_value_rtx) == REG)
9455 use_reg (&call_fusage, struct_value_rtx);
9456 size += GET_MODE_SIZE (Pmode);
9459 /* All arguments and registers used for the call are set up by now! */
9460 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9462 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9463 and we don't want to load it into a register as an optimization,
9464 because prepare_call_address already did it if it should be done. */
9465 if (GET_CODE (function) != SYMBOL_REF)
9466 function = memory_address (FUNCTION_MODE, function);
9468 /* Generate the actual call instruction and save the return value. */
9469 #ifdef HAVE_untyped_call
9470 if (HAVE_untyped_call)
9471 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9472 result, result_vector (1, result)));
9475 #ifdef HAVE_call_value
9476 if (HAVE_call_value)
9480 /* Locate the unique return register. It is not possible to
9481 express a call that sets more than one return register using
9482 call_value; use untyped_call for that. In fact, untyped_call
9483 only needs to save the return registers in the given block. */
9484 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9485 if ((mode = apply_result_mode[regno]) != VOIDmode)
9488 abort (); /* HAVE_untyped_call required. */
9489 valreg = gen_rtx (REG, mode, regno);
9492 emit_call_insn (gen_call_value (valreg,
9493 gen_rtx (MEM, FUNCTION_MODE, function),
9494 const0_rtx, NULL_RTX, const0_rtx));
9496 emit_move_insn (change_address (result, GET_MODE (valreg),
9504 /* Find the CALL insn we just emitted. */
9505 for (call_insn = get_last_insn ();
9506 call_insn && GET_CODE (call_insn) != CALL_INSN;
9507 call_insn = PREV_INSN (call_insn))
9513 /* Put the register usage information on the CALL. If there is already
9514 some usage information, put ours at the end. */
9515 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9519 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9520 link = XEXP (link, 1))
9523 XEXP (link, 1) = call_fusage;
9526 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9528 /* Restore the stack. */
9529 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9531 /* Return the address of the result block. */
9532 return copy_addr_to_reg (XEXP (result, 0));
9535 /* Perform an untyped return. */
9538 expand_builtin_return (result)
9541 int size, align, regno;
9542 enum machine_mode mode;
9544 rtx call_fusage = 0;
9546 apply_result_size ();
9547 result = gen_rtx (MEM, BLKmode, result);
9549 #ifdef HAVE_untyped_return
9550 if (HAVE_untyped_return)
9552 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9558 /* Restore the return value and note that each value is used. */
9560 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9561 if ((mode = apply_result_mode[regno]) != VOIDmode)
9563 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9564 if (size % align != 0)
9565 size = CEIL (size, align) * align;
9566 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9567 emit_move_insn (reg,
9568 change_address (result, mode,
9569 plus_constant (XEXP (result, 0),
9572 push_to_sequence (call_fusage);
9573 emit_insn (gen_rtx (USE, VOIDmode, reg));
9574 call_fusage = get_insns ();
9576 size += GET_MODE_SIZE (mode);
9579 /* Put the USE insns before the return. */
9580 emit_insns (call_fusage);
9582 /* Return whatever values was restored by jumping directly to the end
9584 expand_null_return ();
9587 /* Expand code for a post- or pre- increment or decrement
9588 and return the RTX for the result.
9589 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9592 expand_increment (exp, post, ignore)
9596 register rtx op0, op1;
9597 register rtx temp, value;
9598 register tree incremented = TREE_OPERAND (exp, 0);
9599 optab this_optab = add_optab;
9601 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9602 int op0_is_copy = 0;
9603 int single_insn = 0;
9604 /* 1 means we can't store into OP0 directly,
9605 because it is a subreg narrower than a word,
9606 and we don't dare clobber the rest of the word. */
9609 if (output_bytecode)
9611 bc_expand_expr (exp);
9615 /* Stabilize any component ref that might need to be
9616 evaluated more than once below. */
9618 || TREE_CODE (incremented) == BIT_FIELD_REF
9619 || (TREE_CODE (incremented) == COMPONENT_REF
9620 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9621 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9622 incremented = stabilize_reference (incremented);
9623 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9624 ones into save exprs so that they don't accidentally get evaluated
9625 more than once by the code below. */
9626 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9627 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9628 incremented = save_expr (incremented);
9630 /* Compute the operands as RTX.
9631 Note whether OP0 is the actual lvalue or a copy of it:
9632 I believe it is a copy iff it is a register or subreg
9633 and insns were generated in computing it. */
9635 temp = get_last_insn ();
9636 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9638 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9639 in place but instead must do sign- or zero-extension during assignment,
9640 so we copy it into a new register and let the code below use it as
9643 Note that we can safely modify this SUBREG since it is know not to be
9644 shared (it was made by the expand_expr call above). */
9646 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9649 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9653 else if (GET_CODE (op0) == SUBREG
9654 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9656 /* We cannot increment this SUBREG in place. If we are
9657 post-incrementing, get a copy of the old value. Otherwise,
9658 just mark that we cannot increment in place. */
9660 op0 = copy_to_reg (op0);
9665 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9666 && temp != get_last_insn ());
9667 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9669 /* Decide whether incrementing or decrementing. */
9670 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9671 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9672 this_optab = sub_optab;
9674 /* Convert decrement by a constant into a negative increment. */
9675 if (this_optab == sub_optab
9676 && GET_CODE (op1) == CONST_INT)
9678 op1 = GEN_INT (- INTVAL (op1));
9679 this_optab = add_optab;
9682 /* For a preincrement, see if we can do this with a single instruction. */
9685 icode = (int) this_optab->handlers[(int) mode].insn_code;
9686 if (icode != (int) CODE_FOR_nothing
9687 /* Make sure that OP0 is valid for operands 0 and 1
9688 of the insn we want to queue. */
9689 && (*insn_operand_predicate[icode][0]) (op0, mode)
9690 && (*insn_operand_predicate[icode][1]) (op0, mode)
9691 && (*insn_operand_predicate[icode][2]) (op1, mode))
9695 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9696 then we cannot just increment OP0. We must therefore contrive to
9697 increment the original value. Then, for postincrement, we can return
9698 OP0 since it is a copy of the old value. For preincrement, expand here
9699 unless we can do it with a single insn.
9701 Likewise if storing directly into OP0 would clobber high bits
9702 we need to preserve (bad_subreg). */
9703 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9705 /* This is the easiest way to increment the value wherever it is.
9706 Problems with multiple evaluation of INCREMENTED are prevented
9707 because either (1) it is a component_ref or preincrement,
9708 in which case it was stabilized above, or (2) it is an array_ref
9709 with constant index in an array in a register, which is
9710 safe to reevaluate. */
9711 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9712 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9713 ? MINUS_EXPR : PLUS_EXPR),
9716 TREE_OPERAND (exp, 1));
9718 while (TREE_CODE (incremented) == NOP_EXPR
9719 || TREE_CODE (incremented) == CONVERT_EXPR)
9721 newexp = convert (TREE_TYPE (incremented), newexp);
9722 incremented = TREE_OPERAND (incremented, 0);
9725 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9726 return post ? op0 : temp;
9731 /* We have a true reference to the value in OP0.
9732 If there is an insn to add or subtract in this mode, queue it.
9733 Queueing the increment insn avoids the register shuffling
9734 that often results if we must increment now and first save
9735 the old value for subsequent use. */
9737 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9738 op0 = stabilize (op0);
9741 icode = (int) this_optab->handlers[(int) mode].insn_code;
9742 if (icode != (int) CODE_FOR_nothing
9743 /* Make sure that OP0 is valid for operands 0 and 1
9744 of the insn we want to queue. */
9745 && (*insn_operand_predicate[icode][0]) (op0, mode)
9746 && (*insn_operand_predicate[icode][1]) (op0, mode))
9748 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9749 op1 = force_reg (mode, op1);
9751 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9753 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9755 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9758 op0 = change_address (op0, VOIDmode, addr);
9759 temp = force_reg (GET_MODE (op0), op0);
9760 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9761 op1 = force_reg (mode, op1);
9763 /* The increment queue is LIFO, thus we have to `queue'
9764 the instructions in reverse order. */
9765 enqueue_insn (op0, gen_move_insn (op0, temp));
9766 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9771 /* Preincrement, or we can't increment with one simple insn. */
9773 /* Save a copy of the value before inc or dec, to return it later. */
9774 temp = value = copy_to_reg (op0);
9776 /* Arrange to return the incremented value. */
9777 /* Copy the rtx because expand_binop will protect from the queue,
9778 and the results of that would be invalid for us to return
9779 if our caller does emit_queue before using our result. */
9780 temp = copy_rtx (value = op0);
9782 /* Increment however we can. */
9783 op1 = expand_binop (mode, this_optab, value, op1, op0,
9784 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9785 /* Make sure the value is stored into OP0. */
9787 emit_move_insn (op0, op1);
9792 /* Expand all function calls contained within EXP, innermost ones first.
9793 But don't look within expressions that have sequence points.
9794 For each CALL_EXPR, record the rtx for its value
9795 in the CALL_EXPR_RTL field. */
9798 preexpand_calls (exp)
9801 register int nops, i;
9802 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9804 if (! do_preexpand_calls)
9807 /* Only expressions and references can contain calls. */
9809 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9812 switch (TREE_CODE (exp))
9815 /* Do nothing if already expanded. */
9816 if (CALL_EXPR_RTL (exp) != 0
9817 /* Do nothing if the call returns a variable-sized object. */
9818 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9819 /* Do nothing to built-in functions. */
9820 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9821 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9823 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9826 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9831 case TRUTH_ANDIF_EXPR:
9832 case TRUTH_ORIF_EXPR:
9833 /* If we find one of these, then we can be sure
9834 the adjust will be done for it (since it makes jumps).
9835 Do it now, so that if this is inside an argument
9836 of a function, we don't get the stack adjustment
9837 after some other args have already been pushed. */
9838 do_pending_stack_adjust ();
9843 case WITH_CLEANUP_EXPR:
9844 case CLEANUP_POINT_EXPR:
9848 if (SAVE_EXPR_RTL (exp) != 0)
9852 nops = tree_code_length[(int) TREE_CODE (exp)];
9853 for (i = 0; i < nops; i++)
9854 if (TREE_OPERAND (exp, i) != 0)
9856 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9857 if (type == 'e' || type == '<' || type == '1' || type == '2'
9859 preexpand_calls (TREE_OPERAND (exp, i));
9863 /* At the start of a function, record that we have no previously-pushed
9864 arguments waiting to be popped. */
9867 init_pending_stack_adjust ()
9869 pending_stack_adjust = 0;
9872 /* When exiting from function, if safe, clear out any pending stack adjust
9873 so the adjustment won't get done. */
9876 clear_pending_stack_adjust ()
9878 #ifdef EXIT_IGNORE_STACK
9880 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9881 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9882 && ! flag_inline_functions)
9883 pending_stack_adjust = 0;
9887 /* Pop any previously-pushed arguments that have not been popped yet. */
9890 do_pending_stack_adjust ()
9892 if (inhibit_defer_pop == 0)
9894 if (pending_stack_adjust != 0)
9895 adjust_stack (GEN_INT (pending_stack_adjust));
9896 pending_stack_adjust = 0;
9900 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9901 Returns the cleanups to be performed. */
9904 defer_cleanups_to (old_cleanups)
9907 tree new_cleanups = NULL_TREE;
9908 tree cleanups = cleanups_this_call;
9909 tree last = NULL_TREE;
9911 while (cleanups_this_call != old_cleanups)
9913 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9914 last = cleanups_this_call;
9915 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9920 /* Remove the list from the chain of cleanups. */
9921 TREE_CHAIN (last) = NULL_TREE;
9923 /* reverse them so that we can build them in the right order. */
9924 cleanups = nreverse (cleanups);
9926 /* All cleanups must be on the function_obstack. */
9927 push_obstacks_nochange ();
9928 resume_temporary_allocation ();
9933 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9934 TREE_VALUE (cleanups), new_cleanups);
9936 new_cleanups = TREE_VALUE (cleanups);
9938 cleanups = TREE_CHAIN (cleanups);
9944 return new_cleanups;
9947 /* Expand all cleanups up to OLD_CLEANUPS.
9948 Needed here, and also for language-dependent calls. */
9951 expand_cleanups_to (old_cleanups)
9954 while (cleanups_this_call != old_cleanups)
9956 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9957 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9958 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9962 /* Expand conditional expressions. */
9964 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9965 LABEL is an rtx of code CODE_LABEL, in this function and all the
9969 jumpifnot (exp, label)
9973 do_jump (exp, label, NULL_RTX);
9976 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9983 do_jump (exp, NULL_RTX, label);
9986 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9987 the result is zero, or IF_TRUE_LABEL if the result is one.
9988 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9989 meaning fall through in that case.
9991 do_jump always does any pending stack adjust except when it does not
9992 actually perform a jump. An example where there is no jump
9993 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9995 This function is responsible for optimizing cases such as
9996 &&, || and comparison operators in EXP. */
9999 do_jump (exp, if_false_label, if_true_label)
10001 rtx if_false_label, if_true_label;
10003 register enum tree_code code = TREE_CODE (exp);
10004 /* Some cases need to create a label to jump to
10005 in order to properly fall through.
10006 These cases set DROP_THROUGH_LABEL nonzero. */
10007 rtx drop_through_label = 0;
10009 rtx comparison = 0;
10012 enum machine_mode mode;
10022 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10028 /* This is not true with #pragma weak */
10030 /* The address of something can never be zero. */
10032 emit_jump (if_true_label);
10037 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10038 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10039 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10042 /* If we are narrowing the operand, we have to do the compare in the
10044 if ((TYPE_PRECISION (TREE_TYPE (exp))
10045 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10047 case NON_LVALUE_EXPR:
10048 case REFERENCE_EXPR:
10053 /* These cannot change zero->non-zero or vice versa. */
10054 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10058 /* This is never less insns than evaluating the PLUS_EXPR followed by
10059 a test and can be longer if the test is eliminated. */
10061 /* Reduce to minus. */
10062 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10063 TREE_OPERAND (exp, 0),
10064 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10065 TREE_OPERAND (exp, 1))));
10066 /* Process as MINUS. */
10070 /* Non-zero iff operands of minus differ. */
10071 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10072 TREE_OPERAND (exp, 0),
10073 TREE_OPERAND (exp, 1)),
10078 /* If we are AND'ing with a small constant, do this comparison in the
10079 smallest type that fits. If the machine doesn't have comparisons
10080 that small, it will be converted back to the wider comparison.
10081 This helps if we are testing the sign bit of a narrower object.
10082 combine can't do this for us because it can't know whether a
10083 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10085 if (! SLOW_BYTE_ACCESS
10086 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10087 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10088 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10089 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10090 && (type = type_for_mode (mode, 1)) != 0
10091 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10092 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10093 != CODE_FOR_nothing))
10095 do_jump (convert (type, exp), if_false_label, if_true_label);
10100 case TRUTH_NOT_EXPR:
10101 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10104 case TRUTH_ANDIF_EXPR:
10107 tree cleanups, old_cleanups;
10109 if (if_false_label == 0)
10110 if_false_label = drop_through_label = gen_label_rtx ();
10112 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10113 seq1 = get_insns ();
10116 old_cleanups = cleanups_this_call;
10118 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10119 seq2 = get_insns ();
10120 cleanups = defer_cleanups_to (old_cleanups);
10125 rtx flag = gen_reg_rtx (word_mode);
10129 /* Flag cleanups as not needed. */
10130 emit_move_insn (flag, const0_rtx);
10133 /* Flag cleanups as needed. */
10134 emit_move_insn (flag, const1_rtx);
10137 /* All cleanups must be on the function_obstack. */
10138 push_obstacks_nochange ();
10139 resume_temporary_allocation ();
10141 /* convert flag, which is an rtx, into a tree. */
10142 cond = make_node (RTL_EXPR);
10143 TREE_TYPE (cond) = integer_type_node;
10144 RTL_EXPR_RTL (cond) = flag;
10145 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10146 cond = save_expr (cond);
10148 new_cleanups = build (COND_EXPR, void_type_node,
10149 truthvalue_conversion (cond),
10150 cleanups, integer_zero_node);
10151 new_cleanups = fold (new_cleanups);
10155 /* Now add in the conditionalized cleanups. */
10157 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10158 expand_eh_region_start ();
10168 case TRUTH_ORIF_EXPR:
10171 tree cleanups, old_cleanups;
10173 if (if_true_label == 0)
10174 if_true_label = drop_through_label = gen_label_rtx ();
10176 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10177 seq1 = get_insns ();
10180 old_cleanups = cleanups_this_call;
10182 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10183 seq2 = get_insns ();
10184 cleanups = defer_cleanups_to (old_cleanups);
10189 rtx flag = gen_reg_rtx (word_mode);
10193 /* Flag cleanups as not needed. */
10194 emit_move_insn (flag, const0_rtx);
10197 /* Flag cleanups as needed. */
10198 emit_move_insn (flag, const1_rtx);
10201 /* All cleanups must be on the function_obstack. */
10202 push_obstacks_nochange ();
10203 resume_temporary_allocation ();
10205 /* convert flag, which is an rtx, into a tree. */
10206 cond = make_node (RTL_EXPR);
10207 TREE_TYPE (cond) = integer_type_node;
10208 RTL_EXPR_RTL (cond) = flag;
10209 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10210 cond = save_expr (cond);
10212 new_cleanups = build (COND_EXPR, void_type_node,
10213 truthvalue_conversion (cond),
10214 cleanups, integer_zero_node);
10215 new_cleanups = fold (new_cleanups);
10219 /* Now add in the conditionalized cleanups. */
10221 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10222 expand_eh_region_start ();
10232 case COMPOUND_EXPR:
10233 push_temp_slots ();
10234 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10235 preserve_temp_slots (NULL_RTX);
10236 free_temp_slots ();
10239 do_pending_stack_adjust ();
10240 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10243 case COMPONENT_REF:
10244 case BIT_FIELD_REF:
10247 int bitsize, bitpos, unsignedp;
10248 enum machine_mode mode;
10254 /* Get description of this reference. We don't actually care
10255 about the underlying object here. */
10256 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10257 &mode, &unsignedp, &volatilep,
10260 type = type_for_size (bitsize, unsignedp);
10261 if (! SLOW_BYTE_ACCESS
10262 && type != 0 && bitsize >= 0
10263 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10264 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10265 != CODE_FOR_nothing))
10267 do_jump (convert (type, exp), if_false_label, if_true_label);
10274 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10275 if (integer_onep (TREE_OPERAND (exp, 1))
10276 && integer_zerop (TREE_OPERAND (exp, 2)))
10277 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10279 else if (integer_zerop (TREE_OPERAND (exp, 1))
10280 && integer_onep (TREE_OPERAND (exp, 2)))
10281 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10286 tree cleanups_left_side, cleanups_right_side, old_cleanups;
10288 register rtx label1 = gen_label_rtx ();
10289 drop_through_label = gen_label_rtx ();
10291 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10293 /* We need to save the cleanups for the lhs and rhs separately.
10294 Keep track of the cleanups seen before the lhs. */
10295 old_cleanups = cleanups_this_call;
10297 /* Now the THEN-expression. */
10298 do_jump (TREE_OPERAND (exp, 1),
10299 if_false_label ? if_false_label : drop_through_label,
10300 if_true_label ? if_true_label : drop_through_label);
10301 /* In case the do_jump just above never jumps. */
10302 do_pending_stack_adjust ();
10303 emit_label (label1);
10304 seq1 = get_insns ();
10305 /* Now grab the cleanups for the lhs. */
10306 cleanups_left_side = defer_cleanups_to (old_cleanups);
10309 /* And keep track of where we start before the rhs. */
10310 old_cleanups = cleanups_this_call;
10312 /* Now the ELSE-expression. */
10313 do_jump (TREE_OPERAND (exp, 2),
10314 if_false_label ? if_false_label : drop_through_label,
10315 if_true_label ? if_true_label : drop_through_label);
10316 seq2 = get_insns ();
10317 /* Grab the cleanups for the rhs. */
10318 cleanups_right_side = defer_cleanups_to (old_cleanups);
10321 if (cleanups_left_side || cleanups_right_side)
10323 /* Make the cleanups for the THEN and ELSE clauses
10324 conditional based on which half is executed. */
10325 rtx flag = gen_reg_rtx (word_mode);
10329 /* Set the flag to 0 so that we know we executed the lhs. */
10330 emit_move_insn (flag, const0_rtx);
10333 /* Set the flag to 1 so that we know we executed the rhs. */
10334 emit_move_insn (flag, const1_rtx);
10337 /* Make sure the cleanup lives on the function_obstack. */
10338 push_obstacks_nochange ();
10339 resume_temporary_allocation ();
10341 /* Now, build up a COND_EXPR that tests the value of the
10342 flag, and then either do the cleanups for the lhs or the
10344 cond = make_node (RTL_EXPR);
10345 TREE_TYPE (cond) = integer_type_node;
10346 RTL_EXPR_RTL (cond) = flag;
10347 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10348 cond = save_expr (cond);
10350 new_cleanups = build (COND_EXPR, void_type_node,
10351 truthvalue_conversion (cond),
10352 cleanups_right_side, cleanups_left_side);
10353 new_cleanups = fold (new_cleanups);
10357 /* Now add in the conditionalized cleanups. */
10359 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10360 expand_eh_region_start ();
10364 /* No cleanups were needed, so emit the two sequences
10374 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10376 if (integer_zerop (TREE_OPERAND (exp, 1)))
10377 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10378 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10379 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10382 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10383 fold (build (EQ_EXPR, TREE_TYPE (exp),
10384 fold (build1 (REALPART_EXPR,
10385 TREE_TYPE (inner_type),
10386 TREE_OPERAND (exp, 0))),
10387 fold (build1 (REALPART_EXPR,
10388 TREE_TYPE (inner_type),
10389 TREE_OPERAND (exp, 1))))),
10390 fold (build (EQ_EXPR, TREE_TYPE (exp),
10391 fold (build1 (IMAGPART_EXPR,
10392 TREE_TYPE (inner_type),
10393 TREE_OPERAND (exp, 0))),
10394 fold (build1 (IMAGPART_EXPR,
10395 TREE_TYPE (inner_type),
10396 TREE_OPERAND (exp, 1))))))),
10397 if_false_label, if_true_label);
10398 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10399 && !can_compare_p (TYPE_MODE (inner_type)))
10400 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10402 comparison = compare (exp, EQ, EQ);
10408 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10410 if (integer_zerop (TREE_OPERAND (exp, 1)))
10411 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10412 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10413 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10416 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10417 fold (build (NE_EXPR, TREE_TYPE (exp),
10418 fold (build1 (REALPART_EXPR,
10419 TREE_TYPE (inner_type),
10420 TREE_OPERAND (exp, 0))),
10421 fold (build1 (REALPART_EXPR,
10422 TREE_TYPE (inner_type),
10423 TREE_OPERAND (exp, 1))))),
10424 fold (build (NE_EXPR, TREE_TYPE (exp),
10425 fold (build1 (IMAGPART_EXPR,
10426 TREE_TYPE (inner_type),
10427 TREE_OPERAND (exp, 0))),
10428 fold (build1 (IMAGPART_EXPR,
10429 TREE_TYPE (inner_type),
10430 TREE_OPERAND (exp, 1))))))),
10431 if_false_label, if_true_label);
10432 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10433 && !can_compare_p (TYPE_MODE (inner_type)))
10434 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10436 comparison = compare (exp, NE, NE);
10441 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10443 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10444 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10446 comparison = compare (exp, LT, LTU);
10450 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10452 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10453 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10455 comparison = compare (exp, LE, LEU);
10459 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10461 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10462 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10464 comparison = compare (exp, GT, GTU);
10468 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10470 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10471 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10473 comparison = compare (exp, GE, GEU);
10478 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10480 /* This is not needed any more and causes poor code since it causes
10481 comparisons and tests from non-SI objects to have different code
10483 /* Copy to register to avoid generating bad insns by cse
10484 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10485 if (!cse_not_expected && GET_CODE (temp) == MEM)
10486 temp = copy_to_reg (temp);
10488 do_pending_stack_adjust ();
10489 if (GET_CODE (temp) == CONST_INT)
10490 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10491 else if (GET_CODE (temp) == LABEL_REF)
10492 comparison = const_true_rtx;
10493 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10494 && !can_compare_p (GET_MODE (temp)))
10495 /* Note swapping the labels gives us not-equal. */
10496 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10497 else if (GET_MODE (temp) != VOIDmode)
10498 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10499 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10500 GET_MODE (temp), NULL_RTX, 0);
10505 /* Do any postincrements in the expression that was tested. */
10508 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10509 straight into a conditional jump instruction as the jump condition.
10510 Otherwise, all the work has been done already. */
10512 if (comparison == const_true_rtx)
10515 emit_jump (if_true_label);
10517 else if (comparison == const0_rtx)
10519 if (if_false_label)
10520 emit_jump (if_false_label);
10522 else if (comparison)
10523 do_jump_for_compare (comparison, if_false_label, if_true_label);
10525 if (drop_through_label)
10527 /* If do_jump produces code that might be jumped around,
10528 do any stack adjusts from that code, before the place
10529 where control merges in. */
10530 do_pending_stack_adjust ();
10531 emit_label (drop_through_label);
10535 /* Given a comparison expression EXP for values too wide to be compared
10536 with one insn, test the comparison and jump to the appropriate label.
10537 The code of EXP is ignored; we always test GT if SWAP is 0,
10538 and LT if SWAP is 1. */
10541 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10544 rtx if_false_label, if_true_label;
10546 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10547 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10548 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10549 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10550 rtx drop_through_label = 0;
10551 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10554 if (! if_true_label || ! if_false_label)
10555 drop_through_label = gen_label_rtx ();
10556 if (! if_true_label)
10557 if_true_label = drop_through_label;
10558 if (! if_false_label)
10559 if_false_label = drop_through_label;
10561 /* Compare a word at a time, high order first. */
10562 for (i = 0; i < nwords; i++)
10565 rtx op0_word, op1_word;
10567 if (WORDS_BIG_ENDIAN)
10569 op0_word = operand_subword_force (op0, i, mode);
10570 op1_word = operand_subword_force (op1, i, mode);
10574 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10575 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10578 /* All but high-order word must be compared as unsigned. */
10579 comp = compare_from_rtx (op0_word, op1_word,
10580 (unsignedp || i > 0) ? GTU : GT,
10581 unsignedp, word_mode, NULL_RTX, 0);
10582 if (comp == const_true_rtx)
10583 emit_jump (if_true_label);
10584 else if (comp != const0_rtx)
10585 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10587 /* Consider lower words only if these are equal. */
10588 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10590 if (comp == const_true_rtx)
10591 emit_jump (if_false_label);
10592 else if (comp != const0_rtx)
10593 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10596 if (if_false_label)
10597 emit_jump (if_false_label);
10598 if (drop_through_label)
10599 emit_label (drop_through_label);
10602 /* Compare OP0 with OP1, word at a time, in mode MODE.
10603 UNSIGNEDP says to do unsigned comparison.
10604 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10607 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10608 enum machine_mode mode;
10611 rtx if_false_label, if_true_label;
10613 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10614 rtx drop_through_label = 0;
10617 if (! if_true_label || ! if_false_label)
10618 drop_through_label = gen_label_rtx ();
10619 if (! if_true_label)
10620 if_true_label = drop_through_label;
10621 if (! if_false_label)
10622 if_false_label = drop_through_label;
10624 /* Compare a word at a time, high order first. */
10625 for (i = 0; i < nwords; i++)
10628 rtx op0_word, op1_word;
10630 if (WORDS_BIG_ENDIAN)
10632 op0_word = operand_subword_force (op0, i, mode);
10633 op1_word = operand_subword_force (op1, i, mode);
10637 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10638 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10641 /* All but high-order word must be compared as unsigned. */
10642 comp = compare_from_rtx (op0_word, op1_word,
10643 (unsignedp || i > 0) ? GTU : GT,
10644 unsignedp, word_mode, NULL_RTX, 0);
10645 if (comp == const_true_rtx)
10646 emit_jump (if_true_label);
10647 else if (comp != const0_rtx)
10648 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10650 /* Consider lower words only if these are equal. */
10651 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10653 if (comp == const_true_rtx)
10654 emit_jump (if_false_label);
10655 else if (comp != const0_rtx)
10656 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10659 if (if_false_label)
10660 emit_jump (if_false_label);
10661 if (drop_through_label)
10662 emit_label (drop_through_label);
10665 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10666 with one insn, test the comparison and jump to the appropriate label. */
10669 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10671 rtx if_false_label, if_true_label;
10673 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10674 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10675 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10676 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10678 rtx drop_through_label = 0;
10680 if (! if_false_label)
10681 drop_through_label = if_false_label = gen_label_rtx ();
10683 for (i = 0; i < nwords; i++)
10685 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10686 operand_subword_force (op1, i, mode),
10687 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10688 word_mode, NULL_RTX, 0);
10689 if (comp == const_true_rtx)
10690 emit_jump (if_false_label);
10691 else if (comp != const0_rtx)
10692 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10696 emit_jump (if_true_label);
10697 if (drop_through_label)
10698 emit_label (drop_through_label);
10701 /* Jump according to whether OP0 is 0.
10702 We assume that OP0 has an integer mode that is too wide
10703 for the available compare insns. */
10706 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10708 rtx if_false_label, if_true_label;
10710 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10712 rtx drop_through_label = 0;
10714 if (! if_false_label)
10715 drop_through_label = if_false_label = gen_label_rtx ();
10717 for (i = 0; i < nwords; i++)
10719 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10721 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10722 if (comp == const_true_rtx)
10723 emit_jump (if_false_label);
10724 else if (comp != const0_rtx)
10725 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10729 emit_jump (if_true_label);
10730 if (drop_through_label)
10731 emit_label (drop_through_label);
10734 /* Given a comparison expression in rtl form, output conditional branches to
10735 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10738 do_jump_for_compare (comparison, if_false_label, if_true_label)
10739 rtx comparison, if_false_label, if_true_label;
10743 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10744 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10748 if (if_false_label)
10749 emit_jump (if_false_label);
10751 else if (if_false_label)
10754 rtx prev = get_last_insn ();
10757 /* Output the branch with the opposite condition. Then try to invert
10758 what is generated. If more than one insn is a branch, or if the
10759 branch is not the last insn written, abort. If we can't invert
10760 the branch, emit make a true label, redirect this jump to that,
10761 emit a jump to the false label and define the true label. */
10763 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10764 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10768 /* Here we get the first insn that was just emitted. It used to be the
10769 case that, on some machines, emitting the branch would discard
10770 the previous compare insn and emit a replacement. This isn't
10771 done anymore, but abort if we see that PREV is deleted. */
10774 insn = get_insns ();
10775 else if (INSN_DELETED_P (prev))
10778 insn = NEXT_INSN (prev);
10780 for (; insn; insn = NEXT_INSN (insn))
10781 if (GET_CODE (insn) == JUMP_INSN)
10788 if (branch != get_last_insn ())
10791 JUMP_LABEL (branch) = if_false_label;
10792 if (! invert_jump (branch, if_false_label))
10794 if_true_label = gen_label_rtx ();
10795 redirect_jump (branch, if_true_label);
10796 emit_jump (if_false_label);
10797 emit_label (if_true_label);
10802 /* Generate code for a comparison expression EXP
10803 (including code to compute the values to be compared)
10804 and set (CC0) according to the result.
10805 SIGNED_CODE should be the rtx operation for this comparison for
10806 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10808 We force a stack adjustment unless there are currently
10809 things pushed on the stack that aren't yet used. */
10812 compare (exp, signed_code, unsigned_code)
10814 enum rtx_code signed_code, unsigned_code;
10817 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10819 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10820 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10821 register enum machine_mode mode = TYPE_MODE (type);
10822 int unsignedp = TREE_UNSIGNED (type);
10823 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10825 #ifdef HAVE_canonicalize_funcptr_for_compare
10826 /* If function pointers need to be "canonicalized" before they can
10827 be reliably compared, then canonicalize them. */
10828 if (HAVE_canonicalize_funcptr_for_compare
10829 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10830 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10833 rtx new_op0 = gen_reg_rtx (mode);
10835 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10839 if (HAVE_canonicalize_funcptr_for_compare
10840 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10841 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10844 rtx new_op1 = gen_reg_rtx (mode);
10846 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10851 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10853 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10854 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10857 /* Like compare but expects the values to compare as two rtx's.
10858 The decision as to signed or unsigned comparison must be made by the caller.
10860 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10863 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10864 size of MODE should be used. */
10867 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10868 register rtx op0, op1;
10869 enum rtx_code code;
10871 enum machine_mode mode;
10877 /* If one operand is constant, make it the second one. Only do this
10878 if the other operand is not constant as well. */
10880 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10881 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10886 code = swap_condition (code);
10889 if (flag_force_mem)
10891 op0 = force_not_mem (op0);
10892 op1 = force_not_mem (op1);
10895 do_pending_stack_adjust ();
10897 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10898 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10902 /* There's no need to do this now that combine.c can eliminate lots of
10903 sign extensions. This can be less efficient in certain cases on other
10906 /* If this is a signed equality comparison, we can do it as an
10907 unsigned comparison since zero-extension is cheaper than sign
10908 extension and comparisons with zero are done as unsigned. This is
10909 the case even on machines that can do fast sign extension, since
10910 zero-extension is easier to combine with other operations than
10911 sign-extension is. If we are comparing against a constant, we must
10912 convert it to what it would look like unsigned. */
10913 if ((code == EQ || code == NE) && ! unsignedp
10914 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10916 if (GET_CODE (op1) == CONST_INT
10917 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10918 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10923 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10925 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10928 /* Generate code to calculate EXP using a store-flag instruction
10929 and return an rtx for the result. EXP is either a comparison
10930 or a TRUTH_NOT_EXPR whose operand is a comparison.
10932 If TARGET is nonzero, store the result there if convenient.
10934 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10937 Return zero if there is no suitable set-flag instruction
10938 available on this machine.
10940 Once expand_expr has been called on the arguments of the comparison,
10941 we are committed to doing the store flag, since it is not safe to
10942 re-evaluate the expression. We emit the store-flag insn by calling
10943 emit_store_flag, but only expand the arguments if we have a reason
10944 to believe that emit_store_flag will be successful. If we think that
10945 it will, but it isn't, we have to simulate the store-flag with a
10946 set/jump/set sequence. */
10949 do_store_flag (exp, target, mode, only_cheap)
10952 enum machine_mode mode;
10955 enum rtx_code code;
10956 tree arg0, arg1, type;
10958 enum machine_mode operand_mode;
10962 enum insn_code icode;
10963 rtx subtarget = target;
10964 rtx result, label, pattern, jump_pat;
10966 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10967 result at the end. We can't simply invert the test since it would
10968 have already been inverted if it were valid. This case occurs for
10969 some floating-point comparisons. */
10971 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10972 invert = 1, exp = TREE_OPERAND (exp, 0);
10974 arg0 = TREE_OPERAND (exp, 0);
10975 arg1 = TREE_OPERAND (exp, 1);
10976 type = TREE_TYPE (arg0);
10977 operand_mode = TYPE_MODE (type);
10978 unsignedp = TREE_UNSIGNED (type);
10980 /* We won't bother with BLKmode store-flag operations because it would mean
10981 passing a lot of information to emit_store_flag. */
10982 if (operand_mode == BLKmode)
10985 /* We won't bother with store-flag operations involving function pointers
10986 when function pointers must be canonicalized before comparisons. */
10987 #ifdef HAVE_canonicalize_funcptr_for_compare
10988 if (HAVE_canonicalize_funcptr_for_compare
10989 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10990 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10992 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10993 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10994 == FUNCTION_TYPE))))
11001 /* Get the rtx comparison code to use. We know that EXP is a comparison
11002 operation of some type. Some comparisons against 1 and -1 can be
11003 converted to comparisons with zero. Do so here so that the tests
11004 below will be aware that we have a comparison with zero. These
11005 tests will not catch constants in the first operand, but constants
11006 are rarely passed as the first operand. */
11008 switch (TREE_CODE (exp))
11017 if (integer_onep (arg1))
11018 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11020 code = unsignedp ? LTU : LT;
11023 if (! unsignedp && integer_all_onesp (arg1))
11024 arg1 = integer_zero_node, code = LT;
11026 code = unsignedp ? LEU : LE;
11029 if (! unsignedp && integer_all_onesp (arg1))
11030 arg1 = integer_zero_node, code = GE;
11032 code = unsignedp ? GTU : GT;
11035 if (integer_onep (arg1))
11036 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11038 code = unsignedp ? GEU : GE;
11044 /* Put a constant second. */
11045 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11047 tem = arg0; arg0 = arg1; arg1 = tem;
11048 code = swap_condition (code);
11051 /* If this is an equality or inequality test of a single bit, we can
11052 do this by shifting the bit being tested to the low-order bit and
11053 masking the result with the constant 1. If the condition was EQ,
11054 we xor it with 1. This does not require an scc insn and is faster
11055 than an scc insn even if we have it. */
11057 if ((code == NE || code == EQ)
11058 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11059 && integer_pow2p (TREE_OPERAND (arg0, 1))
11060 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
11062 tree inner = TREE_OPERAND (arg0, 0);
11067 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
11068 NULL_RTX, VOIDmode, 0));
11069 /* In this case, immed_double_const will sign extend the value to make
11070 it look the same on the host and target. We must remove the
11071 sign-extension before calling exact_log2, since exact_log2 will
11072 fail for negative values. */
11073 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
11074 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
11075 /* We don't use the obvious constant shift to generate the mask,
11076 because that generates compiler warnings when BITS_PER_WORD is
11077 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
11078 code is unreachable in that case. */
11079 tem = tem & GET_MODE_MASK (word_mode);
11080 bitnum = exact_log2 (tem);
11082 /* If INNER is a right shift of a constant and it plus BITNUM does
11083 not overflow, adjust BITNUM and INNER. */
11085 if (TREE_CODE (inner) == RSHIFT_EXPR
11086 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11087 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11088 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11089 < TYPE_PRECISION (type)))
11091 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11092 inner = TREE_OPERAND (inner, 0);
11095 /* If we are going to be able to omit the AND below, we must do our
11096 operations as unsigned. If we must use the AND, we have a choice.
11097 Normally unsigned is faster, but for some machines signed is. */
11098 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11099 #ifdef LOAD_EXTEND_OP
11100 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11106 if (subtarget == 0 || GET_CODE (subtarget) != REG
11107 || GET_MODE (subtarget) != operand_mode
11108 || ! safe_from_p (subtarget, inner))
11111 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11114 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11115 size_int (bitnum), subtarget, ops_unsignedp);
11117 if (GET_MODE (op0) != mode)
11118 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11120 if ((code == EQ && ! invert) || (code == NE && invert))
11121 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11122 ops_unsignedp, OPTAB_LIB_WIDEN);
11124 /* Put the AND last so it can combine with more things. */
11125 if (bitnum != TYPE_PRECISION (type) - 1)
11126 op0 = expand_and (op0, const1_rtx, subtarget);
11131 /* Now see if we are likely to be able to do this. Return if not. */
11132 if (! can_compare_p (operand_mode))
11134 icode = setcc_gen_code[(int) code];
11135 if (icode == CODE_FOR_nothing
11136 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11138 /* We can only do this if it is one of the special cases that
11139 can be handled without an scc insn. */
11140 if ((code == LT && integer_zerop (arg1))
11141 || (! only_cheap && code == GE && integer_zerop (arg1)))
11143 else if (BRANCH_COST >= 0
11144 && ! only_cheap && (code == NE || code == EQ)
11145 && TREE_CODE (type) != REAL_TYPE
11146 && ((abs_optab->handlers[(int) operand_mode].insn_code
11147 != CODE_FOR_nothing)
11148 || (ffs_optab->handlers[(int) operand_mode].insn_code
11149 != CODE_FOR_nothing)))
11155 preexpand_calls (exp);
11156 if (subtarget == 0 || GET_CODE (subtarget) != REG
11157 || GET_MODE (subtarget) != operand_mode
11158 || ! safe_from_p (subtarget, arg1))
11161 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11162 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11165 target = gen_reg_rtx (mode);
11167 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11168 because, if the emit_store_flag does anything it will succeed and
11169 OP0 and OP1 will not be used subsequently. */
11171 result = emit_store_flag (target, code,
11172 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11173 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11174 operand_mode, unsignedp, 1);
11179 result = expand_binop (mode, xor_optab, result, const1_rtx,
11180 result, 0, OPTAB_LIB_WIDEN);
11184 /* If this failed, we have to do this with set/compare/jump/set code. */
11185 if (GET_CODE (target) != REG
11186 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11187 target = gen_reg_rtx (GET_MODE (target));
11189 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11190 result = compare_from_rtx (op0, op1, code, unsignedp,
11191 operand_mode, NULL_RTX, 0);
11192 if (GET_CODE (result) == CONST_INT)
11193 return (((result == const0_rtx && ! invert)
11194 || (result != const0_rtx && invert))
11195 ? const0_rtx : const1_rtx);
11197 label = gen_label_rtx ();
11198 if (bcc_gen_fctn[(int) code] == 0)
11201 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11202 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11203 emit_label (label);
11208 /* Generate a tablejump instruction (used for switch statements). */
11210 #ifdef HAVE_tablejump
11212 /* INDEX is the value being switched on, with the lowest value
11213 in the table already subtracted.
11214 MODE is its expected mode (needed if INDEX is constant).
11215 RANGE is the length of the jump table.
11216 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11218 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11219 index value is out of range. */
11222 do_tablejump (index, mode, range, table_label, default_label)
11223 rtx index, range, table_label, default_label;
11224 enum machine_mode mode;
11226 register rtx temp, vector;
11228 /* Do an unsigned comparison (in the proper mode) between the index
11229 expression and the value which represents the length of the range.
11230 Since we just finished subtracting the lower bound of the range
11231 from the index expression, this comparison allows us to simultaneously
11232 check that the original index expression value is both greater than
11233 or equal to the minimum value of the range and less than or equal to
11234 the maximum value of the range. */
11236 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11237 emit_jump_insn (gen_bgtu (default_label));
11239 /* If index is in range, it must fit in Pmode.
11240 Convert to Pmode so we can index with it. */
11242 index = convert_to_mode (Pmode, index, 1);
11244 /* Don't let a MEM slip thru, because then INDEX that comes
11245 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11246 and break_out_memory_refs will go to work on it and mess it up. */
11247 #ifdef PIC_CASE_VECTOR_ADDRESS
11248 if (flag_pic && GET_CODE (index) != REG)
11249 index = copy_to_mode_reg (Pmode, index);
11252 /* If flag_force_addr were to affect this address
11253 it could interfere with the tricky assumptions made
11254 about addresses that contain label-refs,
11255 which may be valid only very near the tablejump itself. */
11256 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11257 GET_MODE_SIZE, because this indicates how large insns are. The other
11258 uses should all be Pmode, because they are addresses. This code
11259 could fail if addresses and insns are not the same size. */
11260 index = gen_rtx (PLUS, Pmode,
11261 gen_rtx (MULT, Pmode, index,
11262 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11263 gen_rtx (LABEL_REF, Pmode, table_label));
11264 #ifdef PIC_CASE_VECTOR_ADDRESS
11266 index = PIC_CASE_VECTOR_ADDRESS (index);
11269 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11270 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11271 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11272 RTX_UNCHANGING_P (vector) = 1;
11273 convert_move (temp, vector, 0);
11275 emit_jump_insn (gen_tablejump (temp, table_label));
11277 #ifndef CASE_VECTOR_PC_RELATIVE
11278 /* If we are generating PIC code or if the table is PC-relative, the
11279 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11285 #endif /* HAVE_tablejump */
11288 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11289 to that value is on the top of the stack. The resulting type is TYPE, and
11290 the source declaration is DECL. */
11293 bc_load_memory (type, decl)
11296 enum bytecode_opcode opcode;
11299 /* Bit fields are special. We only know about signed and
11300 unsigned ints, and enums. The latter are treated as
11301 signed integers. */
11303 if (DECL_BIT_FIELD (decl))
11304 if (TREE_CODE (type) == ENUMERAL_TYPE
11305 || TREE_CODE (type) == INTEGER_TYPE)
11306 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11310 /* See corresponding comment in bc_store_memory(). */
11311 if (TYPE_MODE (type) == BLKmode
11312 || TYPE_MODE (type) == VOIDmode)
11315 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11317 if (opcode == neverneverland)
11320 bc_emit_bytecode (opcode);
11322 #ifdef DEBUG_PRINT_CODE
11323 fputc ('\n', stderr);
11328 /* Store the contents of the second stack slot to the address in the
11329 top stack slot. DECL is the declaration of the destination and is used
11330 to determine whether we're dealing with a bitfield. */
11333 bc_store_memory (type, decl)
11336 enum bytecode_opcode opcode;
11339 if (DECL_BIT_FIELD (decl))
11341 if (TREE_CODE (type) == ENUMERAL_TYPE
11342 || TREE_CODE (type) == INTEGER_TYPE)
11348 if (TYPE_MODE (type) == BLKmode)
11350 /* Copy structure. This expands to a block copy instruction, storeBLK.
11351 In addition to the arguments expected by the other store instructions,
11352 it also expects a type size (SImode) on top of the stack, which is the
11353 structure size in size units (usually bytes). The two first arguments
11354 are already on the stack; so we just put the size on level 1. For some
11355 other languages, the size may be variable, this is why we don't encode
11356 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11358 bc_expand_expr (TYPE_SIZE (type));
11362 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11364 if (opcode == neverneverland)
11367 bc_emit_bytecode (opcode);
11369 #ifdef DEBUG_PRINT_CODE
11370 fputc ('\n', stderr);
11375 /* Allocate local stack space sufficient to hold a value of the given
11376 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11377 integral power of 2. A special case is locals of type VOID, which
11378 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11379 remapped into the corresponding attribute of SI. */
11382 bc_allocate_local (size, alignment)
11383 int size, alignment;
11386 int byte_alignment;
11391 /* Normalize size and alignment */
11393 size = UNITS_PER_WORD;
11395 if (alignment < BITS_PER_UNIT)
11396 byte_alignment = 1 << (INT_ALIGN - 1);
11399 byte_alignment = alignment / BITS_PER_UNIT;
11401 if (local_vars_size & (byte_alignment - 1))
11402 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11404 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11405 local_vars_size += size;
11411 /* Allocate variable-sized local array. Variable-sized arrays are
11412 actually pointers to the address in memory where they are stored. */
11415 bc_allocate_variable_array (size)
11419 const int ptralign = (1 << (PTR_ALIGN - 1));
11421 /* Align pointer */
11422 if (local_vars_size & ptralign)
11423 local_vars_size += ptralign - (local_vars_size & ptralign);
11425 /* Note down local space needed: pointer to block; also return
11428 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11429 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11434 /* Push the machine address for the given external variable offset. */
11437 bc_load_externaddr (externaddr)
11440 bc_emit_bytecode (constP);
11441 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11442 BYTECODE_BC_LABEL (externaddr)->offset);
11444 #ifdef DEBUG_PRINT_CODE
11445 fputc ('\n', stderr);
11450 /* Like above, but expects an IDENTIFIER. */
11453 bc_load_externaddr_id (id, offset)
11457 if (!IDENTIFIER_POINTER (id))
11460 bc_emit_bytecode (constP);
11461 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11463 #ifdef DEBUG_PRINT_CODE
11464 fputc ('\n', stderr);
11469 /* Push the machine address for the given local variable offset. */
11472 bc_load_localaddr (localaddr)
11475 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11479 /* Push the machine address for the given parameter offset.
11480 NOTE: offset is in bits. */
11483 bc_load_parmaddr (parmaddr)
11486 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11491 /* Convert a[i] into *(a + i). */
11494 bc_canonicalize_array_ref (exp)
11497 tree type = TREE_TYPE (exp);
11498 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11499 TREE_OPERAND (exp, 0));
11500 tree index = TREE_OPERAND (exp, 1);
11503 /* Convert the integer argument to a type the same size as a pointer
11504 so the multiply won't overflow spuriously. */
11506 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11507 index = convert (type_for_size (POINTER_SIZE, 0), index);
11509 /* The array address isn't volatile even if the array is.
11510 (Of course this isn't terribly relevant since the bytecode
11511 translator treats nearly everything as volatile anyway.) */
11512 TREE_THIS_VOLATILE (array_adr) = 0;
11514 return build1 (INDIRECT_REF, type,
11515 fold (build (PLUS_EXPR,
11516 TYPE_POINTER_TO (type),
11518 fold (build (MULT_EXPR,
11519 TYPE_POINTER_TO (type),
11521 size_in_bytes (type))))));
11525 /* Load the address of the component referenced by the given
11526 COMPONENT_REF expression.
11528 Returns innermost lvalue. */
11531 bc_expand_component_address (exp)
11535 enum machine_mode mode;
11537 HOST_WIDE_INT SIval;
11540 tem = TREE_OPERAND (exp, 1);
11541 mode = DECL_MODE (tem);
11544 /* Compute cumulative bit offset for nested component refs
11545 and array refs, and find the ultimate containing object. */
11547 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11549 if (TREE_CODE (tem) == COMPONENT_REF)
11550 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11552 if (TREE_CODE (tem) == ARRAY_REF
11553 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11554 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11556 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11557 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11558 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11563 bc_expand_expr (tem);
11566 /* For bitfields also push their offset and size */
11567 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11568 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11570 if (SIval = bitpos / BITS_PER_UNIT)
11571 bc_emit_instruction (addconstPSI, SIval);
11573 return (TREE_OPERAND (exp, 1));
11577 /* Emit code to push two SI constants */
11580 bc_push_offset_and_size (offset, size)
11581 HOST_WIDE_INT offset, size;
11583 bc_emit_instruction (constSI, offset);
11584 bc_emit_instruction (constSI, size);
11588 /* Emit byte code to push the address of the given lvalue expression to
11589 the stack. If it's a bit field, we also push offset and size info.
11591 Returns innermost component, which allows us to determine not only
11592 its type, but also whether it's a bitfield. */
11595 bc_expand_address (exp)
11599 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11603 switch (TREE_CODE (exp))
11607 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11609 case COMPONENT_REF:
11611 return (bc_expand_component_address (exp));
11615 bc_expand_expr (TREE_OPERAND (exp, 0));
11617 /* For variable-sized types: retrieve pointer. Sometimes the
11618 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11619 also make sure we have an operand, just in case... */
11621 if (TREE_OPERAND (exp, 0)
11622 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11623 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11624 bc_emit_instruction (loadP);
11626 /* If packed, also return offset and size */
11627 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11629 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11630 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11632 return (TREE_OPERAND (exp, 0));
11634 case FUNCTION_DECL:
11636 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11637 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11642 bc_load_parmaddr (DECL_RTL (exp));
11644 /* For variable-sized types: retrieve pointer */
11645 if (TYPE_SIZE (TREE_TYPE (exp))
11646 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11647 bc_emit_instruction (loadP);
11649 /* If packed, also return offset and size */
11650 if (DECL_BIT_FIELD (exp))
11651 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11652 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11658 bc_emit_instruction (returnP);
11664 if (BYTECODE_LABEL (DECL_RTL (exp)))
11665 bc_load_externaddr (DECL_RTL (exp));
11668 if (DECL_EXTERNAL (exp))
11669 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11670 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11672 bc_load_localaddr (DECL_RTL (exp));
11674 /* For variable-sized types: retrieve pointer */
11675 if (TYPE_SIZE (TREE_TYPE (exp))
11676 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11677 bc_emit_instruction (loadP);
11679 /* If packed, also return offset and size */
11680 if (DECL_BIT_FIELD (exp))
11681 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11682 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11690 bc_emit_bytecode (constP);
11691 r = output_constant_def (exp);
11692 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11694 #ifdef DEBUG_PRINT_CODE
11695 fputc ('\n', stderr);
11706 /* Most lvalues don't have components. */
11711 /* Emit a type code to be used by the runtime support in handling
11712 parameter passing. The type code consists of the machine mode
11713 plus the minimal alignment shifted left 8 bits. */
11716 bc_runtime_type_code (type)
11721 switch (TREE_CODE (type))
11727 case ENUMERAL_TYPE:
11731 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11743 return build_int_2 (val, 0);
11747 /* Generate constructor label */
11750 bc_gen_constr_label ()
11752 static int label_counter;
11753 static char label[20];
11755 sprintf (label, "*LR%d", label_counter++);
11757 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11761 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11762 expand the constructor data as static data, and push a pointer to it.
11763 The pointer is put in the pointer table and is retrieved by a constP
11764 bytecode instruction. We then loop and store each constructor member in
11765 the corresponding component. Finally, we return the original pointer on
11769 bc_expand_constructor (constr)
11773 HOST_WIDE_INT ptroffs;
11777 /* Literal constructors are handled as constants, whereas
11778 non-literals are evaluated and stored element by element
11779 into the data segment. */
11781 /* Allocate space in proper segment and push pointer to space on stack.
11784 l = bc_gen_constr_label ();
11786 if (TREE_CONSTANT (constr))
11790 bc_emit_const_labeldef (l);
11791 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11797 bc_emit_data_labeldef (l);
11798 bc_output_data_constructor (constr);
11802 /* Add reference to pointer table and recall pointer to stack;
11803 this code is common for both types of constructors: literals
11804 and non-literals. */
11806 ptroffs = bc_define_pointer (l);
11807 bc_emit_instruction (constP, ptroffs);
11809 /* This is all that has to be done if it's a literal. */
11810 if (TREE_CONSTANT (constr))
11814 /* At this point, we have the pointer to the structure on top of the stack.
11815 Generate sequences of store_memory calls for the constructor. */
11817 /* constructor type is structure */
11818 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11822 /* If the constructor has fewer fields than the structure,
11823 clear the whole structure first. */
11825 if (list_length (CONSTRUCTOR_ELTS (constr))
11826 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11828 bc_emit_instruction (duplicate);
11829 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11830 bc_emit_instruction (clearBLK);
11833 /* Store each element of the constructor into the corresponding
11834 field of TARGET. */
11836 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11838 register tree field = TREE_PURPOSE (elt);
11839 register enum machine_mode mode;
11844 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11845 mode = DECL_MODE (field);
11846 unsignedp = TREE_UNSIGNED (field);
11848 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11850 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11851 /* The alignment of TARGET is
11852 at least what its type requires. */
11854 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11855 int_size_in_bytes (TREE_TYPE (constr)));
11860 /* Constructor type is array */
11861 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11865 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11866 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11867 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11868 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11870 /* If the constructor has fewer fields than the structure,
11871 clear the whole structure first. */
11873 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11875 bc_emit_instruction (duplicate);
11876 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11877 bc_emit_instruction (clearBLK);
11881 /* Store each element of the constructor into the corresponding
11882 element of TARGET, determined by counting the elements. */
11884 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11886 elt = TREE_CHAIN (elt), i++)
11888 register enum machine_mode mode;
11893 mode = TYPE_MODE (elttype);
11894 bitsize = GET_MODE_BITSIZE (mode);
11895 unsignedp = TREE_UNSIGNED (elttype);
11897 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11898 /* * TYPE_SIZE_UNIT (elttype) */ );
11900 bc_store_field (elt, bitsize, bitpos, mode,
11901 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11902 /* The alignment of TARGET is
11903 at least what its type requires. */
11905 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11906 int_size_in_bytes (TREE_TYPE (constr)));
11913 /* Store the value of EXP (an expression tree) into member FIELD of
11914 structure at address on stack, which has type TYPE, mode MODE and
11915 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11918 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11919 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11922 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11923 value_mode, unsignedp, align, total_size)
11924 int bitsize, bitpos;
11925 enum machine_mode mode;
11926 tree field, exp, type;
11927 enum machine_mode value_mode;
11933 /* Expand expression and copy pointer */
11934 bc_expand_expr (exp);
11935 bc_emit_instruction (over);
11938 /* If the component is a bit field, we cannot use addressing to access
11939 it. Use bit-field techniques to store in it. */
11941 if (DECL_BIT_FIELD (field))
11943 bc_store_bit_field (bitpos, bitsize, unsignedp);
11947 /* Not bit field */
11949 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11951 /* Advance pointer to the desired member */
11953 bc_emit_instruction (addconstPSI, offset);
11956 bc_store_memory (type, field);
11961 /* Store SI/SU in bitfield */
11964 bc_store_bit_field (offset, size, unsignedp)
11965 int offset, size, unsignedp;
11967 /* Push bitfield offset and size */
11968 bc_push_offset_and_size (offset, size);
11971 bc_emit_instruction (sstoreBI);
11975 /* Load SI/SU from bitfield */
11978 bc_load_bit_field (offset, size, unsignedp)
11979 int offset, size, unsignedp;
11981 /* Push bitfield offset and size */
11982 bc_push_offset_and_size (offset, size);
11984 /* Load: sign-extend if signed, else zero-extend */
11985 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11989 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11990 (adjust stack pointer upwards), negative means add that number of
11991 levels (adjust the stack pointer downwards). Only positive values
11992 normally make sense. */
11995 bc_adjust_stack (nlevels)
12004 bc_emit_instruction (drop);
12007 bc_emit_instruction (drop);
12012 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
12013 stack_depth -= nlevels;
12016 #if defined (VALIDATE_STACK_FOR_BC)
12017 VALIDATE_STACK_FOR_BC ();