1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call;
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
105 int target_temp_slot_level;
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
110 static rtx saveregs_value;
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
115 /* This structure is used by move_by_pieces to describe the move to
118 struct move_by_pieces
128 int explicit_inc_from;
135 /* This structure is used by clear_by_pieces to describe the clear to
138 struct clear_by_pieces
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
154 extern int local_vars_size;
155 extern int stack_depth;
156 extern int max_stack_depth;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
160 static rtx enqueue_insn PROTO((rtx, rtx));
161 static int queued_subexp_p PROTO((rtx));
162 static void init_queue PROTO((void));
163 static void move_by_pieces PROTO((rtx, rtx, int, int));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
175 static int get_inner_unaligned_p PROTO((tree));
176 static tree save_noncopied_parts PROTO((tree, tree));
177 static tree init_noncopied_parts PROTO((tree, tree));
178 static int safe_from_p PROTO((rtx, tree));
179 static int fixed_type_p PROTO((tree));
180 static rtx var_rtx PROTO((tree));
181 static int get_pointer_alignment PROTO((tree, unsigned));
182 static tree string_constant PROTO((tree, tree *));
183 static tree c_strlen PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 void bc_expand_increment PROTO((struct increment_operator *, tree));
194 rtx bc_allocate_local PROTO((int, int));
195 void bc_store_memory PROTO((tree, tree));
196 tree bc_expand_component_address PROTO((tree));
197 tree bc_expand_address PROTO((tree));
198 void bc_expand_constructor PROTO((tree));
199 void bc_adjust_stack PROTO((int));
200 tree bc_canonicalize_array_ref PROTO((tree));
201 void bc_load_memory PROTO((tree, tree));
202 void bc_load_externaddr PROTO((rtx));
203 void bc_load_externaddr_id PROTO((tree, int));
204 void bc_load_localaddr PROTO((rtx));
205 void bc_load_parmaddr PROTO((rtx));
206 static void preexpand_calls PROTO((tree));
207 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
208 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
209 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
210 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
211 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
212 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
213 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
214 static tree defer_cleanups_to PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
224 /* MOVE_RATIO is the number of move instructions that is better than
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
231 /* A value of around 6 would minimize code size; infinity would minimize
233 #define MOVE_RATIO 15
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
262 /* Initialize maps used to convert modes to const, load, and store
266 bc_init_mode_to_opcode_maps ()
270 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
271 mode_to_const_map[mode] =
272 mode_to_load_map[mode] =
273 mode_to_store_map[mode] = neverneverland;
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
280 #include "modemap.def"
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
291 enum machine_mode mode;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
296 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
299 insn = emit_insn (gen_rtx (SET, 0, 0));
300 pat = PATTERN (insn);
302 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
303 mode = (enum machine_mode) ((int) mode + 1))
309 direct_load[(int) mode] = direct_store[(int) mode] = 0;
310 PUT_MODE (mem, mode);
311 PUT_MODE (mem1, mode);
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
316 if (mode != VOIDmode && mode != BLKmode)
317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
318 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
321 if (! HARD_REGNO_MODE_OK (regno, mode))
324 reg = gen_rtx (REG, mode, regno);
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
331 SET_SRC (pat) = mem1;
332 SET_DEST (pat) = reg;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_load[(int) mode] = 1;
337 SET_DEST (pat) = mem;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
342 SET_DEST (pat) = mem1;
343 if (recog (pat, insn, &num_clobbers) >= 0)
344 direct_store[(int) mode] = 1;
351 /* This is run at the start of compiling a function. */
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
362 apply_args_value = 0;
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
373 /* Instead of saving the postincrement queue, empty it. */
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->cleanups_this_call = cleanups_this_call;
379 p->saveregs_value = saveregs_value;
380 p->apply_args_value = apply_args_value;
381 p->forced_labels = forced_labels;
383 pending_stack_adjust = 0;
384 inhibit_defer_pop = 0;
385 cleanups_this_call = 0;
387 apply_args_value = 0;
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
395 restore_expr_status (p)
398 pending_stack_adjust = p->pending_stack_adjust;
399 inhibit_defer_pop = p->inhibit_defer_pop;
400 cleanups_this_call = p->cleanups_this_call;
401 saveregs_value = p->saveregs_value;
402 apply_args_value = p->apply_args_value;
403 forced_labels = p->forced_labels;
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
409 static rtx pending_chain;
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
419 enqueue_insn (var, body)
422 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
423 var, NULL_RTX, NULL_RTX, body, pending_chain);
424 return pending_chain;
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
443 protect_from_queue (x, modify)
447 register RTX_CODE code = GET_CODE (x);
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain == 0)
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
462 if (code == MEM && GET_MODE (x) != BLKmode
463 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
465 register rtx y = XEXP (x, 0);
466 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
474 register rtx temp = gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp, new),
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
485 rtx tem = protect_from_queue (XEXP (x, 0), 0);
486 if (tem != XEXP (x, 0))
492 else if (code == PLUS || code == MULT)
494 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
495 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
496 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x) == 0)
507 return QUEUED_VAR (x);
508 /* If the increment has happened and a pre-increment copy exists,
510 if (QUEUED_COPY (x) != 0)
511 return QUEUED_COPY (x);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
517 return QUEUED_COPY (x);
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
529 register enum rtx_code code = GET_CODE (x);
535 return queued_subexp_p (XEXP (x, 0));
539 return queued_subexp_p (XEXP (x, 0))
540 || queued_subexp_p (XEXP (x, 1));
545 /* Perform all the pending incrementations. */
551 while (p = pending_chain)
553 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
554 pending_chain = QUEUED_NEXT (p);
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
571 convert_move (to, from, unsignedp)
572 register rtx to, from;
575 enum machine_mode to_mode = GET_MODE (to);
576 enum machine_mode from_mode = GET_MODE (from);
577 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
578 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
585 to = protect_from_queue (to, 1);
586 from = protect_from_queue (from, 0);
588 if (to_real != from_real)
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
595 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
597 >= GET_MODE_SIZE (to_mode))
598 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
599 from = gen_lowpart (to_mode, from), from_mode = to_mode;
601 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
604 if (to_mode == from_mode
605 || (from_mode == VOIDmode && CONSTANT_P (from)))
607 emit_move_insn (to, from);
615 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
617 /* Try converting directly if the insn is supported. */
618 if ((code = can_extend_p (to_mode, from_mode, 0))
621 emit_unop_insn (code, to, from, UNKNOWN);
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
770 libcall = extendsfdf2_libfunc;
774 libcall = extendsfxf2_libfunc;
778 libcall = extendsftf2_libfunc;
787 libcall = truncdfsf2_libfunc;
791 libcall = extenddfxf2_libfunc;
795 libcall = extenddftf2_libfunc;
804 libcall = truncxfsf2_libfunc;
808 libcall = truncxfdf2_libfunc;
817 libcall = trunctfsf2_libfunc;
821 libcall = trunctfdf2_libfunc;
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
833 emit_move_insn (to, value);
837 /* Now both modes are integers. */
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
861 emit_unop_insn (code, to, from, equiv_code);
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
877 /* No special multiword conversion insn; do it by hand. */
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
890 lowpart_mode = from_mode;
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
897 /* Compute the value to put in each remaining word. */
899 fill_value = const0_rtx;
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
936 insns = get_insns ();
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
971 #endif /* HAVE_truncsipsi2 */
975 if (from_mode == PSImode)
977 if (to_mode != SImode)
979 from = convert_to_mode (SImode, from, unsignedp);
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2)
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
990 #endif /* HAVE_extendpsisi2 */
995 if (to_mode == PDImode)
997 if (from_mode != DImode)
998 from = convert_to_mode (DImode, from, unsignedp);
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2)
1003 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1006 #endif /* HAVE_truncdipdi2 */
1010 if (from_mode == PDImode)
1012 if (to_mode != DImode)
1014 from = convert_to_mode (DImode, from, unsignedp);
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2)
1022 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1025 #endif /* HAVE_extendpdidi2 */
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1036 GET_MODE_BITSIZE (from_mode)))
1038 if (!((GET_CODE (from) == MEM
1039 && ! MEM_VOLATILE_P (from)
1040 && direct_load[(int) to_mode]
1041 && ! mode_dependent_address_p (XEXP (from, 0)))
1042 || GET_CODE (from) == REG
1043 || GET_CODE (from) == SUBREG))
1044 from = force_reg (from_mode, from);
1045 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047 from = copy_to_reg (from);
1048 emit_move_insn (to, gen_lowpart (to_mode, from));
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1055 /* Convert directly if that works. */
1056 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057 != CODE_FOR_nothing)
1059 emit_unop_insn (code, to, from, equiv_code);
1064 enum machine_mode intermediate;
1066 /* Search for a mode to convert via. */
1067 for (intermediate = from_mode; intermediate != VOIDmode;
1068 intermediate = GET_MODE_WIDER_MODE (intermediate))
1069 if (((can_extend_p (to_mode, intermediate, unsignedp)
1070 != CODE_FOR_nothing)
1071 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1072 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1073 && (can_extend_p (intermediate, from_mode, unsignedp)
1074 != CODE_FOR_nothing))
1076 convert_move (to, convert_to_mode (intermediate, from,
1077 unsignedp), unsignedp);
1081 /* No suitable intermediate mode. */
1086 /* Support special truncate insns for certain modes. */
1088 if (from_mode == DImode && to_mode == SImode)
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2)
1093 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1101 if (from_mode == DImode && to_mode == HImode)
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2)
1106 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1114 if (from_mode == DImode && to_mode == QImode)
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2)
1119 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 if (from_mode == SImode && to_mode == HImode)
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2)
1132 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 if (from_mode == SImode && to_mode == QImode)
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2)
1145 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 if (from_mode == HImode && to_mode == QImode)
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2)
1158 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 if (from_mode == TImode && to_mode == DImode)
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2)
1171 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 if (from_mode == TImode && to_mode == SImode)
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2)
1184 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 if (from_mode == TImode && to_mode == HImode)
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2)
1197 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 if (from_mode == TImode && to_mode == QImode)
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2)
1210 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1223 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1224 emit_move_insn (to, temp);
1228 /* Mode combination is not recognized. */
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1243 convert_to_mode (mode, x, unsignedp)
1244 enum machine_mode mode;
1248 return convert_modes (mode, VOIDmode, x, unsignedp);
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1265 convert_modes (mode, oldmode, x, unsignedp)
1266 enum machine_mode mode, oldmode;
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1275 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1278 x = gen_lowpart (mode, x);
1280 if (GET_MODE (x) != VOIDmode)
1281 oldmode = GET_MODE (x);
1283 if (mode == oldmode)
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1292 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1293 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1296 HOST_WIDE_INT val = INTVAL (x);
1298 if (oldmode != VOIDmode
1299 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1301 int width = GET_MODE_BITSIZE (oldmode);
1303 /* We need to zero extend VAL. */
1304 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1307 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1310 /* We can do this with a gen_lowpart if both desired and current modes
1311 are integer, and this is either a constant integer, a register, or a
1312 non-volatile MEM. Except for the constant case where MODE is no
1313 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1315 if ((GET_CODE (x) == CONST_INT
1316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1317 || (GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_CLASS (oldmode) == MODE_INT
1319 && (GET_CODE (x) == CONST_DOUBLE
1320 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1321 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1322 && direct_load[(int) mode])
1323 || (GET_CODE (x) == REG
1324 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1325 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1327 /* ?? If we don't know OLDMODE, we have to assume here that
1328 X does not need sign- or zero-extension. This may not be
1329 the case, but it's the best we can do. */
1330 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1331 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1333 HOST_WIDE_INT val = INTVAL (x);
1334 int width = GET_MODE_BITSIZE (oldmode);
1336 /* We must sign or zero-extend in this case. Start by
1337 zero-extending, then sign extend if we need to. */
1338 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1340 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1341 val |= (HOST_WIDE_INT) (-1) << width;
1343 return GEN_INT (val);
1346 return gen_lowpart (mode, x);
1349 temp = gen_reg_rtx (mode);
1350 convert_move (temp, x, unsignedp);
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1361 move_by_pieces (to, from, len, align)
1365 struct move_by_pieces data;
1366 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1367 int max_size = MOVE_MAX + 1;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1392 /* If copying requires more than two move insns,
1393 copy addresses to registers (to make displacements shorter)
1394 and use post-increment if available. */
1395 if (!(data.autinc_from && data.autinc_to)
1396 && move_by_pieces_ninsns (len, align) > 2)
1398 #ifdef HAVE_PRE_DECREMENT
1399 if (data.reverse && ! data.autinc_from)
1401 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1402 data.autinc_from = 1;
1403 data.explicit_inc_from = -1;
1406 #ifdef HAVE_POST_INCREMENT
1407 if (! data.autinc_from)
1409 data.from_addr = copy_addr_to_reg (from_addr);
1410 data.autinc_from = 1;
1411 data.explicit_inc_from = 1;
1414 if (!data.autinc_from && CONSTANT_P (from_addr))
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416 #ifdef HAVE_PRE_DECREMENT
1417 if (data.reverse && ! data.autinc_to)
1419 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1421 data.explicit_inc_to = -1;
1424 #ifdef HAVE_POST_INCREMENT
1425 if (! data.reverse && ! data.autinc_to)
1427 data.to_addr = copy_addr_to_reg (to_addr);
1429 data.explicit_inc_to = 1;
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1443 while (max_size > 1)
1445 enum machine_mode mode = VOIDmode, tmode;
1446 enum insn_code icode;
1448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1450 if (GET_MODE_SIZE (tmode) < max_size)
1453 if (mode == VOIDmode)
1456 icode = mov_optab->handlers[(int) mode].insn_code;
1457 if (icode != CODE_FOR_nothing
1458 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1459 GET_MODE_SIZE (mode)))
1460 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1462 max_size = GET_MODE_SIZE (mode);
1465 /* The code above should have handled everything. */
1470 /* Return number of insns required to move L bytes by pieces.
1471 ALIGN (in bytes) is maximum alignment we can assume. */
1474 move_by_pieces_ninsns (l, align)
1478 register int n_insns = 0;
1479 int max_size = MOVE_MAX + 1;
1481 if (! SLOW_UNALIGNED_ACCESS
1482 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1485 while (max_size > 1)
1487 enum machine_mode mode = VOIDmode, tmode;
1488 enum insn_code icode;
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1495 if (mode == VOIDmode)
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing
1500 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1501 GET_MODE_SIZE (mode)))
1502 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1504 max_size = GET_MODE_SIZE (mode);
1510 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1515 move_by_pieces_1 (genfun, mode, data)
1517 enum machine_mode mode;
1518 struct move_by_pieces *data;
1520 register int size = GET_MODE_SIZE (mode);
1521 register rtx to1, from1;
1523 while (data->len >= size)
1525 if (data->reverse) data->offset -= size;
1527 to1 = (data->autinc_to
1528 ? gen_rtx (MEM, mode, data->to_addr)
1529 : change_address (data->to, mode,
1530 plus_constant (data->to_addr, data->offset)));
1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
1534 ? gen_rtx (MEM, mode, data->from_addr)
1535 : change_address (data->from, mode,
1536 plus_constant (data->from_addr, data->offset)));
1537 MEM_IN_STRUCT_P (from1) = data->from_struct;
1539 #ifdef HAVE_PRE_DECREMENT
1540 if (data->explicit_inc_to < 0)
1541 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1542 if (data->explicit_inc_from < 0)
1543 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1546 emit_insn ((*genfun) (to1, from1));
1547 #ifdef HAVE_POST_INCREMENT
1548 if (data->explicit_inc_to > 0)
1549 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1550 if (data->explicit_inc_from > 0)
1551 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1554 if (! data->reverse) data->offset += size;
1560 /* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
1568 measured in bytes. */
1571 emit_block_move (x, y, size, align)
1576 if (GET_MODE (x) != BLKmode)
1579 if (GET_MODE (y) != BLKmode)
1582 x = protect_from_queue (x, 1);
1583 y = protect_from_queue (y, 0);
1584 size = protect_from_queue (size, 0);
1586 if (GET_CODE (x) != MEM)
1588 if (GET_CODE (y) != MEM)
1593 if (GET_CODE (size) == CONST_INT
1594 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1595 move_by_pieces (x, y, INTVAL (size), align);
1598 /* Try the most limited insn first, because there's no point
1599 including more than one in the machine description unless
1600 the more limited one has some advantage. */
1602 rtx opalign = GEN_INT (align);
1603 enum machine_mode mode;
1605 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1606 mode = GET_MODE_WIDER_MODE (mode))
1608 enum insn_code code = movstr_optab[(int) mode];
1610 if (code != CODE_FOR_nothing
1611 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1612 here because if SIZE is less than the mode mask, as it is
1613 returned by the macro, it will definitely be less than the
1614 actual mode mask. */
1615 && ((GET_CODE (size) == CONST_INT
1616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1617 <= GET_MODE_MASK (mode)))
1618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1619 && (insn_operand_predicate[(int) code][0] == 0
1620 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1621 && (insn_operand_predicate[(int) code][1] == 0
1622 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1623 && (insn_operand_predicate[(int) code][3] == 0
1624 || (*insn_operand_predicate[(int) code][3]) (opalign,
1628 rtx last = get_last_insn ();
1631 op2 = convert_to_mode (mode, size, 1);
1632 if (insn_operand_predicate[(int) code][2] != 0
1633 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1634 op2 = copy_to_mode_reg (mode, op2);
1636 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1643 delete_insns_since (last);
1647 #ifdef TARGET_MEM_FUNCTIONS
1648 emit_library_call (memcpy_libfunc, 0,
1649 VOIDmode, 3, XEXP (x, 0), Pmode,
1651 convert_to_mode (TYPE_MODE (sizetype), size,
1652 TREE_UNSIGNED (sizetype)),
1653 TYPE_MODE (sizetype));
1655 emit_library_call (bcopy_libfunc, 0,
1656 VOIDmode, 3, XEXP (y, 0), Pmode,
1658 convert_to_mode (TYPE_MODE (integer_type_node), size,
1659 TREE_UNSIGNED (integer_type_node)),
1660 TYPE_MODE (integer_type_node));
1665 /* Copy all or part of a value X into registers starting at REGNO.
1666 The number of registers to be filled is NREGS. */
1669 move_block_to_reg (regno, x, nregs, mode)
1673 enum machine_mode mode;
1681 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1682 x = validize_mem (force_const_mem (mode, x));
1684 /* See if the machine can do this with a load multiple insn. */
1685 #ifdef HAVE_load_multiple
1686 if (HAVE_load_multiple)
1688 last = get_last_insn ();
1689 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1697 delete_insns_since (last);
1701 for (i = 0; i < nregs; i++)
1702 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1703 operand_subword_force (x, i, mode));
1706 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1707 The number of registers to be filled is NREGS. SIZE indicates the number
1708 of bytes in the object X. */
1712 move_block_from_reg (regno, x, nregs, size)
1720 enum machine_mode mode;
1722 /* If SIZE is that of a mode no bigger than a word, just use that
1723 mode's store operation. */
1724 if (size <= UNITS_PER_WORD
1725 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1727 emit_move_insn (change_address (x, mode, NULL),
1728 gen_rtx (REG, mode, regno));
1732 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1733 to the left before storing to memory. Note that the previous test
1734 doesn't handle all cases (e.g. SIZE == 3). */
1735 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1737 rtx tem = operand_subword (x, 0, 1, BLKmode);
1743 shift = expand_shift (LSHIFT_EXPR, word_mode,
1744 gen_rtx (REG, word_mode, regno),
1745 build_int_2 ((UNITS_PER_WORD - size)
1746 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1747 emit_move_insn (tem, shift);
1751 /* See if the machine can do this with a store multiple insn. */
1752 #ifdef HAVE_store_multiple
1753 if (HAVE_store_multiple)
1755 last = get_last_insn ();
1756 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1764 delete_insns_since (last);
1768 for (i = 0; i < nregs; i++)
1770 rtx tem = operand_subword (x, i, 1, BLKmode);
1775 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1779 /* Emit code to move a block Y to a block X, where X is non-consecutive
1780 registers represented by a PARALLEL. */
1783 emit_group_load (x, y)
1786 rtx target_reg, source;
1789 if (GET_CODE (x) != PARALLEL)
1792 /* Check for a NULL entry, used to indicate that the parameter goes
1793 both on the stack and in registers. */
1794 if (XEXP (XVECEXP (x, 0, 0), 0))
1799 for (; i < XVECLEN (x, 0); i++)
1801 rtx element = XVECEXP (x, 0, i);
1803 target_reg = XEXP (element, 0);
1805 if (GET_CODE (y) == MEM)
1806 source = change_address (y, GET_MODE (target_reg),
1807 plus_constant (XEXP (y, 0),
1808 INTVAL (XEXP (element, 1))));
1809 else if (XEXP (element, 1) == const0_rtx)
1811 if (GET_MODE (target_reg) == GET_MODE (y))
1813 /* Allow for the target_reg to be smaller than the input register
1814 to allow for AIX with 4 DF arguments after a single SI arg. The
1815 last DF argument will only load 1 word into the integer registers,
1816 but load a DF value into the float registers. */
1817 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1818 <= GET_MODE_SIZE (GET_MODE (y)))
1819 && GET_MODE (target_reg) == word_mode)
1820 /* This might be a const_double, so we can't just use SUBREG. */
1821 source = operand_subword (y, 0, 0, VOIDmode);
1828 emit_move_insn (target_reg, source);
1832 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1833 registers represented by a PARALLEL. */
1836 emit_group_store (x, y)
1839 rtx source_reg, target;
1842 if (GET_CODE (y) != PARALLEL)
1845 /* Check for a NULL entry, used to indicate that the parameter goes
1846 both on the stack and in registers. */
1847 if (XEXP (XVECEXP (y, 0, 0), 0))
1852 for (; i < XVECLEN (y, 0); i++)
1854 rtx element = XVECEXP (y, 0, i);
1856 source_reg = XEXP (element, 0);
1858 if (GET_CODE (x) == MEM)
1859 target = change_address (x, GET_MODE (source_reg),
1860 plus_constant (XEXP (x, 0),
1861 INTVAL (XEXP (element, 1))));
1862 else if (XEXP (element, 1) == const0_rtx)
1865 if (GET_MODE (target) != GET_MODE (source_reg))
1866 target = gen_lowpart (GET_MODE (source_reg), target);
1871 emit_move_insn (target, source_reg);
1875 /* Add a USE expression for REG to the (possibly empty) list pointed
1876 to by CALL_FUSAGE. REG must denote a hard register. */
1879 use_reg (call_fusage, reg)
1880 rtx *call_fusage, reg;
1882 if (GET_CODE (reg) != REG
1883 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1887 = gen_rtx (EXPR_LIST, VOIDmode,
1888 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1891 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1892 starting at REGNO. All of these registers must be hard registers. */
1895 use_regs (call_fusage, regno, nregs)
1902 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1905 for (i = 0; i < nregs; i++)
1906 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1909 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1910 PARALLEL REGS. This is for calls that pass values in multiple
1911 non-contiguous locations. The Irix 6 ABI has examples of this. */
1914 use_group_regs (call_fusage, regs)
1920 /* Check for a NULL entry, used to indicate that the parameter goes
1921 both on the stack and in registers. */
1922 if (XEXP (XVECEXP (regs, 0, 0), 0))
1927 for (; i < XVECLEN (regs, 0); i++)
1928 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1931 /* Generate several move instructions to clear LEN bytes of block TO.
1932 (A MEM rtx with BLKmode). The caller must pass TO through
1933 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1937 clear_by_pieces (to, len, align)
1941 struct clear_by_pieces data;
1942 rtx to_addr = XEXP (to, 0);
1943 int max_size = MOVE_MAX + 1;
1946 data.to_addr = to_addr;
1949 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1950 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1952 data.explicit_inc_to = 0;
1954 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1955 if (data.reverse) data.offset = len;
1958 data.to_struct = MEM_IN_STRUCT_P (to);
1960 /* If copying requires more than two move insns,
1961 copy addresses to registers (to make displacements shorter)
1962 and use post-increment if available. */
1964 && move_by_pieces_ninsns (len, align) > 2)
1966 #ifdef HAVE_PRE_DECREMENT
1967 if (data.reverse && ! data.autinc_to)
1969 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1971 data.explicit_inc_to = -1;
1974 #ifdef HAVE_POST_INCREMENT
1975 if (! data.reverse && ! data.autinc_to)
1977 data.to_addr = copy_addr_to_reg (to_addr);
1979 data.explicit_inc_to = 1;
1982 if (!data.autinc_to && CONSTANT_P (to_addr))
1983 data.to_addr = copy_addr_to_reg (to_addr);
1986 if (! SLOW_UNALIGNED_ACCESS
1987 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1990 /* First move what we can in the largest integer mode, then go to
1991 successively smaller modes. */
1993 while (max_size > 1)
1995 enum machine_mode mode = VOIDmode, tmode;
1996 enum insn_code icode;
1998 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1999 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2000 if (GET_MODE_SIZE (tmode) < max_size)
2003 if (mode == VOIDmode)
2006 icode = mov_optab->handlers[(int) mode].insn_code;
2007 if (icode != CODE_FOR_nothing
2008 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2009 GET_MODE_SIZE (mode)))
2010 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2012 max_size = GET_MODE_SIZE (mode);
2015 /* The code above should have handled everything. */
2020 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2021 with move instructions for mode MODE. GENFUN is the gen_... function
2022 to make a move insn for that mode. DATA has all the other info. */
2025 clear_by_pieces_1 (genfun, mode, data)
2027 enum machine_mode mode;
2028 struct clear_by_pieces *data;
2030 register int size = GET_MODE_SIZE (mode);
2033 while (data->len >= size)
2035 if (data->reverse) data->offset -= size;
2037 to1 = (data->autinc_to
2038 ? gen_rtx (MEM, mode, data->to_addr)
2039 : change_address (data->to, mode,
2040 plus_constant (data->to_addr, data->offset)));
2041 MEM_IN_STRUCT_P (to1) = data->to_struct;
2043 #ifdef HAVE_PRE_DECREMENT
2044 if (data->explicit_inc_to < 0)
2045 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2048 emit_insn ((*genfun) (to1, const0_rtx));
2049 #ifdef HAVE_POST_INCREMENT
2050 if (data->explicit_inc_to > 0)
2051 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2054 if (! data->reverse) data->offset += size;
2060 /* Write zeros through the storage of OBJECT.
2061 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2062 the maximum alignment we can is has, measured in bytes. */
2065 clear_storage (object, size, align)
2070 if (GET_MODE (object) == BLKmode)
2072 object = protect_from_queue (object, 1);
2073 size = protect_from_queue (size, 0);
2075 if (GET_CODE (size) == CONST_INT
2076 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2077 clear_by_pieces (object, INTVAL (size), align);
2081 /* Try the most limited insn first, because there's no point
2082 including more than one in the machine description unless
2083 the more limited one has some advantage. */
2085 rtx opalign = GEN_INT (align);
2086 enum machine_mode mode;
2088 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2089 mode = GET_MODE_WIDER_MODE (mode))
2091 enum insn_code code = clrstr_optab[(int) mode];
2093 if (code != CODE_FOR_nothing
2094 /* We don't need MODE to be narrower than
2095 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2096 the mode mask, as it is returned by the macro, it will
2097 definitely be less than the actual mode mask. */
2098 && ((GET_CODE (size) == CONST_INT
2099 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2100 <= GET_MODE_MASK (mode)))
2101 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2102 && (insn_operand_predicate[(int) code][0] == 0
2103 || (*insn_operand_predicate[(int) code][0]) (object,
2105 && (insn_operand_predicate[(int) code][2] == 0
2106 || (*insn_operand_predicate[(int) code][2]) (opalign,
2110 rtx last = get_last_insn ();
2113 op1 = convert_to_mode (mode, size, 1);
2114 if (insn_operand_predicate[(int) code][1] != 0
2115 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2117 op1 = copy_to_mode_reg (mode, op1);
2119 pat = GEN_FCN ((int) code) (object, op1, opalign);
2126 delete_insns_since (last);
2131 #ifdef TARGET_MEM_FUNCTIONS
2132 emit_library_call (memset_libfunc, 0,
2134 XEXP (object, 0), Pmode,
2135 const0_rtx, TYPE_MODE (integer_type_node),
2136 convert_to_mode (TYPE_MODE (sizetype),
2137 size, TREE_UNSIGNED (sizetype)),
2138 TYPE_MODE (sizetype));
2140 emit_library_call (bzero_libfunc, 0,
2142 XEXP (object, 0), Pmode,
2143 convert_to_mode (TYPE_MODE (integer_type_node),
2145 TREE_UNSIGNED (integer_type_node)),
2146 TYPE_MODE (integer_type_node));
2151 emit_move_insn (object, const0_rtx);
2154 /* Generate code to copy Y into X.
2155 Both Y and X must have the same mode, except that
2156 Y can be a constant with VOIDmode.
2157 This mode cannot be BLKmode; use emit_block_move for that.
2159 Return the last instruction emitted. */
2162 emit_move_insn (x, y)
2165 enum machine_mode mode = GET_MODE (x);
2167 x = protect_from_queue (x, 1);
2168 y = protect_from_queue (y, 0);
2170 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2173 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2174 y = force_const_mem (mode, y);
2176 /* If X or Y are memory references, verify that their addresses are valid
2178 if (GET_CODE (x) == MEM
2179 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2180 && ! push_operand (x, GET_MODE (x)))
2182 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2183 x = change_address (x, VOIDmode, XEXP (x, 0));
2185 if (GET_CODE (y) == MEM
2186 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2188 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2189 y = change_address (y, VOIDmode, XEXP (y, 0));
2191 if (mode == BLKmode)
2194 return emit_move_insn_1 (x, y);
2197 /* Low level part of emit_move_insn.
2198 Called just like emit_move_insn, but assumes X and Y
2199 are basically valid. */
2202 emit_move_insn_1 (x, y)
2205 enum machine_mode mode = GET_MODE (x);
2206 enum machine_mode submode;
2207 enum mode_class class = GET_MODE_CLASS (mode);
2210 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2212 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2214 /* Expand complex moves by moving real part and imag part, if possible. */
2215 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2216 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2218 (class == MODE_COMPLEX_INT
2219 ? MODE_INT : MODE_FLOAT),
2221 && (mov_optab->handlers[(int) submode].insn_code
2222 != CODE_FOR_nothing))
2224 /* Don't split destination if it is a stack push. */
2225 int stack = push_operand (x, GET_MODE (x));
2228 /* If this is a stack, push the highpart first, so it
2229 will be in the argument order.
2231 In that case, change_address is used only to convert
2232 the mode, not to change the address. */
2235 /* Note that the real part always precedes the imag part in memory
2236 regardless of machine's endianness. */
2237 #ifdef STACK_GROWS_DOWNWARD
2238 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2239 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2240 gen_imagpart (submode, y)));
2241 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2242 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2243 gen_realpart (submode, y)));
2245 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2246 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2247 gen_realpart (submode, y)));
2248 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2249 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2250 gen_imagpart (submode, y)));
2255 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2256 (gen_realpart (submode, x), gen_realpart (submode, y)));
2257 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2258 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2261 return get_last_insn ();
2264 /* This will handle any multi-word mode that lacks a move_insn pattern.
2265 However, you will get better code if you define such patterns,
2266 even if they must turn into multiple assembler instructions. */
2267 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2272 #ifdef PUSH_ROUNDING
2274 /* If X is a push on the stack, do the push now and replace
2275 X with a reference to the stack pointer. */
2276 if (push_operand (x, GET_MODE (x)))
2278 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2279 x = change_address (x, VOIDmode, stack_pointer_rtx);
2283 /* Show the output dies here. */
2285 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2288 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2291 rtx xpart = operand_subword (x, i, 1, mode);
2292 rtx ypart = operand_subword (y, i, 1, mode);
2294 /* If we can't get a part of Y, put Y into memory if it is a
2295 constant. Otherwise, force it into a register. If we still
2296 can't get a part of Y, abort. */
2297 if (ypart == 0 && CONSTANT_P (y))
2299 y = force_const_mem (mode, y);
2300 ypart = operand_subword (y, i, 1, mode);
2302 else if (ypart == 0)
2303 ypart = operand_subword_force (y, i, mode);
2305 if (xpart == 0 || ypart == 0)
2308 last_insn = emit_move_insn (xpart, ypart);
2317 /* Pushing data onto the stack. */
2319 /* Push a block of length SIZE (perhaps variable)
2320 and return an rtx to address the beginning of the block.
2321 Note that it is not possible for the value returned to be a QUEUED.
2322 The value may be virtual_outgoing_args_rtx.
2324 EXTRA is the number of bytes of padding to push in addition to SIZE.
2325 BELOW nonzero means this padding comes at low addresses;
2326 otherwise, the padding comes at high addresses. */
2329 push_block (size, extra, below)
2335 size = convert_modes (Pmode, ptr_mode, size, 1);
2336 if (CONSTANT_P (size))
2337 anti_adjust_stack (plus_constant (size, extra));
2338 else if (GET_CODE (size) == REG && extra == 0)
2339 anti_adjust_stack (size);
2342 rtx temp = copy_to_mode_reg (Pmode, size);
2344 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2345 temp, 0, OPTAB_LIB_WIDEN);
2346 anti_adjust_stack (temp);
2349 #ifdef STACK_GROWS_DOWNWARD
2350 temp = virtual_outgoing_args_rtx;
2351 if (extra != 0 && below)
2352 temp = plus_constant (temp, extra);
2354 if (GET_CODE (size) == CONST_INT)
2355 temp = plus_constant (virtual_outgoing_args_rtx,
2356 - INTVAL (size) - (below ? 0 : extra));
2357 else if (extra != 0 && !below)
2358 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2359 negate_rtx (Pmode, plus_constant (size, extra)));
2361 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2362 negate_rtx (Pmode, size));
2365 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2371 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2374 /* Generate code to push X onto the stack, assuming it has mode MODE and
2376 MODE is redundant except when X is a CONST_INT (since they don't
2378 SIZE is an rtx for the size of data to be copied (in bytes),
2379 needed only if X is BLKmode.
2381 ALIGN (in bytes) is maximum alignment we can assume.
2383 If PARTIAL and REG are both nonzero, then copy that many of the first
2384 words of X into registers starting with REG, and push the rest of X.
2385 The amount of space pushed is decreased by PARTIAL words,
2386 rounded *down* to a multiple of PARM_BOUNDARY.
2387 REG must be a hard register in this case.
2388 If REG is zero but PARTIAL is not, take any all others actions for an
2389 argument partially in registers, but do not actually load any
2392 EXTRA is the amount in bytes of extra space to leave next to this arg.
2393 This is ignored if an argument block has already been allocated.
2395 On a machine that lacks real push insns, ARGS_ADDR is the address of
2396 the bottom of the argument block for this call. We use indexing off there
2397 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2398 argument block has not been preallocated.
2400 ARGS_SO_FAR is the size of args previously pushed for this call. */
2403 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2404 args_addr, args_so_far)
2406 enum machine_mode mode;
2417 enum direction stack_direction
2418 #ifdef STACK_GROWS_DOWNWARD
2424 /* Decide where to pad the argument: `downward' for below,
2425 `upward' for above, or `none' for don't pad it.
2426 Default is below for small data on big-endian machines; else above. */
2427 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2429 /* Invert direction if stack is post-update. */
2430 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2431 if (where_pad != none)
2432 where_pad = (where_pad == downward ? upward : downward);
2434 xinner = x = protect_from_queue (x, 0);
2436 if (mode == BLKmode)
2438 /* Copy a block into the stack, entirely or partially. */
2441 int used = partial * UNITS_PER_WORD;
2442 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2450 /* USED is now the # of bytes we need not copy to the stack
2451 because registers will take care of them. */
2454 xinner = change_address (xinner, BLKmode,
2455 plus_constant (XEXP (xinner, 0), used));
2457 /* If the partial register-part of the arg counts in its stack size,
2458 skip the part of stack space corresponding to the registers.
2459 Otherwise, start copying to the beginning of the stack space,
2460 by setting SKIP to 0. */
2461 #ifndef REG_PARM_STACK_SPACE
2467 #ifdef PUSH_ROUNDING
2468 /* Do it with several push insns if that doesn't take lots of insns
2469 and if there is no difficulty with push insns that skip bytes
2470 on the stack for alignment purposes. */
2472 && GET_CODE (size) == CONST_INT
2474 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2476 /* Here we avoid the case of a structure whose weak alignment
2477 forces many pushes of a small amount of data,
2478 and such small pushes do rounding that causes trouble. */
2479 && ((! SLOW_UNALIGNED_ACCESS)
2480 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2481 || PUSH_ROUNDING (align) == align)
2482 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2484 /* Push padding now if padding above and stack grows down,
2485 or if padding below and stack grows up.
2486 But if space already allocated, this has already been done. */
2487 if (extra && args_addr == 0
2488 && where_pad != none && where_pad != stack_direction)
2489 anti_adjust_stack (GEN_INT (extra));
2491 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2492 INTVAL (size) - used, align);
2495 #endif /* PUSH_ROUNDING */
2497 /* Otherwise make space on the stack and copy the data
2498 to the address of that space. */
2500 /* Deduct words put into registers from the size we must copy. */
2503 if (GET_CODE (size) == CONST_INT)
2504 size = GEN_INT (INTVAL (size) - used);
2506 size = expand_binop (GET_MODE (size), sub_optab, size,
2507 GEN_INT (used), NULL_RTX, 0,
2511 /* Get the address of the stack space.
2512 In this case, we do not deal with EXTRA separately.
2513 A single stack adjust will do. */
2516 temp = push_block (size, extra, where_pad == downward);
2519 else if (GET_CODE (args_so_far) == CONST_INT)
2520 temp = memory_address (BLKmode,
2521 plus_constant (args_addr,
2522 skip + INTVAL (args_so_far)));
2524 temp = memory_address (BLKmode,
2525 plus_constant (gen_rtx (PLUS, Pmode,
2526 args_addr, args_so_far),
2529 /* TEMP is the address of the block. Copy the data there. */
2530 if (GET_CODE (size) == CONST_INT
2531 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2534 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2535 INTVAL (size), align);
2538 /* Try the most limited insn first, because there's no point
2539 including more than one in the machine description unless
2540 the more limited one has some advantage. */
2541 #ifdef HAVE_movstrqi
2543 && GET_CODE (size) == CONST_INT
2544 && ((unsigned) INTVAL (size)
2545 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2547 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2548 xinner, size, GEN_INT (align));
2556 #ifdef HAVE_movstrhi
2558 && GET_CODE (size) == CONST_INT
2559 && ((unsigned) INTVAL (size)
2560 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2562 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2563 xinner, size, GEN_INT (align));
2571 #ifdef HAVE_movstrsi
2574 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2575 xinner, size, GEN_INT (align));
2583 #ifdef HAVE_movstrdi
2586 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2587 xinner, size, GEN_INT (align));
2596 #ifndef ACCUMULATE_OUTGOING_ARGS
2597 /* If the source is referenced relative to the stack pointer,
2598 copy it to another register to stabilize it. We do not need
2599 to do this if we know that we won't be changing sp. */
2601 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2602 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2603 temp = copy_to_reg (temp);
2606 /* Make inhibit_defer_pop nonzero around the library call
2607 to force it to pop the bcopy-arguments right away. */
2609 #ifdef TARGET_MEM_FUNCTIONS
2610 emit_library_call (memcpy_libfunc, 0,
2611 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2612 convert_to_mode (TYPE_MODE (sizetype),
2613 size, TREE_UNSIGNED (sizetype)),
2614 TYPE_MODE (sizetype));
2616 emit_library_call (bcopy_libfunc, 0,
2617 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2618 convert_to_mode (TYPE_MODE (integer_type_node),
2620 TREE_UNSIGNED (integer_type_node)),
2621 TYPE_MODE (integer_type_node));
2626 else if (partial > 0)
2628 /* Scalar partly in registers. */
2630 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2633 /* # words of start of argument
2634 that we must make space for but need not store. */
2635 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2636 int args_offset = INTVAL (args_so_far);
2639 /* Push padding now if padding above and stack grows down,
2640 or if padding below and stack grows up.
2641 But if space already allocated, this has already been done. */
2642 if (extra && args_addr == 0
2643 && where_pad != none && where_pad != stack_direction)
2644 anti_adjust_stack (GEN_INT (extra));
2646 /* If we make space by pushing it, we might as well push
2647 the real data. Otherwise, we can leave OFFSET nonzero
2648 and leave the space uninitialized. */
2652 /* Now NOT_STACK gets the number of words that we don't need to
2653 allocate on the stack. */
2654 not_stack = partial - offset;
2656 /* If the partial register-part of the arg counts in its stack size,
2657 skip the part of stack space corresponding to the registers.
2658 Otherwise, start copying to the beginning of the stack space,
2659 by setting SKIP to 0. */
2660 #ifndef REG_PARM_STACK_SPACE
2666 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2667 x = validize_mem (force_const_mem (mode, x));
2669 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2670 SUBREGs of such registers are not allowed. */
2671 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2672 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2673 x = copy_to_reg (x);
2675 /* Loop over all the words allocated on the stack for this arg. */
2676 /* We can do it by words, because any scalar bigger than a word
2677 has a size a multiple of a word. */
2678 #ifndef PUSH_ARGS_REVERSED
2679 for (i = not_stack; i < size; i++)
2681 for (i = size - 1; i >= not_stack; i--)
2683 if (i >= not_stack + offset)
2684 emit_push_insn (operand_subword_force (x, i, mode),
2685 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2687 GEN_INT (args_offset + ((i - not_stack + skip)
2688 * UNITS_PER_WORD)));
2694 /* Push padding now if padding above and stack grows down,
2695 or if padding below and stack grows up.
2696 But if space already allocated, this has already been done. */
2697 if (extra && args_addr == 0
2698 && where_pad != none && where_pad != stack_direction)
2699 anti_adjust_stack (GEN_INT (extra));
2701 #ifdef PUSH_ROUNDING
2703 addr = gen_push_operand ();
2706 if (GET_CODE (args_so_far) == CONST_INT)
2708 = memory_address (mode,
2709 plus_constant (args_addr, INTVAL (args_so_far)));
2711 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2714 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2718 /* If part should go in registers, copy that part
2719 into the appropriate registers. Do this now, at the end,
2720 since mem-to-mem copies above may do function calls. */
2721 if (partial > 0 && reg != 0)
2723 /* Handle calls that pass values in multiple non-contiguous locations.
2724 The Irix 6 ABI has examples of this. */
2725 if (GET_CODE (reg) == PARALLEL)
2726 emit_group_load (reg, x);
2728 move_block_to_reg (REGNO (reg), x, partial, mode);
2731 if (extra && args_addr == 0 && where_pad == stack_direction)
2732 anti_adjust_stack (GEN_INT (extra));
2735 /* Expand an assignment that stores the value of FROM into TO.
2736 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2737 (This may contain a QUEUED rtx;
2738 if the value is constant, this rtx is a constant.)
2739 Otherwise, the returned value is NULL_RTX.
2741 SUGGEST_REG is no longer actually used.
2742 It used to mean, copy the value through a register
2743 and return that register, if that is possible.
2744 We now use WANT_VALUE to decide whether to do this. */
2747 expand_assignment (to, from, want_value, suggest_reg)
2752 register rtx to_rtx = 0;
2755 /* Don't crash if the lhs of the assignment was erroneous. */
2757 if (TREE_CODE (to) == ERROR_MARK)
2759 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2760 return want_value ? result : NULL_RTX;
2763 if (output_bytecode)
2765 tree dest_innermost;
2767 bc_expand_expr (from);
2768 bc_emit_instruction (duplicate);
2770 dest_innermost = bc_expand_address (to);
2772 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2773 take care of it here. */
2775 bc_store_memory (TREE_TYPE (to), dest_innermost);
2779 /* Assignment of a structure component needs special treatment
2780 if the structure component's rtx is not simply a MEM.
2781 Assignment of an array element at a constant index, and assignment of
2782 an array element in an unaligned packed structure field, has the same
2785 if (TREE_CODE (to) == COMPONENT_REF
2786 || TREE_CODE (to) == BIT_FIELD_REF
2787 || (TREE_CODE (to) == ARRAY_REF
2788 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2789 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2790 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2792 enum machine_mode mode1;
2802 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2803 &unsignedp, &volatilep, &alignment);
2805 /* If we are going to use store_bit_field and extract_bit_field,
2806 make sure to_rtx will be safe for multiple use. */
2808 if (mode1 == VOIDmode && want_value)
2809 tem = stabilize_reference (tem);
2811 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2814 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2816 if (GET_CODE (to_rtx) != MEM)
2818 to_rtx = change_address (to_rtx, VOIDmode,
2819 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2820 force_reg (ptr_mode, offset_rtx)));
2824 if (GET_CODE (to_rtx) == MEM)
2826 /* When the offset is zero, to_rtx is the address of the
2827 structure we are storing into, and hence may be shared.
2828 We must make a new MEM before setting the volatile bit. */
2830 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2831 MEM_VOLATILE_P (to_rtx) = 1;
2833 #if 0 /* This was turned off because, when a field is volatile
2834 in an object which is not volatile, the object may be in a register,
2835 and then we would abort over here. */
2841 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2843 /* Spurious cast makes HPUX compiler happy. */
2844 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2847 /* Required alignment of containing datum. */
2849 int_size_in_bytes (TREE_TYPE (tem)));
2850 preserve_temp_slots (result);
2854 /* If the value is meaningful, convert RESULT to the proper mode.
2855 Otherwise, return nothing. */
2856 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2857 TYPE_MODE (TREE_TYPE (from)),
2859 TREE_UNSIGNED (TREE_TYPE (to)))
2863 /* If the rhs is a function call and its value is not an aggregate,
2864 call the function before we start to compute the lhs.
2865 This is needed for correct code for cases such as
2866 val = setjmp (buf) on machines where reference to val
2867 requires loading up part of an address in a separate insn.
2869 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2870 a promoted variable where the zero- or sign- extension needs to be done.
2871 Handling this in the normal way is safe because no computation is done
2873 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2874 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2875 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2880 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2882 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2884 /* Handle calls that return values in multiple non-contiguous locations.
2885 The Irix 6 ABI has examples of this. */
2886 if (GET_CODE (to_rtx) == PARALLEL)
2887 emit_group_load (to_rtx, value);
2888 else if (GET_MODE (to_rtx) == BLKmode)
2889 emit_block_move (to_rtx, value, expr_size (from),
2890 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2892 emit_move_insn (to_rtx, value);
2893 preserve_temp_slots (to_rtx);
2896 return want_value ? to_rtx : NULL_RTX;
2899 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2900 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2903 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2905 /* Don't move directly into a return register. */
2906 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2911 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2912 emit_move_insn (to_rtx, temp);
2913 preserve_temp_slots (to_rtx);
2916 return want_value ? to_rtx : NULL_RTX;
2919 /* In case we are returning the contents of an object which overlaps
2920 the place the value is being stored, use a safe function when copying
2921 a value through a pointer into a structure value return block. */
2922 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2923 && current_function_returns_struct
2924 && !current_function_returns_pcc_struct)
2929 size = expr_size (from);
2930 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2932 #ifdef TARGET_MEM_FUNCTIONS
2933 emit_library_call (memcpy_libfunc, 0,
2934 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2935 XEXP (from_rtx, 0), Pmode,
2936 convert_to_mode (TYPE_MODE (sizetype),
2937 size, TREE_UNSIGNED (sizetype)),
2938 TYPE_MODE (sizetype));
2940 emit_library_call (bcopy_libfunc, 0,
2941 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2942 XEXP (to_rtx, 0), Pmode,
2943 convert_to_mode (TYPE_MODE (integer_type_node),
2944 size, TREE_UNSIGNED (integer_type_node)),
2945 TYPE_MODE (integer_type_node));
2948 preserve_temp_slots (to_rtx);
2951 return want_value ? to_rtx : NULL_RTX;
2954 /* Compute FROM and store the value in the rtx we got. */
2957 result = store_expr (from, to_rtx, want_value);
2958 preserve_temp_slots (result);
2961 return want_value ? result : NULL_RTX;
2964 /* Generate code for computing expression EXP,
2965 and storing the value into TARGET.
2966 TARGET may contain a QUEUED rtx.
2968 If WANT_VALUE is nonzero, return a copy of the value
2969 not in TARGET, so that we can be sure to use the proper
2970 value in a containing expression even if TARGET has something
2971 else stored in it. If possible, we copy the value through a pseudo
2972 and return that pseudo. Or, if the value is constant, we try to
2973 return the constant. In some cases, we return a pseudo
2974 copied *from* TARGET.
2976 If the mode is BLKmode then we may return TARGET itself.
2977 It turns out that in BLKmode it doesn't cause a problem.
2978 because C has no operators that could combine two different
2979 assignments into the same BLKmode object with different values
2980 with no sequence point. Will other languages need this to
2983 If WANT_VALUE is 0, we return NULL, to make sure
2984 to catch quickly any cases where the caller uses the value
2985 and fails to set WANT_VALUE. */
2988 store_expr (exp, target, want_value)
2990 register rtx target;
2994 int dont_return_target = 0;
2996 if (TREE_CODE (exp) == COMPOUND_EXPR)
2998 /* Perform first part of compound expression, then assign from second
3000 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3002 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3004 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3006 /* For conditional expression, get safe form of the target. Then
3007 test the condition, doing the appropriate assignment on either
3008 side. This avoids the creation of unnecessary temporaries.
3009 For non-BLKmode, it is more efficient not to do this. */
3011 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3012 rtx flag = NULL_RTX;
3013 tree left_cleanups = NULL_TREE;
3014 tree right_cleanups = NULL_TREE;
3015 tree old_cleanups = cleanups_this_call;
3017 /* Used to save a pointer to the place to put the setting of
3018 the flag that indicates if this side of the conditional was
3019 taken. We backpatch the code, if we find out later that we
3020 have any conditional cleanups that need to be performed. */
3021 rtx dest_right_flag = NULL_RTX;
3022 rtx dest_left_flag = NULL_RTX;
3025 target = protect_from_queue (target, 1);
3027 do_pending_stack_adjust ();
3029 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3030 store_expr (TREE_OPERAND (exp, 1), target, 0);
3031 dest_left_flag = get_last_insn ();
3032 /* Handle conditional cleanups, if any. */
3033 left_cleanups = defer_cleanups_to (old_cleanups);
3035 emit_jump_insn (gen_jump (lab2));
3038 store_expr (TREE_OPERAND (exp, 2), target, 0);
3039 dest_right_flag = get_last_insn ();
3040 /* Handle conditional cleanups, if any. */
3041 right_cleanups = defer_cleanups_to (old_cleanups);
3046 /* Add back in any conditional cleanups. */
3047 if (left_cleanups || right_cleanups)
3053 /* Now that we know that a flag is needed, go back and add in the
3054 setting of the flag. */
3056 flag = gen_reg_rtx (word_mode);
3058 /* Do the left side flag. */
3059 last = get_last_insn ();
3060 /* Flag left cleanups as needed. */
3061 emit_move_insn (flag, const1_rtx);
3062 /* ??? deprecated, use sequences instead. */
3063 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3065 /* Do the right side flag. */
3066 last = get_last_insn ();
3067 /* Flag left cleanups as needed. */
3068 emit_move_insn (flag, const0_rtx);
3069 /* ??? deprecated, use sequences instead. */
3070 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3072 /* All cleanups must be on the function_obstack. */
3073 push_obstacks_nochange ();
3074 resume_temporary_allocation ();
3076 /* convert flag, which is an rtx, into a tree. */
3077 cond = make_node (RTL_EXPR);
3078 TREE_TYPE (cond) = integer_type_node;
3079 RTL_EXPR_RTL (cond) = flag;
3080 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3081 cond = save_expr (cond);
3083 if (! left_cleanups)
3084 left_cleanups = integer_zero_node;
3085 if (! right_cleanups)
3086 right_cleanups = integer_zero_node;
3087 new_cleanups = build (COND_EXPR, void_type_node,
3088 truthvalue_conversion (cond),
3089 left_cleanups, right_cleanups);
3090 new_cleanups = fold (new_cleanups);
3094 /* Now add in the conditionalized cleanups. */
3096 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3097 expand_eh_region_start ();
3099 return want_value ? target : NULL_RTX;
3101 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3102 && GET_MODE (target) != BLKmode)
3103 /* If target is in memory and caller wants value in a register instead,
3104 arrange that. Pass TARGET as target for expand_expr so that,
3105 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3106 We know expand_expr will not use the target in that case.
3107 Don't do this if TARGET is volatile because we are supposed
3108 to write it and then read it. */
3110 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3111 GET_MODE (target), 0);
3112 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3113 temp = copy_to_reg (temp);
3114 dont_return_target = 1;
3116 else if (queued_subexp_p (target))
3117 /* If target contains a postincrement, let's not risk
3118 using it as the place to generate the rhs. */
3120 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3122 /* Expand EXP into a new pseudo. */
3123 temp = gen_reg_rtx (GET_MODE (target));
3124 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3127 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3129 /* If target is volatile, ANSI requires accessing the value
3130 *from* the target, if it is accessed. So make that happen.
3131 In no case return the target itself. */
3132 if (! MEM_VOLATILE_P (target) && want_value)
3133 dont_return_target = 1;
3135 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3136 /* If this is an scalar in a register that is stored in a wider mode
3137 than the declared mode, compute the result into its declared mode
3138 and then convert to the wider mode. Our value is the computed
3141 /* If we don't want a value, we can do the conversion inside EXP,
3142 which will often result in some optimizations. Do the conversion
3143 in two steps: first change the signedness, if needed, then
3144 the extend. But don't do this if the type of EXP is a subtype
3145 of something else since then the conversion might involve
3146 more than just converting modes. */
3147 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3148 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3150 if (TREE_UNSIGNED (TREE_TYPE (exp))
3151 != SUBREG_PROMOTED_UNSIGNED_P (target))
3154 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3158 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3159 SUBREG_PROMOTED_UNSIGNED_P (target)),
3163 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3165 /* If TEMP is a volatile MEM and we want a result value, make
3166 the access now so it gets done only once. Likewise if
3167 it contains TARGET. */
3168 if (GET_CODE (temp) == MEM && want_value
3169 && (MEM_VOLATILE_P (temp)
3170 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3171 temp = copy_to_reg (temp);
3173 /* If TEMP is a VOIDmode constant, use convert_modes to make
3174 sure that we properly convert it. */
3175 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3176 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3177 TYPE_MODE (TREE_TYPE (exp)), temp,
3178 SUBREG_PROMOTED_UNSIGNED_P (target));
3180 convert_move (SUBREG_REG (target), temp,
3181 SUBREG_PROMOTED_UNSIGNED_P (target));
3182 return want_value ? temp : NULL_RTX;
3186 temp = expand_expr (exp, target, GET_MODE (target), 0);
3187 /* Return TARGET if it's a specified hardware register.
3188 If TARGET is a volatile mem ref, either return TARGET
3189 or return a reg copied *from* TARGET; ANSI requires this.
3191 Otherwise, if TEMP is not TARGET, return TEMP
3192 if it is constant (for efficiency),
3193 or if we really want the correct value. */
3194 if (!(target && GET_CODE (target) == REG
3195 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3196 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3198 && (CONSTANT_P (temp) || want_value))
3199 dont_return_target = 1;
3202 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3203 the same as that of TARGET, adjust the constant. This is needed, for
3204 example, in case it is a CONST_DOUBLE and we want only a word-sized
3206 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3207 && TREE_CODE (exp) != ERROR_MARK
3208 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3209 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3210 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3212 /* If value was not generated in the target, store it there.
3213 Convert the value to TARGET's type first if nec. */
3215 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3217 target = protect_from_queue (target, 1);
3218 if (GET_MODE (temp) != GET_MODE (target)
3219 && GET_MODE (temp) != VOIDmode)
3221 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3222 if (dont_return_target)
3224 /* In this case, we will return TEMP,
3225 so make sure it has the proper mode.
3226 But don't forget to store the value into TARGET. */
3227 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3228 emit_move_insn (target, temp);
3231 convert_move (target, temp, unsignedp);
3234 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3236 /* Handle copying a string constant into an array.
3237 The string constant may be shorter than the array.
3238 So copy just the string's actual length, and clear the rest. */
3242 /* Get the size of the data type of the string,
3243 which is actually the size of the target. */
3244 size = expr_size (exp);
3245 if (GET_CODE (size) == CONST_INT
3246 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3247 emit_block_move (target, temp, size,
3248 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3251 /* Compute the size of the data to copy from the string. */
3253 = size_binop (MIN_EXPR,
3254 make_tree (sizetype, size),
3256 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3257 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3261 /* Copy that much. */
3262 emit_block_move (target, temp, copy_size_rtx,
3263 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3265 /* Figure out how much is left in TARGET that we have to clear.
3266 Do all calculations in ptr_mode. */
3268 addr = XEXP (target, 0);
3269 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3271 if (GET_CODE (copy_size_rtx) == CONST_INT)
3273 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3274 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3278 addr = force_reg (ptr_mode, addr);
3279 addr = expand_binop (ptr_mode, add_optab, addr,
3280 copy_size_rtx, NULL_RTX, 0,
3283 size = expand_binop (ptr_mode, sub_optab, size,
3284 copy_size_rtx, NULL_RTX, 0,
3287 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3288 GET_MODE (size), 0, 0);
3289 label = gen_label_rtx ();
3290 emit_jump_insn (gen_blt (label));
3293 if (size != const0_rtx)
3295 #ifdef TARGET_MEM_FUNCTIONS
3296 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3298 const0_rtx, TYPE_MODE (integer_type_node),
3299 convert_to_mode (TYPE_MODE (sizetype),
3301 TREE_UNSIGNED (sizetype)),
3302 TYPE_MODE (sizetype));
3304 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3306 convert_to_mode (TYPE_MODE (integer_type_node),
3308 TREE_UNSIGNED (integer_type_node)),
3309 TYPE_MODE (integer_type_node));
3317 /* Handle calls that return values in multiple non-contiguous locations.
3318 The Irix 6 ABI has examples of this. */
3319 else if (GET_CODE (target) == PARALLEL)
3320 emit_group_load (target, temp);
3321 else if (GET_MODE (temp) == BLKmode)
3322 emit_block_move (target, temp, expr_size (exp),
3323 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3325 emit_move_insn (target, temp);
3328 /* If we don't want a value, return NULL_RTX. */
3332 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3333 ??? The latter test doesn't seem to make sense. */
3334 else if (dont_return_target && GET_CODE (temp) != MEM)
3337 /* Return TARGET itself if it is a hard register. */
3338 else if (want_value && GET_MODE (target) != BLKmode
3339 && ! (GET_CODE (target) == REG
3340 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3341 return copy_to_reg (target);
3347 /* Return 1 if EXP just contains zeros. */
3355 switch (TREE_CODE (exp))
3359 case NON_LVALUE_EXPR:
3360 return is_zeros_p (TREE_OPERAND (exp, 0));
3363 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3367 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3370 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3373 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3374 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3375 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3376 if (! is_zeros_p (TREE_VALUE (elt)))
3385 /* Return 1 if EXP contains mostly (3/4) zeros. */
3388 mostly_zeros_p (exp)
3391 if (TREE_CODE (exp) == CONSTRUCTOR)
3393 int elts = 0, zeros = 0;
3394 tree elt = CONSTRUCTOR_ELTS (exp);
3395 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3397 /* If there are no ranges of true bits, it is all zero. */
3398 return elt == NULL_TREE;
3400 for (; elt; elt = TREE_CHAIN (elt))
3402 /* We do not handle the case where the index is a RANGE_EXPR,
3403 so the statistic will be somewhat inaccurate.
3404 We do make a more accurate count in store_constructor itself,
3405 so since this function is only used for nested array elements,
3406 this should be close enough. */
3407 if (mostly_zeros_p (TREE_VALUE (elt)))
3412 return 4 * zeros >= 3 * elts;
3415 return is_zeros_p (exp);
3418 /* Helper function for store_constructor.
3419 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3420 TYPE is the type of the CONSTRUCTOR, not the element type.
3421 CLEARED is as for store_constructor.
3423 This provides a recursive shortcut back to store_constructor when it isn't
3424 necessary to go through store_field. This is so that we can pass through
3425 the cleared field to let store_constructor know that we may not have to
3426 clear a substructure if the outer structure has already been cleared. */
3429 store_constructor_field (target, bitsize, bitpos,
3430 mode, exp, type, cleared)
3432 int bitsize, bitpos;
3433 enum machine_mode mode;
3437 if (TREE_CODE (exp) == CONSTRUCTOR
3438 && bitpos % BITS_PER_UNIT == 0
3439 /* If we have a non-zero bitpos for a register target, then we just
3440 let store_field do the bitfield handling. This is unlikely to
3441 generate unnecessary clear instructions anyways. */
3442 && (bitpos == 0 || GET_CODE (target) == MEM))
3445 target = change_address (target, VOIDmode,
3446 plus_constant (XEXP (target, 0),
3447 bitpos / BITS_PER_UNIT));
3448 store_constructor (exp, target, cleared);
3451 store_field (target, bitsize, bitpos, mode, exp,
3452 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3453 int_size_in_bytes (type));
3456 /* Store the value of constructor EXP into the rtx TARGET.
3457 TARGET is either a REG or a MEM.
3458 CLEARED is true if TARGET is known to have been zero'd. */
3461 store_constructor (exp, target, cleared)
3466 tree type = TREE_TYPE (exp);
3468 /* We know our target cannot conflict, since safe_from_p has been called. */
3470 /* Don't try copying piece by piece into a hard register
3471 since that is vulnerable to being clobbered by EXP.
3472 Instead, construct in a pseudo register and then copy it all. */
3473 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3475 rtx temp = gen_reg_rtx (GET_MODE (target));
3476 store_constructor (exp, temp, 0);
3477 emit_move_insn (target, temp);
3482 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3483 || TREE_CODE (type) == QUAL_UNION_TYPE)
3487 /* Inform later passes that the whole union value is dead. */
3488 if (TREE_CODE (type) == UNION_TYPE
3489 || TREE_CODE (type) == QUAL_UNION_TYPE)
3490 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3492 /* If we are building a static constructor into a register,
3493 set the initial value as zero so we can fold the value into
3494 a constant. But if more than one register is involved,
3495 this probably loses. */
3496 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3497 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3500 emit_move_insn (target, const0_rtx);
3505 /* If the constructor has fewer fields than the structure
3506 or if we are initializing the structure to mostly zeros,
3507 clear the whole structure first. */
3508 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3509 != list_length (TYPE_FIELDS (type)))
3510 || mostly_zeros_p (exp))
3513 clear_storage (target, expr_size (exp),
3514 TYPE_ALIGN (type) / BITS_PER_UNIT);
3519 /* Inform later passes that the old value is dead. */
3520 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3522 /* Store each element of the constructor into
3523 the corresponding field of TARGET. */
3525 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3527 register tree field = TREE_PURPOSE (elt);
3528 register enum machine_mode mode;
3532 tree pos, constant = 0, offset = 0;
3533 rtx to_rtx = target;
3535 /* Just ignore missing fields.
3536 We cleared the whole structure, above,
3537 if any fields are missing. */
3541 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3544 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3545 unsignedp = TREE_UNSIGNED (field);
3546 mode = DECL_MODE (field);
3547 if (DECL_BIT_FIELD (field))
3550 pos = DECL_FIELD_BITPOS (field);
3551 if (TREE_CODE (pos) == INTEGER_CST)
3553 else if (TREE_CODE (pos) == PLUS_EXPR
3554 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3555 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3560 bitpos = TREE_INT_CST_LOW (constant);
3566 if (contains_placeholder_p (offset))
3567 offset = build (WITH_RECORD_EXPR, sizetype,
3570 offset = size_binop (FLOOR_DIV_EXPR, offset,
3571 size_int (BITS_PER_UNIT));
3573 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3574 if (GET_CODE (to_rtx) != MEM)
3578 = change_address (to_rtx, VOIDmode,
3579 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3580 force_reg (ptr_mode, offset_rtx)));
3582 if (TREE_READONLY (field))
3584 if (GET_CODE (to_rtx) == MEM)
3585 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3587 RTX_UNCHANGING_P (to_rtx) = 1;
3590 store_constructor_field (to_rtx, bitsize, bitpos,
3591 mode, TREE_VALUE (elt), type, cleared);
3594 else if (TREE_CODE (type) == ARRAY_TYPE)
3599 tree domain = TYPE_DOMAIN (type);
3600 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3601 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3602 tree elttype = TREE_TYPE (type);
3604 /* If the constructor has fewer elements than the array,
3605 clear the whole array first. Similarly if this this is
3606 static constructor of a non-BLKmode object. */
3607 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3611 HOST_WIDE_INT count = 0, zero_count = 0;
3613 /* This loop is a more accurate version of the loop in
3614 mostly_zeros_p (it handles RANGE_EXPR in an index).
3615 It is also needed to check for missing elements. */
3616 for (elt = CONSTRUCTOR_ELTS (exp);
3618 elt = TREE_CHAIN (elt))
3620 tree index = TREE_PURPOSE (elt);
3621 HOST_WIDE_INT this_node_count;
3622 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3624 tree lo_index = TREE_OPERAND (index, 0);
3625 tree hi_index = TREE_OPERAND (index, 1);
3626 if (TREE_CODE (lo_index) != INTEGER_CST
3627 || TREE_CODE (hi_index) != INTEGER_CST)
3632 this_node_count = TREE_INT_CST_LOW (hi_index)
3633 - TREE_INT_CST_LOW (lo_index) + 1;
3636 this_node_count = 1;
3637 count += this_node_count;
3638 if (mostly_zeros_p (TREE_VALUE (elt)))
3639 zero_count += this_node_count;
3641 /* Clear the entire array first if there are any missing elements,
3642 or if the incidence of zero elements is >= 75%. */
3643 if (count < maxelt - minelt + 1
3644 || 4 * zero_count >= 3 * count)
3650 clear_storage (target, expr_size (exp),
3651 TYPE_ALIGN (type) / BITS_PER_UNIT);
3655 /* Inform later passes that the old value is dead. */
3656 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3658 /* Store each element of the constructor into
3659 the corresponding element of TARGET, determined
3660 by counting the elements. */
3661 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3663 elt = TREE_CHAIN (elt), i++)
3665 register enum machine_mode mode;
3669 tree value = TREE_VALUE (elt);
3670 tree index = TREE_PURPOSE (elt);
3671 rtx xtarget = target;
3673 if (cleared && is_zeros_p (value))
3676 mode = TYPE_MODE (elttype);
3677 bitsize = GET_MODE_BITSIZE (mode);
3678 unsignedp = TREE_UNSIGNED (elttype);
3680 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3682 tree lo_index = TREE_OPERAND (index, 0);
3683 tree hi_index = TREE_OPERAND (index, 1);
3684 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3685 struct nesting *loop;
3686 HOST_WIDE_INT lo, hi, count;
3689 /* If the range is constant and "small", unroll the loop. */
3690 if (TREE_CODE (lo_index) == INTEGER_CST
3691 && TREE_CODE (hi_index) == INTEGER_CST
3692 && (lo = TREE_INT_CST_LOW (lo_index),
3693 hi = TREE_INT_CST_LOW (hi_index),
3694 count = hi - lo + 1,
3695 (GET_CODE (target) != MEM
3697 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3698 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3701 lo -= minelt; hi -= minelt;
3702 for (; lo <= hi; lo++)
3704 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3705 store_constructor_field (target, bitsize, bitpos,
3706 mode, value, type, cleared);
3711 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3712 loop_top = gen_label_rtx ();
3713 loop_end = gen_label_rtx ();
3715 unsignedp = TREE_UNSIGNED (domain);
3717 index = build_decl (VAR_DECL, NULL_TREE, domain);
3719 DECL_RTL (index) = index_r
3720 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3723 if (TREE_CODE (value) == SAVE_EXPR
3724 && SAVE_EXPR_RTL (value) == 0)
3726 /* Make sure value gets expanded once before the
3728 expand_expr (value, const0_rtx, VOIDmode, 0);
3731 store_expr (lo_index, index_r, 0);
3732 loop = expand_start_loop (0);
3734 /* Assign value to element index. */
3735 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3736 size_int (BITS_PER_UNIT));
3737 position = size_binop (MULT_EXPR,
3738 size_binop (MINUS_EXPR, index,
3739 TYPE_MIN_VALUE (domain)),
3741 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3742 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3743 xtarget = change_address (target, mode, addr);
3744 if (TREE_CODE (value) == CONSTRUCTOR)
3745 store_constructor (value, xtarget, cleared);
3747 store_expr (value, xtarget, 0);
3749 expand_exit_loop_if_false (loop,
3750 build (LT_EXPR, integer_type_node,
3753 expand_increment (build (PREINCREMENT_EXPR,
3755 index, integer_one_node), 0, 0);
3757 emit_label (loop_end);
3759 /* Needed by stupid register allocation. to extend the
3760 lifetime of pseudo-regs used by target past the end
3762 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3765 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3766 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3772 index = size_int (i);
3775 index = size_binop (MINUS_EXPR, index,
3776 TYPE_MIN_VALUE (domain));
3777 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3778 size_int (BITS_PER_UNIT));
3779 position = size_binop (MULT_EXPR, index, position);
3780 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3781 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3782 xtarget = change_address (target, mode, addr);
3783 store_expr (value, xtarget, 0);
3788 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3789 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3791 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3792 store_constructor_field (target, bitsize, bitpos,
3793 mode, value, type, cleared);
3797 /* set constructor assignments */
3798 else if (TREE_CODE (type) == SET_TYPE)
3800 tree elt = CONSTRUCTOR_ELTS (exp);
3801 rtx xtarget = XEXP (target, 0);
3802 int set_word_size = TYPE_ALIGN (type);
3803 int nbytes = int_size_in_bytes (type), nbits;
3804 tree domain = TYPE_DOMAIN (type);
3805 tree domain_min, domain_max, bitlength;
3807 /* The default implementation strategy is to extract the constant
3808 parts of the constructor, use that to initialize the target,
3809 and then "or" in whatever non-constant ranges we need in addition.
3811 If a large set is all zero or all ones, it is
3812 probably better to set it using memset (if available) or bzero.
3813 Also, if a large set has just a single range, it may also be
3814 better to first clear all the first clear the set (using
3815 bzero/memset), and set the bits we want. */
3817 /* Check for all zeros. */
3818 if (elt == NULL_TREE)
3821 clear_storage (target, expr_size (exp),
3822 TYPE_ALIGN (type) / BITS_PER_UNIT);
3826 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3827 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3828 bitlength = size_binop (PLUS_EXPR,
3829 size_binop (MINUS_EXPR, domain_max, domain_min),
3832 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3834 nbits = TREE_INT_CST_LOW (bitlength);
3836 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3837 are "complicated" (more than one range), initialize (the
3838 constant parts) by copying from a constant. */
3839 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3840 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3842 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3843 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3844 char *bit_buffer = (char *) alloca (nbits);
3845 HOST_WIDE_INT word = 0;
3848 int offset = 0; /* In bytes from beginning of set. */
3849 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3852 if (bit_buffer[ibit])
3854 if (BYTES_BIG_ENDIAN)
3855 word |= (1 << (set_word_size - 1 - bit_pos));
3857 word |= 1 << bit_pos;
3860 if (bit_pos >= set_word_size || ibit == nbits)
3862 if (word != 0 || ! cleared)
3864 rtx datum = GEN_INT (word);
3866 /* The assumption here is that it is safe to use
3867 XEXP if the set is multi-word, but not if
3868 it's single-word. */
3869 if (GET_CODE (target) == MEM)
3871 to_rtx = plus_constant (XEXP (target, 0), offset);
3872 to_rtx = change_address (target, mode, to_rtx);
3874 else if (offset == 0)
3878 emit_move_insn (to_rtx, datum);
3884 offset += set_word_size / BITS_PER_UNIT;
3890 /* Don't bother clearing storage if the set is all ones. */
3891 if (TREE_CHAIN (elt) != NULL_TREE
3892 || (TREE_PURPOSE (elt) == NULL_TREE
3894 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3895 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3896 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3897 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3899 clear_storage (target, expr_size (exp),
3900 TYPE_ALIGN (type) / BITS_PER_UNIT);
3903 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3905 /* start of range of element or NULL */
3906 tree startbit = TREE_PURPOSE (elt);
3907 /* end of range of element, or element value */
3908 tree endbit = TREE_VALUE (elt);
3909 HOST_WIDE_INT startb, endb;
3910 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3912 bitlength_rtx = expand_expr (bitlength,
3913 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3915 /* handle non-range tuple element like [ expr ] */
3916 if (startbit == NULL_TREE)
3918 startbit = save_expr (endbit);
3921 startbit = convert (sizetype, startbit);
3922 endbit = convert (sizetype, endbit);
3923 if (! integer_zerop (domain_min))
3925 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3926 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3928 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3929 EXPAND_CONST_ADDRESS);
3930 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3931 EXPAND_CONST_ADDRESS);
3935 targetx = assign_stack_temp (GET_MODE (target),
3936 GET_MODE_SIZE (GET_MODE (target)),
3938 emit_move_insn (targetx, target);
3940 else if (GET_CODE (target) == MEM)
3945 #ifdef TARGET_MEM_FUNCTIONS
3946 /* Optimization: If startbit and endbit are
3947 constants divisible by BITS_PER_UNIT,
3948 call memset instead. */
3949 if (TREE_CODE (startbit) == INTEGER_CST
3950 && TREE_CODE (endbit) == INTEGER_CST
3951 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3952 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3954 emit_library_call (memset_libfunc, 0,
3956 plus_constant (XEXP (targetx, 0),
3957 startb / BITS_PER_UNIT),
3959 constm1_rtx, TYPE_MODE (integer_type_node),
3960 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3961 TYPE_MODE (sizetype));
3966 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3967 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3968 bitlength_rtx, TYPE_MODE (sizetype),
3969 startbit_rtx, TYPE_MODE (sizetype),
3970 endbit_rtx, TYPE_MODE (sizetype));
3973 emit_move_insn (target, targetx);
3981 /* Store the value of EXP (an expression tree)
3982 into a subfield of TARGET which has mode MODE and occupies
3983 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3984 If MODE is VOIDmode, it means that we are storing into a bit-field.
3986 If VALUE_MODE is VOIDmode, return nothing in particular.
3987 UNSIGNEDP is not used in this case.
3989 Otherwise, return an rtx for the value stored. This rtx
3990 has mode VALUE_MODE if that is convenient to do.
3991 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3993 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3994 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3997 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3998 unsignedp, align, total_size)
4000 int bitsize, bitpos;
4001 enum machine_mode mode;
4003 enum machine_mode value_mode;
4008 HOST_WIDE_INT width_mask = 0;
4010 if (bitsize < HOST_BITS_PER_WIDE_INT)
4011 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4013 /* If we are storing into an unaligned field of an aligned union that is
4014 in a register, we may have the mode of TARGET being an integer mode but
4015 MODE == BLKmode. In that case, get an aligned object whose size and
4016 alignment are the same as TARGET and store TARGET into it (we can avoid
4017 the store if the field being stored is the entire width of TARGET). Then
4018 call ourselves recursively to store the field into a BLKmode version of
4019 that object. Finally, load from the object into TARGET. This is not
4020 very efficient in general, but should only be slightly more expensive
4021 than the otherwise-required unaligned accesses. Perhaps this can be
4022 cleaned up later. */
4025 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4027 rtx object = assign_stack_temp (GET_MODE (target),
4028 GET_MODE_SIZE (GET_MODE (target)), 0);
4029 rtx blk_object = copy_rtx (object);
4031 MEM_IN_STRUCT_P (object) = 1;
4032 MEM_IN_STRUCT_P (blk_object) = 1;
4033 PUT_MODE (blk_object, BLKmode);
4035 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4036 emit_move_insn (object, target);
4038 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4041 /* Even though we aren't returning target, we need to
4042 give it the updated value. */
4043 emit_move_insn (target, object);
4048 /* If the structure is in a register or if the component
4049 is a bit field, we cannot use addressing to access it.
4050 Use bit-field techniques or SUBREG to store in it. */
4052 if (mode == VOIDmode
4053 || (mode != BLKmode && ! direct_store[(int) mode])
4054 || GET_CODE (target) == REG
4055 || GET_CODE (target) == SUBREG
4056 /* If the field isn't aligned enough to store as an ordinary memref,
4057 store it as a bit field. */
4058 || (SLOW_UNALIGNED_ACCESS
4059 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4060 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4062 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4064 /* If BITSIZE is narrower than the size of the type of EXP
4065 we will be narrowing TEMP. Normally, what's wanted are the
4066 low-order bits. However, if EXP's type is a record and this is
4067 big-endian machine, we want the upper BITSIZE bits. */
4068 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4069 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4070 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4071 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4072 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4076 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4078 if (mode != VOIDmode && mode != BLKmode
4079 && mode != TYPE_MODE (TREE_TYPE (exp)))
4080 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4082 /* If the modes of TARGET and TEMP are both BLKmode, both
4083 must be in memory and BITPOS must be aligned on a byte
4084 boundary. If so, we simply do a block copy. */
4085 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4087 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4088 || bitpos % BITS_PER_UNIT != 0)
4091 target = change_address (target, VOIDmode,
4092 plus_constant (XEXP (target, 0),
4093 bitpos / BITS_PER_UNIT));
4095 emit_block_move (target, temp,
4096 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4100 return value_mode == VOIDmode ? const0_rtx : target;
4103 /* Store the value in the bitfield. */
4104 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4105 if (value_mode != VOIDmode)
4107 /* The caller wants an rtx for the value. */
4108 /* If possible, avoid refetching from the bitfield itself. */
4110 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4113 enum machine_mode tmode;
4116 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4117 tmode = GET_MODE (temp);
4118 if (tmode == VOIDmode)
4120 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4121 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4122 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4124 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4125 NULL_RTX, value_mode, 0, align,
4132 rtx addr = XEXP (target, 0);
4135 /* If a value is wanted, it must be the lhs;
4136 so make the address stable for multiple use. */
4138 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4139 && ! CONSTANT_ADDRESS_P (addr)
4140 /* A frame-pointer reference is already stable. */
4141 && ! (GET_CODE (addr) == PLUS
4142 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4143 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4144 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4145 addr = copy_to_reg (addr);
4147 /* Now build a reference to just the desired component. */
4149 to_rtx = change_address (target, mode,
4150 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4151 MEM_IN_STRUCT_P (to_rtx) = 1;
4153 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4157 /* Return true if any object containing the innermost array is an unaligned
4158 packed structure field. */
4161 get_inner_unaligned_p (exp)
4164 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4168 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4170 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4174 else if (TREE_CODE (exp) != ARRAY_REF
4175 && TREE_CODE (exp) != NON_LVALUE_EXPR
4176 && ! ((TREE_CODE (exp) == NOP_EXPR
4177 || TREE_CODE (exp) == CONVERT_EXPR)
4178 && (TYPE_MODE (TREE_TYPE (exp))
4179 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4182 exp = TREE_OPERAND (exp, 0);
4188 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4189 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4190 ARRAY_REFs and find the ultimate containing object, which we return.
4192 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4193 bit position, and *PUNSIGNEDP to the signedness of the field.
4194 If the position of the field is variable, we store a tree
4195 giving the variable offset (in units) in *POFFSET.
4196 This offset is in addition to the bit position.
4197 If the position is not variable, we store 0 in *POFFSET.
4198 We set *PALIGNMENT to the alignment in bytes of the address that will be
4199 computed. This is the alignment of the thing we return if *POFFSET
4200 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4202 If any of the extraction expressions is volatile,
4203 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4205 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4206 is a mode that can be used to access the field. In that case, *PBITSIZE
4209 If the field describes a variable-sized object, *PMODE is set to
4210 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4211 this case, but the address of the object can be found. */
4214 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4215 punsignedp, pvolatilep, palignment)
4220 enum machine_mode *pmode;
4225 tree orig_exp = exp;
4227 enum machine_mode mode = VOIDmode;
4228 tree offset = integer_zero_node;
4229 int alignment = BIGGEST_ALIGNMENT;
4231 if (TREE_CODE (exp) == COMPONENT_REF)
4233 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4234 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4235 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4236 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4238 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4240 size_tree = TREE_OPERAND (exp, 1);
4241 *punsignedp = TREE_UNSIGNED (exp);
4245 mode = TYPE_MODE (TREE_TYPE (exp));
4246 *pbitsize = GET_MODE_BITSIZE (mode);
4247 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4252 if (TREE_CODE (size_tree) != INTEGER_CST)
4253 mode = BLKmode, *pbitsize = -1;
4255 *pbitsize = TREE_INT_CST_LOW (size_tree);
4258 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4259 and find the ultimate containing object. */
4265 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4267 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4268 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4269 : TREE_OPERAND (exp, 2));
4270 tree constant = integer_zero_node, var = pos;
4272 /* If this field hasn't been filled in yet, don't go
4273 past it. This should only happen when folding expressions
4274 made during type construction. */
4278 /* Assume here that the offset is a multiple of a unit.
4279 If not, there should be an explicitly added constant. */
4280 if (TREE_CODE (pos) == PLUS_EXPR
4281 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4282 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4283 else if (TREE_CODE (pos) == INTEGER_CST)
4284 constant = pos, var = integer_zero_node;
4286 *pbitpos += TREE_INT_CST_LOW (constant);
4287 offset = size_binop (PLUS_EXPR, offset,
4288 size_binop (EXACT_DIV_EXPR, var,
4289 size_int (BITS_PER_UNIT)));
4292 else if (TREE_CODE (exp) == ARRAY_REF)
4294 /* This code is based on the code in case ARRAY_REF in expand_expr
4295 below. We assume here that the size of an array element is
4296 always an integral multiple of BITS_PER_UNIT. */
4298 tree index = TREE_OPERAND (exp, 1);
4299 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4301 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4302 tree index_type = TREE_TYPE (index);
4304 if (! integer_zerop (low_bound))
4305 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4307 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4309 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4311 index_type = TREE_TYPE (index);
4314 index = fold (build (MULT_EXPR, index_type, index,
4315 convert (index_type,
4316 TYPE_SIZE (TREE_TYPE (exp)))));
4318 if (TREE_CODE (index) == INTEGER_CST
4319 && TREE_INT_CST_HIGH (index) == 0)
4320 *pbitpos += TREE_INT_CST_LOW (index);
4322 offset = size_binop (PLUS_EXPR, offset,
4323 size_binop (FLOOR_DIV_EXPR, index,
4324 size_int (BITS_PER_UNIT)));
4326 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4327 && ! ((TREE_CODE (exp) == NOP_EXPR
4328 || TREE_CODE (exp) == CONVERT_EXPR)
4329 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4330 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4332 && (TYPE_MODE (TREE_TYPE (exp))
4333 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4336 /* If any reference in the chain is volatile, the effect is volatile. */
4337 if (TREE_THIS_VOLATILE (exp))
4340 /* If the offset is non-constant already, then we can't assume any
4341 alignment more than the alignment here. */
4342 if (! integer_zerop (offset))
4343 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4345 exp = TREE_OPERAND (exp, 0);
4348 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4349 alignment = MIN (alignment, DECL_ALIGN (exp));
4350 else if (TREE_TYPE (exp) != 0)
4351 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4353 if (integer_zerop (offset))
4356 if (offset != 0 && contains_placeholder_p (offset))
4357 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4361 *palignment = alignment / BITS_PER_UNIT;
4365 /* Given an rtx VALUE that may contain additions and multiplications,
4366 return an equivalent value that just refers to a register or memory.
4367 This is done by generating instructions to perform the arithmetic
4368 and returning a pseudo-register containing the value.
4370 The returned value may be a REG, SUBREG, MEM or constant. */
4373 force_operand (value, target)
4376 register optab binoptab = 0;
4377 /* Use a temporary to force order of execution of calls to
4381 /* Use subtarget as the target for operand 0 of a binary operation. */
4382 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4384 if (GET_CODE (value) == PLUS)
4385 binoptab = add_optab;
4386 else if (GET_CODE (value) == MINUS)
4387 binoptab = sub_optab;
4388 else if (GET_CODE (value) == MULT)
4390 op2 = XEXP (value, 1);
4391 if (!CONSTANT_P (op2)
4392 && !(GET_CODE (op2) == REG && op2 != subtarget))
4394 tmp = force_operand (XEXP (value, 0), subtarget);
4395 return expand_mult (GET_MODE (value), tmp,
4396 force_operand (op2, NULL_RTX),
4402 op2 = XEXP (value, 1);
4403 if (!CONSTANT_P (op2)
4404 && !(GET_CODE (op2) == REG && op2 != subtarget))
4406 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4408 binoptab = add_optab;
4409 op2 = negate_rtx (GET_MODE (value), op2);
4412 /* Check for an addition with OP2 a constant integer and our first
4413 operand a PLUS of a virtual register and something else. In that
4414 case, we want to emit the sum of the virtual register and the
4415 constant first and then add the other value. This allows virtual
4416 register instantiation to simply modify the constant rather than
4417 creating another one around this addition. */
4418 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4419 && GET_CODE (XEXP (value, 0)) == PLUS
4420 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4421 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4422 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4424 rtx temp = expand_binop (GET_MODE (value), binoptab,
4425 XEXP (XEXP (value, 0), 0), op2,
4426 subtarget, 0, OPTAB_LIB_WIDEN);
4427 return expand_binop (GET_MODE (value), binoptab, temp,
4428 force_operand (XEXP (XEXP (value, 0), 1), 0),
4429 target, 0, OPTAB_LIB_WIDEN);
4432 tmp = force_operand (XEXP (value, 0), subtarget);
4433 return expand_binop (GET_MODE (value), binoptab, tmp,
4434 force_operand (op2, NULL_RTX),
4435 target, 0, OPTAB_LIB_WIDEN);
4436 /* We give UNSIGNEDP = 0 to expand_binop
4437 because the only operations we are expanding here are signed ones. */
4442 /* Subroutine of expand_expr:
4443 save the non-copied parts (LIST) of an expr (LHS), and return a list
4444 which can restore these values to their previous values,
4445 should something modify their storage. */
4448 save_noncopied_parts (lhs, list)
4455 for (tail = list; tail; tail = TREE_CHAIN (tail))
4456 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4457 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4460 tree part = TREE_VALUE (tail);
4461 tree part_type = TREE_TYPE (part);
4462 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4463 rtx target = assign_temp (part_type, 0, 1, 1);
4464 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4465 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4466 parts = tree_cons (to_be_saved,
4467 build (RTL_EXPR, part_type, NULL_TREE,
4470 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4475 /* Subroutine of expand_expr:
4476 record the non-copied parts (LIST) of an expr (LHS), and return a list
4477 which specifies the initial values of these parts. */
4480 init_noncopied_parts (lhs, list)
4487 for (tail = list; tail; tail = TREE_CHAIN (tail))
4488 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4489 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4492 tree part = TREE_VALUE (tail);
4493 tree part_type = TREE_TYPE (part);
4494 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4495 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4500 /* Subroutine of expand_expr: return nonzero iff there is no way that
4501 EXP can reference X, which is being modified. */
4504 safe_from_p (x, exp)
4512 /* If EXP has varying size, we MUST use a target since we currently
4513 have no way of allocating temporaries of variable size
4514 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4515 So we assume here that something at a higher level has prevented a
4516 clash. This is somewhat bogus, but the best we can do. Only
4517 do this when X is BLKmode. */
4518 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4519 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4520 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4521 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4522 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4524 && GET_MODE (x) == BLKmode))
4527 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4528 find the underlying pseudo. */
4529 if (GET_CODE (x) == SUBREG)
4532 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4536 /* If X is a location in the outgoing argument area, it is always safe. */
4537 if (GET_CODE (x) == MEM
4538 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4539 || (GET_CODE (XEXP (x, 0)) == PLUS
4540 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4543 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4546 exp_rtl = DECL_RTL (exp);
4553 if (TREE_CODE (exp) == TREE_LIST)
4554 return ((TREE_VALUE (exp) == 0
4555 || safe_from_p (x, TREE_VALUE (exp)))
4556 && (TREE_CHAIN (exp) == 0
4557 || safe_from_p (x, TREE_CHAIN (exp))));
4562 return safe_from_p (x, TREE_OPERAND (exp, 0));
4566 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4567 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4571 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4572 the expression. If it is set, we conflict iff we are that rtx or
4573 both are in memory. Otherwise, we check all operands of the
4574 expression recursively. */
4576 switch (TREE_CODE (exp))
4579 return (staticp (TREE_OPERAND (exp, 0))
4580 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4583 if (GET_CODE (x) == MEM)
4588 exp_rtl = CALL_EXPR_RTL (exp);
4591 /* Assume that the call will clobber all hard registers and
4593 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4594 || GET_CODE (x) == MEM)
4601 /* If a sequence exists, we would have to scan every instruction
4602 in the sequence to see if it was safe. This is probably not
4604 if (RTL_EXPR_SEQUENCE (exp))
4607 exp_rtl = RTL_EXPR_RTL (exp);
4610 case WITH_CLEANUP_EXPR:
4611 exp_rtl = RTL_EXPR_RTL (exp);
4614 case CLEANUP_POINT_EXPR:
4615 return safe_from_p (x, TREE_OPERAND (exp, 0));
4618 exp_rtl = SAVE_EXPR_RTL (exp);
4622 /* The only operand we look at is operand 1. The rest aren't
4623 part of the expression. */
4624 return safe_from_p (x, TREE_OPERAND (exp, 1));
4626 case METHOD_CALL_EXPR:
4627 /* This takes a rtx argument, but shouldn't appear here. */
4631 /* If we have an rtx, we do not need to scan our operands. */
4635 nops = tree_code_length[(int) TREE_CODE (exp)];
4636 for (i = 0; i < nops; i++)
4637 if (TREE_OPERAND (exp, i) != 0
4638 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4642 /* If we have an rtl, find any enclosed object. Then see if we conflict
4646 if (GET_CODE (exp_rtl) == SUBREG)
4648 exp_rtl = SUBREG_REG (exp_rtl);
4649 if (GET_CODE (exp_rtl) == REG
4650 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4654 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4655 are memory and EXP is not readonly. */
4656 return ! (rtx_equal_p (x, exp_rtl)
4657 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4658 && ! TREE_READONLY (exp)));
4661 /* If we reach here, it is safe. */
4665 /* Subroutine of expand_expr: return nonzero iff EXP is an
4666 expression whose type is statically determinable. */
4672 if (TREE_CODE (exp) == PARM_DECL
4673 || TREE_CODE (exp) == VAR_DECL
4674 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4675 || TREE_CODE (exp) == COMPONENT_REF
4676 || TREE_CODE (exp) == ARRAY_REF)
4681 /* Subroutine of expand_expr: return rtx if EXP is a
4682 variable or parameter; else return 0. */
4689 switch (TREE_CODE (exp))
4693 return DECL_RTL (exp);
4699 /* expand_expr: generate code for computing expression EXP.
4700 An rtx for the computed value is returned. The value is never null.
4701 In the case of a void EXP, const0_rtx is returned.
4703 The value may be stored in TARGET if TARGET is nonzero.
4704 TARGET is just a suggestion; callers must assume that
4705 the rtx returned may not be the same as TARGET.
4707 If TARGET is CONST0_RTX, it means that the value will be ignored.
4709 If TMODE is not VOIDmode, it suggests generating the
4710 result in mode TMODE. But this is done only when convenient.
4711 Otherwise, TMODE is ignored and the value generated in its natural mode.
4712 TMODE is just a suggestion; callers must assume that
4713 the rtx returned may not have mode TMODE.
4715 Note that TARGET may have neither TMODE nor MODE. In that case, it
4716 probably will not be used.
4718 If MODIFIER is EXPAND_SUM then when EXP is an addition
4719 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4720 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4721 products as above, or REG or MEM, or constant.
4722 Ordinarily in such cases we would output mul or add instructions
4723 and then return a pseudo reg containing the sum.
4725 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4726 it also marks a label as absolutely required (it can't be dead).
4727 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4728 This is used for outputting expressions used in initializers.
4730 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4731 with a constant address even if that address is not normally legitimate.
4732 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4735 expand_expr (exp, target, tmode, modifier)
4738 enum machine_mode tmode;
4739 enum expand_modifier modifier;
4741 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4742 This is static so it will be accessible to our recursive callees. */
4743 static tree placeholder_list = 0;
4744 register rtx op0, op1, temp;
4745 tree type = TREE_TYPE (exp);
4746 int unsignedp = TREE_UNSIGNED (type);
4747 register enum machine_mode mode = TYPE_MODE (type);
4748 register enum tree_code code = TREE_CODE (exp);
4750 /* Use subtarget as the target for operand 0 of a binary operation. */
4751 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4752 rtx original_target = target;
4753 /* Maybe defer this until sure not doing bytecode? */
4754 int ignore = (target == const0_rtx
4755 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4756 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4757 || code == COND_EXPR)
4758 && TREE_CODE (type) == VOID_TYPE));
4762 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4764 bc_expand_expr (exp);
4768 /* Don't use hard regs as subtargets, because the combiner
4769 can only handle pseudo regs. */
4770 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4772 /* Avoid subtargets inside loops,
4773 since they hide some invariant expressions. */
4774 if (preserve_subexpressions_p ())
4777 /* If we are going to ignore this result, we need only do something
4778 if there is a side-effect somewhere in the expression. If there
4779 is, short-circuit the most common cases here. Note that we must
4780 not call expand_expr with anything but const0_rtx in case this
4781 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4785 if (! TREE_SIDE_EFFECTS (exp))
4788 /* Ensure we reference a volatile object even if value is ignored. */
4789 if (TREE_THIS_VOLATILE (exp)
4790 && TREE_CODE (exp) != FUNCTION_DECL
4791 && mode != VOIDmode && mode != BLKmode)
4793 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4794 if (GET_CODE (temp) == MEM)
4795 temp = copy_to_reg (temp);
4799 if (TREE_CODE_CLASS (code) == '1')
4800 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4801 VOIDmode, modifier);
4802 else if (TREE_CODE_CLASS (code) == '2'
4803 || TREE_CODE_CLASS (code) == '<')
4805 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4806 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4809 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4810 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4811 /* If the second operand has no side effects, just evaluate
4813 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4814 VOIDmode, modifier);
4819 /* If will do cse, generate all results into pseudo registers
4820 since 1) that allows cse to find more things
4821 and 2) otherwise cse could produce an insn the machine
4824 if (! cse_not_expected && mode != BLKmode && target
4825 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4832 tree function = decl_function_context (exp);
4833 /* Handle using a label in a containing function. */
4834 if (function != current_function_decl && function != 0)
4836 struct function *p = find_function_data (function);
4837 /* Allocate in the memory associated with the function
4838 that the label is in. */
4839 push_obstacks (p->function_obstack,
4840 p->function_maybepermanent_obstack);
4842 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4843 label_rtx (exp), p->forced_labels);
4846 else if (modifier == EXPAND_INITIALIZER)
4847 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4848 label_rtx (exp), forced_labels);
4849 temp = gen_rtx (MEM, FUNCTION_MODE,
4850 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4851 if (function != current_function_decl && function != 0)
4852 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4857 if (DECL_RTL (exp) == 0)
4859 error_with_decl (exp, "prior parameter's size depends on `%s'");
4860 return CONST0_RTX (mode);
4863 /* ... fall through ... */
4866 /* If a static var's type was incomplete when the decl was written,
4867 but the type is complete now, lay out the decl now. */
4868 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4869 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4871 push_obstacks_nochange ();
4872 end_temporary_allocation ();
4873 layout_decl (exp, 0);
4874 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4878 /* ... fall through ... */
4882 if (DECL_RTL (exp) == 0)
4885 /* Ensure variable marked as used even if it doesn't go through
4886 a parser. If it hasn't be used yet, write out an external
4888 if (! TREE_USED (exp))
4890 assemble_external (exp);
4891 TREE_USED (exp) = 1;
4894 /* Show we haven't gotten RTL for this yet. */
4897 /* Handle variables inherited from containing functions. */
4898 context = decl_function_context (exp);
4900 /* We treat inline_function_decl as an alias for the current function
4901 because that is the inline function whose vars, types, etc.
4902 are being merged into the current function.
4903 See expand_inline_function. */
4905 if (context != 0 && context != current_function_decl
4906 && context != inline_function_decl
4907 /* If var is static, we don't need a static chain to access it. */
4908 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4909 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4913 /* Mark as non-local and addressable. */
4914 DECL_NONLOCAL (exp) = 1;
4915 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4917 mark_addressable (exp);
4918 if (GET_CODE (DECL_RTL (exp)) != MEM)
4920 addr = XEXP (DECL_RTL (exp), 0);
4921 if (GET_CODE (addr) == MEM)
4922 addr = gen_rtx (MEM, Pmode,
4923 fix_lexical_addr (XEXP (addr, 0), exp));
4925 addr = fix_lexical_addr (addr, exp);
4926 temp = change_address (DECL_RTL (exp), mode, addr);
4929 /* This is the case of an array whose size is to be determined
4930 from its initializer, while the initializer is still being parsed.
4933 else if (GET_CODE (DECL_RTL (exp)) == MEM
4934 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4935 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4936 XEXP (DECL_RTL (exp), 0));
4938 /* If DECL_RTL is memory, we are in the normal case and either
4939 the address is not valid or it is not a register and -fforce-addr
4940 is specified, get the address into a register. */
4942 else if (GET_CODE (DECL_RTL (exp)) == MEM
4943 && modifier != EXPAND_CONST_ADDRESS
4944 && modifier != EXPAND_SUM
4945 && modifier != EXPAND_INITIALIZER
4946 && (! memory_address_p (DECL_MODE (exp),
4947 XEXP (DECL_RTL (exp), 0))
4949 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4950 temp = change_address (DECL_RTL (exp), VOIDmode,
4951 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4953 /* If we got something, return it. But first, set the alignment
4954 the address is a register. */
4957 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4958 mark_reg_pointer (XEXP (temp, 0),
4959 DECL_ALIGN (exp) / BITS_PER_UNIT);
4964 /* If the mode of DECL_RTL does not match that of the decl, it
4965 must be a promoted value. We return a SUBREG of the wanted mode,
4966 but mark it so that we know that it was already extended. */
4968 if (GET_CODE (DECL_RTL (exp)) == REG
4969 && GET_MODE (DECL_RTL (exp)) != mode)
4971 /* Get the signedness used for this variable. Ensure we get the
4972 same mode we got when the variable was declared. */
4973 if (GET_MODE (DECL_RTL (exp))
4974 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4977 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4978 SUBREG_PROMOTED_VAR_P (temp) = 1;
4979 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4983 return DECL_RTL (exp);
4986 return immed_double_const (TREE_INT_CST_LOW (exp),
4987 TREE_INT_CST_HIGH (exp),
4991 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4994 /* If optimized, generate immediate CONST_DOUBLE
4995 which will be turned into memory by reload if necessary.
4997 We used to force a register so that loop.c could see it. But
4998 this does not allow gen_* patterns to perform optimizations with
4999 the constants. It also produces two insns in cases like "x = 1.0;".
5000 On most machines, floating-point constants are not permitted in
5001 many insns, so we'd end up copying it to a register in any case.
5003 Now, we do the copying in expand_binop, if appropriate. */
5004 return immed_real_const (exp);
5008 if (! TREE_CST_RTL (exp))
5009 output_constant_def (exp);
5011 /* TREE_CST_RTL probably contains a constant address.
5012 On RISC machines where a constant address isn't valid,
5013 make some insns to get that address into a register. */
5014 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5015 && modifier != EXPAND_CONST_ADDRESS
5016 && modifier != EXPAND_INITIALIZER
5017 && modifier != EXPAND_SUM
5018 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5020 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5021 return change_address (TREE_CST_RTL (exp), VOIDmode,
5022 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5023 return TREE_CST_RTL (exp);
5026 context = decl_function_context (exp);
5028 /* We treat inline_function_decl as an alias for the current function
5029 because that is the inline function whose vars, types, etc.
5030 are being merged into the current function.
5031 See expand_inline_function. */
5032 if (context == current_function_decl || context == inline_function_decl)
5035 /* If this is non-local, handle it. */
5038 temp = SAVE_EXPR_RTL (exp);
5039 if (temp && GET_CODE (temp) == REG)
5041 put_var_into_stack (exp);
5042 temp = SAVE_EXPR_RTL (exp);
5044 if (temp == 0 || GET_CODE (temp) != MEM)
5046 return change_address (temp, mode,
5047 fix_lexical_addr (XEXP (temp, 0), exp));
5049 if (SAVE_EXPR_RTL (exp) == 0)
5051 if (mode == VOIDmode)
5054 temp = assign_temp (type, 0, 0, 0);
5056 SAVE_EXPR_RTL (exp) = temp;
5057 if (!optimize && GET_CODE (temp) == REG)
5058 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5061 /* If the mode of TEMP does not match that of the expression, it
5062 must be a promoted value. We pass store_expr a SUBREG of the
5063 wanted mode but mark it so that we know that it was already
5064 extended. Note that `unsignedp' was modified above in
5067 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5069 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5070 SUBREG_PROMOTED_VAR_P (temp) = 1;
5071 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5074 if (temp == const0_rtx)
5075 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5077 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5080 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5081 must be a promoted value. We return a SUBREG of the wanted mode,
5082 but mark it so that we know that it was already extended. */
5084 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5085 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5087 /* Compute the signedness and make the proper SUBREG. */
5088 promote_mode (type, mode, &unsignedp, 0);
5089 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5090 SUBREG_PROMOTED_VAR_P (temp) = 1;
5091 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5095 return SAVE_EXPR_RTL (exp);
5100 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5101 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5105 case PLACEHOLDER_EXPR:
5106 /* If there is an object on the head of the placeholder list,
5107 see if some object in it's references is of type TYPE. For
5108 further information, see tree.def. */
5109 if (placeholder_list)
5111 tree need_type = TYPE_MAIN_VARIANT (type);
5113 tree old_list = placeholder_list;
5116 /* See if the object is the type that we want. Then see if
5117 the operand of any reference is the type we want. */
5118 if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_PURPOSE (placeholder_list)))
5120 object = TREE_PURPOSE (placeholder_list);
5122 /* Find the innermost reference that is of the type we want. */
5123 for (elt = TREE_PURPOSE (placeholder_list);
5125 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5126 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5127 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5128 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5129 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5130 || TREE_CODE (elt) == COND_EXPR)
5131 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5132 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5133 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5135 object = TREE_OPERAND (elt, 0);
5139 /* Expand this object skipping the list entries before
5140 it was found in case it is also a PLACEHOLDER_EXPR.
5141 In that case, we want to translate it using subsequent
5143 placeholder_list = TREE_CHAIN (placeholder_list);
5144 temp = expand_expr (object, original_target, tmode, modifier);
5145 placeholder_list = old_list;
5150 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5153 case WITH_RECORD_EXPR:
5154 /* Put the object on the placeholder list, expand our first operand,
5155 and pop the list. */
5156 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5158 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5160 placeholder_list = TREE_CHAIN (placeholder_list);
5164 expand_exit_loop_if_false (NULL_PTR,
5165 invert_truthvalue (TREE_OPERAND (exp, 0)));
5170 expand_start_loop (1);
5171 expand_expr_stmt (TREE_OPERAND (exp, 0));
5179 tree vars = TREE_OPERAND (exp, 0);
5180 int vars_need_expansion = 0;
5182 /* Need to open a binding contour here because
5183 if there are any cleanups they most be contained here. */
5184 expand_start_bindings (0);
5186 /* Mark the corresponding BLOCK for output in its proper place. */
5187 if (TREE_OPERAND (exp, 2) != 0
5188 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5189 insert_block (TREE_OPERAND (exp, 2));
5191 /* If VARS have not yet been expanded, expand them now. */
5194 if (DECL_RTL (vars) == 0)
5196 vars_need_expansion = 1;
5199 expand_decl_init (vars);
5200 vars = TREE_CHAIN (vars);
5203 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5205 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5211 if (RTL_EXPR_SEQUENCE (exp))
5213 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5215 emit_insns (RTL_EXPR_SEQUENCE (exp));
5216 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5218 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5219 free_temps_for_rtl_expr (exp);
5220 return RTL_EXPR_RTL (exp);
5223 /* If we don't need the result, just ensure we evaluate any
5228 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5229 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5233 /* All elts simple constants => refer to a constant in memory. But
5234 if this is a non-BLKmode mode, let it store a field at a time
5235 since that should make a CONST_INT or CONST_DOUBLE when we
5236 fold. Likewise, if we have a target we can use, it is best to
5237 store directly into the target unless the type is large enough
5238 that memcpy will be used. If we are making an initializer and
5239 all operands are constant, put it in memory as well. */
5240 else if ((TREE_STATIC (exp)
5241 && ((mode == BLKmode
5242 && ! (target != 0 && safe_from_p (target, exp)))
5243 || TREE_ADDRESSABLE (exp)
5244 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5245 && (move_by_pieces_ninsns
5246 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5247 TYPE_ALIGN (type) / BITS_PER_UNIT)
5249 && ! mostly_zeros_p (exp))))
5250 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5252 rtx constructor = output_constant_def (exp);
5253 if (modifier != EXPAND_CONST_ADDRESS
5254 && modifier != EXPAND_INITIALIZER
5255 && modifier != EXPAND_SUM
5256 && (! memory_address_p (GET_MODE (constructor),
5257 XEXP (constructor, 0))
5259 && GET_CODE (XEXP (constructor, 0)) != REG)))
5260 constructor = change_address (constructor, VOIDmode,
5261 XEXP (constructor, 0));
5267 /* Handle calls that pass values in multiple non-contiguous
5268 locations. The Irix 6 ABI has examples of this. */
5269 if (target == 0 || ! safe_from_p (target, exp)
5270 || GET_CODE (target) == PARALLEL)
5272 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5273 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5275 target = assign_temp (type, 0, 1, 1);
5278 if (TREE_READONLY (exp))
5280 if (GET_CODE (target) == MEM)
5281 target = change_address (target, GET_MODE (target),
5283 RTX_UNCHANGING_P (target) = 1;
5286 store_constructor (exp, target, 0);
5292 tree exp1 = TREE_OPERAND (exp, 0);
5295 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5296 op0 = memory_address (mode, op0);
5298 temp = gen_rtx (MEM, mode, op0);
5299 /* If address was computed by addition,
5300 mark this as an element of an aggregate. */
5301 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5302 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5303 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5304 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5305 || (TREE_CODE (exp1) == ADDR_EXPR
5306 && (exp2 = TREE_OPERAND (exp1, 0))
5307 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5308 MEM_IN_STRUCT_P (temp) = 1;
5309 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5311 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5312 here, because, in C and C++, the fact that a location is accessed
5313 through a pointer to const does not mean that the value there can
5314 never change. Languages where it can never change should
5315 also set TREE_STATIC. */
5316 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5321 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5325 tree array = TREE_OPERAND (exp, 0);
5326 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5327 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5328 tree index = TREE_OPERAND (exp, 1);
5329 tree index_type = TREE_TYPE (index);
5332 if (TREE_CODE (low_bound) != INTEGER_CST
5333 && contains_placeholder_p (low_bound))
5334 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5336 /* Optimize the special-case of a zero lower bound.
5338 We convert the low_bound to sizetype to avoid some problems
5339 with constant folding. (E.g. suppose the lower bound is 1,
5340 and its mode is QI. Without the conversion, (ARRAY
5341 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5342 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5344 But sizetype isn't quite right either (especially if
5345 the lowbound is negative). FIXME */
5347 if (! integer_zerop (low_bound))
5348 index = fold (build (MINUS_EXPR, index_type, index,
5349 convert (sizetype, low_bound)));
5351 if ((TREE_CODE (index) != INTEGER_CST
5352 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5353 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5355 /* Nonconstant array index or nonconstant element size, and
5356 not an array in an unaligned (packed) structure field.
5357 Generate the tree for *(&array+index) and expand that,
5358 except do it in a language-independent way
5359 and don't complain about non-lvalue arrays.
5360 `mark_addressable' should already have been called
5361 for any array for which this case will be reached. */
5363 /* Don't forget the const or volatile flag from the array
5365 tree variant_type = build_type_variant (type,
5366 TREE_READONLY (exp),
5367 TREE_THIS_VOLATILE (exp));
5368 tree array_adr = build1 (ADDR_EXPR,
5369 build_pointer_type (variant_type), array);
5371 tree size = size_in_bytes (type);
5373 /* Convert the integer argument to a type the same size as sizetype
5374 so the multiply won't overflow spuriously. */
5375 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5376 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5379 if (TREE_CODE (size) != INTEGER_CST
5380 && contains_placeholder_p (size))
5381 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5383 /* Don't think the address has side effects
5384 just because the array does.
5385 (In some cases the address might have side effects,
5386 and we fail to record that fact here. However, it should not
5387 matter, since expand_expr should not care.) */
5388 TREE_SIDE_EFFECTS (array_adr) = 0;
5392 (INDIRECT_REF, type,
5393 fold (build (PLUS_EXPR,
5394 TYPE_POINTER_TO (variant_type),
5399 TYPE_POINTER_TO (variant_type),
5400 fold (build (MULT_EXPR, TREE_TYPE (index),
5402 convert (TREE_TYPE (index),
5405 /* Volatility, etc., of new expression is same as old
5407 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5408 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5409 TREE_READONLY (elt) = TREE_READONLY (exp);
5411 return expand_expr (elt, target, tmode, modifier);
5414 /* Fold an expression like: "foo"[2].
5415 This is not done in fold so it won't happen inside &.
5416 Don't fold if this is for wide characters since it's too
5417 difficult to do correctly and this is a very rare case. */
5419 if (TREE_CODE (array) == STRING_CST
5420 && TREE_CODE (index) == INTEGER_CST
5421 && !TREE_INT_CST_HIGH (index)
5422 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5423 && GET_MODE_CLASS (mode) == MODE_INT
5424 && GET_MODE_SIZE (mode) == 1)
5425 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5427 /* If this is a constant index into a constant array,
5428 just get the value from the array. Handle both the cases when
5429 we have an explicit constructor and when our operand is a variable
5430 that was declared const. */
5432 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5434 if (TREE_CODE (index) == INTEGER_CST
5435 && TREE_INT_CST_HIGH (index) == 0)
5437 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5439 i = TREE_INT_CST_LOW (index);
5441 elem = TREE_CHAIN (elem);
5443 return expand_expr (fold (TREE_VALUE (elem)), target,
5448 else if (optimize >= 1
5449 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5450 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5451 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5453 if (TREE_CODE (index) == INTEGER_CST
5454 && TREE_INT_CST_HIGH (index) == 0)
5456 tree init = DECL_INITIAL (array);
5458 i = TREE_INT_CST_LOW (index);
5459 if (TREE_CODE (init) == CONSTRUCTOR)
5461 tree elem = CONSTRUCTOR_ELTS (init);
5464 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5465 elem = TREE_CHAIN (elem);
5467 return expand_expr (fold (TREE_VALUE (elem)), target,
5470 else if (TREE_CODE (init) == STRING_CST
5471 && i < TREE_STRING_LENGTH (init))
5472 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5477 /* Treat array-ref with constant index as a component-ref. */
5481 /* If the operand is a CONSTRUCTOR, we can just extract the
5482 appropriate field if it is present. Don't do this if we have
5483 already written the data since we want to refer to that copy
5484 and varasm.c assumes that's what we'll do. */
5485 if (code != ARRAY_REF
5486 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5487 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5491 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5492 elt = TREE_CHAIN (elt))
5493 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5494 /* We can normally use the value of the field in the
5495 CONSTRUCTOR. However, if this is a bitfield in
5496 an integral mode that we can fit in a HOST_WIDE_INT,
5497 we must mask only the number of bits in the bitfield,
5498 since this is done implicitly by the constructor. If
5499 the bitfield does not meet either of those conditions,
5500 we can't do this optimization. */
5501 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5502 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5504 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5505 <= HOST_BITS_PER_WIDE_INT))))
5507 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5508 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5510 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5511 enum machine_mode imode
5512 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5514 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5516 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5517 op0 = expand_and (op0, op1, target);
5522 = build_int_2 (imode - bitsize, 0);
5524 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5526 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5536 enum machine_mode mode1;
5542 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5543 &mode1, &unsignedp, &volatilep,
5546 /* If we got back the original object, something is wrong. Perhaps
5547 we are evaluating an expression too early. In any event, don't
5548 infinitely recurse. */
5552 /* If TEM's type is a union of variable size, pass TARGET to the inner
5553 computation, since it will need a temporary and TARGET is known
5554 to have to do. This occurs in unchecked conversion in Ada. */
5556 op0 = expand_expr (tem,
5557 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5558 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5560 ? target : NULL_RTX),
5562 modifier == EXPAND_INITIALIZER ? modifier : 0);
5564 /* If this is a constant, put it into a register if it is a
5565 legitimate constant and memory if it isn't. */
5566 if (CONSTANT_P (op0))
5568 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5569 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5570 op0 = force_reg (mode, op0);
5572 op0 = validize_mem (force_const_mem (mode, op0));
5577 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5579 if (GET_CODE (op0) != MEM)
5581 op0 = change_address (op0, VOIDmode,
5582 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5583 force_reg (ptr_mode, offset_rtx)));
5586 /* Don't forget about volatility even if this is a bitfield. */
5587 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5589 op0 = copy_rtx (op0);
5590 MEM_VOLATILE_P (op0) = 1;
5593 /* In cases where an aligned union has an unaligned object
5594 as a field, we might be extracting a BLKmode value from
5595 an integer-mode (e.g., SImode) object. Handle this case
5596 by doing the extract into an object as wide as the field
5597 (which we know to be the width of a basic mode), then
5598 storing into memory, and changing the mode to BLKmode.
5599 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5600 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5601 if (mode1 == VOIDmode
5602 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5603 || (modifier != EXPAND_CONST_ADDRESS
5604 && modifier != EXPAND_INITIALIZER
5605 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5606 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5607 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5608 /* If the field isn't aligned enough to fetch as a memref,
5609 fetch it as a bit field. */
5610 || (SLOW_UNALIGNED_ACCESS
5611 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5612 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5614 enum machine_mode ext_mode = mode;
5616 if (ext_mode == BLKmode)
5617 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5619 if (ext_mode == BLKmode)
5621 /* In this case, BITPOS must start at a byte boundary and
5622 TARGET, if specified, must be a MEM. */
5623 if (GET_CODE (op0) != MEM
5624 || (target != 0 && GET_CODE (target) != MEM)
5625 || bitpos % BITS_PER_UNIT != 0)
5628 op0 = change_address (op0, VOIDmode,
5629 plus_constant (XEXP (op0, 0),
5630 bitpos / BITS_PER_UNIT));
5632 target = assign_temp (type, 0, 1, 1);
5634 emit_block_move (target, op0,
5635 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5642 op0 = validize_mem (op0);
5644 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5645 mark_reg_pointer (XEXP (op0, 0), alignment);
5647 op0 = extract_bit_field (op0, bitsize, bitpos,
5648 unsignedp, target, ext_mode, ext_mode,
5650 int_size_in_bytes (TREE_TYPE (tem)));
5652 /* If the result is a record type and BITSIZE is narrower than
5653 the mode of OP0, an integral mode, and this is a big endian
5654 machine, we must put the field into the high-order bits. */
5655 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5656 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5657 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5658 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5659 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5663 if (mode == BLKmode)
5665 rtx new = assign_stack_temp (ext_mode,
5666 bitsize / BITS_PER_UNIT, 0);
5668 emit_move_insn (new, op0);
5669 op0 = copy_rtx (new);
5670 PUT_MODE (op0, BLKmode);
5671 MEM_IN_STRUCT_P (op0) = 1;
5677 /* If the result is BLKmode, use that to access the object
5679 if (mode == BLKmode)
5682 /* Get a reference to just this component. */
5683 if (modifier == EXPAND_CONST_ADDRESS
5684 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5685 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5686 (bitpos / BITS_PER_UNIT)));
5688 op0 = change_address (op0, mode1,
5689 plus_constant (XEXP (op0, 0),
5690 (bitpos / BITS_PER_UNIT)));
5691 if (GET_CODE (XEXP (op0, 0)) == REG)
5692 mark_reg_pointer (XEXP (op0, 0), alignment);
5694 MEM_IN_STRUCT_P (op0) = 1;
5695 MEM_VOLATILE_P (op0) |= volatilep;
5696 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5697 || modifier == EXPAND_CONST_ADDRESS
5698 || modifier == EXPAND_INITIALIZER)
5700 else if (target == 0)
5701 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5703 convert_move (target, op0, unsignedp);
5707 /* Intended for a reference to a buffer of a file-object in Pascal.
5708 But it's not certain that a special tree code will really be
5709 necessary for these. INDIRECT_REF might work for them. */
5715 /* Pascal set IN expression.
5718 rlo = set_low - (set_low%bits_per_word);
5719 the_word = set [ (index - rlo)/bits_per_word ];
5720 bit_index = index % bits_per_word;
5721 bitmask = 1 << bit_index;
5722 return !!(the_word & bitmask); */
5724 tree set = TREE_OPERAND (exp, 0);
5725 tree index = TREE_OPERAND (exp, 1);
5726 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5727 tree set_type = TREE_TYPE (set);
5728 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5729 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5730 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5731 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5732 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5733 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5734 rtx setaddr = XEXP (setval, 0);
5735 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5737 rtx diff, quo, rem, addr, bit, result;
5739 preexpand_calls (exp);
5741 /* If domain is empty, answer is no. Likewise if index is constant
5742 and out of bounds. */
5743 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5744 && TREE_CODE (set_low_bound) == INTEGER_CST
5745 && tree_int_cst_lt (set_high_bound, set_low_bound)
5746 || (TREE_CODE (index) == INTEGER_CST
5747 && TREE_CODE (set_low_bound) == INTEGER_CST
5748 && tree_int_cst_lt (index, set_low_bound))
5749 || (TREE_CODE (set_high_bound) == INTEGER_CST
5750 && TREE_CODE (index) == INTEGER_CST
5751 && tree_int_cst_lt (set_high_bound, index))))
5755 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5757 /* If we get here, we have to generate the code for both cases
5758 (in range and out of range). */
5760 op0 = gen_label_rtx ();
5761 op1 = gen_label_rtx ();
5763 if (! (GET_CODE (index_val) == CONST_INT
5764 && GET_CODE (lo_r) == CONST_INT))
5766 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5767 GET_MODE (index_val), iunsignedp, 0);
5768 emit_jump_insn (gen_blt (op1));
5771 if (! (GET_CODE (index_val) == CONST_INT
5772 && GET_CODE (hi_r) == CONST_INT))
5774 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5775 GET_MODE (index_val), iunsignedp, 0);
5776 emit_jump_insn (gen_bgt (op1));
5779 /* Calculate the element number of bit zero in the first word
5781 if (GET_CODE (lo_r) == CONST_INT)
5782 rlow = GEN_INT (INTVAL (lo_r)
5783 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5785 rlow = expand_binop (index_mode, and_optab, lo_r,
5786 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5787 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5789 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5790 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5792 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5793 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5794 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5795 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5797 addr = memory_address (byte_mode,
5798 expand_binop (index_mode, add_optab, diff,
5799 setaddr, NULL_RTX, iunsignedp,
5802 /* Extract the bit we want to examine */
5803 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5804 gen_rtx (MEM, byte_mode, addr),
5805 make_tree (TREE_TYPE (index), rem),
5807 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5808 GET_MODE (target) == byte_mode ? target : 0,
5809 1, OPTAB_LIB_WIDEN);
5811 if (result != target)
5812 convert_move (target, result, 1);
5814 /* Output the code to handle the out-of-range case. */
5817 emit_move_insn (target, const0_rtx);
5822 case WITH_CLEANUP_EXPR:
5823 if (RTL_EXPR_RTL (exp) == 0)
5826 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5828 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5829 /* That's it for this cleanup. */
5830 TREE_OPERAND (exp, 2) = 0;
5831 expand_eh_region_start ();
5833 return RTL_EXPR_RTL (exp);
5835 case CLEANUP_POINT_EXPR:
5837 extern int temp_slot_level;
5838 tree old_cleanups = cleanups_this_call;
5839 int old_temp_level = target_temp_slot_level;
5841 target_temp_slot_level = temp_slot_level;
5842 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5843 /* If we're going to use this value, load it up now. */
5845 op0 = force_not_mem (op0);
5846 expand_cleanups_to (old_cleanups);
5847 preserve_temp_slots (op0);
5850 target_temp_slot_level = old_temp_level;
5855 /* Check for a built-in function. */
5856 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5857 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5859 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5860 return expand_builtin (exp, target, subtarget, tmode, ignore);
5862 /* If this call was expanded already by preexpand_calls,
5863 just return the result we got. */
5864 if (CALL_EXPR_RTL (exp) != 0)
5865 return CALL_EXPR_RTL (exp);
5867 return expand_call (exp, target, ignore);
5869 case NON_LVALUE_EXPR:
5872 case REFERENCE_EXPR:
5873 if (TREE_CODE (type) == UNION_TYPE)
5875 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5878 if (mode != BLKmode)
5879 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5881 target = assign_temp (type, 0, 1, 1);
5884 if (GET_CODE (target) == MEM)
5885 /* Store data into beginning of memory target. */
5886 store_expr (TREE_OPERAND (exp, 0),
5887 change_address (target, TYPE_MODE (valtype), 0), 0);
5889 else if (GET_CODE (target) == REG)
5890 /* Store this field into a union of the proper type. */
5891 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5892 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5894 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5898 /* Return the entire union. */
5902 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5904 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5907 /* If the signedness of the conversion differs and OP0 is
5908 a promoted SUBREG, clear that indication since we now
5909 have to do the proper extension. */
5910 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5911 && GET_CODE (op0) == SUBREG)
5912 SUBREG_PROMOTED_VAR_P (op0) = 0;
5917 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5918 if (GET_MODE (op0) == mode)
5921 /* If OP0 is a constant, just convert it into the proper mode. */
5922 if (CONSTANT_P (op0))
5924 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5925 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5927 if (modifier == EXPAND_INITIALIZER)
5928 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5932 convert_to_mode (mode, op0,
5933 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5935 convert_move (target, op0,
5936 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5940 /* We come here from MINUS_EXPR when the second operand is a
5943 this_optab = add_optab;
5945 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5946 something else, make sure we add the register to the constant and
5947 then to the other thing. This case can occur during strength
5948 reduction and doing it this way will produce better code if the
5949 frame pointer or argument pointer is eliminated.
5951 fold-const.c will ensure that the constant is always in the inner
5952 PLUS_EXPR, so the only case we need to do anything about is if
5953 sp, ap, or fp is our second argument, in which case we must swap
5954 the innermost first argument and our second argument. */
5956 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5957 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5958 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5959 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5960 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5961 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5963 tree t = TREE_OPERAND (exp, 1);
5965 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5966 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5969 /* If the result is to be ptr_mode and we are adding an integer to
5970 something, we might be forming a constant. So try to use
5971 plus_constant. If it produces a sum and we can't accept it,
5972 use force_operand. This allows P = &ARR[const] to generate
5973 efficient code on machines where a SYMBOL_REF is not a valid
5976 If this is an EXPAND_SUM call, always return the sum. */
5977 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5978 || mode == ptr_mode)
5980 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5981 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5982 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5984 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5986 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5987 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5988 op1 = force_operand (op1, target);
5992 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5993 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5994 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5996 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5998 if (! CONSTANT_P (op0))
6000 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6001 VOIDmode, modifier);
6002 /* Don't go to both_summands if modifier
6003 says it's not right to return a PLUS. */
6004 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6008 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6009 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6010 op0 = force_operand (op0, target);
6015 /* No sense saving up arithmetic to be done
6016 if it's all in the wrong mode to form part of an address.
6017 And force_operand won't know whether to sign-extend or
6019 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6020 || mode != ptr_mode)
6023 preexpand_calls (exp);
6024 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6027 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
6028 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
6031 /* Make sure any term that's a sum with a constant comes last. */
6032 if (GET_CODE (op0) == PLUS
6033 && CONSTANT_P (XEXP (op0, 1)))
6039 /* If adding to a sum including a constant,
6040 associate it to put the constant outside. */
6041 if (GET_CODE (op1) == PLUS
6042 && CONSTANT_P (XEXP (op1, 1)))
6044 rtx constant_term = const0_rtx;
6046 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6049 /* Ensure that MULT comes first if there is one. */
6050 else if (GET_CODE (op0) == MULT)
6051 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
6053 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
6055 /* Let's also eliminate constants from op0 if possible. */
6056 op0 = eliminate_constant_term (op0, &constant_term);
6058 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6059 their sum should be a constant. Form it into OP1, since the
6060 result we want will then be OP0 + OP1. */
6062 temp = simplify_binary_operation (PLUS, mode, constant_term,
6067 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6070 /* Put a constant term last and put a multiplication first. */
6071 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6072 temp = op1, op1 = op0, op0 = temp;
6074 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6075 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6078 /* For initializers, we are allowed to return a MINUS of two
6079 symbolic constants. Here we handle all cases when both operands
6081 /* Handle difference of two symbolic constants,
6082 for the sake of an initializer. */
6083 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6084 && really_constant_p (TREE_OPERAND (exp, 0))
6085 && really_constant_p (TREE_OPERAND (exp, 1)))
6087 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6088 VOIDmode, modifier);
6089 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6090 VOIDmode, modifier);
6092 /* If the last operand is a CONST_INT, use plus_constant of
6093 the negated constant. Else make the MINUS. */
6094 if (GET_CODE (op1) == CONST_INT)
6095 return plus_constant (op0, - INTVAL (op1));
6097 return gen_rtx (MINUS, mode, op0, op1);
6099 /* Convert A - const to A + (-const). */
6100 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6102 tree negated = fold (build1 (NEGATE_EXPR, type,
6103 TREE_OPERAND (exp, 1)));
6105 /* Deal with the case where we can't negate the constant
6107 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6109 tree newtype = signed_type (type);
6110 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6111 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6112 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6114 if (! TREE_OVERFLOW (newneg))
6115 return expand_expr (convert (type,
6116 build (PLUS_EXPR, newtype,
6118 target, tmode, modifier);
6122 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6126 this_optab = sub_optab;
6130 preexpand_calls (exp);
6131 /* If first operand is constant, swap them.
6132 Thus the following special case checks need only
6133 check the second operand. */
6134 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6136 register tree t1 = TREE_OPERAND (exp, 0);
6137 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6138 TREE_OPERAND (exp, 1) = t1;
6141 /* Attempt to return something suitable for generating an
6142 indexed address, for machines that support that. */
6144 if (modifier == EXPAND_SUM && mode == ptr_mode
6145 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6146 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6148 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6150 /* Apply distributive law if OP0 is x+c. */
6151 if (GET_CODE (op0) == PLUS
6152 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6153 return gen_rtx (PLUS, mode,
6154 gen_rtx (MULT, mode, XEXP (op0, 0),
6155 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6156 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6157 * INTVAL (XEXP (op0, 1))));
6159 if (GET_CODE (op0) != REG)
6160 op0 = force_operand (op0, NULL_RTX);
6161 if (GET_CODE (op0) != REG)
6162 op0 = copy_to_mode_reg (mode, op0);
6164 return gen_rtx (MULT, mode, op0,
6165 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6168 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6171 /* Check for multiplying things that have been extended
6172 from a narrower type. If this machine supports multiplying
6173 in that narrower type with a result in the desired type,
6174 do it that way, and avoid the explicit type-conversion. */
6175 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6176 && TREE_CODE (type) == INTEGER_TYPE
6177 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6178 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6179 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6180 && int_fits_type_p (TREE_OPERAND (exp, 1),
6181 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6182 /* Don't use a widening multiply if a shift will do. */
6183 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6184 > HOST_BITS_PER_WIDE_INT)
6185 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6187 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6188 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6190 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6191 /* If both operands are extended, they must either both
6192 be zero-extended or both be sign-extended. */
6193 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6195 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6197 enum machine_mode innermode
6198 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6199 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6200 ? smul_widen_optab : umul_widen_optab);
6201 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6202 ? umul_widen_optab : smul_widen_optab);
6203 if (mode == GET_MODE_WIDER_MODE (innermode))
6205 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6207 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6208 NULL_RTX, VOIDmode, 0);
6209 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6210 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6213 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6214 NULL_RTX, VOIDmode, 0);
6217 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6218 && innermode == word_mode)
6221 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6222 NULL_RTX, VOIDmode, 0);
6223 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6224 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6227 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6228 NULL_RTX, VOIDmode, 0);
6229 temp = expand_binop (mode, other_optab, op0, op1, target,
6230 unsignedp, OPTAB_LIB_WIDEN);
6231 htem = expand_mult_highpart_adjust (innermode,
6232 gen_highpart (innermode, temp),
6234 gen_highpart (innermode, temp),
6236 emit_move_insn (gen_highpart (innermode, temp), htem);
6241 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6242 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6243 return expand_mult (mode, op0, op1, target, unsignedp);
6245 case TRUNC_DIV_EXPR:
6246 case FLOOR_DIV_EXPR:
6248 case ROUND_DIV_EXPR:
6249 case EXACT_DIV_EXPR:
6250 preexpand_calls (exp);
6251 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6253 /* Possible optimization: compute the dividend with EXPAND_SUM
6254 then if the divisor is constant can optimize the case
6255 where some terms of the dividend have coeffs divisible by it. */
6256 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6257 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6258 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6261 this_optab = flodiv_optab;
6264 case TRUNC_MOD_EXPR:
6265 case FLOOR_MOD_EXPR:
6267 case ROUND_MOD_EXPR:
6268 preexpand_calls (exp);
6269 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6271 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6272 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6273 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6275 case FIX_ROUND_EXPR:
6276 case FIX_FLOOR_EXPR:
6278 abort (); /* Not used for C. */
6280 case FIX_TRUNC_EXPR:
6281 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6283 target = gen_reg_rtx (mode);
6284 expand_fix (target, op0, unsignedp);
6288 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6290 target = gen_reg_rtx (mode);
6291 /* expand_float can't figure out what to do if FROM has VOIDmode.
6292 So give it the correct mode. With -O, cse will optimize this. */
6293 if (GET_MODE (op0) == VOIDmode)
6294 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6296 expand_float (target, op0,
6297 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6301 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6302 temp = expand_unop (mode, neg_optab, op0, target, 0);
6308 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6310 /* Handle complex values specially. */
6311 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6312 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6313 return expand_complex_abs (mode, op0, target, unsignedp);
6315 /* Unsigned abs is simply the operand. Testing here means we don't
6316 risk generating incorrect code below. */
6317 if (TREE_UNSIGNED (type))
6320 return expand_abs (mode, op0, target, unsignedp,
6321 safe_from_p (target, TREE_OPERAND (exp, 0)));
6325 target = original_target;
6326 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6327 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6328 || GET_MODE (target) != mode
6329 || (GET_CODE (target) == REG
6330 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6331 target = gen_reg_rtx (mode);
6332 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6333 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6335 /* First try to do it with a special MIN or MAX instruction.
6336 If that does not win, use a conditional jump to select the proper
6338 this_optab = (TREE_UNSIGNED (type)
6339 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6340 : (code == MIN_EXPR ? smin_optab : smax_optab));
6342 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6347 /* At this point, a MEM target is no longer useful; we will get better
6350 if (GET_CODE (target) == MEM)
6351 target = gen_reg_rtx (mode);
6354 emit_move_insn (target, op0);
6356 op0 = gen_label_rtx ();
6358 /* If this mode is an integer too wide to compare properly,
6359 compare word by word. Rely on cse to optimize constant cases. */
6360 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6362 if (code == MAX_EXPR)
6363 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6364 target, op1, NULL_RTX, op0);
6366 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6367 op1, target, NULL_RTX, op0);
6368 emit_move_insn (target, op1);
6372 if (code == MAX_EXPR)
6373 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6374 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6375 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6377 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6378 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6379 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6380 if (temp == const0_rtx)
6381 emit_move_insn (target, op1);
6382 else if (temp != const_true_rtx)
6384 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6385 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6388 emit_move_insn (target, op1);
6395 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6396 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6402 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6403 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6408 /* ??? Can optimize bitwise operations with one arg constant.
6409 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6410 and (a bitwise1 b) bitwise2 b (etc)
6411 but that is probably not worth while. */
6413 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6414 boolean values when we want in all cases to compute both of them. In
6415 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6416 as actual zero-or-1 values and then bitwise anding. In cases where
6417 there cannot be any side effects, better code would be made by
6418 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6419 how to recognize those cases. */
6421 case TRUTH_AND_EXPR:
6423 this_optab = and_optab;
6428 this_optab = ior_optab;
6431 case TRUTH_XOR_EXPR:
6433 this_optab = xor_optab;
6440 preexpand_calls (exp);
6441 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6443 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6444 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6447 /* Could determine the answer when only additive constants differ. Also,
6448 the addition of one can be handled by changing the condition. */
6455 preexpand_calls (exp);
6456 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6460 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6461 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6463 && GET_CODE (original_target) == REG
6464 && (GET_MODE (original_target)
6465 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6467 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6470 if (temp != original_target)
6471 temp = copy_to_reg (temp);
6473 op1 = gen_label_rtx ();
6474 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6475 GET_MODE (temp), unsignedp, 0);
6476 emit_jump_insn (gen_beq (op1));
6477 emit_move_insn (temp, const1_rtx);
6482 /* If no set-flag instruction, must generate a conditional
6483 store into a temporary variable. Drop through
6484 and handle this like && and ||. */
6486 case TRUTH_ANDIF_EXPR:
6487 case TRUTH_ORIF_EXPR:
6489 && (target == 0 || ! safe_from_p (target, exp)
6490 /* Make sure we don't have a hard reg (such as function's return
6491 value) live across basic blocks, if not optimizing. */
6492 || (!optimize && GET_CODE (target) == REG
6493 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6494 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6497 emit_clr_insn (target);
6499 op1 = gen_label_rtx ();
6500 jumpifnot (exp, op1);
6503 emit_0_to_1_insn (target);
6506 return ignore ? const0_rtx : target;
6508 case TRUTH_NOT_EXPR:
6509 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6510 /* The parser is careful to generate TRUTH_NOT_EXPR
6511 only with operands that are always zero or one. */
6512 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6513 target, 1, OPTAB_LIB_WIDEN);
6519 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6521 return expand_expr (TREE_OPERAND (exp, 1),
6522 (ignore ? const0_rtx : target),
6526 /* If we would have a "singleton" (see below) were it not for a
6527 conversion in each arm, bring that conversion back out. */
6528 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6529 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6530 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6531 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6533 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6534 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6536 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6537 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6538 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6539 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6540 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6541 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6542 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6543 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6544 return expand_expr (build1 (NOP_EXPR, type,
6545 build (COND_EXPR, TREE_TYPE (true),
6546 TREE_OPERAND (exp, 0),
6548 target, tmode, modifier);
6552 rtx flag = NULL_RTX;
6553 tree left_cleanups = NULL_TREE;
6554 tree right_cleanups = NULL_TREE;
6556 /* Used to save a pointer to the place to put the setting of
6557 the flag that indicates if this side of the conditional was
6558 taken. We backpatch the code, if we find out later that we
6559 have any conditional cleanups that need to be performed. */
6560 rtx dest_right_flag = NULL_RTX;
6561 rtx dest_left_flag = NULL_RTX;
6563 /* Note that COND_EXPRs whose type is a structure or union
6564 are required to be constructed to contain assignments of
6565 a temporary variable, so that we can evaluate them here
6566 for side effect only. If type is void, we must do likewise. */
6568 /* If an arm of the branch requires a cleanup,
6569 only that cleanup is performed. */
6572 tree binary_op = 0, unary_op = 0;
6573 tree old_cleanups = cleanups_this_call;
6575 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6576 convert it to our mode, if necessary. */
6577 if (integer_onep (TREE_OPERAND (exp, 1))
6578 && integer_zerop (TREE_OPERAND (exp, 2))
6579 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6583 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6588 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6589 if (GET_MODE (op0) == mode)
6593 target = gen_reg_rtx (mode);
6594 convert_move (target, op0, unsignedp);
6598 /* Check for X ? A + B : A. If we have this, we can copy A to the
6599 output and conditionally add B. Similarly for unary operations.
6600 Don't do this if X has side-effects because those side effects
6601 might affect A or B and the "?" operation is a sequence point in
6602 ANSI. (operand_equal_p tests for side effects.) */
6604 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6605 && operand_equal_p (TREE_OPERAND (exp, 2),
6606 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6607 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6608 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6609 && operand_equal_p (TREE_OPERAND (exp, 1),
6610 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6611 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6612 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6613 && operand_equal_p (TREE_OPERAND (exp, 2),
6614 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6615 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6616 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6617 && operand_equal_p (TREE_OPERAND (exp, 1),
6618 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6619 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6621 /* If we are not to produce a result, we have no target. Otherwise,
6622 if a target was specified use it; it will not be used as an
6623 intermediate target unless it is safe. If no target, use a
6628 else if (original_target
6629 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6630 || (singleton && GET_CODE (original_target) == REG
6631 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6632 && original_target == var_rtx (singleton)))
6633 && GET_MODE (original_target) == mode
6634 && ! (GET_CODE (original_target) == MEM
6635 && MEM_VOLATILE_P (original_target)))
6636 temp = original_target;
6637 else if (TREE_ADDRESSABLE (type))
6640 temp = assign_temp (type, 0, 0, 1);
6642 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6643 do the test of X as a store-flag operation, do this as
6644 A + ((X != 0) << log C). Similarly for other simple binary
6645 operators. Only do for C == 1 if BRANCH_COST is low. */
6646 if (temp && singleton && binary_op
6647 && (TREE_CODE (binary_op) == PLUS_EXPR
6648 || TREE_CODE (binary_op) == MINUS_EXPR
6649 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6650 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6651 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6652 : integer_onep (TREE_OPERAND (binary_op, 1)))
6653 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6656 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6657 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6658 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6661 /* If we had X ? A : A + 1, do this as A + (X == 0).
6663 We have to invert the truth value here and then put it
6664 back later if do_store_flag fails. We cannot simply copy
6665 TREE_OPERAND (exp, 0) to another variable and modify that
6666 because invert_truthvalue can modify the tree pointed to
6668 if (singleton == TREE_OPERAND (exp, 1))
6669 TREE_OPERAND (exp, 0)
6670 = invert_truthvalue (TREE_OPERAND (exp, 0));
6672 result = do_store_flag (TREE_OPERAND (exp, 0),
6673 (safe_from_p (temp, singleton)
6675 mode, BRANCH_COST <= 1);
6677 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6678 result = expand_shift (LSHIFT_EXPR, mode, result,
6679 build_int_2 (tree_log2
6683 (safe_from_p (temp, singleton)
6684 ? temp : NULL_RTX), 0);
6688 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6689 return expand_binop (mode, boptab, op1, result, temp,
6690 unsignedp, OPTAB_LIB_WIDEN);
6692 else if (singleton == TREE_OPERAND (exp, 1))
6693 TREE_OPERAND (exp, 0)
6694 = invert_truthvalue (TREE_OPERAND (exp, 0));
6697 do_pending_stack_adjust ();
6699 op0 = gen_label_rtx ();
6701 flag = gen_reg_rtx (word_mode);
6702 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6706 /* If the target conflicts with the other operand of the
6707 binary op, we can't use it. Also, we can't use the target
6708 if it is a hard register, because evaluating the condition
6709 might clobber it. */
6711 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6712 || (GET_CODE (temp) == REG
6713 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6714 temp = gen_reg_rtx (mode);
6715 store_expr (singleton, temp, 0);
6718 expand_expr (singleton,
6719 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6720 dest_left_flag = get_last_insn ();
6721 if (singleton == TREE_OPERAND (exp, 1))
6722 jumpif (TREE_OPERAND (exp, 0), op0);
6724 jumpifnot (TREE_OPERAND (exp, 0), op0);
6726 /* Allows cleanups up to here. */
6727 old_cleanups = cleanups_this_call;
6728 if (binary_op && temp == 0)
6729 /* Just touch the other operand. */
6730 expand_expr (TREE_OPERAND (binary_op, 1),
6731 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6733 store_expr (build (TREE_CODE (binary_op), type,
6734 make_tree (type, temp),
6735 TREE_OPERAND (binary_op, 1)),
6738 store_expr (build1 (TREE_CODE (unary_op), type,
6739 make_tree (type, temp)),
6742 dest_right_flag = get_last_insn ();
6745 /* This is now done in jump.c and is better done there because it
6746 produces shorter register lifetimes. */
6748 /* Check for both possibilities either constants or variables
6749 in registers (but not the same as the target!). If so, can
6750 save branches by assigning one, branching, and assigning the
6752 else if (temp && GET_MODE (temp) != BLKmode
6753 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6754 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6755 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6756 && DECL_RTL (TREE_OPERAND (exp, 1))
6757 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6758 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6759 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6760 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6761 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6762 && DECL_RTL (TREE_OPERAND (exp, 2))
6763 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6764 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6766 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6767 temp = gen_reg_rtx (mode);
6768 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6769 dest_left_flag = get_last_insn ();
6770 jumpifnot (TREE_OPERAND (exp, 0), op0);
6772 /* Allows cleanups up to here. */
6773 old_cleanups = cleanups_this_call;
6774 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6776 dest_right_flag = get_last_insn ();
6779 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6780 comparison operator. If we have one of these cases, set the
6781 output to A, branch on A (cse will merge these two references),
6782 then set the output to FOO. */
6784 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6785 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6786 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6787 TREE_OPERAND (exp, 1), 0)
6788 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6789 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6791 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6792 temp = gen_reg_rtx (mode);
6793 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6794 dest_left_flag = get_last_insn ();
6795 jumpif (TREE_OPERAND (exp, 0), op0);
6797 /* Allows cleanups up to here. */
6798 old_cleanups = cleanups_this_call;
6799 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6801 dest_right_flag = get_last_insn ();
6804 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6805 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6806 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6807 TREE_OPERAND (exp, 2), 0)
6808 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6809 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6811 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6812 temp = gen_reg_rtx (mode);
6813 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6814 dest_left_flag = get_last_insn ();
6815 jumpifnot (TREE_OPERAND (exp, 0), op0);
6817 /* Allows cleanups up to here. */
6818 old_cleanups = cleanups_this_call;
6819 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6821 dest_right_flag = get_last_insn ();
6825 op1 = gen_label_rtx ();
6826 jumpifnot (TREE_OPERAND (exp, 0), op0);
6828 /* Allows cleanups up to here. */
6829 old_cleanups = cleanups_this_call;
6831 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6833 expand_expr (TREE_OPERAND (exp, 1),
6834 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6835 dest_left_flag = get_last_insn ();
6837 /* Handle conditional cleanups, if any. */
6838 left_cleanups = defer_cleanups_to (old_cleanups);
6841 emit_jump_insn (gen_jump (op1));
6845 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6847 expand_expr (TREE_OPERAND (exp, 2),
6848 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6849 dest_right_flag = get_last_insn ();
6852 /* Handle conditional cleanups, if any. */
6853 right_cleanups = defer_cleanups_to (old_cleanups);
6859 /* Add back in, any conditional cleanups. */
6860 if (left_cleanups || right_cleanups)
6866 /* Now that we know that a flag is needed, go back and add in the
6867 setting of the flag. */
6869 /* Do the left side flag. */
6870 last = get_last_insn ();
6871 /* Flag left cleanups as needed. */
6872 emit_move_insn (flag, const1_rtx);
6873 /* ??? deprecated, use sequences instead. */
6874 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6876 /* Do the right side flag. */
6877 last = get_last_insn ();
6878 /* Flag left cleanups as needed. */
6879 emit_move_insn (flag, const0_rtx);
6880 /* ??? deprecated, use sequences instead. */
6881 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6883 /* All cleanups must be on the function_obstack. */
6884 push_obstacks_nochange ();
6885 resume_temporary_allocation ();
6887 /* convert flag, which is an rtx, into a tree. */
6888 cond = make_node (RTL_EXPR);
6889 TREE_TYPE (cond) = integer_type_node;
6890 RTL_EXPR_RTL (cond) = flag;
6891 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6892 cond = save_expr (cond);
6894 if (! left_cleanups)
6895 left_cleanups = integer_zero_node;
6896 if (! right_cleanups)
6897 right_cleanups = integer_zero_node;
6898 new_cleanups = build (COND_EXPR, void_type_node,
6899 truthvalue_conversion (cond),
6900 left_cleanups, right_cleanups);
6901 new_cleanups = fold (new_cleanups);
6905 /* Now add in the conditionalized cleanups. */
6907 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6908 expand_eh_region_start ();
6915 /* Something needs to be initialized, but we didn't know
6916 where that thing was when building the tree. For example,
6917 it could be the return value of a function, or a parameter
6918 to a function which lays down in the stack, or a temporary
6919 variable which must be passed by reference.
6921 We guarantee that the expression will either be constructed
6922 or copied into our original target. */
6924 tree slot = TREE_OPERAND (exp, 0);
6925 tree cleanups = NULL_TREE;
6929 if (TREE_CODE (slot) != VAR_DECL)
6933 target = original_target;
6937 if (DECL_RTL (slot) != 0)
6939 target = DECL_RTL (slot);
6940 /* If we have already expanded the slot, so don't do
6942 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6947 target = assign_temp (type, 2, 1, 1);
6948 /* All temp slots at this level must not conflict. */
6949 preserve_temp_slots (target);
6950 DECL_RTL (slot) = target;
6952 /* Since SLOT is not known to the called function
6953 to belong to its stack frame, we must build an explicit
6954 cleanup. This case occurs when we must build up a reference
6955 to pass the reference as an argument. In this case,
6956 it is very likely that such a reference need not be
6959 if (TREE_OPERAND (exp, 2) == 0)
6960 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6961 cleanups = TREE_OPERAND (exp, 2);
6966 /* This case does occur, when expanding a parameter which
6967 needs to be constructed on the stack. The target
6968 is the actual stack address that we want to initialize.
6969 The function we call will perform the cleanup in this case. */
6971 /* If we have already assigned it space, use that space,
6972 not target that we were passed in, as our target
6973 parameter is only a hint. */
6974 if (DECL_RTL (slot) != 0)
6976 target = DECL_RTL (slot);
6977 /* If we have already expanded the slot, so don't do
6979 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6983 DECL_RTL (slot) = target;
6986 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6987 /* Mark it as expanded. */
6988 TREE_OPERAND (exp, 1) = NULL_TREE;
6990 store_expr (exp1, target, 0);
6994 cleanups_this_call = tree_cons (NULL_TREE,
6996 cleanups_this_call);
6997 expand_eh_region_start ();
7005 tree lhs = TREE_OPERAND (exp, 0);
7006 tree rhs = TREE_OPERAND (exp, 1);
7007 tree noncopied_parts = 0;
7008 tree lhs_type = TREE_TYPE (lhs);
7010 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7011 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7012 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7013 TYPE_NONCOPIED_PARTS (lhs_type));
7014 while (noncopied_parts != 0)
7016 expand_assignment (TREE_VALUE (noncopied_parts),
7017 TREE_PURPOSE (noncopied_parts), 0, 0);
7018 noncopied_parts = TREE_CHAIN (noncopied_parts);
7025 /* If lhs is complex, expand calls in rhs before computing it.
7026 That's so we don't compute a pointer and save it over a call.
7027 If lhs is simple, compute it first so we can give it as a
7028 target if the rhs is just a call. This avoids an extra temp and copy
7029 and that prevents a partial-subsumption which makes bad code.
7030 Actually we could treat component_ref's of vars like vars. */
7032 tree lhs = TREE_OPERAND (exp, 0);
7033 tree rhs = TREE_OPERAND (exp, 1);
7034 tree noncopied_parts = 0;
7035 tree lhs_type = TREE_TYPE (lhs);
7039 if (TREE_CODE (lhs) != VAR_DECL
7040 && TREE_CODE (lhs) != RESULT_DECL
7041 && TREE_CODE (lhs) != PARM_DECL)
7042 preexpand_calls (exp);
7044 /* Check for |= or &= of a bitfield of size one into another bitfield
7045 of size 1. In this case, (unless we need the result of the
7046 assignment) we can do this more efficiently with a
7047 test followed by an assignment, if necessary.
7049 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7050 things change so we do, this code should be enhanced to
7053 && TREE_CODE (lhs) == COMPONENT_REF
7054 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7055 || TREE_CODE (rhs) == BIT_AND_EXPR)
7056 && TREE_OPERAND (rhs, 0) == lhs
7057 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7058 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7059 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7061 rtx label = gen_label_rtx ();
7063 do_jump (TREE_OPERAND (rhs, 1),
7064 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7065 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7066 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7067 (TREE_CODE (rhs) == BIT_IOR_EXPR
7069 : integer_zero_node)),
7071 do_pending_stack_adjust ();
7076 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7077 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7078 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7079 TYPE_NONCOPIED_PARTS (lhs_type));
7081 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7082 while (noncopied_parts != 0)
7084 expand_assignment (TREE_PURPOSE (noncopied_parts),
7085 TREE_VALUE (noncopied_parts), 0, 0);
7086 noncopied_parts = TREE_CHAIN (noncopied_parts);
7091 case PREINCREMENT_EXPR:
7092 case PREDECREMENT_EXPR:
7093 return expand_increment (exp, 0, ignore);
7095 case POSTINCREMENT_EXPR:
7096 case POSTDECREMENT_EXPR:
7097 /* Faster to treat as pre-increment if result is not used. */
7098 return expand_increment (exp, ! ignore, ignore);
7101 /* If nonzero, TEMP will be set to the address of something that might
7102 be a MEM corresponding to a stack slot. */
7105 /* Are we taking the address of a nested function? */
7106 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7107 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7108 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7110 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7111 op0 = force_operand (op0, target);
7113 /* If we are taking the address of something erroneous, just
7115 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7119 /* We make sure to pass const0_rtx down if we came in with
7120 ignore set, to avoid doing the cleanups twice for something. */
7121 op0 = expand_expr (TREE_OPERAND (exp, 0),
7122 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7123 (modifier == EXPAND_INITIALIZER
7124 ? modifier : EXPAND_CONST_ADDRESS));
7126 /* If we are going to ignore the result, OP0 will have been set
7127 to const0_rtx, so just return it. Don't get confused and
7128 think we are taking the address of the constant. */
7132 op0 = protect_from_queue (op0, 0);
7134 /* We would like the object in memory. If it is a constant,
7135 we can have it be statically allocated into memory. For
7136 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7137 memory and store the value into it. */
7139 if (CONSTANT_P (op0))
7140 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7142 else if (GET_CODE (op0) == MEM)
7144 mark_temp_addr_taken (op0);
7145 temp = XEXP (op0, 0);
7148 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7149 || GET_CODE (op0) == CONCAT)
7151 /* If this object is in a register, it must be not
7153 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7154 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7156 mark_temp_addr_taken (memloc);
7157 emit_move_insn (memloc, op0);
7161 if (GET_CODE (op0) != MEM)
7164 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7166 temp = XEXP (op0, 0);
7167 #ifdef POINTERS_EXTEND_UNSIGNED
7168 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7169 && mode == ptr_mode)
7170 temp = convert_memory_address (ptr_mode, temp);
7175 op0 = force_operand (XEXP (op0, 0), target);
7178 if (flag_force_addr && GET_CODE (op0) != REG)
7179 op0 = force_reg (Pmode, op0);
7181 if (GET_CODE (op0) == REG
7182 && ! REG_USERVAR_P (op0))
7183 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7185 /* If we might have had a temp slot, add an equivalent address
7188 update_temp_slot_address (temp, op0);
7190 #ifdef POINTERS_EXTEND_UNSIGNED
7191 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7192 && mode == ptr_mode)
7193 op0 = convert_memory_address (ptr_mode, op0);
7198 case ENTRY_VALUE_EXPR:
7201 /* COMPLEX type for Extended Pascal & Fortran */
7204 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7207 /* Get the rtx code of the operands. */
7208 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7209 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7212 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7216 /* Move the real (op0) and imaginary (op1) parts to their location. */
7217 emit_move_insn (gen_realpart (mode, target), op0);
7218 emit_move_insn (gen_imagpart (mode, target), op1);
7220 insns = get_insns ();
7223 /* Complex construction should appear as a single unit. */
7224 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7225 each with a separate pseudo as destination.
7226 It's not correct for flow to treat them as a unit. */
7227 if (GET_CODE (target) != CONCAT)
7228 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7236 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7237 return gen_realpart (mode, op0);
7240 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7241 return gen_imagpart (mode, op0);
7245 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7249 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7252 target = gen_reg_rtx (mode);
7256 /* Store the realpart and the negated imagpart to target. */
7257 emit_move_insn (gen_realpart (partmode, target),
7258 gen_realpart (partmode, op0));
7260 imag_t = gen_imagpart (partmode, target);
7261 temp = expand_unop (partmode, neg_optab,
7262 gen_imagpart (partmode, op0), imag_t, 0);
7264 emit_move_insn (imag_t, temp);
7266 insns = get_insns ();
7269 /* Conjugate should appear as a single unit
7270 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7271 each with a separate pseudo as destination.
7272 It's not correct for flow to treat them as a unit. */
7273 if (GET_CODE (target) != CONCAT)
7274 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7282 op0 = CONST0_RTX (tmode);
7288 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7291 /* Here to do an ordinary binary operator, generating an instruction
7292 from the optab already placed in `this_optab'. */
7294 preexpand_calls (exp);
7295 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7297 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7298 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7300 temp = expand_binop (mode, this_optab, op0, op1, target,
7301 unsignedp, OPTAB_LIB_WIDEN);
7308 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7311 bc_expand_expr (exp)
7314 enum tree_code code;
7317 struct binary_operator *binoptab;
7318 struct unary_operator *unoptab;
7319 struct increment_operator *incroptab;
7320 struct bc_label *lab, *lab1;
7321 enum bytecode_opcode opcode;
7324 code = TREE_CODE (exp);
7330 if (DECL_RTL (exp) == 0)
7332 error_with_decl (exp, "prior parameter's size depends on `%s'");
7336 bc_load_parmaddr (DECL_RTL (exp));
7337 bc_load_memory (TREE_TYPE (exp), exp);
7343 if (DECL_RTL (exp) == 0)
7347 if (BYTECODE_LABEL (DECL_RTL (exp)))
7348 bc_load_externaddr (DECL_RTL (exp));
7350 bc_load_localaddr (DECL_RTL (exp));
7352 if (TREE_PUBLIC (exp))
7353 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7354 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7356 bc_load_localaddr (DECL_RTL (exp));
7358 bc_load_memory (TREE_TYPE (exp), exp);
7363 #ifdef DEBUG_PRINT_CODE
7364 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7366 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7368 : TYPE_MODE (TREE_TYPE (exp)))],
7369 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7375 #ifdef DEBUG_PRINT_CODE
7376 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7378 /* FIX THIS: find a better way to pass real_cst's. -bson */
7379 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7380 (double) TREE_REAL_CST (exp));
7389 /* We build a call description vector describing the type of
7390 the return value and of the arguments; this call vector,
7391 together with a pointer to a location for the return value
7392 and the base of the argument list, is passed to the low
7393 level machine dependent call subroutine, which is responsible
7394 for putting the arguments wherever real functions expect
7395 them, as well as getting the return value back. */
7397 tree calldesc = 0, arg;
7401 /* Push the evaluated args on the evaluation stack in reverse
7402 order. Also make an entry for each arg in the calldesc
7403 vector while we're at it. */
7405 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7407 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7410 bc_expand_expr (TREE_VALUE (arg));
7412 calldesc = tree_cons ((tree) 0,
7413 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7415 calldesc = tree_cons ((tree) 0,
7416 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7420 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7422 /* Allocate a location for the return value and push its
7423 address on the evaluation stack. Also make an entry
7424 at the front of the calldesc for the return value type. */
7426 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7427 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7428 bc_load_localaddr (retval);
7430 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7431 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7433 /* Prepend the argument count. */
7434 calldesc = tree_cons ((tree) 0,
7435 build_int_2 (nargs, 0),
7438 /* Push the address of the call description vector on the stack. */
7439 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7440 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7441 build_index_type (build_int_2 (nargs * 2, 0)));
7442 r = output_constant_def (calldesc);
7443 bc_load_externaddr (r);
7445 /* Push the address of the function to be called. */
7446 bc_expand_expr (TREE_OPERAND (exp, 0));
7448 /* Call the function, popping its address and the calldesc vector
7449 address off the evaluation stack in the process. */
7450 bc_emit_instruction (call);
7452 /* Pop the arguments off the stack. */
7453 bc_adjust_stack (nargs);
7455 /* Load the return value onto the stack. */
7456 bc_load_localaddr (retval);
7457 bc_load_memory (type, TREE_OPERAND (exp, 0));
7463 if (!SAVE_EXPR_RTL (exp))
7465 /* First time around: copy to local variable */
7466 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7467 TYPE_ALIGN (TREE_TYPE(exp)));
7468 bc_expand_expr (TREE_OPERAND (exp, 0));
7469 bc_emit_instruction (duplicate);
7471 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7472 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7476 /* Consecutive reference: use saved copy */
7477 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7478 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7483 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7484 how are they handled instead? */
7487 TREE_USED (exp) = 1;
7488 bc_expand_expr (STMT_BODY (exp));
7495 bc_expand_expr (TREE_OPERAND (exp, 0));
7496 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7501 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7506 bc_expand_address (TREE_OPERAND (exp, 0));
7511 bc_expand_expr (TREE_OPERAND (exp, 0));
7512 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7517 bc_expand_expr (bc_canonicalize_array_ref (exp));
7522 bc_expand_component_address (exp);
7524 /* If we have a bitfield, generate a proper load */
7525 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7530 bc_expand_expr (TREE_OPERAND (exp, 0));
7531 bc_emit_instruction (drop);
7532 bc_expand_expr (TREE_OPERAND (exp, 1));
7537 bc_expand_expr (TREE_OPERAND (exp, 0));
7538 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7539 lab = bc_get_bytecode_label ();
7540 bc_emit_bytecode (xjumpifnot);
7541 bc_emit_bytecode_labelref (lab);
7543 #ifdef DEBUG_PRINT_CODE
7544 fputc ('\n', stderr);
7546 bc_expand_expr (TREE_OPERAND (exp, 1));
7547 lab1 = bc_get_bytecode_label ();
7548 bc_emit_bytecode (jump);
7549 bc_emit_bytecode_labelref (lab1);
7551 #ifdef DEBUG_PRINT_CODE
7552 fputc ('\n', stderr);
7555 bc_emit_bytecode_labeldef (lab);
7556 bc_expand_expr (TREE_OPERAND (exp, 2));
7557 bc_emit_bytecode_labeldef (lab1);
7560 case TRUTH_ANDIF_EXPR:
7562 opcode = xjumpifnot;
7565 case TRUTH_ORIF_EXPR:
7572 binoptab = optab_plus_expr;
7577 binoptab = optab_minus_expr;
7582 binoptab = optab_mult_expr;
7585 case TRUNC_DIV_EXPR:
7586 case FLOOR_DIV_EXPR:
7588 case ROUND_DIV_EXPR:
7589 case EXACT_DIV_EXPR:
7591 binoptab = optab_trunc_div_expr;
7594 case TRUNC_MOD_EXPR:
7595 case FLOOR_MOD_EXPR:
7597 case ROUND_MOD_EXPR:
7599 binoptab = optab_trunc_mod_expr;
7602 case FIX_ROUND_EXPR:
7603 case FIX_FLOOR_EXPR:
7605 abort (); /* Not used for C. */
7607 case FIX_TRUNC_EXPR:
7614 abort (); /* FIXME */
7618 binoptab = optab_rdiv_expr;
7623 binoptab = optab_bit_and_expr;
7628 binoptab = optab_bit_ior_expr;
7633 binoptab = optab_bit_xor_expr;
7638 binoptab = optab_lshift_expr;
7643 binoptab = optab_rshift_expr;
7646 case TRUTH_AND_EXPR:
7648 binoptab = optab_truth_and_expr;
7653 binoptab = optab_truth_or_expr;
7658 binoptab = optab_lt_expr;
7663 binoptab = optab_le_expr;
7668 binoptab = optab_ge_expr;
7673 binoptab = optab_gt_expr;
7678 binoptab = optab_eq_expr;
7683 binoptab = optab_ne_expr;
7688 unoptab = optab_negate_expr;
7693 unoptab = optab_bit_not_expr;
7696 case TRUTH_NOT_EXPR:
7698 unoptab = optab_truth_not_expr;
7701 case PREDECREMENT_EXPR:
7703 incroptab = optab_predecrement_expr;
7706 case PREINCREMENT_EXPR:
7708 incroptab = optab_preincrement_expr;
7711 case POSTDECREMENT_EXPR:
7713 incroptab = optab_postdecrement_expr;
7716 case POSTINCREMENT_EXPR:
7718 incroptab = optab_postincrement_expr;
7723 bc_expand_constructor (exp);
7733 tree vars = TREE_OPERAND (exp, 0);
7734 int vars_need_expansion = 0;
7736 /* Need to open a binding contour here because
7737 if there are any cleanups they most be contained here. */
7738 expand_start_bindings (0);
7740 /* Mark the corresponding BLOCK for output. */
7741 if (TREE_OPERAND (exp, 2) != 0)
7742 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7744 /* If VARS have not yet been expanded, expand them now. */
7747 if (DECL_RTL (vars) == 0)
7749 vars_need_expansion = 1;
7752 expand_decl_init (vars);
7753 vars = TREE_CHAIN (vars);
7756 bc_expand_expr (TREE_OPERAND (exp, 1));
7758 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7768 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7769 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7775 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7781 bc_expand_expr (TREE_OPERAND (exp, 0));
7782 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7783 lab = bc_get_bytecode_label ();
7785 bc_emit_instruction (duplicate);
7786 bc_emit_bytecode (opcode);
7787 bc_emit_bytecode_labelref (lab);
7789 #ifdef DEBUG_PRINT_CODE
7790 fputc ('\n', stderr);
7793 bc_emit_instruction (drop);
7795 bc_expand_expr (TREE_OPERAND (exp, 1));
7796 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7797 bc_emit_bytecode_labeldef (lab);
7803 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7805 /* Push the quantum. */
7806 bc_expand_expr (TREE_OPERAND (exp, 1));
7808 /* Convert it to the lvalue's type. */
7809 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7811 /* Push the address of the lvalue */
7812 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7814 /* Perform actual increment */
7815 bc_expand_increment (incroptab, type);
7819 /* Return the alignment in bits of EXP, a pointer valued expression.
7820 But don't return more than MAX_ALIGN no matter what.
7821 The alignment returned is, by default, the alignment of the thing that
7822 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7824 Otherwise, look at the expression to see if we can do better, i.e., if the
7825 expression is actually pointing at an object whose alignment is tighter. */
7828 get_pointer_alignment (exp, max_align)
7832 unsigned align, inner;
7834 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7837 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7838 align = MIN (align, max_align);
7842 switch (TREE_CODE (exp))
7846 case NON_LVALUE_EXPR:
7847 exp = TREE_OPERAND (exp, 0);
7848 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7850 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7851 align = MIN (inner, max_align);
7855 /* If sum of pointer + int, restrict our maximum alignment to that
7856 imposed by the integer. If not, we can't do any better than
7858 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7861 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7866 exp = TREE_OPERAND (exp, 0);
7870 /* See what we are pointing at and look at its alignment. */
7871 exp = TREE_OPERAND (exp, 0);
7872 if (TREE_CODE (exp) == FUNCTION_DECL)
7873 align = FUNCTION_BOUNDARY;
7874 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7875 align = DECL_ALIGN (exp);
7876 #ifdef CONSTANT_ALIGNMENT
7877 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7878 align = CONSTANT_ALIGNMENT (exp, align);
7880 return MIN (align, max_align);
7888 /* Return the tree node and offset if a given argument corresponds to
7889 a string constant. */
7892 string_constant (arg, ptr_offset)
7898 if (TREE_CODE (arg) == ADDR_EXPR
7899 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7901 *ptr_offset = integer_zero_node;
7902 return TREE_OPERAND (arg, 0);
7904 else if (TREE_CODE (arg) == PLUS_EXPR)
7906 tree arg0 = TREE_OPERAND (arg, 0);
7907 tree arg1 = TREE_OPERAND (arg, 1);
7912 if (TREE_CODE (arg0) == ADDR_EXPR
7913 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7916 return TREE_OPERAND (arg0, 0);
7918 else if (TREE_CODE (arg1) == ADDR_EXPR
7919 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7922 return TREE_OPERAND (arg1, 0);
7929 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7930 way, because it could contain a zero byte in the middle.
7931 TREE_STRING_LENGTH is the size of the character array, not the string.
7933 Unfortunately, string_constant can't access the values of const char
7934 arrays with initializers, so neither can we do so here. */
7944 src = string_constant (src, &offset_node);
7947 max = TREE_STRING_LENGTH (src);
7948 ptr = TREE_STRING_POINTER (src);
7949 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7951 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7952 compute the offset to the following null if we don't know where to
7953 start searching for it. */
7955 for (i = 0; i < max; i++)
7958 /* We don't know the starting offset, but we do know that the string
7959 has no internal zero bytes. We can assume that the offset falls
7960 within the bounds of the string; otherwise, the programmer deserves
7961 what he gets. Subtract the offset from the length of the string,
7963 /* This would perhaps not be valid if we were dealing with named
7964 arrays in addition to literal string constants. */
7965 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7968 /* We have a known offset into the string. Start searching there for
7969 a null character. */
7970 if (offset_node == 0)
7974 /* Did we get a long long offset? If so, punt. */
7975 if (TREE_INT_CST_HIGH (offset_node) != 0)
7977 offset = TREE_INT_CST_LOW (offset_node);
7979 /* If the offset is known to be out of bounds, warn, and call strlen at
7981 if (offset < 0 || offset > max)
7983 warning ("offset outside bounds of constant string");
7986 /* Use strlen to search for the first zero byte. Since any strings
7987 constructed with build_string will have nulls appended, we win even
7988 if we get handed something like (char[4])"abcd".
7990 Since OFFSET is our starting index into the string, no further
7991 calculation is needed. */
7992 return size_int (strlen (ptr + offset));
7996 expand_builtin_return_addr (fndecl_code, count, tem)
7997 enum built_in_function fndecl_code;
8003 /* Some machines need special handling before we can access
8004 arbitrary frames. For example, on the sparc, we must first flush
8005 all register windows to the stack. */
8006 #ifdef SETUP_FRAME_ADDRESSES
8007 SETUP_FRAME_ADDRESSES ();
8010 /* On the sparc, the return address is not in the frame, it is in a
8011 register. There is no way to access it off of the current frame
8012 pointer, but it can be accessed off the previous frame pointer by
8013 reading the value from the register window save area. */
8014 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8015 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8019 /* Scan back COUNT frames to the specified frame. */
8020 for (i = 0; i < count; i++)
8022 /* Assume the dynamic chain pointer is in the word that the
8023 frame address points to, unless otherwise specified. */
8024 #ifdef DYNAMIC_CHAIN_ADDRESS
8025 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8027 tem = memory_address (Pmode, tem);
8028 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
8031 /* For __builtin_frame_address, return what we've got. */
8032 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8035 /* For __builtin_return_address, Get the return address from that
8037 #ifdef RETURN_ADDR_RTX
8038 tem = RETURN_ADDR_RTX (count, tem);
8040 tem = memory_address (Pmode,
8041 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8042 tem = gen_rtx (MEM, Pmode, tem);
8047 /* Expand an expression EXP that calls a built-in function,
8048 with result going to TARGET if that's convenient
8049 (and in mode MODE if that's convenient).
8050 SUBTARGET may be used as the target for computing one of EXP's operands.
8051 IGNORE is nonzero if the value is to be ignored. */
8053 #define CALLED_AS_BUILT_IN(NODE) \
8054 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8057 expand_builtin (exp, target, subtarget, mode, ignore)
8061 enum machine_mode mode;
8064 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8065 tree arglist = TREE_OPERAND (exp, 1);
8068 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8069 optab builtin_optab;
8071 switch (DECL_FUNCTION_CODE (fndecl))
8076 /* build_function_call changes these into ABS_EXPR. */
8081 /* Treat these like sqrt, but only if the user asks for them. */
8082 if (! flag_fast_math)
8084 case BUILT_IN_FSQRT:
8085 /* If not optimizing, call the library function. */
8090 /* Arg could be wrong type if user redeclared this fcn wrong. */
8091 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8094 /* Stabilize and compute the argument. */
8095 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8096 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8098 exp = copy_node (exp);
8099 arglist = copy_node (arglist);
8100 TREE_OPERAND (exp, 1) = arglist;
8101 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8103 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8105 /* Make a suitable register to place result in. */
8106 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8111 switch (DECL_FUNCTION_CODE (fndecl))
8114 builtin_optab = sin_optab; break;
8116 builtin_optab = cos_optab; break;
8117 case BUILT_IN_FSQRT:
8118 builtin_optab = sqrt_optab; break;
8123 /* Compute into TARGET.
8124 Set TARGET to wherever the result comes back. */
8125 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8126 builtin_optab, op0, target, 0);
8128 /* If we were unable to expand via the builtin, stop the
8129 sequence (without outputting the insns) and break, causing
8130 a call the the library function. */
8137 /* Check the results by default. But if flag_fast_math is turned on,
8138 then assume sqrt will always be called with valid arguments. */
8140 if (! flag_fast_math)
8142 /* Don't define the builtin FP instructions
8143 if your machine is not IEEE. */
8144 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8147 lab1 = gen_label_rtx ();
8149 /* Test the result; if it is NaN, set errno=EDOM because
8150 the argument was not in the domain. */
8151 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8152 emit_jump_insn (gen_beq (lab1));
8156 #ifdef GEN_ERRNO_RTX
8157 rtx errno_rtx = GEN_ERRNO_RTX;
8160 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8163 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8166 /* We can't set errno=EDOM directly; let the library call do it.
8167 Pop the arguments right away in case the call gets deleted. */
8169 expand_call (exp, target, 0);
8176 /* Output the entire sequence. */
8177 insns = get_insns ();
8183 /* __builtin_apply_args returns block of memory allocated on
8184 the stack into which is stored the arg pointer, structure
8185 value address, static chain, and all the registers that might
8186 possibly be used in performing a function call. The code is
8187 moved to the start of the function so the incoming values are
8189 case BUILT_IN_APPLY_ARGS:
8190 /* Don't do __builtin_apply_args more than once in a function.
8191 Save the result of the first call and reuse it. */
8192 if (apply_args_value != 0)
8193 return apply_args_value;
8195 /* When this function is called, it means that registers must be
8196 saved on entry to this function. So we migrate the
8197 call to the first insn of this function. */
8202 temp = expand_builtin_apply_args ();
8206 apply_args_value = temp;
8208 /* Put the sequence after the NOTE that starts the function.
8209 If this is inside a SEQUENCE, make the outer-level insn
8210 chain current, so the code is placed at the start of the
8212 push_topmost_sequence ();
8213 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8214 pop_topmost_sequence ();
8218 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8219 FUNCTION with a copy of the parameters described by
8220 ARGUMENTS, and ARGSIZE. It returns a block of memory
8221 allocated on the stack into which is stored all the registers
8222 that might possibly be used for returning the result of a
8223 function. ARGUMENTS is the value returned by
8224 __builtin_apply_args. ARGSIZE is the number of bytes of
8225 arguments that must be copied. ??? How should this value be
8226 computed? We'll also need a safe worst case value for varargs
8228 case BUILT_IN_APPLY:
8230 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8231 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8232 || TREE_CHAIN (arglist) == 0
8233 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8234 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8235 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8243 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8244 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8246 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8249 /* __builtin_return (RESULT) causes the function to return the
8250 value described by RESULT. RESULT is address of the block of
8251 memory returned by __builtin_apply. */
8252 case BUILT_IN_RETURN:
8254 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8255 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8256 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8257 NULL_RTX, VOIDmode, 0));
8260 case BUILT_IN_SAVEREGS:
8261 /* Don't do __builtin_saveregs more than once in a function.
8262 Save the result of the first call and reuse it. */
8263 if (saveregs_value != 0)
8264 return saveregs_value;
8266 /* When this function is called, it means that registers must be
8267 saved on entry to this function. So we migrate the
8268 call to the first insn of this function. */
8272 /* Now really call the function. `expand_call' does not call
8273 expand_builtin, so there is no danger of infinite recursion here. */
8276 #ifdef EXPAND_BUILTIN_SAVEREGS
8277 /* Do whatever the machine needs done in this case. */
8278 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8280 /* The register where the function returns its value
8281 is likely to have something else in it, such as an argument.
8282 So preserve that register around the call. */
8284 if (value_mode != VOIDmode)
8286 rtx valreg = hard_libcall_value (value_mode);
8287 rtx saved_valreg = gen_reg_rtx (value_mode);
8289 emit_move_insn (saved_valreg, valreg);
8290 temp = expand_call (exp, target, ignore);
8291 emit_move_insn (valreg, saved_valreg);
8294 /* Generate the call, putting the value in a pseudo. */
8295 temp = expand_call (exp, target, ignore);
8301 saveregs_value = temp;
8303 /* Put the sequence after the NOTE that starts the function.
8304 If this is inside a SEQUENCE, make the outer-level insn
8305 chain current, so the code is placed at the start of the
8307 push_topmost_sequence ();
8308 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8309 pop_topmost_sequence ();
8313 /* __builtin_args_info (N) returns word N of the arg space info
8314 for the current function. The number and meanings of words
8315 is controlled by the definition of CUMULATIVE_ARGS. */
8316 case BUILT_IN_ARGS_INFO:
8318 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8320 int *word_ptr = (int *) ¤t_function_args_info;
8321 tree type, elts, result;
8323 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8324 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8325 __FILE__, __LINE__);
8329 tree arg = TREE_VALUE (arglist);
8330 if (TREE_CODE (arg) != INTEGER_CST)
8331 error ("argument of `__builtin_args_info' must be constant");
8334 int wordnum = TREE_INT_CST_LOW (arg);
8336 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8337 error ("argument of `__builtin_args_info' out of range");
8339 return GEN_INT (word_ptr[wordnum]);
8343 error ("missing argument in `__builtin_args_info'");
8348 for (i = 0; i < nwords; i++)
8349 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8351 type = build_array_type (integer_type_node,
8352 build_index_type (build_int_2 (nwords, 0)));
8353 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8354 TREE_CONSTANT (result) = 1;
8355 TREE_STATIC (result) = 1;
8356 result = build (INDIRECT_REF, build_pointer_type (type), result);
8357 TREE_CONSTANT (result) = 1;
8358 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8362 /* Return the address of the first anonymous stack arg. */
8363 case BUILT_IN_NEXT_ARG:
8365 tree fntype = TREE_TYPE (current_function_decl);
8367 if ((TYPE_ARG_TYPES (fntype) == 0
8368 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8370 && ! current_function_varargs)
8372 error ("`va_start' used in function with fixed args");
8378 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8379 tree arg = TREE_VALUE (arglist);
8381 /* Strip off all nops for the sake of the comparison. This
8382 is not quite the same as STRIP_NOPS. It does more.
8383 We must also strip off INDIRECT_EXPR for C++ reference
8385 while (TREE_CODE (arg) == NOP_EXPR
8386 || TREE_CODE (arg) == CONVERT_EXPR
8387 || TREE_CODE (arg) == NON_LVALUE_EXPR
8388 || TREE_CODE (arg) == INDIRECT_REF)
8389 arg = TREE_OPERAND (arg, 0);
8390 if (arg != last_parm)
8391 warning ("second parameter of `va_start' not last named argument");
8393 else if (! current_function_varargs)
8394 /* Evidently an out of date version of <stdarg.h>; can't validate
8395 va_start's second argument, but can still work as intended. */
8396 warning ("`__builtin_next_arg' called without an argument");
8399 return expand_binop (Pmode, add_optab,
8400 current_function_internal_arg_pointer,
8401 current_function_arg_offset_rtx,
8402 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8404 case BUILT_IN_CLASSIFY_TYPE:
8407 tree type = TREE_TYPE (TREE_VALUE (arglist));
8408 enum tree_code code = TREE_CODE (type);
8409 if (code == VOID_TYPE)
8410 return GEN_INT (void_type_class);
8411 if (code == INTEGER_TYPE)
8412 return GEN_INT (integer_type_class);
8413 if (code == CHAR_TYPE)
8414 return GEN_INT (char_type_class);
8415 if (code == ENUMERAL_TYPE)
8416 return GEN_INT (enumeral_type_class);
8417 if (code == BOOLEAN_TYPE)
8418 return GEN_INT (boolean_type_class);
8419 if (code == POINTER_TYPE)
8420 return GEN_INT (pointer_type_class);
8421 if (code == REFERENCE_TYPE)
8422 return GEN_INT (reference_type_class);
8423 if (code == OFFSET_TYPE)
8424 return GEN_INT (offset_type_class);
8425 if (code == REAL_TYPE)
8426 return GEN_INT (real_type_class);
8427 if (code == COMPLEX_TYPE)
8428 return GEN_INT (complex_type_class);
8429 if (code == FUNCTION_TYPE)
8430 return GEN_INT (function_type_class);
8431 if (code == METHOD_TYPE)
8432 return GEN_INT (method_type_class);
8433 if (code == RECORD_TYPE)
8434 return GEN_INT (record_type_class);
8435 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8436 return GEN_INT (union_type_class);
8437 if (code == ARRAY_TYPE)
8439 if (TYPE_STRING_FLAG (type))
8440 return GEN_INT (string_type_class);
8442 return GEN_INT (array_type_class);
8444 if (code == SET_TYPE)
8445 return GEN_INT (set_type_class);
8446 if (code == FILE_TYPE)
8447 return GEN_INT (file_type_class);
8448 if (code == LANG_TYPE)
8449 return GEN_INT (lang_type_class);
8451 return GEN_INT (no_type_class);
8453 case BUILT_IN_CONSTANT_P:
8458 tree arg = TREE_VALUE (arglist);
8461 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8462 || (TREE_CODE (arg) == ADDR_EXPR
8463 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8464 ? const1_rtx : const0_rtx);
8467 case BUILT_IN_FRAME_ADDRESS:
8468 /* The argument must be a nonnegative integer constant.
8469 It counts the number of frames to scan up the stack.
8470 The value is the address of that frame. */
8471 case BUILT_IN_RETURN_ADDRESS:
8472 /* The argument must be a nonnegative integer constant.
8473 It counts the number of frames to scan up the stack.
8474 The value is the return address saved in that frame. */
8476 /* Warning about missing arg was already issued. */
8478 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8480 error ("invalid arg to `__builtin_return_address'");
8483 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8485 error ("invalid arg to `__builtin_return_address'");
8490 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8491 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8492 hard_frame_pointer_rtx);
8494 /* For __builtin_frame_address, return what we've got. */
8495 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8498 if (GET_CODE (tem) != REG)
8499 tem = copy_to_reg (tem);
8503 case BUILT_IN_ALLOCA:
8505 /* Arg could be non-integer if user redeclared this fcn wrong. */
8506 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8509 /* Compute the argument. */
8510 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8512 /* Allocate the desired space. */
8513 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8516 /* If not optimizing, call the library function. */
8517 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8521 /* Arg could be non-integer if user redeclared this fcn wrong. */
8522 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8525 /* Compute the argument. */
8526 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8527 /* Compute ffs, into TARGET if possible.
8528 Set TARGET to wherever the result comes back. */
8529 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8530 ffs_optab, op0, target, 1);
8535 case BUILT_IN_STRLEN:
8536 /* If not optimizing, call the library function. */
8537 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8541 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8542 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8546 tree src = TREE_VALUE (arglist);
8547 tree len = c_strlen (src);
8550 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8552 rtx result, src_rtx, char_rtx;
8553 enum machine_mode insn_mode = value_mode, char_mode;
8554 enum insn_code icode;
8556 /* If the length is known, just return it. */
8558 return expand_expr (len, target, mode, 0);
8560 /* If SRC is not a pointer type, don't do this operation inline. */
8564 /* Call a function if we can't compute strlen in the right mode. */
8566 while (insn_mode != VOIDmode)
8568 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8569 if (icode != CODE_FOR_nothing)
8572 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8574 if (insn_mode == VOIDmode)
8577 /* Make a place to write the result of the instruction. */
8580 && GET_CODE (result) == REG
8581 && GET_MODE (result) == insn_mode
8582 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8583 result = gen_reg_rtx (insn_mode);
8585 /* Make sure the operands are acceptable to the predicates. */
8587 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8588 result = gen_reg_rtx (insn_mode);
8590 src_rtx = memory_address (BLKmode,
8591 expand_expr (src, NULL_RTX, ptr_mode,
8593 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8594 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8596 char_rtx = const0_rtx;
8597 char_mode = insn_operand_mode[(int)icode][2];
8598 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8599 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8601 emit_insn (GEN_FCN (icode) (result,
8602 gen_rtx (MEM, BLKmode, src_rtx),
8603 char_rtx, GEN_INT (align)));
8605 /* Return the value in the proper mode for this function. */
8606 if (GET_MODE (result) == value_mode)
8608 else if (target != 0)
8610 convert_move (target, result, 0);
8614 return convert_to_mode (value_mode, result, 0);
8617 case BUILT_IN_STRCPY:
8618 /* If not optimizing, call the library function. */
8619 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8623 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8624 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8625 || TREE_CHAIN (arglist) == 0
8626 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8630 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8635 len = size_binop (PLUS_EXPR, len, integer_one_node);
8637 chainon (arglist, build_tree_list (NULL_TREE, len));
8641 case BUILT_IN_MEMCPY:
8642 /* If not optimizing, call the library function. */
8643 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8647 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8648 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8649 || TREE_CHAIN (arglist) == 0
8650 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8651 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8652 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8656 tree dest = TREE_VALUE (arglist);
8657 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8658 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8662 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8664 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8665 rtx dest_rtx, dest_mem, src_mem;
8667 /* If either SRC or DEST is not a pointer type, don't do
8668 this operation in-line. */
8669 if (src_align == 0 || dest_align == 0)
8671 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8672 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8676 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8677 dest_mem = gen_rtx (MEM, BLKmode,
8678 memory_address (BLKmode, dest_rtx));
8679 /* There could be a void* cast on top of the object. */
8680 while (TREE_CODE (dest) == NOP_EXPR)
8681 dest = TREE_OPERAND (dest, 0);
8682 type = TREE_TYPE (TREE_TYPE (dest));
8683 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8684 src_mem = gen_rtx (MEM, BLKmode,
8685 memory_address (BLKmode,
8686 expand_expr (src, NULL_RTX,
8689 /* There could be a void* cast on top of the object. */
8690 while (TREE_CODE (src) == NOP_EXPR)
8691 src = TREE_OPERAND (src, 0);
8692 type = TREE_TYPE (TREE_TYPE (src));
8693 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8695 /* Copy word part most expediently. */
8696 emit_block_move (dest_mem, src_mem,
8697 expand_expr (len, NULL_RTX, VOIDmode, 0),
8698 MIN (src_align, dest_align));
8699 return force_operand (dest_rtx, NULL_RTX);
8702 case BUILT_IN_MEMSET:
8703 /* If not optimizing, call the library function. */
8704 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8708 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8709 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8710 || TREE_CHAIN (arglist) == 0
8711 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8713 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8715 != (TREE_CODE (TREE_TYPE
8717 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8721 tree dest = TREE_VALUE (arglist);
8722 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8723 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8727 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8728 rtx dest_rtx, dest_mem;
8730 /* If DEST is not a pointer type, don't do this
8731 operation in-line. */
8732 if (dest_align == 0)
8735 /* If VAL is not 0, don't do this operation in-line. */
8736 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8739 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8740 dest_mem = gen_rtx (MEM, BLKmode,
8741 memory_address (BLKmode, dest_rtx));
8742 /* There could be a void* cast on top of the object. */
8743 while (TREE_CODE (dest) == NOP_EXPR)
8744 dest = TREE_OPERAND (dest, 0);
8745 type = TREE_TYPE (TREE_TYPE (dest));
8746 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8748 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8751 return force_operand (dest_rtx, NULL_RTX);
8754 /* These comparison functions need an instruction that returns an actual
8755 index. An ordinary compare that just sets the condition codes
8757 #ifdef HAVE_cmpstrsi
8758 case BUILT_IN_STRCMP:
8759 /* If not optimizing, call the library function. */
8760 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8764 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8765 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8766 || TREE_CHAIN (arglist) == 0
8767 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8769 else if (!HAVE_cmpstrsi)
8772 tree arg1 = TREE_VALUE (arglist);
8773 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8777 len = c_strlen (arg1);
8779 len = size_binop (PLUS_EXPR, integer_one_node, len);
8780 len2 = c_strlen (arg2);
8782 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8784 /* If we don't have a constant length for the first, use the length
8785 of the second, if we know it. We don't require a constant for
8786 this case; some cost analysis could be done if both are available
8787 but neither is constant. For now, assume they're equally cheap.
8789 If both strings have constant lengths, use the smaller. This
8790 could arise if optimization results in strcpy being called with
8791 two fixed strings, or if the code was machine-generated. We should
8792 add some code to the `memcmp' handler below to deal with such
8793 situations, someday. */
8794 if (!len || TREE_CODE (len) != INTEGER_CST)
8801 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8803 if (tree_int_cst_lt (len2, len))
8807 chainon (arglist, build_tree_list (NULL_TREE, len));
8811 case BUILT_IN_MEMCMP:
8812 /* If not optimizing, call the library function. */
8813 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8817 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8818 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8819 || TREE_CHAIN (arglist) == 0
8820 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8821 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8822 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8824 else if (!HAVE_cmpstrsi)
8827 tree arg1 = TREE_VALUE (arglist);
8828 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8829 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8833 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8835 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8836 enum machine_mode insn_mode
8837 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8839 /* If we don't have POINTER_TYPE, call the function. */
8840 if (arg1_align == 0 || arg2_align == 0)
8842 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8843 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8847 /* Make a place to write the result of the instruction. */
8850 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8851 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8852 result = gen_reg_rtx (insn_mode);
8854 emit_insn (gen_cmpstrsi (result,
8855 gen_rtx (MEM, BLKmode,
8856 expand_expr (arg1, NULL_RTX,
8859 gen_rtx (MEM, BLKmode,
8860 expand_expr (arg2, NULL_RTX,
8863 expand_expr (len, NULL_RTX, VOIDmode, 0),
8864 GEN_INT (MIN (arg1_align, arg2_align))));
8866 /* Return the value in the proper mode for this function. */
8867 mode = TYPE_MODE (TREE_TYPE (exp));
8868 if (GET_MODE (result) == mode)
8870 else if (target != 0)
8872 convert_move (target, result, 0);
8876 return convert_to_mode (mode, result, 0);
8879 case BUILT_IN_STRCMP:
8880 case BUILT_IN_MEMCMP:
8884 /* __builtin_setjmp is passed a pointer to an array of five words
8885 (not all will be used on all machines). It operates similarly to
8886 the C library function of the same name, but is more efficient.
8887 Much of the code below (and for longjmp) is copied from the handling
8890 NOTE: This is intended for use by GNAT and will only work in
8891 the method used by it. This code will likely NOT survive to
8892 the GCC 2.8.0 release. */
8893 case BUILT_IN_SETJMP:
8895 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8899 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8901 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8902 enum machine_mode sa_mode = Pmode;
8904 int old_inhibit_defer_pop = inhibit_defer_pop;
8906 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8907 build_function_type (void_type_node, NULL_TREE),
8910 CUMULATIVE_ARGS args_so_far;
8913 #ifdef POINTERS_EXTEND_UNSIGNED
8914 buf_addr = convert_memory_address (Pmode, buf_addr);
8917 buf_addr = force_reg (Pmode, buf_addr);
8919 if (target == 0 || GET_CODE (target) != REG
8920 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8921 target = gen_reg_rtx (value_mode);
8925 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8926 current_function_calls_setjmp = 1;
8928 /* We store the frame pointer and the address of lab1 in the buffer
8929 and use the rest of it for the stack save area, which is
8930 machine-dependent. */
8931 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8932 virtual_stack_vars_rtx);
8934 (validize_mem (gen_rtx (MEM, Pmode,
8935 plus_constant (buf_addr,
8936 GET_MODE_SIZE (Pmode)))),
8937 gen_rtx (LABEL_REF, Pmode, lab1));
8939 #ifdef HAVE_save_stack_nonlocal
8940 if (HAVE_save_stack_nonlocal)
8941 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8944 stack_save = gen_rtx (MEM, sa_mode,
8945 plus_constant (buf_addr,
8946 2 * GET_MODE_SIZE (Pmode)));
8947 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8951 emit_insn (gen_setjmp ());
8954 /* Set TARGET to zero and branch around the other case. */
8955 emit_move_insn (target, const0_rtx);
8956 emit_jump_insn (gen_jump (lab2));
8960 /* Note that setjmp clobbers FP when we get here, so we have to
8961 make sure it's marked as used by this function. */
8962 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8964 /* Mark the static chain as clobbered here so life information
8965 doesn't get messed up for it. */
8966 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8968 /* Now put in the code to restore the frame pointer, and argument
8969 pointer, if needed. The code below is from expand_end_bindings
8970 in stmt.c; see detailed documentation there. */
8971 #ifdef HAVE_nonlocal_goto
8972 if (! HAVE_nonlocal_goto)
8974 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8976 current_function_has_nonlocal_goto = 1;
8978 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8979 if (fixed_regs[ARG_POINTER_REGNUM])
8981 #ifdef ELIMINABLE_REGS
8982 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8984 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8985 if (elim_regs[i].from == ARG_POINTER_REGNUM
8986 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8989 if (i == sizeof elim_regs / sizeof elim_regs [0])
8992 /* Now restore our arg pointer from the address at which it
8993 was saved in our stack frame.
8994 If there hasn't be space allocated for it yet, make
8996 if (arg_pointer_save_area == 0)
8997 arg_pointer_save_area
8998 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8999 emit_move_insn (virtual_incoming_args_rtx,
9000 copy_to_reg (arg_pointer_save_area));
9005 #ifdef HAVE_nonlocal_goto_receiver
9006 if (HAVE_nonlocal_goto_receiver)
9007 emit_insn (gen_nonlocal_goto_receiver ());
9009 /* The static chain pointer contains the address of dummy function.
9010 We need to call it here to handle some PIC cases of restoring
9011 a global pointer. Then return 1. */
9012 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
9014 /* We can't actually call emit_library_call here, so do everything
9015 it does, which isn't much for a libfunc with no args. */
9016 op0 = memory_address (FUNCTION_MODE, op0);
9018 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
9019 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
9020 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
9022 #ifndef ACCUMULATE_OUTGOING_ARGS
9023 #ifdef HAVE_call_pop
9025 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
9026 const0_rtx, next_arg_reg,
9027 GEN_INT (return_pops)));
9034 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
9035 const0_rtx, next_arg_reg, const0_rtx));
9040 emit_move_insn (target, const1_rtx);
9045 /* __builtin_longjmp is passed a pointer to an array of five words
9046 and a value, which is a dummy. It's similar to the C library longjmp
9047 function but works with __builtin_setjmp above. */
9048 case BUILT_IN_LONGJMP:
9049 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9050 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9054 tree dummy_id = get_identifier ("__dummy");
9055 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
9056 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
9057 #ifdef POINTERS_EXTEND_UNSIGNED
9060 convert_memory_address
9062 expand_expr (TREE_VALUE (arglist),
9063 NULL_RTX, VOIDmode, 0)));
9066 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9070 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
9071 rtx lab = gen_rtx (MEM, Pmode,
9072 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
9073 enum machine_mode sa_mode
9074 #ifdef HAVE_save_stack_nonlocal
9075 = (HAVE_save_stack_nonlocal
9076 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9081 rtx stack = gen_rtx (MEM, sa_mode,
9082 plus_constant (buf_addr,
9083 2 * GET_MODE_SIZE (Pmode)));
9085 DECL_EXTERNAL (dummy_decl) = 1;
9086 TREE_PUBLIC (dummy_decl) = 1;
9087 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9089 /* Expand the second expression just for side-effects. */
9090 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9091 const0_rtx, VOIDmode, 0);
9093 assemble_external (dummy_decl);
9095 /* Pick up FP, label, and SP from the block and jump. This code is
9096 from expand_goto in stmt.c; see there for detailed comments. */
9097 #if HAVE_nonlocal_goto
9098 if (HAVE_nonlocal_goto)
9099 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9100 XEXP (DECL_RTL (dummy_decl), 0)));
9104 lab = copy_to_reg (lab);
9105 emit_move_insn (hard_frame_pointer_rtx, fp);
9106 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9108 /* Put in the static chain register the address of the dummy
9110 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9111 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9112 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9113 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9114 emit_indirect_jump (lab);
9120 default: /* just do library call, if unknown builtin */
9121 error ("built-in function `%s' not currently supported",
9122 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9125 /* The switch statement above can drop through to cause the function
9126 to be called normally. */
9128 return expand_call (exp, target, ignore);
9131 /* Built-in functions to perform an untyped call and return. */
9133 /* For each register that may be used for calling a function, this
9134 gives a mode used to copy the register's value. VOIDmode indicates
9135 the register is not used for calling a function. If the machine
9136 has register windows, this gives only the outbound registers.
9137 INCOMING_REGNO gives the corresponding inbound register. */
9138 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9140 /* For each register that may be used for returning values, this gives
9141 a mode used to copy the register's value. VOIDmode indicates the
9142 register is not used for returning values. If the machine has
9143 register windows, this gives only the outbound registers.
9144 INCOMING_REGNO gives the corresponding inbound register. */
9145 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9147 /* For each register that may be used for calling a function, this
9148 gives the offset of that register into the block returned by
9149 __builtin_apply_args. 0 indicates that the register is not
9150 used for calling a function. */
9151 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9153 /* Return the offset of register REGNO into the block returned by
9154 __builtin_apply_args. This is not declared static, since it is
9155 needed in objc-act.c. */
9158 apply_args_register_offset (regno)
9163 /* Arguments are always put in outgoing registers (in the argument
9164 block) if such make sense. */
9165 #ifdef OUTGOING_REGNO
9166 regno = OUTGOING_REGNO(regno);
9168 return apply_args_reg_offset[regno];
9171 /* Return the size required for the block returned by __builtin_apply_args,
9172 and initialize apply_args_mode. */
9177 static int size = -1;
9179 enum machine_mode mode;
9181 /* The values computed by this function never change. */
9184 /* The first value is the incoming arg-pointer. */
9185 size = GET_MODE_SIZE (Pmode);
9187 /* The second value is the structure value address unless this is
9188 passed as an "invisible" first argument. */
9189 if (struct_value_rtx)
9190 size += GET_MODE_SIZE (Pmode);
9192 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9193 if (FUNCTION_ARG_REGNO_P (regno))
9195 /* Search for the proper mode for copying this register's
9196 value. I'm not sure this is right, but it works so far. */
9197 enum machine_mode best_mode = VOIDmode;
9199 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9201 mode = GET_MODE_WIDER_MODE (mode))
9202 if (HARD_REGNO_MODE_OK (regno, mode)
9203 && HARD_REGNO_NREGS (regno, mode) == 1)
9206 if (best_mode == VOIDmode)
9207 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9209 mode = GET_MODE_WIDER_MODE (mode))
9210 if (HARD_REGNO_MODE_OK (regno, mode)
9211 && (mov_optab->handlers[(int) mode].insn_code
9212 != CODE_FOR_nothing))
9216 if (mode == VOIDmode)
9219 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9220 if (size % align != 0)
9221 size = CEIL (size, align) * align;
9222 apply_args_reg_offset[regno] = size;
9223 size += GET_MODE_SIZE (mode);
9224 apply_args_mode[regno] = mode;
9228 apply_args_mode[regno] = VOIDmode;
9229 apply_args_reg_offset[regno] = 0;
9235 /* Return the size required for the block returned by __builtin_apply,
9236 and initialize apply_result_mode. */
9239 apply_result_size ()
9241 static int size = -1;
9243 enum machine_mode mode;
9245 /* The values computed by this function never change. */
9250 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9251 if (FUNCTION_VALUE_REGNO_P (regno))
9253 /* Search for the proper mode for copying this register's
9254 value. I'm not sure this is right, but it works so far. */
9255 enum machine_mode best_mode = VOIDmode;
9257 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9259 mode = GET_MODE_WIDER_MODE (mode))
9260 if (HARD_REGNO_MODE_OK (regno, mode))
9263 if (best_mode == VOIDmode)
9264 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9266 mode = GET_MODE_WIDER_MODE (mode))
9267 if (HARD_REGNO_MODE_OK (regno, mode)
9268 && (mov_optab->handlers[(int) mode].insn_code
9269 != CODE_FOR_nothing))
9273 if (mode == VOIDmode)
9276 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9277 if (size % align != 0)
9278 size = CEIL (size, align) * align;
9279 size += GET_MODE_SIZE (mode);
9280 apply_result_mode[regno] = mode;
9283 apply_result_mode[regno] = VOIDmode;
9285 /* Allow targets that use untyped_call and untyped_return to override
9286 the size so that machine-specific information can be stored here. */
9287 #ifdef APPLY_RESULT_SIZE
9288 size = APPLY_RESULT_SIZE;
9294 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9295 /* Create a vector describing the result block RESULT. If SAVEP is true,
9296 the result block is used to save the values; otherwise it is used to
9297 restore the values. */
9300 result_vector (savep, result)
9304 int regno, size, align, nelts;
9305 enum machine_mode mode;
9307 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9310 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9311 if ((mode = apply_result_mode[regno]) != VOIDmode)
9313 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9314 if (size % align != 0)
9315 size = CEIL (size, align) * align;
9316 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9317 mem = change_address (result, mode,
9318 plus_constant (XEXP (result, 0), size));
9319 savevec[nelts++] = (savep
9320 ? gen_rtx (SET, VOIDmode, mem, reg)
9321 : gen_rtx (SET, VOIDmode, reg, mem));
9322 size += GET_MODE_SIZE (mode);
9324 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9326 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9328 /* Save the state required to perform an untyped call with the same
9329 arguments as were passed to the current function. */
9332 expand_builtin_apply_args ()
9335 int size, align, regno;
9336 enum machine_mode mode;
9338 /* Create a block where the arg-pointer, structure value address,
9339 and argument registers can be saved. */
9340 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9342 /* Walk past the arg-pointer and structure value address. */
9343 size = GET_MODE_SIZE (Pmode);
9344 if (struct_value_rtx)
9345 size += GET_MODE_SIZE (Pmode);
9347 /* Save each register used in calling a function to the block. */
9348 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9349 if ((mode = apply_args_mode[regno]) != VOIDmode)
9353 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9354 if (size % align != 0)
9355 size = CEIL (size, align) * align;
9357 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9360 /* For reg-stack.c's stack register household.
9361 Compare with a similar piece of code in function.c. */
9363 emit_insn (gen_rtx (USE, mode, tem));
9366 emit_move_insn (change_address (registers, mode,
9367 plus_constant (XEXP (registers, 0),
9370 size += GET_MODE_SIZE (mode);
9373 /* Save the arg pointer to the block. */
9374 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9375 copy_to_reg (virtual_incoming_args_rtx));
9376 size = GET_MODE_SIZE (Pmode);
9378 /* Save the structure value address unless this is passed as an
9379 "invisible" first argument. */
9380 if (struct_value_incoming_rtx)
9382 emit_move_insn (change_address (registers, Pmode,
9383 plus_constant (XEXP (registers, 0),
9385 copy_to_reg (struct_value_incoming_rtx));
9386 size += GET_MODE_SIZE (Pmode);
9389 /* Return the address of the block. */
9390 return copy_addr_to_reg (XEXP (registers, 0));
9393 /* Perform an untyped call and save the state required to perform an
9394 untyped return of whatever value was returned by the given function. */
9397 expand_builtin_apply (function, arguments, argsize)
9398 rtx function, arguments, argsize;
9400 int size, align, regno;
9401 enum machine_mode mode;
9402 rtx incoming_args, result, reg, dest, call_insn;
9403 rtx old_stack_level = 0;
9404 rtx call_fusage = 0;
9406 /* Create a block where the return registers can be saved. */
9407 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9409 /* ??? The argsize value should be adjusted here. */
9411 /* Fetch the arg pointer from the ARGUMENTS block. */
9412 incoming_args = gen_reg_rtx (Pmode);
9413 emit_move_insn (incoming_args,
9414 gen_rtx (MEM, Pmode, arguments));
9415 #ifndef STACK_GROWS_DOWNWARD
9416 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9417 incoming_args, 0, OPTAB_LIB_WIDEN);
9420 /* Perform postincrements before actually calling the function. */
9423 /* Push a new argument block and copy the arguments. */
9424 do_pending_stack_adjust ();
9425 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9427 /* Push a block of memory onto the stack to store the memory arguments.
9428 Save the address in a register, and copy the memory arguments. ??? I
9429 haven't figured out how the calling convention macros effect this,
9430 but it's likely that the source and/or destination addresses in
9431 the block copy will need updating in machine specific ways. */
9432 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9433 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9434 gen_rtx (MEM, BLKmode, incoming_args),
9436 PARM_BOUNDARY / BITS_PER_UNIT);
9438 /* Refer to the argument block. */
9440 arguments = gen_rtx (MEM, BLKmode, arguments);
9442 /* Walk past the arg-pointer and structure value address. */
9443 size = GET_MODE_SIZE (Pmode);
9444 if (struct_value_rtx)
9445 size += GET_MODE_SIZE (Pmode);
9447 /* Restore each of the registers previously saved. Make USE insns
9448 for each of these registers for use in making the call. */
9449 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9450 if ((mode = apply_args_mode[regno]) != VOIDmode)
9452 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9453 if (size % align != 0)
9454 size = CEIL (size, align) * align;
9455 reg = gen_rtx (REG, mode, regno);
9456 emit_move_insn (reg,
9457 change_address (arguments, mode,
9458 plus_constant (XEXP (arguments, 0),
9461 use_reg (&call_fusage, reg);
9462 size += GET_MODE_SIZE (mode);
9465 /* Restore the structure value address unless this is passed as an
9466 "invisible" first argument. */
9467 size = GET_MODE_SIZE (Pmode);
9468 if (struct_value_rtx)
9470 rtx value = gen_reg_rtx (Pmode);
9471 emit_move_insn (value,
9472 change_address (arguments, Pmode,
9473 plus_constant (XEXP (arguments, 0),
9475 emit_move_insn (struct_value_rtx, value);
9476 if (GET_CODE (struct_value_rtx) == REG)
9477 use_reg (&call_fusage, struct_value_rtx);
9478 size += GET_MODE_SIZE (Pmode);
9481 /* All arguments and registers used for the call are set up by now! */
9482 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9484 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9485 and we don't want to load it into a register as an optimization,
9486 because prepare_call_address already did it if it should be done. */
9487 if (GET_CODE (function) != SYMBOL_REF)
9488 function = memory_address (FUNCTION_MODE, function);
9490 /* Generate the actual call instruction and save the return value. */
9491 #ifdef HAVE_untyped_call
9492 if (HAVE_untyped_call)
9493 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9494 result, result_vector (1, result)));
9497 #ifdef HAVE_call_value
9498 if (HAVE_call_value)
9502 /* Locate the unique return register. It is not possible to
9503 express a call that sets more than one return register using
9504 call_value; use untyped_call for that. In fact, untyped_call
9505 only needs to save the return registers in the given block. */
9506 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9507 if ((mode = apply_result_mode[regno]) != VOIDmode)
9510 abort (); /* HAVE_untyped_call required. */
9511 valreg = gen_rtx (REG, mode, regno);
9514 emit_call_insn (gen_call_value (valreg,
9515 gen_rtx (MEM, FUNCTION_MODE, function),
9516 const0_rtx, NULL_RTX, const0_rtx));
9518 emit_move_insn (change_address (result, GET_MODE (valreg),
9526 /* Find the CALL insn we just emitted. */
9527 for (call_insn = get_last_insn ();
9528 call_insn && GET_CODE (call_insn) != CALL_INSN;
9529 call_insn = PREV_INSN (call_insn))
9535 /* Put the register usage information on the CALL. If there is already
9536 some usage information, put ours at the end. */
9537 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9541 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9542 link = XEXP (link, 1))
9545 XEXP (link, 1) = call_fusage;
9548 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9550 /* Restore the stack. */
9551 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9553 /* Return the address of the result block. */
9554 return copy_addr_to_reg (XEXP (result, 0));
9557 /* Perform an untyped return. */
9560 expand_builtin_return (result)
9563 int size, align, regno;
9564 enum machine_mode mode;
9566 rtx call_fusage = 0;
9568 apply_result_size ();
9569 result = gen_rtx (MEM, BLKmode, result);
9571 #ifdef HAVE_untyped_return
9572 if (HAVE_untyped_return)
9574 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9580 /* Restore the return value and note that each value is used. */
9582 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9583 if ((mode = apply_result_mode[regno]) != VOIDmode)
9585 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9586 if (size % align != 0)
9587 size = CEIL (size, align) * align;
9588 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9589 emit_move_insn (reg,
9590 change_address (result, mode,
9591 plus_constant (XEXP (result, 0),
9594 push_to_sequence (call_fusage);
9595 emit_insn (gen_rtx (USE, VOIDmode, reg));
9596 call_fusage = get_insns ();
9598 size += GET_MODE_SIZE (mode);
9601 /* Put the USE insns before the return. */
9602 emit_insns (call_fusage);
9604 /* Return whatever values was restored by jumping directly to the end
9606 expand_null_return ();
9609 /* Expand code for a post- or pre- increment or decrement
9610 and return the RTX for the result.
9611 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9614 expand_increment (exp, post, ignore)
9618 register rtx op0, op1;
9619 register rtx temp, value;
9620 register tree incremented = TREE_OPERAND (exp, 0);
9621 optab this_optab = add_optab;
9623 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9624 int op0_is_copy = 0;
9625 int single_insn = 0;
9626 /* 1 means we can't store into OP0 directly,
9627 because it is a subreg narrower than a word,
9628 and we don't dare clobber the rest of the word. */
9631 if (output_bytecode)
9633 bc_expand_expr (exp);
9637 /* Stabilize any component ref that might need to be
9638 evaluated more than once below. */
9640 || TREE_CODE (incremented) == BIT_FIELD_REF
9641 || (TREE_CODE (incremented) == COMPONENT_REF
9642 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9643 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9644 incremented = stabilize_reference (incremented);
9645 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9646 ones into save exprs so that they don't accidentally get evaluated
9647 more than once by the code below. */
9648 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9649 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9650 incremented = save_expr (incremented);
9652 /* Compute the operands as RTX.
9653 Note whether OP0 is the actual lvalue or a copy of it:
9654 I believe it is a copy iff it is a register or subreg
9655 and insns were generated in computing it. */
9657 temp = get_last_insn ();
9658 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9660 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9661 in place but instead must do sign- or zero-extension during assignment,
9662 so we copy it into a new register and let the code below use it as
9665 Note that we can safely modify this SUBREG since it is know not to be
9666 shared (it was made by the expand_expr call above). */
9668 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9671 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9675 else if (GET_CODE (op0) == SUBREG
9676 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9678 /* We cannot increment this SUBREG in place. If we are
9679 post-incrementing, get a copy of the old value. Otherwise,
9680 just mark that we cannot increment in place. */
9682 op0 = copy_to_reg (op0);
9687 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9688 && temp != get_last_insn ());
9689 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9691 /* Decide whether incrementing or decrementing. */
9692 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9693 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9694 this_optab = sub_optab;
9696 /* Convert decrement by a constant into a negative increment. */
9697 if (this_optab == sub_optab
9698 && GET_CODE (op1) == CONST_INT)
9700 op1 = GEN_INT (- INTVAL (op1));
9701 this_optab = add_optab;
9704 /* For a preincrement, see if we can do this with a single instruction. */
9707 icode = (int) this_optab->handlers[(int) mode].insn_code;
9708 if (icode != (int) CODE_FOR_nothing
9709 /* Make sure that OP0 is valid for operands 0 and 1
9710 of the insn we want to queue. */
9711 && (*insn_operand_predicate[icode][0]) (op0, mode)
9712 && (*insn_operand_predicate[icode][1]) (op0, mode)
9713 && (*insn_operand_predicate[icode][2]) (op1, mode))
9717 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9718 then we cannot just increment OP0. We must therefore contrive to
9719 increment the original value. Then, for postincrement, we can return
9720 OP0 since it is a copy of the old value. For preincrement, expand here
9721 unless we can do it with a single insn.
9723 Likewise if storing directly into OP0 would clobber high bits
9724 we need to preserve (bad_subreg). */
9725 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9727 /* This is the easiest way to increment the value wherever it is.
9728 Problems with multiple evaluation of INCREMENTED are prevented
9729 because either (1) it is a component_ref or preincrement,
9730 in which case it was stabilized above, or (2) it is an array_ref
9731 with constant index in an array in a register, which is
9732 safe to reevaluate. */
9733 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9734 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9735 ? MINUS_EXPR : PLUS_EXPR),
9738 TREE_OPERAND (exp, 1));
9740 while (TREE_CODE (incremented) == NOP_EXPR
9741 || TREE_CODE (incremented) == CONVERT_EXPR)
9743 newexp = convert (TREE_TYPE (incremented), newexp);
9744 incremented = TREE_OPERAND (incremented, 0);
9747 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9748 return post ? op0 : temp;
9753 /* We have a true reference to the value in OP0.
9754 If there is an insn to add or subtract in this mode, queue it.
9755 Queueing the increment insn avoids the register shuffling
9756 that often results if we must increment now and first save
9757 the old value for subsequent use. */
9759 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9760 op0 = stabilize (op0);
9763 icode = (int) this_optab->handlers[(int) mode].insn_code;
9764 if (icode != (int) CODE_FOR_nothing
9765 /* Make sure that OP0 is valid for operands 0 and 1
9766 of the insn we want to queue. */
9767 && (*insn_operand_predicate[icode][0]) (op0, mode)
9768 && (*insn_operand_predicate[icode][1]) (op0, mode))
9770 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9771 op1 = force_reg (mode, op1);
9773 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9775 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9777 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9780 op0 = change_address (op0, VOIDmode, addr);
9781 temp = force_reg (GET_MODE (op0), op0);
9782 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9783 op1 = force_reg (mode, op1);
9785 /* The increment queue is LIFO, thus we have to `queue'
9786 the instructions in reverse order. */
9787 enqueue_insn (op0, gen_move_insn (op0, temp));
9788 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9793 /* Preincrement, or we can't increment with one simple insn. */
9795 /* Save a copy of the value before inc or dec, to return it later. */
9796 temp = value = copy_to_reg (op0);
9798 /* Arrange to return the incremented value. */
9799 /* Copy the rtx because expand_binop will protect from the queue,
9800 and the results of that would be invalid for us to return
9801 if our caller does emit_queue before using our result. */
9802 temp = copy_rtx (value = op0);
9804 /* Increment however we can. */
9805 op1 = expand_binop (mode, this_optab, value, op1, op0,
9806 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9807 /* Make sure the value is stored into OP0. */
9809 emit_move_insn (op0, op1);
9814 /* Expand all function calls contained within EXP, innermost ones first.
9815 But don't look within expressions that have sequence points.
9816 For each CALL_EXPR, record the rtx for its value
9817 in the CALL_EXPR_RTL field. */
9820 preexpand_calls (exp)
9823 register int nops, i;
9824 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9826 if (! do_preexpand_calls)
9829 /* Only expressions and references can contain calls. */
9831 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9834 switch (TREE_CODE (exp))
9837 /* Do nothing if already expanded. */
9838 if (CALL_EXPR_RTL (exp) != 0
9839 /* Do nothing if the call returns a variable-sized object. */
9840 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9841 /* Do nothing to built-in functions. */
9842 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9843 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9845 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9848 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9853 case TRUTH_ANDIF_EXPR:
9854 case TRUTH_ORIF_EXPR:
9855 /* If we find one of these, then we can be sure
9856 the adjust will be done for it (since it makes jumps).
9857 Do it now, so that if this is inside an argument
9858 of a function, we don't get the stack adjustment
9859 after some other args have already been pushed. */
9860 do_pending_stack_adjust ();
9865 case WITH_CLEANUP_EXPR:
9866 case CLEANUP_POINT_EXPR:
9870 if (SAVE_EXPR_RTL (exp) != 0)
9874 nops = tree_code_length[(int) TREE_CODE (exp)];
9875 for (i = 0; i < nops; i++)
9876 if (TREE_OPERAND (exp, i) != 0)
9878 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9879 if (type == 'e' || type == '<' || type == '1' || type == '2'
9881 preexpand_calls (TREE_OPERAND (exp, i));
9885 /* At the start of a function, record that we have no previously-pushed
9886 arguments waiting to be popped. */
9889 init_pending_stack_adjust ()
9891 pending_stack_adjust = 0;
9894 /* When exiting from function, if safe, clear out any pending stack adjust
9895 so the adjustment won't get done. */
9898 clear_pending_stack_adjust ()
9900 #ifdef EXIT_IGNORE_STACK
9902 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9903 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9904 && ! flag_inline_functions)
9905 pending_stack_adjust = 0;
9909 /* Pop any previously-pushed arguments that have not been popped yet. */
9912 do_pending_stack_adjust ()
9914 if (inhibit_defer_pop == 0)
9916 if (pending_stack_adjust != 0)
9917 adjust_stack (GEN_INT (pending_stack_adjust));
9918 pending_stack_adjust = 0;
9922 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9923 Returns the cleanups to be performed. */
9926 defer_cleanups_to (old_cleanups)
9929 tree new_cleanups = NULL_TREE;
9930 tree cleanups = cleanups_this_call;
9931 tree last = NULL_TREE;
9933 while (cleanups_this_call != old_cleanups)
9935 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9936 last = cleanups_this_call;
9937 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9942 /* Remove the list from the chain of cleanups. */
9943 TREE_CHAIN (last) = NULL_TREE;
9945 /* reverse them so that we can build them in the right order. */
9946 cleanups = nreverse (cleanups);
9948 /* All cleanups must be on the function_obstack. */
9949 push_obstacks_nochange ();
9950 resume_temporary_allocation ();
9955 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9956 TREE_VALUE (cleanups), new_cleanups);
9958 new_cleanups = TREE_VALUE (cleanups);
9960 cleanups = TREE_CHAIN (cleanups);
9966 return new_cleanups;
9969 /* Expand all cleanups up to OLD_CLEANUPS.
9970 Needed here, and also for language-dependent calls. */
9973 expand_cleanups_to (old_cleanups)
9976 while (cleanups_this_call != old_cleanups)
9978 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9979 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9980 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9984 /* Expand conditional expressions. */
9986 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9987 LABEL is an rtx of code CODE_LABEL, in this function and all the
9991 jumpifnot (exp, label)
9995 do_jump (exp, label, NULL_RTX);
9998 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10001 jumpif (exp, label)
10005 do_jump (exp, NULL_RTX, label);
10008 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10009 the result is zero, or IF_TRUE_LABEL if the result is one.
10010 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10011 meaning fall through in that case.
10013 do_jump always does any pending stack adjust except when it does not
10014 actually perform a jump. An example where there is no jump
10015 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10017 This function is responsible for optimizing cases such as
10018 &&, || and comparison operators in EXP. */
10021 do_jump (exp, if_false_label, if_true_label)
10023 rtx if_false_label, if_true_label;
10025 register enum tree_code code = TREE_CODE (exp);
10026 /* Some cases need to create a label to jump to
10027 in order to properly fall through.
10028 These cases set DROP_THROUGH_LABEL nonzero. */
10029 rtx drop_through_label = 0;
10031 rtx comparison = 0;
10034 enum machine_mode mode;
10044 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10050 /* This is not true with #pragma weak */
10052 /* The address of something can never be zero. */
10054 emit_jump (if_true_label);
10059 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10060 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10061 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10064 /* If we are narrowing the operand, we have to do the compare in the
10066 if ((TYPE_PRECISION (TREE_TYPE (exp))
10067 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10069 case NON_LVALUE_EXPR:
10070 case REFERENCE_EXPR:
10075 /* These cannot change zero->non-zero or vice versa. */
10076 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10080 /* This is never less insns than evaluating the PLUS_EXPR followed by
10081 a test and can be longer if the test is eliminated. */
10083 /* Reduce to minus. */
10084 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10085 TREE_OPERAND (exp, 0),
10086 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10087 TREE_OPERAND (exp, 1))));
10088 /* Process as MINUS. */
10092 /* Non-zero iff operands of minus differ. */
10093 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10094 TREE_OPERAND (exp, 0),
10095 TREE_OPERAND (exp, 1)),
10100 /* If we are AND'ing with a small constant, do this comparison in the
10101 smallest type that fits. If the machine doesn't have comparisons
10102 that small, it will be converted back to the wider comparison.
10103 This helps if we are testing the sign bit of a narrower object.
10104 combine can't do this for us because it can't know whether a
10105 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10107 if (! SLOW_BYTE_ACCESS
10108 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10109 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10110 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10111 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10112 && (type = type_for_mode (mode, 1)) != 0
10113 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10114 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10115 != CODE_FOR_nothing))
10117 do_jump (convert (type, exp), if_false_label, if_true_label);
10122 case TRUTH_NOT_EXPR:
10123 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10126 case TRUTH_ANDIF_EXPR:
10129 tree cleanups, old_cleanups;
10131 if (if_false_label == 0)
10132 if_false_label = drop_through_label = gen_label_rtx ();
10134 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10135 seq1 = get_insns ();
10138 old_cleanups = cleanups_this_call;
10140 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10141 seq2 = get_insns ();
10142 cleanups = defer_cleanups_to (old_cleanups);
10147 rtx flag = gen_reg_rtx (word_mode);
10151 /* Flag cleanups as not needed. */
10152 emit_move_insn (flag, const0_rtx);
10155 /* Flag cleanups as needed. */
10156 emit_move_insn (flag, const1_rtx);
10159 /* All cleanups must be on the function_obstack. */
10160 push_obstacks_nochange ();
10161 resume_temporary_allocation ();
10163 /* convert flag, which is an rtx, into a tree. */
10164 cond = make_node (RTL_EXPR);
10165 TREE_TYPE (cond) = integer_type_node;
10166 RTL_EXPR_RTL (cond) = flag;
10167 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10168 cond = save_expr (cond);
10170 new_cleanups = build (COND_EXPR, void_type_node,
10171 truthvalue_conversion (cond),
10172 cleanups, integer_zero_node);
10173 new_cleanups = fold (new_cleanups);
10177 /* Now add in the conditionalized cleanups. */
10179 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10180 expand_eh_region_start ();
10190 case TRUTH_ORIF_EXPR:
10193 tree cleanups, old_cleanups;
10195 if (if_true_label == 0)
10196 if_true_label = drop_through_label = gen_label_rtx ();
10198 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10199 seq1 = get_insns ();
10202 old_cleanups = cleanups_this_call;
10204 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10205 seq2 = get_insns ();
10206 cleanups = defer_cleanups_to (old_cleanups);
10211 rtx flag = gen_reg_rtx (word_mode);
10215 /* Flag cleanups as not needed. */
10216 emit_move_insn (flag, const0_rtx);
10219 /* Flag cleanups as needed. */
10220 emit_move_insn (flag, const1_rtx);
10223 /* All cleanups must be on the function_obstack. */
10224 push_obstacks_nochange ();
10225 resume_temporary_allocation ();
10227 /* convert flag, which is an rtx, into a tree. */
10228 cond = make_node (RTL_EXPR);
10229 TREE_TYPE (cond) = integer_type_node;
10230 RTL_EXPR_RTL (cond) = flag;
10231 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10232 cond = save_expr (cond);
10234 new_cleanups = build (COND_EXPR, void_type_node,
10235 truthvalue_conversion (cond),
10236 cleanups, integer_zero_node);
10237 new_cleanups = fold (new_cleanups);
10241 /* Now add in the conditionalized cleanups. */
10243 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10244 expand_eh_region_start ();
10254 case COMPOUND_EXPR:
10255 push_temp_slots ();
10256 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10257 preserve_temp_slots (NULL_RTX);
10258 free_temp_slots ();
10261 do_pending_stack_adjust ();
10262 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10265 case COMPONENT_REF:
10266 case BIT_FIELD_REF:
10269 int bitsize, bitpos, unsignedp;
10270 enum machine_mode mode;
10276 /* Get description of this reference. We don't actually care
10277 about the underlying object here. */
10278 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10279 &mode, &unsignedp, &volatilep,
10282 type = type_for_size (bitsize, unsignedp);
10283 if (! SLOW_BYTE_ACCESS
10284 && type != 0 && bitsize >= 0
10285 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10286 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10287 != CODE_FOR_nothing))
10289 do_jump (convert (type, exp), if_false_label, if_true_label);
10296 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10297 if (integer_onep (TREE_OPERAND (exp, 1))
10298 && integer_zerop (TREE_OPERAND (exp, 2)))
10299 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10301 else if (integer_zerop (TREE_OPERAND (exp, 1))
10302 && integer_onep (TREE_OPERAND (exp, 2)))
10303 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10308 tree cleanups_left_side, cleanups_right_side, old_cleanups;
10310 register rtx label1 = gen_label_rtx ();
10311 drop_through_label = gen_label_rtx ();
10313 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10315 /* We need to save the cleanups for the lhs and rhs separately.
10316 Keep track of the cleanups seen before the lhs. */
10317 old_cleanups = cleanups_this_call;
10319 /* Now the THEN-expression. */
10320 do_jump (TREE_OPERAND (exp, 1),
10321 if_false_label ? if_false_label : drop_through_label,
10322 if_true_label ? if_true_label : drop_through_label);
10323 /* In case the do_jump just above never jumps. */
10324 do_pending_stack_adjust ();
10325 emit_label (label1);
10326 seq1 = get_insns ();
10327 /* Now grab the cleanups for the lhs. */
10328 cleanups_left_side = defer_cleanups_to (old_cleanups);
10331 /* And keep track of where we start before the rhs. */
10332 old_cleanups = cleanups_this_call;
10334 /* Now the ELSE-expression. */
10335 do_jump (TREE_OPERAND (exp, 2),
10336 if_false_label ? if_false_label : drop_through_label,
10337 if_true_label ? if_true_label : drop_through_label);
10338 seq2 = get_insns ();
10339 /* Grab the cleanups for the rhs. */
10340 cleanups_right_side = defer_cleanups_to (old_cleanups);
10343 if (cleanups_left_side || cleanups_right_side)
10345 /* Make the cleanups for the THEN and ELSE clauses
10346 conditional based on which half is executed. */
10347 rtx flag = gen_reg_rtx (word_mode);
10351 /* Set the flag to 0 so that we know we executed the lhs. */
10352 emit_move_insn (flag, const0_rtx);
10355 /* Set the flag to 1 so that we know we executed the rhs. */
10356 emit_move_insn (flag, const1_rtx);
10359 /* Make sure the cleanup lives on the function_obstack. */
10360 push_obstacks_nochange ();
10361 resume_temporary_allocation ();
10363 /* Now, build up a COND_EXPR that tests the value of the
10364 flag, and then either do the cleanups for the lhs or the
10366 cond = make_node (RTL_EXPR);
10367 TREE_TYPE (cond) = integer_type_node;
10368 RTL_EXPR_RTL (cond) = flag;
10369 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10370 cond = save_expr (cond);
10372 new_cleanups = build (COND_EXPR, void_type_node,
10373 truthvalue_conversion (cond),
10374 cleanups_right_side, cleanups_left_side);
10375 new_cleanups = fold (new_cleanups);
10379 /* Now add in the conditionalized cleanups. */
10381 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10382 expand_eh_region_start ();
10386 /* No cleanups were needed, so emit the two sequences
10396 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10398 if (integer_zerop (TREE_OPERAND (exp, 1)))
10399 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10400 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10401 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10404 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10405 fold (build (EQ_EXPR, TREE_TYPE (exp),
10406 fold (build1 (REALPART_EXPR,
10407 TREE_TYPE (inner_type),
10408 TREE_OPERAND (exp, 0))),
10409 fold (build1 (REALPART_EXPR,
10410 TREE_TYPE (inner_type),
10411 TREE_OPERAND (exp, 1))))),
10412 fold (build (EQ_EXPR, TREE_TYPE (exp),
10413 fold (build1 (IMAGPART_EXPR,
10414 TREE_TYPE (inner_type),
10415 TREE_OPERAND (exp, 0))),
10416 fold (build1 (IMAGPART_EXPR,
10417 TREE_TYPE (inner_type),
10418 TREE_OPERAND (exp, 1))))))),
10419 if_false_label, if_true_label);
10420 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10421 && !can_compare_p (TYPE_MODE (inner_type)))
10422 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10424 comparison = compare (exp, EQ, EQ);
10430 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10432 if (integer_zerop (TREE_OPERAND (exp, 1)))
10433 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10434 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10435 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10438 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10439 fold (build (NE_EXPR, TREE_TYPE (exp),
10440 fold (build1 (REALPART_EXPR,
10441 TREE_TYPE (inner_type),
10442 TREE_OPERAND (exp, 0))),
10443 fold (build1 (REALPART_EXPR,
10444 TREE_TYPE (inner_type),
10445 TREE_OPERAND (exp, 1))))),
10446 fold (build (NE_EXPR, TREE_TYPE (exp),
10447 fold (build1 (IMAGPART_EXPR,
10448 TREE_TYPE (inner_type),
10449 TREE_OPERAND (exp, 0))),
10450 fold (build1 (IMAGPART_EXPR,
10451 TREE_TYPE (inner_type),
10452 TREE_OPERAND (exp, 1))))))),
10453 if_false_label, if_true_label);
10454 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10455 && !can_compare_p (TYPE_MODE (inner_type)))
10456 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10458 comparison = compare (exp, NE, NE);
10463 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10465 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10466 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10468 comparison = compare (exp, LT, LTU);
10472 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10474 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10475 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10477 comparison = compare (exp, LE, LEU);
10481 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10483 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10484 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10486 comparison = compare (exp, GT, GTU);
10490 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10492 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10493 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10495 comparison = compare (exp, GE, GEU);
10500 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10502 /* This is not needed any more and causes poor code since it causes
10503 comparisons and tests from non-SI objects to have different code
10505 /* Copy to register to avoid generating bad insns by cse
10506 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10507 if (!cse_not_expected && GET_CODE (temp) == MEM)
10508 temp = copy_to_reg (temp);
10510 do_pending_stack_adjust ();
10511 if (GET_CODE (temp) == CONST_INT)
10512 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10513 else if (GET_CODE (temp) == LABEL_REF)
10514 comparison = const_true_rtx;
10515 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10516 && !can_compare_p (GET_MODE (temp)))
10517 /* Note swapping the labels gives us not-equal. */
10518 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10519 else if (GET_MODE (temp) != VOIDmode)
10520 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10521 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10522 GET_MODE (temp), NULL_RTX, 0);
10527 /* Do any postincrements in the expression that was tested. */
10530 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10531 straight into a conditional jump instruction as the jump condition.
10532 Otherwise, all the work has been done already. */
10534 if (comparison == const_true_rtx)
10537 emit_jump (if_true_label);
10539 else if (comparison == const0_rtx)
10541 if (if_false_label)
10542 emit_jump (if_false_label);
10544 else if (comparison)
10545 do_jump_for_compare (comparison, if_false_label, if_true_label);
10547 if (drop_through_label)
10549 /* If do_jump produces code that might be jumped around,
10550 do any stack adjusts from that code, before the place
10551 where control merges in. */
10552 do_pending_stack_adjust ();
10553 emit_label (drop_through_label);
10557 /* Given a comparison expression EXP for values too wide to be compared
10558 with one insn, test the comparison and jump to the appropriate label.
10559 The code of EXP is ignored; we always test GT if SWAP is 0,
10560 and LT if SWAP is 1. */
10563 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10566 rtx if_false_label, if_true_label;
10568 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10569 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10570 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10571 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10572 rtx drop_through_label = 0;
10573 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10576 if (! if_true_label || ! if_false_label)
10577 drop_through_label = gen_label_rtx ();
10578 if (! if_true_label)
10579 if_true_label = drop_through_label;
10580 if (! if_false_label)
10581 if_false_label = drop_through_label;
10583 /* Compare a word at a time, high order first. */
10584 for (i = 0; i < nwords; i++)
10587 rtx op0_word, op1_word;
10589 if (WORDS_BIG_ENDIAN)
10591 op0_word = operand_subword_force (op0, i, mode);
10592 op1_word = operand_subword_force (op1, i, mode);
10596 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10597 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10600 /* All but high-order word must be compared as unsigned. */
10601 comp = compare_from_rtx (op0_word, op1_word,
10602 (unsignedp || i > 0) ? GTU : GT,
10603 unsignedp, word_mode, NULL_RTX, 0);
10604 if (comp == const_true_rtx)
10605 emit_jump (if_true_label);
10606 else if (comp != const0_rtx)
10607 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10609 /* Consider lower words only if these are equal. */
10610 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10612 if (comp == const_true_rtx)
10613 emit_jump (if_false_label);
10614 else if (comp != const0_rtx)
10615 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10618 if (if_false_label)
10619 emit_jump (if_false_label);
10620 if (drop_through_label)
10621 emit_label (drop_through_label);
10624 /* Compare OP0 with OP1, word at a time, in mode MODE.
10625 UNSIGNEDP says to do unsigned comparison.
10626 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10629 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10630 enum machine_mode mode;
10633 rtx if_false_label, if_true_label;
10635 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10636 rtx drop_through_label = 0;
10639 if (! if_true_label || ! if_false_label)
10640 drop_through_label = gen_label_rtx ();
10641 if (! if_true_label)
10642 if_true_label = drop_through_label;
10643 if (! if_false_label)
10644 if_false_label = drop_through_label;
10646 /* Compare a word at a time, high order first. */
10647 for (i = 0; i < nwords; i++)
10650 rtx op0_word, op1_word;
10652 if (WORDS_BIG_ENDIAN)
10654 op0_word = operand_subword_force (op0, i, mode);
10655 op1_word = operand_subword_force (op1, i, mode);
10659 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10660 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10663 /* All but high-order word must be compared as unsigned. */
10664 comp = compare_from_rtx (op0_word, op1_word,
10665 (unsignedp || i > 0) ? GTU : GT,
10666 unsignedp, word_mode, NULL_RTX, 0);
10667 if (comp == const_true_rtx)
10668 emit_jump (if_true_label);
10669 else if (comp != const0_rtx)
10670 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10672 /* Consider lower words only if these are equal. */
10673 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10675 if (comp == const_true_rtx)
10676 emit_jump (if_false_label);
10677 else if (comp != const0_rtx)
10678 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10681 if (if_false_label)
10682 emit_jump (if_false_label);
10683 if (drop_through_label)
10684 emit_label (drop_through_label);
10687 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10688 with one insn, test the comparison and jump to the appropriate label. */
10691 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10693 rtx if_false_label, if_true_label;
10695 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10696 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10697 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10698 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10700 rtx drop_through_label = 0;
10702 if (! if_false_label)
10703 drop_through_label = if_false_label = gen_label_rtx ();
10705 for (i = 0; i < nwords; i++)
10707 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10708 operand_subword_force (op1, i, mode),
10709 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10710 word_mode, NULL_RTX, 0);
10711 if (comp == const_true_rtx)
10712 emit_jump (if_false_label);
10713 else if (comp != const0_rtx)
10714 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10718 emit_jump (if_true_label);
10719 if (drop_through_label)
10720 emit_label (drop_through_label);
10723 /* Jump according to whether OP0 is 0.
10724 We assume that OP0 has an integer mode that is too wide
10725 for the available compare insns. */
10728 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10730 rtx if_false_label, if_true_label;
10732 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10734 rtx drop_through_label = 0;
10736 if (! if_false_label)
10737 drop_through_label = if_false_label = gen_label_rtx ();
10739 for (i = 0; i < nwords; i++)
10741 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10743 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10744 if (comp == const_true_rtx)
10745 emit_jump (if_false_label);
10746 else if (comp != const0_rtx)
10747 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10751 emit_jump (if_true_label);
10752 if (drop_through_label)
10753 emit_label (drop_through_label);
10756 /* Given a comparison expression in rtl form, output conditional branches to
10757 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10760 do_jump_for_compare (comparison, if_false_label, if_true_label)
10761 rtx comparison, if_false_label, if_true_label;
10765 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10766 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10770 if (if_false_label)
10771 emit_jump (if_false_label);
10773 else if (if_false_label)
10776 rtx prev = get_last_insn ();
10779 /* Output the branch with the opposite condition. Then try to invert
10780 what is generated. If more than one insn is a branch, or if the
10781 branch is not the last insn written, abort. If we can't invert
10782 the branch, emit make a true label, redirect this jump to that,
10783 emit a jump to the false label and define the true label. */
10785 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10786 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10790 /* Here we get the first insn that was just emitted. It used to be the
10791 case that, on some machines, emitting the branch would discard
10792 the previous compare insn and emit a replacement. This isn't
10793 done anymore, but abort if we see that PREV is deleted. */
10796 insn = get_insns ();
10797 else if (INSN_DELETED_P (prev))
10800 insn = NEXT_INSN (prev);
10802 for (; insn; insn = NEXT_INSN (insn))
10803 if (GET_CODE (insn) == JUMP_INSN)
10810 if (branch != get_last_insn ())
10813 JUMP_LABEL (branch) = if_false_label;
10814 if (! invert_jump (branch, if_false_label))
10816 if_true_label = gen_label_rtx ();
10817 redirect_jump (branch, if_true_label);
10818 emit_jump (if_false_label);
10819 emit_label (if_true_label);
10824 /* Generate code for a comparison expression EXP
10825 (including code to compute the values to be compared)
10826 and set (CC0) according to the result.
10827 SIGNED_CODE should be the rtx operation for this comparison for
10828 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10830 We force a stack adjustment unless there are currently
10831 things pushed on the stack that aren't yet used. */
10834 compare (exp, signed_code, unsigned_code)
10836 enum rtx_code signed_code, unsigned_code;
10839 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10841 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10842 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10843 register enum machine_mode mode = TYPE_MODE (type);
10844 int unsignedp = TREE_UNSIGNED (type);
10845 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10847 #ifdef HAVE_canonicalize_funcptr_for_compare
10848 /* If function pointers need to be "canonicalized" before they can
10849 be reliably compared, then canonicalize them. */
10850 if (HAVE_canonicalize_funcptr_for_compare
10851 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10852 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10855 rtx new_op0 = gen_reg_rtx (mode);
10857 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10861 if (HAVE_canonicalize_funcptr_for_compare
10862 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10863 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10866 rtx new_op1 = gen_reg_rtx (mode);
10868 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10873 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10875 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10876 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10879 /* Like compare but expects the values to compare as two rtx's.
10880 The decision as to signed or unsigned comparison must be made by the caller.
10882 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10885 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10886 size of MODE should be used. */
10889 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10890 register rtx op0, op1;
10891 enum rtx_code code;
10893 enum machine_mode mode;
10899 /* If one operand is constant, make it the second one. Only do this
10900 if the other operand is not constant as well. */
10902 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10903 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10908 code = swap_condition (code);
10911 if (flag_force_mem)
10913 op0 = force_not_mem (op0);
10914 op1 = force_not_mem (op1);
10917 do_pending_stack_adjust ();
10919 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10920 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10924 /* There's no need to do this now that combine.c can eliminate lots of
10925 sign extensions. This can be less efficient in certain cases on other
10928 /* If this is a signed equality comparison, we can do it as an
10929 unsigned comparison since zero-extension is cheaper than sign
10930 extension and comparisons with zero are done as unsigned. This is
10931 the case even on machines that can do fast sign extension, since
10932 zero-extension is easier to combine with other operations than
10933 sign-extension is. If we are comparing against a constant, we must
10934 convert it to what it would look like unsigned. */
10935 if ((code == EQ || code == NE) && ! unsignedp
10936 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10938 if (GET_CODE (op1) == CONST_INT
10939 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10940 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10945 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10947 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10950 /* Generate code to calculate EXP using a store-flag instruction
10951 and return an rtx for the result. EXP is either a comparison
10952 or a TRUTH_NOT_EXPR whose operand is a comparison.
10954 If TARGET is nonzero, store the result there if convenient.
10956 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10959 Return zero if there is no suitable set-flag instruction
10960 available on this machine.
10962 Once expand_expr has been called on the arguments of the comparison,
10963 we are committed to doing the store flag, since it is not safe to
10964 re-evaluate the expression. We emit the store-flag insn by calling
10965 emit_store_flag, but only expand the arguments if we have a reason
10966 to believe that emit_store_flag will be successful. If we think that
10967 it will, but it isn't, we have to simulate the store-flag with a
10968 set/jump/set sequence. */
10971 do_store_flag (exp, target, mode, only_cheap)
10974 enum machine_mode mode;
10977 enum rtx_code code;
10978 tree arg0, arg1, type;
10980 enum machine_mode operand_mode;
10984 enum insn_code icode;
10985 rtx subtarget = target;
10986 rtx result, label, pattern, jump_pat;
10988 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10989 result at the end. We can't simply invert the test since it would
10990 have already been inverted if it were valid. This case occurs for
10991 some floating-point comparisons. */
10993 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10994 invert = 1, exp = TREE_OPERAND (exp, 0);
10996 arg0 = TREE_OPERAND (exp, 0);
10997 arg1 = TREE_OPERAND (exp, 1);
10998 type = TREE_TYPE (arg0);
10999 operand_mode = TYPE_MODE (type);
11000 unsignedp = TREE_UNSIGNED (type);
11002 /* We won't bother with BLKmode store-flag operations because it would mean
11003 passing a lot of information to emit_store_flag. */
11004 if (operand_mode == BLKmode)
11007 /* We won't bother with store-flag operations involving function pointers
11008 when function pointers must be canonicalized before comparisons. */
11009 #ifdef HAVE_canonicalize_funcptr_for_compare
11010 if (HAVE_canonicalize_funcptr_for_compare
11011 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11012 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11014 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11015 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11016 == FUNCTION_TYPE))))
11023 /* Get the rtx comparison code to use. We know that EXP is a comparison
11024 operation of some type. Some comparisons against 1 and -1 can be
11025 converted to comparisons with zero. Do so here so that the tests
11026 below will be aware that we have a comparison with zero. These
11027 tests will not catch constants in the first operand, but constants
11028 are rarely passed as the first operand. */
11030 switch (TREE_CODE (exp))
11039 if (integer_onep (arg1))
11040 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11042 code = unsignedp ? LTU : LT;
11045 if (! unsignedp && integer_all_onesp (arg1))
11046 arg1 = integer_zero_node, code = LT;
11048 code = unsignedp ? LEU : LE;
11051 if (! unsignedp && integer_all_onesp (arg1))
11052 arg1 = integer_zero_node, code = GE;
11054 code = unsignedp ? GTU : GT;
11057 if (integer_onep (arg1))
11058 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11060 code = unsignedp ? GEU : GE;
11066 /* Put a constant second. */
11067 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11069 tem = arg0; arg0 = arg1; arg1 = tem;
11070 code = swap_condition (code);
11073 /* If this is an equality or inequality test of a single bit, we can
11074 do this by shifting the bit being tested to the low-order bit and
11075 masking the result with the constant 1. If the condition was EQ,
11076 we xor it with 1. This does not require an scc insn and is faster
11077 than an scc insn even if we have it. */
11079 if ((code == NE || code == EQ)
11080 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11081 && integer_pow2p (TREE_OPERAND (arg0, 1))
11082 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
11084 tree inner = TREE_OPERAND (arg0, 0);
11089 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
11090 NULL_RTX, VOIDmode, 0));
11091 /* In this case, immed_double_const will sign extend the value to make
11092 it look the same on the host and target. We must remove the
11093 sign-extension before calling exact_log2, since exact_log2 will
11094 fail for negative values. */
11095 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
11096 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
11097 /* We don't use the obvious constant shift to generate the mask,
11098 because that generates compiler warnings when BITS_PER_WORD is
11099 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
11100 code is unreachable in that case. */
11101 tem = tem & GET_MODE_MASK (word_mode);
11102 bitnum = exact_log2 (tem);
11104 /* If INNER is a right shift of a constant and it plus BITNUM does
11105 not overflow, adjust BITNUM and INNER. */
11107 if (TREE_CODE (inner) == RSHIFT_EXPR
11108 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11109 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11110 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11111 < TYPE_PRECISION (type)))
11113 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11114 inner = TREE_OPERAND (inner, 0);
11117 /* If we are going to be able to omit the AND below, we must do our
11118 operations as unsigned. If we must use the AND, we have a choice.
11119 Normally unsigned is faster, but for some machines signed is. */
11120 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11121 #ifdef LOAD_EXTEND_OP
11122 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11128 if (subtarget == 0 || GET_CODE (subtarget) != REG
11129 || GET_MODE (subtarget) != operand_mode
11130 || ! safe_from_p (subtarget, inner))
11133 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11136 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11137 size_int (bitnum), subtarget, ops_unsignedp);
11139 if (GET_MODE (op0) != mode)
11140 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11142 if ((code == EQ && ! invert) || (code == NE && invert))
11143 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11144 ops_unsignedp, OPTAB_LIB_WIDEN);
11146 /* Put the AND last so it can combine with more things. */
11147 if (bitnum != TYPE_PRECISION (type) - 1)
11148 op0 = expand_and (op0, const1_rtx, subtarget);
11153 /* Now see if we are likely to be able to do this. Return if not. */
11154 if (! can_compare_p (operand_mode))
11156 icode = setcc_gen_code[(int) code];
11157 if (icode == CODE_FOR_nothing
11158 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11160 /* We can only do this if it is one of the special cases that
11161 can be handled without an scc insn. */
11162 if ((code == LT && integer_zerop (arg1))
11163 || (! only_cheap && code == GE && integer_zerop (arg1)))
11165 else if (BRANCH_COST >= 0
11166 && ! only_cheap && (code == NE || code == EQ)
11167 && TREE_CODE (type) != REAL_TYPE
11168 && ((abs_optab->handlers[(int) operand_mode].insn_code
11169 != CODE_FOR_nothing)
11170 || (ffs_optab->handlers[(int) operand_mode].insn_code
11171 != CODE_FOR_nothing)))
11177 preexpand_calls (exp);
11178 if (subtarget == 0 || GET_CODE (subtarget) != REG
11179 || GET_MODE (subtarget) != operand_mode
11180 || ! safe_from_p (subtarget, arg1))
11183 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11184 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11187 target = gen_reg_rtx (mode);
11189 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11190 because, if the emit_store_flag does anything it will succeed and
11191 OP0 and OP1 will not be used subsequently. */
11193 result = emit_store_flag (target, code,
11194 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11195 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11196 operand_mode, unsignedp, 1);
11201 result = expand_binop (mode, xor_optab, result, const1_rtx,
11202 result, 0, OPTAB_LIB_WIDEN);
11206 /* If this failed, we have to do this with set/compare/jump/set code. */
11207 if (GET_CODE (target) != REG
11208 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11209 target = gen_reg_rtx (GET_MODE (target));
11211 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11212 result = compare_from_rtx (op0, op1, code, unsignedp,
11213 operand_mode, NULL_RTX, 0);
11214 if (GET_CODE (result) == CONST_INT)
11215 return (((result == const0_rtx && ! invert)
11216 || (result != const0_rtx && invert))
11217 ? const0_rtx : const1_rtx);
11219 label = gen_label_rtx ();
11220 if (bcc_gen_fctn[(int) code] == 0)
11223 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11224 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11225 emit_label (label);
11230 /* Generate a tablejump instruction (used for switch statements). */
11232 #ifdef HAVE_tablejump
11234 /* INDEX is the value being switched on, with the lowest value
11235 in the table already subtracted.
11236 MODE is its expected mode (needed if INDEX is constant).
11237 RANGE is the length of the jump table.
11238 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11240 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11241 index value is out of range. */
11244 do_tablejump (index, mode, range, table_label, default_label)
11245 rtx index, range, table_label, default_label;
11246 enum machine_mode mode;
11248 register rtx temp, vector;
11250 /* Do an unsigned comparison (in the proper mode) between the index
11251 expression and the value which represents the length of the range.
11252 Since we just finished subtracting the lower bound of the range
11253 from the index expression, this comparison allows us to simultaneously
11254 check that the original index expression value is both greater than
11255 or equal to the minimum value of the range and less than or equal to
11256 the maximum value of the range. */
11258 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11259 emit_jump_insn (gen_bgtu (default_label));
11261 /* If index is in range, it must fit in Pmode.
11262 Convert to Pmode so we can index with it. */
11264 index = convert_to_mode (Pmode, index, 1);
11266 /* Don't let a MEM slip thru, because then INDEX that comes
11267 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11268 and break_out_memory_refs will go to work on it and mess it up. */
11269 #ifdef PIC_CASE_VECTOR_ADDRESS
11270 if (flag_pic && GET_CODE (index) != REG)
11271 index = copy_to_mode_reg (Pmode, index);
11274 /* If flag_force_addr were to affect this address
11275 it could interfere with the tricky assumptions made
11276 about addresses that contain label-refs,
11277 which may be valid only very near the tablejump itself. */
11278 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11279 GET_MODE_SIZE, because this indicates how large insns are. The other
11280 uses should all be Pmode, because they are addresses. This code
11281 could fail if addresses and insns are not the same size. */
11282 index = gen_rtx (PLUS, Pmode,
11283 gen_rtx (MULT, Pmode, index,
11284 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11285 gen_rtx (LABEL_REF, Pmode, table_label));
11286 #ifdef PIC_CASE_VECTOR_ADDRESS
11288 index = PIC_CASE_VECTOR_ADDRESS (index);
11291 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11292 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11293 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11294 RTX_UNCHANGING_P (vector) = 1;
11295 convert_move (temp, vector, 0);
11297 emit_jump_insn (gen_tablejump (temp, table_label));
11299 #ifndef CASE_VECTOR_PC_RELATIVE
11300 /* If we are generating PIC code or if the table is PC-relative, the
11301 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11307 #endif /* HAVE_tablejump */
11310 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11311 to that value is on the top of the stack. The resulting type is TYPE, and
11312 the source declaration is DECL. */
11315 bc_load_memory (type, decl)
11318 enum bytecode_opcode opcode;
11321 /* Bit fields are special. We only know about signed and
11322 unsigned ints, and enums. The latter are treated as
11323 signed integers. */
11325 if (DECL_BIT_FIELD (decl))
11326 if (TREE_CODE (type) == ENUMERAL_TYPE
11327 || TREE_CODE (type) == INTEGER_TYPE)
11328 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11332 /* See corresponding comment in bc_store_memory(). */
11333 if (TYPE_MODE (type) == BLKmode
11334 || TYPE_MODE (type) == VOIDmode)
11337 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11339 if (opcode == neverneverland)
11342 bc_emit_bytecode (opcode);
11344 #ifdef DEBUG_PRINT_CODE
11345 fputc ('\n', stderr);
11350 /* Store the contents of the second stack slot to the address in the
11351 top stack slot. DECL is the declaration of the destination and is used
11352 to determine whether we're dealing with a bitfield. */
11355 bc_store_memory (type, decl)
11358 enum bytecode_opcode opcode;
11361 if (DECL_BIT_FIELD (decl))
11363 if (TREE_CODE (type) == ENUMERAL_TYPE
11364 || TREE_CODE (type) == INTEGER_TYPE)
11370 if (TYPE_MODE (type) == BLKmode)
11372 /* Copy structure. This expands to a block copy instruction, storeBLK.
11373 In addition to the arguments expected by the other store instructions,
11374 it also expects a type size (SImode) on top of the stack, which is the
11375 structure size in size units (usually bytes). The two first arguments
11376 are already on the stack; so we just put the size on level 1. For some
11377 other languages, the size may be variable, this is why we don't encode
11378 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11380 bc_expand_expr (TYPE_SIZE (type));
11384 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11386 if (opcode == neverneverland)
11389 bc_emit_bytecode (opcode);
11391 #ifdef DEBUG_PRINT_CODE
11392 fputc ('\n', stderr);
11397 /* Allocate local stack space sufficient to hold a value of the given
11398 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11399 integral power of 2. A special case is locals of type VOID, which
11400 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11401 remapped into the corresponding attribute of SI. */
11404 bc_allocate_local (size, alignment)
11405 int size, alignment;
11408 int byte_alignment;
11413 /* Normalize size and alignment */
11415 size = UNITS_PER_WORD;
11417 if (alignment < BITS_PER_UNIT)
11418 byte_alignment = 1 << (INT_ALIGN - 1);
11421 byte_alignment = alignment / BITS_PER_UNIT;
11423 if (local_vars_size & (byte_alignment - 1))
11424 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11426 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11427 local_vars_size += size;
11433 /* Allocate variable-sized local array. Variable-sized arrays are
11434 actually pointers to the address in memory where they are stored. */
11437 bc_allocate_variable_array (size)
11441 const int ptralign = (1 << (PTR_ALIGN - 1));
11443 /* Align pointer */
11444 if (local_vars_size & ptralign)
11445 local_vars_size += ptralign - (local_vars_size & ptralign);
11447 /* Note down local space needed: pointer to block; also return
11450 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11451 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11456 /* Push the machine address for the given external variable offset. */
11459 bc_load_externaddr (externaddr)
11462 bc_emit_bytecode (constP);
11463 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11464 BYTECODE_BC_LABEL (externaddr)->offset);
11466 #ifdef DEBUG_PRINT_CODE
11467 fputc ('\n', stderr);
11472 /* Like above, but expects an IDENTIFIER. */
11475 bc_load_externaddr_id (id, offset)
11479 if (!IDENTIFIER_POINTER (id))
11482 bc_emit_bytecode (constP);
11483 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11485 #ifdef DEBUG_PRINT_CODE
11486 fputc ('\n', stderr);
11491 /* Push the machine address for the given local variable offset. */
11494 bc_load_localaddr (localaddr)
11497 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11501 /* Push the machine address for the given parameter offset.
11502 NOTE: offset is in bits. */
11505 bc_load_parmaddr (parmaddr)
11508 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11513 /* Convert a[i] into *(a + i). */
11516 bc_canonicalize_array_ref (exp)
11519 tree type = TREE_TYPE (exp);
11520 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11521 TREE_OPERAND (exp, 0));
11522 tree index = TREE_OPERAND (exp, 1);
11525 /* Convert the integer argument to a type the same size as a pointer
11526 so the multiply won't overflow spuriously. */
11528 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11529 index = convert (type_for_size (POINTER_SIZE, 0), index);
11531 /* The array address isn't volatile even if the array is.
11532 (Of course this isn't terribly relevant since the bytecode
11533 translator treats nearly everything as volatile anyway.) */
11534 TREE_THIS_VOLATILE (array_adr) = 0;
11536 return build1 (INDIRECT_REF, type,
11537 fold (build (PLUS_EXPR,
11538 TYPE_POINTER_TO (type),
11540 fold (build (MULT_EXPR,
11541 TYPE_POINTER_TO (type),
11543 size_in_bytes (type))))));
11547 /* Load the address of the component referenced by the given
11548 COMPONENT_REF expression.
11550 Returns innermost lvalue. */
11553 bc_expand_component_address (exp)
11557 enum machine_mode mode;
11559 HOST_WIDE_INT SIval;
11562 tem = TREE_OPERAND (exp, 1);
11563 mode = DECL_MODE (tem);
11566 /* Compute cumulative bit offset for nested component refs
11567 and array refs, and find the ultimate containing object. */
11569 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11571 if (TREE_CODE (tem) == COMPONENT_REF)
11572 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11574 if (TREE_CODE (tem) == ARRAY_REF
11575 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11576 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11578 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11579 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11580 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11585 bc_expand_expr (tem);
11588 /* For bitfields also push their offset and size */
11589 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11590 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11592 if (SIval = bitpos / BITS_PER_UNIT)
11593 bc_emit_instruction (addconstPSI, SIval);
11595 return (TREE_OPERAND (exp, 1));
11599 /* Emit code to push two SI constants */
11602 bc_push_offset_and_size (offset, size)
11603 HOST_WIDE_INT offset, size;
11605 bc_emit_instruction (constSI, offset);
11606 bc_emit_instruction (constSI, size);
11610 /* Emit byte code to push the address of the given lvalue expression to
11611 the stack. If it's a bit field, we also push offset and size info.
11613 Returns innermost component, which allows us to determine not only
11614 its type, but also whether it's a bitfield. */
11617 bc_expand_address (exp)
11621 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11625 switch (TREE_CODE (exp))
11629 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11631 case COMPONENT_REF:
11633 return (bc_expand_component_address (exp));
11637 bc_expand_expr (TREE_OPERAND (exp, 0));
11639 /* For variable-sized types: retrieve pointer. Sometimes the
11640 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11641 also make sure we have an operand, just in case... */
11643 if (TREE_OPERAND (exp, 0)
11644 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11645 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11646 bc_emit_instruction (loadP);
11648 /* If packed, also return offset and size */
11649 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11651 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11652 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11654 return (TREE_OPERAND (exp, 0));
11656 case FUNCTION_DECL:
11658 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11659 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11664 bc_load_parmaddr (DECL_RTL (exp));
11666 /* For variable-sized types: retrieve pointer */
11667 if (TYPE_SIZE (TREE_TYPE (exp))
11668 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11669 bc_emit_instruction (loadP);
11671 /* If packed, also return offset and size */
11672 if (DECL_BIT_FIELD (exp))
11673 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11674 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11680 bc_emit_instruction (returnP);
11686 if (BYTECODE_LABEL (DECL_RTL (exp)))
11687 bc_load_externaddr (DECL_RTL (exp));
11690 if (DECL_EXTERNAL (exp))
11691 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11692 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11694 bc_load_localaddr (DECL_RTL (exp));
11696 /* For variable-sized types: retrieve pointer */
11697 if (TYPE_SIZE (TREE_TYPE (exp))
11698 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11699 bc_emit_instruction (loadP);
11701 /* If packed, also return offset and size */
11702 if (DECL_BIT_FIELD (exp))
11703 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11704 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11712 bc_emit_bytecode (constP);
11713 r = output_constant_def (exp);
11714 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11716 #ifdef DEBUG_PRINT_CODE
11717 fputc ('\n', stderr);
11728 /* Most lvalues don't have components. */
11733 /* Emit a type code to be used by the runtime support in handling
11734 parameter passing. The type code consists of the machine mode
11735 plus the minimal alignment shifted left 8 bits. */
11738 bc_runtime_type_code (type)
11743 switch (TREE_CODE (type))
11749 case ENUMERAL_TYPE:
11753 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11765 return build_int_2 (val, 0);
11769 /* Generate constructor label */
11772 bc_gen_constr_label ()
11774 static int label_counter;
11775 static char label[20];
11777 sprintf (label, "*LR%d", label_counter++);
11779 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11783 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11784 expand the constructor data as static data, and push a pointer to it.
11785 The pointer is put in the pointer table and is retrieved by a constP
11786 bytecode instruction. We then loop and store each constructor member in
11787 the corresponding component. Finally, we return the original pointer on
11791 bc_expand_constructor (constr)
11795 HOST_WIDE_INT ptroffs;
11799 /* Literal constructors are handled as constants, whereas
11800 non-literals are evaluated and stored element by element
11801 into the data segment. */
11803 /* Allocate space in proper segment and push pointer to space on stack.
11806 l = bc_gen_constr_label ();
11808 if (TREE_CONSTANT (constr))
11812 bc_emit_const_labeldef (l);
11813 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11819 bc_emit_data_labeldef (l);
11820 bc_output_data_constructor (constr);
11824 /* Add reference to pointer table and recall pointer to stack;
11825 this code is common for both types of constructors: literals
11826 and non-literals. */
11828 ptroffs = bc_define_pointer (l);
11829 bc_emit_instruction (constP, ptroffs);
11831 /* This is all that has to be done if it's a literal. */
11832 if (TREE_CONSTANT (constr))
11836 /* At this point, we have the pointer to the structure on top of the stack.
11837 Generate sequences of store_memory calls for the constructor. */
11839 /* constructor type is structure */
11840 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11844 /* If the constructor has fewer fields than the structure,
11845 clear the whole structure first. */
11847 if (list_length (CONSTRUCTOR_ELTS (constr))
11848 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11850 bc_emit_instruction (duplicate);
11851 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11852 bc_emit_instruction (clearBLK);
11855 /* Store each element of the constructor into the corresponding
11856 field of TARGET. */
11858 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11860 register tree field = TREE_PURPOSE (elt);
11861 register enum machine_mode mode;
11866 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11867 mode = DECL_MODE (field);
11868 unsignedp = TREE_UNSIGNED (field);
11870 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11872 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11873 /* The alignment of TARGET is
11874 at least what its type requires. */
11876 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11877 int_size_in_bytes (TREE_TYPE (constr)));
11882 /* Constructor type is array */
11883 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11887 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11888 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11889 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11890 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11892 /* If the constructor has fewer fields than the structure,
11893 clear the whole structure first. */
11895 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11897 bc_emit_instruction (duplicate);
11898 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11899 bc_emit_instruction (clearBLK);
11903 /* Store each element of the constructor into the corresponding
11904 element of TARGET, determined by counting the elements. */
11906 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11908 elt = TREE_CHAIN (elt), i++)
11910 register enum machine_mode mode;
11915 mode = TYPE_MODE (elttype);
11916 bitsize = GET_MODE_BITSIZE (mode);
11917 unsignedp = TREE_UNSIGNED (elttype);
11919 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11920 /* * TYPE_SIZE_UNIT (elttype) */ );
11922 bc_store_field (elt, bitsize, bitpos, mode,
11923 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11924 /* The alignment of TARGET is
11925 at least what its type requires. */
11927 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11928 int_size_in_bytes (TREE_TYPE (constr)));
11935 /* Store the value of EXP (an expression tree) into member FIELD of
11936 structure at address on stack, which has type TYPE, mode MODE and
11937 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11940 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11941 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11944 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11945 value_mode, unsignedp, align, total_size)
11946 int bitsize, bitpos;
11947 enum machine_mode mode;
11948 tree field, exp, type;
11949 enum machine_mode value_mode;
11955 /* Expand expression and copy pointer */
11956 bc_expand_expr (exp);
11957 bc_emit_instruction (over);
11960 /* If the component is a bit field, we cannot use addressing to access
11961 it. Use bit-field techniques to store in it. */
11963 if (DECL_BIT_FIELD (field))
11965 bc_store_bit_field (bitpos, bitsize, unsignedp);
11969 /* Not bit field */
11971 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11973 /* Advance pointer to the desired member */
11975 bc_emit_instruction (addconstPSI, offset);
11978 bc_store_memory (type, field);
11983 /* Store SI/SU in bitfield */
11986 bc_store_bit_field (offset, size, unsignedp)
11987 int offset, size, unsignedp;
11989 /* Push bitfield offset and size */
11990 bc_push_offset_and_size (offset, size);
11993 bc_emit_instruction (sstoreBI);
11997 /* Load SI/SU from bitfield */
12000 bc_load_bit_field (offset, size, unsignedp)
12001 int offset, size, unsignedp;
12003 /* Push bitfield offset and size */
12004 bc_push_offset_and_size (offset, size);
12006 /* Load: sign-extend if signed, else zero-extend */
12007 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
12011 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
12012 (adjust stack pointer upwards), negative means add that number of
12013 levels (adjust the stack pointer downwards). Only positive values
12014 normally make sense. */
12017 bc_adjust_stack (nlevels)
12026 bc_emit_instruction (drop);
12029 bc_emit_instruction (drop);
12034 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
12035 stack_depth -= nlevels;
12038 #if defined (VALIDATE_STACK_FOR_BC)
12039 VALIDATE_STACK_FOR_BC ();