1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call;
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
105 int target_temp_slot_level;
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
110 static rtx saveregs_value;
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
115 /* This structure is used by move_by_pieces to describe the move to
118 struct move_by_pieces
128 int explicit_inc_from;
135 /* This structure is used by clear_by_pieces to describe the clear to
138 struct clear_by_pieces
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
154 extern int local_vars_size;
155 extern int stack_depth;
156 extern int max_stack_depth;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
160 static rtx enqueue_insn PROTO((rtx, rtx));
161 static int queued_subexp_p PROTO((rtx));
162 static void init_queue PROTO((void));
163 static void move_by_pieces PROTO((rtx, rtx, int, int));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
175 static int get_inner_unaligned_p PROTO((tree));
176 static tree save_noncopied_parts PROTO((tree, tree));
177 static tree init_noncopied_parts PROTO((tree, tree));
178 static int safe_from_p PROTO((rtx, tree));
179 static int fixed_type_p PROTO((tree));
180 static rtx var_rtx PROTO((tree));
181 static int get_pointer_alignment PROTO((tree, unsigned));
182 static tree string_constant PROTO((tree, tree *));
183 static tree c_strlen PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 void bc_expand_increment PROTO((struct increment_operator *, tree));
194 rtx bc_allocate_local PROTO((int, int));
195 void bc_store_memory PROTO((tree, tree));
196 tree bc_expand_component_address PROTO((tree));
197 tree bc_expand_address PROTO((tree));
198 void bc_expand_constructor PROTO((tree));
199 void bc_adjust_stack PROTO((int));
200 tree bc_canonicalize_array_ref PROTO((tree));
201 void bc_load_memory PROTO((tree, tree));
202 void bc_load_externaddr PROTO((rtx));
203 void bc_load_externaddr_id PROTO((tree, int));
204 void bc_load_localaddr PROTO((rtx));
205 void bc_load_parmaddr PROTO((rtx));
206 static void preexpand_calls PROTO((tree));
207 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
208 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
209 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
210 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
211 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
212 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
213 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
214 static tree defer_cleanups_to PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
224 /* MOVE_RATIO is the number of move instructions that is better than
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
231 /* A value of around 6 would minimize code size; infinity would minimize
233 #define MOVE_RATIO 15
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
262 /* Initialize maps used to convert modes to const, load, and store
266 bc_init_mode_to_opcode_maps ()
270 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
271 mode_to_const_map[mode] =
272 mode_to_load_map[mode] =
273 mode_to_store_map[mode] = neverneverland;
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
280 #include "modemap.def"
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
291 enum machine_mode mode;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
296 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
299 insn = emit_insn (gen_rtx (SET, 0, 0));
300 pat = PATTERN (insn);
302 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
303 mode = (enum machine_mode) ((int) mode + 1))
309 direct_load[(int) mode] = direct_store[(int) mode] = 0;
310 PUT_MODE (mem, mode);
311 PUT_MODE (mem1, mode);
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
316 if (mode != VOIDmode && mode != BLKmode)
317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
318 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
321 if (! HARD_REGNO_MODE_OK (regno, mode))
324 reg = gen_rtx (REG, mode, regno);
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
331 SET_SRC (pat) = mem1;
332 SET_DEST (pat) = reg;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_load[(int) mode] = 1;
337 SET_DEST (pat) = mem;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
342 SET_DEST (pat) = mem1;
343 if (recog (pat, insn, &num_clobbers) >= 0)
344 direct_store[(int) mode] = 1;
351 /* This is run at the start of compiling a function. */
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
362 apply_args_value = 0;
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
373 /* Instead of saving the postincrement queue, empty it. */
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->cleanups_this_call = cleanups_this_call;
379 p->saveregs_value = saveregs_value;
380 p->apply_args_value = apply_args_value;
381 p->forced_labels = forced_labels;
383 pending_stack_adjust = 0;
384 inhibit_defer_pop = 0;
385 cleanups_this_call = 0;
387 apply_args_value = 0;
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
395 restore_expr_status (p)
398 pending_stack_adjust = p->pending_stack_adjust;
399 inhibit_defer_pop = p->inhibit_defer_pop;
400 cleanups_this_call = p->cleanups_this_call;
401 saveregs_value = p->saveregs_value;
402 apply_args_value = p->apply_args_value;
403 forced_labels = p->forced_labels;
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
409 static rtx pending_chain;
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
419 enqueue_insn (var, body)
422 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
423 var, NULL_RTX, NULL_RTX, body, pending_chain);
424 return pending_chain;
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
443 protect_from_queue (x, modify)
447 register RTX_CODE code = GET_CODE (x);
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain == 0)
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
462 if (code == MEM && GET_MODE (x) != BLKmode
463 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
465 register rtx y = XEXP (x, 0);
466 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
474 register rtx temp = gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp, new),
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
485 rtx tem = protect_from_queue (XEXP (x, 0), 0);
486 if (tem != XEXP (x, 0))
492 else if (code == PLUS || code == MULT)
494 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
495 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
496 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x) == 0)
507 return QUEUED_VAR (x);
508 /* If the increment has happened and a pre-increment copy exists,
510 if (QUEUED_COPY (x) != 0)
511 return QUEUED_COPY (x);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
517 return QUEUED_COPY (x);
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
529 register enum rtx_code code = GET_CODE (x);
535 return queued_subexp_p (XEXP (x, 0));
539 return queued_subexp_p (XEXP (x, 0))
540 || queued_subexp_p (XEXP (x, 1));
545 /* Perform all the pending incrementations. */
551 while (p = pending_chain)
553 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
554 pending_chain = QUEUED_NEXT (p);
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
571 convert_move (to, from, unsignedp)
572 register rtx to, from;
575 enum machine_mode to_mode = GET_MODE (to);
576 enum machine_mode from_mode = GET_MODE (from);
577 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
578 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
585 to = protect_from_queue (to, 1);
586 from = protect_from_queue (from, 0);
588 if (to_real != from_real)
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
595 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
597 >= GET_MODE_SIZE (to_mode))
598 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
599 from = gen_lowpart (to_mode, from), from_mode = to_mode;
601 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
604 if (to_mode == from_mode
605 || (from_mode == VOIDmode && CONSTANT_P (from)))
607 emit_move_insn (to, from);
615 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
617 /* Try converting directly if the insn is supported. */
618 if ((code = can_extend_p (to_mode, from_mode, 0))
621 emit_unop_insn (code, to, from, UNKNOWN);
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
770 libcall = extendsfdf2_libfunc;
774 libcall = extendsfxf2_libfunc;
778 libcall = extendsftf2_libfunc;
787 libcall = truncdfsf2_libfunc;
791 libcall = extenddfxf2_libfunc;
795 libcall = extenddftf2_libfunc;
804 libcall = truncxfsf2_libfunc;
808 libcall = truncxfdf2_libfunc;
817 libcall = trunctfsf2_libfunc;
821 libcall = trunctfdf2_libfunc;
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
833 emit_move_insn (to, value);
837 /* Now both modes are integers. */
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
861 emit_unop_insn (code, to, from, equiv_code);
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
877 /* No special multiword conversion insn; do it by hand. */
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
890 lowpart_mode = from_mode;
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
897 /* Compute the value to put in each remaining word. */
899 fill_value = const0_rtx;
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
936 insns = get_insns ();
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
971 #endif /* HAVE_truncsipsi2 */
975 if (from_mode == PSImode)
977 if (to_mode != SImode)
979 from = convert_to_mode (SImode, from, unsignedp);
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2)
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
990 #endif /* HAVE_extendpsisi2 */
995 if (to_mode == PDImode)
997 if (from_mode != DImode)
998 from = convert_to_mode (DImode, from, unsignedp);
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2)
1003 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1006 #endif /* HAVE_truncdipdi2 */
1010 if (from_mode == PDImode)
1012 if (to_mode != DImode)
1014 from = convert_to_mode (DImode, from, unsignedp);
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2)
1022 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1025 #endif /* HAVE_extendpdidi2 */
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1036 GET_MODE_BITSIZE (from_mode)))
1038 if (!((GET_CODE (from) == MEM
1039 && ! MEM_VOLATILE_P (from)
1040 && direct_load[(int) to_mode]
1041 && ! mode_dependent_address_p (XEXP (from, 0)))
1042 || GET_CODE (from) == REG
1043 || GET_CODE (from) == SUBREG))
1044 from = force_reg (from_mode, from);
1045 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047 from = copy_to_reg (from);
1048 emit_move_insn (to, gen_lowpart (to_mode, from));
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1055 /* Convert directly if that works. */
1056 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057 != CODE_FOR_nothing)
1059 emit_unop_insn (code, to, from, equiv_code);
1064 enum machine_mode intermediate;
1066 /* Search for a mode to convert via. */
1067 for (intermediate = from_mode; intermediate != VOIDmode;
1068 intermediate = GET_MODE_WIDER_MODE (intermediate))
1069 if (((can_extend_p (to_mode, intermediate, unsignedp)
1070 != CODE_FOR_nothing)
1071 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1072 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1073 && (can_extend_p (intermediate, from_mode, unsignedp)
1074 != CODE_FOR_nothing))
1076 convert_move (to, convert_to_mode (intermediate, from,
1077 unsignedp), unsignedp);
1081 /* No suitable intermediate mode. */
1086 /* Support special truncate insns for certain modes. */
1088 if (from_mode == DImode && to_mode == SImode)
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2)
1093 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1101 if (from_mode == DImode && to_mode == HImode)
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2)
1106 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1114 if (from_mode == DImode && to_mode == QImode)
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2)
1119 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 if (from_mode == SImode && to_mode == HImode)
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2)
1132 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 if (from_mode == SImode && to_mode == QImode)
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2)
1145 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 if (from_mode == HImode && to_mode == QImode)
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2)
1158 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 if (from_mode == TImode && to_mode == DImode)
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2)
1171 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 if (from_mode == TImode && to_mode == SImode)
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2)
1184 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 if (from_mode == TImode && to_mode == HImode)
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2)
1197 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 if (from_mode == TImode && to_mode == QImode)
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2)
1210 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1223 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1224 emit_move_insn (to, temp);
1228 /* Mode combination is not recognized. */
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1243 convert_to_mode (mode, x, unsignedp)
1244 enum machine_mode mode;
1248 return convert_modes (mode, VOIDmode, x, unsignedp);
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1265 convert_modes (mode, oldmode, x, unsignedp)
1266 enum machine_mode mode, oldmode;
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1275 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1278 x = gen_lowpart (mode, x);
1280 if (GET_MODE (x) != VOIDmode)
1281 oldmode = GET_MODE (x);
1283 if (mode == oldmode)
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1292 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1293 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1296 HOST_WIDE_INT val = INTVAL (x);
1298 if (oldmode != VOIDmode
1299 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1301 int width = GET_MODE_BITSIZE (oldmode);
1303 /* We need to zero extend VAL. */
1304 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1307 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1310 /* We can do this with a gen_lowpart if both desired and current modes
1311 are integer, and this is either a constant integer, a register, or a
1312 non-volatile MEM. Except for the constant case where MODE is no
1313 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1315 if ((GET_CODE (x) == CONST_INT
1316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1317 || (GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_CLASS (oldmode) == MODE_INT
1319 && (GET_CODE (x) == CONST_DOUBLE
1320 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1321 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1322 && direct_load[(int) mode])
1323 || (GET_CODE (x) == REG
1324 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1325 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1327 /* ?? If we don't know OLDMODE, we have to assume here that
1328 X does not need sign- or zero-extension. This may not be
1329 the case, but it's the best we can do. */
1330 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1331 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1333 HOST_WIDE_INT val = INTVAL (x);
1334 int width = GET_MODE_BITSIZE (oldmode);
1336 /* We must sign or zero-extend in this case. Start by
1337 zero-extending, then sign extend if we need to. */
1338 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1340 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1341 val |= (HOST_WIDE_INT) (-1) << width;
1343 return GEN_INT (val);
1346 return gen_lowpart (mode, x);
1349 temp = gen_reg_rtx (mode);
1350 convert_move (temp, x, unsignedp);
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1361 move_by_pieces (to, from, len, align)
1365 struct move_by_pieces data;
1366 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1367 int max_size = MOVE_MAX + 1;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1392 /* If copying requires more than two move insns,
1393 copy addresses to registers (to make displacements shorter)
1394 and use post-increment if available. */
1395 if (!(data.autinc_from && data.autinc_to)
1396 && move_by_pieces_ninsns (len, align) > 2)
1398 #ifdef HAVE_PRE_DECREMENT
1399 if (data.reverse && ! data.autinc_from)
1401 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1402 data.autinc_from = 1;
1403 data.explicit_inc_from = -1;
1406 #ifdef HAVE_POST_INCREMENT
1407 if (! data.autinc_from)
1409 data.from_addr = copy_addr_to_reg (from_addr);
1410 data.autinc_from = 1;
1411 data.explicit_inc_from = 1;
1414 if (!data.autinc_from && CONSTANT_P (from_addr))
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416 #ifdef HAVE_PRE_DECREMENT
1417 if (data.reverse && ! data.autinc_to)
1419 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1421 data.explicit_inc_to = -1;
1424 #ifdef HAVE_POST_INCREMENT
1425 if (! data.reverse && ! data.autinc_to)
1427 data.to_addr = copy_addr_to_reg (to_addr);
1429 data.explicit_inc_to = 1;
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1443 while (max_size > 1)
1445 enum machine_mode mode = VOIDmode, tmode;
1446 enum insn_code icode;
1448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1450 if (GET_MODE_SIZE (tmode) < max_size)
1453 if (mode == VOIDmode)
1456 icode = mov_optab->handlers[(int) mode].insn_code;
1457 if (icode != CODE_FOR_nothing
1458 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1459 GET_MODE_SIZE (mode)))
1460 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1462 max_size = GET_MODE_SIZE (mode);
1465 /* The code above should have handled everything. */
1470 /* Return number of insns required to move L bytes by pieces.
1471 ALIGN (in bytes) is maximum alignment we can assume. */
1474 move_by_pieces_ninsns (l, align)
1478 register int n_insns = 0;
1479 int max_size = MOVE_MAX + 1;
1481 if (! SLOW_UNALIGNED_ACCESS
1482 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1485 while (max_size > 1)
1487 enum machine_mode mode = VOIDmode, tmode;
1488 enum insn_code icode;
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1495 if (mode == VOIDmode)
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing
1500 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1501 GET_MODE_SIZE (mode)))
1502 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1504 max_size = GET_MODE_SIZE (mode);
1510 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1515 move_by_pieces_1 (genfun, mode, data)
1517 enum machine_mode mode;
1518 struct move_by_pieces *data;
1520 register int size = GET_MODE_SIZE (mode);
1521 register rtx to1, from1;
1523 while (data->len >= size)
1525 if (data->reverse) data->offset -= size;
1527 to1 = (data->autinc_to
1528 ? gen_rtx (MEM, mode, data->to_addr)
1529 : copy_rtx (change_address (data->to, mode,
1530 plus_constant (data->to_addr,
1532 MEM_IN_STRUCT_P (to1) = data->to_struct;
1536 ? gen_rtx (MEM, mode, data->from_addr)
1537 : copy_rtx (change_address (data->from, mode,
1538 plus_constant (data->from_addr,
1540 MEM_IN_STRUCT_P (from1) = data->from_struct;
1542 #ifdef HAVE_PRE_DECREMENT
1543 if (data->explicit_inc_to < 0)
1544 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1545 if (data->explicit_inc_from < 0)
1546 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1549 emit_insn ((*genfun) (to1, from1));
1550 #ifdef HAVE_POST_INCREMENT
1551 if (data->explicit_inc_to > 0)
1552 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1553 if (data->explicit_inc_from > 0)
1554 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1557 if (! data->reverse) data->offset += size;
1563 /* Emit code to move a block Y to a block X.
1564 This may be done with string-move instructions,
1565 with multiple scalar move instructions, or with a library call.
1567 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1569 SIZE is an rtx that says how long they are.
1570 ALIGN is the maximum alignment we can assume they have,
1571 measured in bytes. */
1574 emit_block_move (x, y, size, align)
1579 if (GET_MODE (x) != BLKmode)
1582 if (GET_MODE (y) != BLKmode)
1585 x = protect_from_queue (x, 1);
1586 y = protect_from_queue (y, 0);
1587 size = protect_from_queue (size, 0);
1589 if (GET_CODE (x) != MEM)
1591 if (GET_CODE (y) != MEM)
1596 if (GET_CODE (size) == CONST_INT
1597 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1598 move_by_pieces (x, y, INTVAL (size), align);
1601 /* Try the most limited insn first, because there's no point
1602 including more than one in the machine description unless
1603 the more limited one has some advantage. */
1605 rtx opalign = GEN_INT (align);
1606 enum machine_mode mode;
1608 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1609 mode = GET_MODE_WIDER_MODE (mode))
1611 enum insn_code code = movstr_optab[(int) mode];
1613 if (code != CODE_FOR_nothing
1614 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1615 here because if SIZE is less than the mode mask, as it is
1616 returned by the macro, it will definitely be less than the
1617 actual mode mask. */
1618 && ((GET_CODE (size) == CONST_INT
1619 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1620 <= GET_MODE_MASK (mode)))
1621 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1622 && (insn_operand_predicate[(int) code][0] == 0
1623 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1624 && (insn_operand_predicate[(int) code][1] == 0
1625 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1626 && (insn_operand_predicate[(int) code][3] == 0
1627 || (*insn_operand_predicate[(int) code][3]) (opalign,
1631 rtx last = get_last_insn ();
1634 op2 = convert_to_mode (mode, size, 1);
1635 if (insn_operand_predicate[(int) code][2] != 0
1636 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1637 op2 = copy_to_mode_reg (mode, op2);
1639 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1646 delete_insns_since (last);
1650 #ifdef TARGET_MEM_FUNCTIONS
1651 emit_library_call (memcpy_libfunc, 0,
1652 VOIDmode, 3, XEXP (x, 0), Pmode,
1654 convert_to_mode (TYPE_MODE (sizetype), size,
1655 TREE_UNSIGNED (sizetype)),
1656 TYPE_MODE (sizetype));
1658 emit_library_call (bcopy_libfunc, 0,
1659 VOIDmode, 3, XEXP (y, 0), Pmode,
1661 convert_to_mode (TYPE_MODE (integer_type_node), size,
1662 TREE_UNSIGNED (integer_type_node)),
1663 TYPE_MODE (integer_type_node));
1668 /* Copy all or part of a value X into registers starting at REGNO.
1669 The number of registers to be filled is NREGS. */
1672 move_block_to_reg (regno, x, nregs, mode)
1676 enum machine_mode mode;
1684 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1685 x = validize_mem (force_const_mem (mode, x));
1687 /* See if the machine can do this with a load multiple insn. */
1688 #ifdef HAVE_load_multiple
1689 if (HAVE_load_multiple)
1691 last = get_last_insn ();
1692 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1700 delete_insns_since (last);
1704 for (i = 0; i < nregs; i++)
1705 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1706 operand_subword_force (x, i, mode));
1709 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1710 The number of registers to be filled is NREGS. SIZE indicates the number
1711 of bytes in the object X. */
1715 move_block_from_reg (regno, x, nregs, size)
1723 enum machine_mode mode;
1725 /* If SIZE is that of a mode no bigger than a word, just use that
1726 mode's store operation. */
1727 if (size <= UNITS_PER_WORD
1728 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1730 emit_move_insn (change_address (x, mode, NULL),
1731 gen_rtx (REG, mode, regno));
1735 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1736 to the left before storing to memory. Note that the previous test
1737 doesn't handle all cases (e.g. SIZE == 3). */
1738 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1740 rtx tem = operand_subword (x, 0, 1, BLKmode);
1746 shift = expand_shift (LSHIFT_EXPR, word_mode,
1747 gen_rtx (REG, word_mode, regno),
1748 build_int_2 ((UNITS_PER_WORD - size)
1749 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1750 emit_move_insn (tem, shift);
1754 /* See if the machine can do this with a store multiple insn. */
1755 #ifdef HAVE_store_multiple
1756 if (HAVE_store_multiple)
1758 last = get_last_insn ();
1759 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1767 delete_insns_since (last);
1771 for (i = 0; i < nregs; i++)
1773 rtx tem = operand_subword (x, i, 1, BLKmode);
1778 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1782 /* Emit code to move a block Y to a block X, where X is non-consecutive
1783 registers represented by a PARALLEL. */
1786 emit_group_load (x, y)
1789 rtx target_reg, source;
1792 if (GET_CODE (x) != PARALLEL)
1795 /* Check for a NULL entry, used to indicate that the parameter goes
1796 both on the stack and in registers. */
1797 if (XEXP (XVECEXP (x, 0, 0), 0))
1802 for (; i < XVECLEN (x, 0); i++)
1804 rtx element = XVECEXP (x, 0, i);
1806 target_reg = XEXP (element, 0);
1808 if (GET_CODE (y) == MEM)
1809 source = change_address (y, GET_MODE (target_reg),
1810 plus_constant (XEXP (y, 0),
1811 INTVAL (XEXP (element, 1))));
1812 else if (XEXP (element, 1) == const0_rtx)
1814 if (GET_MODE (target_reg) == GET_MODE (y))
1816 /* Allow for the target_reg to be smaller than the input register
1817 to allow for AIX with 4 DF arguments after a single SI arg. The
1818 last DF argument will only load 1 word into the integer registers,
1819 but load a DF value into the float registers. */
1820 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1821 <= GET_MODE_SIZE (GET_MODE (y)))
1822 && GET_MODE (target_reg) == word_mode)
1823 /* This might be a const_double, so we can't just use SUBREG. */
1824 source = operand_subword (y, 0, 0, VOIDmode);
1831 emit_move_insn (target_reg, source);
1835 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1836 registers represented by a PARALLEL. */
1839 emit_group_store (x, y)
1842 rtx source_reg, target;
1845 if (GET_CODE (y) != PARALLEL)
1848 /* Check for a NULL entry, used to indicate that the parameter goes
1849 both on the stack and in registers. */
1850 if (XEXP (XVECEXP (y, 0, 0), 0))
1855 for (; i < XVECLEN (y, 0); i++)
1857 rtx element = XVECEXP (y, 0, i);
1859 source_reg = XEXP (element, 0);
1861 if (GET_CODE (x) == MEM)
1862 target = change_address (x, GET_MODE (source_reg),
1863 plus_constant (XEXP (x, 0),
1864 INTVAL (XEXP (element, 1))));
1865 else if (XEXP (element, 1) == const0_rtx)
1868 if (GET_MODE (target) != GET_MODE (source_reg))
1869 target = gen_lowpart (GET_MODE (source_reg), target);
1874 emit_move_insn (target, source_reg);
1878 /* Add a USE expression for REG to the (possibly empty) list pointed
1879 to by CALL_FUSAGE. REG must denote a hard register. */
1882 use_reg (call_fusage, reg)
1883 rtx *call_fusage, reg;
1885 if (GET_CODE (reg) != REG
1886 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1890 = gen_rtx (EXPR_LIST, VOIDmode,
1891 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1894 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1895 starting at REGNO. All of these registers must be hard registers. */
1898 use_regs (call_fusage, regno, nregs)
1905 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1908 for (i = 0; i < nregs; i++)
1909 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1912 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1913 PARALLEL REGS. This is for calls that pass values in multiple
1914 non-contiguous locations. The Irix 6 ABI has examples of this. */
1917 use_group_regs (call_fusage, regs)
1923 /* Check for a NULL entry, used to indicate that the parameter goes
1924 both on the stack and in registers. */
1925 if (XEXP (XVECEXP (regs, 0, 0), 0))
1930 for (; i < XVECLEN (regs, 0); i++)
1931 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1934 /* Generate several move instructions to clear LEN bytes of block TO.
1935 (A MEM rtx with BLKmode). The caller must pass TO through
1936 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1940 clear_by_pieces (to, len, align)
1944 struct clear_by_pieces data;
1945 rtx to_addr = XEXP (to, 0);
1946 int max_size = MOVE_MAX + 1;
1949 data.to_addr = to_addr;
1952 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1953 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1955 data.explicit_inc_to = 0;
1957 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1958 if (data.reverse) data.offset = len;
1961 data.to_struct = MEM_IN_STRUCT_P (to);
1963 /* If copying requires more than two move insns,
1964 copy addresses to registers (to make displacements shorter)
1965 and use post-increment if available. */
1967 && move_by_pieces_ninsns (len, align) > 2)
1969 #ifdef HAVE_PRE_DECREMENT
1970 if (data.reverse && ! data.autinc_to)
1972 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1974 data.explicit_inc_to = -1;
1977 #ifdef HAVE_POST_INCREMENT
1978 if (! data.reverse && ! data.autinc_to)
1980 data.to_addr = copy_addr_to_reg (to_addr);
1982 data.explicit_inc_to = 1;
1985 if (!data.autinc_to && CONSTANT_P (to_addr))
1986 data.to_addr = copy_addr_to_reg (to_addr);
1989 if (! SLOW_UNALIGNED_ACCESS
1990 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1993 /* First move what we can in the largest integer mode, then go to
1994 successively smaller modes. */
1996 while (max_size > 1)
1998 enum machine_mode mode = VOIDmode, tmode;
1999 enum insn_code icode;
2001 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2002 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2003 if (GET_MODE_SIZE (tmode) < max_size)
2006 if (mode == VOIDmode)
2009 icode = mov_optab->handlers[(int) mode].insn_code;
2010 if (icode != CODE_FOR_nothing
2011 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2012 GET_MODE_SIZE (mode)))
2013 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2015 max_size = GET_MODE_SIZE (mode);
2018 /* The code above should have handled everything. */
2023 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2024 with move instructions for mode MODE. GENFUN is the gen_... function
2025 to make a move insn for that mode. DATA has all the other info. */
2028 clear_by_pieces_1 (genfun, mode, data)
2030 enum machine_mode mode;
2031 struct clear_by_pieces *data;
2033 register int size = GET_MODE_SIZE (mode);
2036 while (data->len >= size)
2038 if (data->reverse) data->offset -= size;
2040 to1 = (data->autinc_to
2041 ? gen_rtx (MEM, mode, data->to_addr)
2042 : copy_rtx (change_address (data->to, mode,
2043 plus_constant (data->to_addr,
2045 MEM_IN_STRUCT_P (to1) = data->to_struct;
2047 #ifdef HAVE_PRE_DECREMENT
2048 if (data->explicit_inc_to < 0)
2049 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2052 emit_insn ((*genfun) (to1, const0_rtx));
2053 #ifdef HAVE_POST_INCREMENT
2054 if (data->explicit_inc_to > 0)
2055 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2058 if (! data->reverse) data->offset += size;
2064 /* Write zeros through the storage of OBJECT.
2065 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2066 the maximum alignment we can is has, measured in bytes. */
2069 clear_storage (object, size, align)
2074 if (GET_MODE (object) == BLKmode)
2076 object = protect_from_queue (object, 1);
2077 size = protect_from_queue (size, 0);
2079 if (GET_CODE (size) == CONST_INT
2080 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2081 clear_by_pieces (object, INTVAL (size), align);
2085 /* Try the most limited insn first, because there's no point
2086 including more than one in the machine description unless
2087 the more limited one has some advantage. */
2089 rtx opalign = GEN_INT (align);
2090 enum machine_mode mode;
2092 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2093 mode = GET_MODE_WIDER_MODE (mode))
2095 enum insn_code code = clrstr_optab[(int) mode];
2097 if (code != CODE_FOR_nothing
2098 /* We don't need MODE to be narrower than
2099 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2100 the mode mask, as it is returned by the macro, it will
2101 definitely be less than the actual mode mask. */
2102 && ((GET_CODE (size) == CONST_INT
2103 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2104 <= GET_MODE_MASK (mode)))
2105 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2106 && (insn_operand_predicate[(int) code][0] == 0
2107 || (*insn_operand_predicate[(int) code][0]) (object,
2109 && (insn_operand_predicate[(int) code][2] == 0
2110 || (*insn_operand_predicate[(int) code][2]) (opalign,
2114 rtx last = get_last_insn ();
2117 op1 = convert_to_mode (mode, size, 1);
2118 if (insn_operand_predicate[(int) code][1] != 0
2119 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2121 op1 = copy_to_mode_reg (mode, op1);
2123 pat = GEN_FCN ((int) code) (object, op1, opalign);
2130 delete_insns_since (last);
2135 #ifdef TARGET_MEM_FUNCTIONS
2136 emit_library_call (memset_libfunc, 0,
2138 XEXP (object, 0), Pmode,
2139 const0_rtx, TYPE_MODE (integer_type_node),
2140 convert_to_mode (TYPE_MODE (sizetype),
2141 size, TREE_UNSIGNED (sizetype)),
2142 TYPE_MODE (sizetype));
2144 emit_library_call (bzero_libfunc, 0,
2146 XEXP (object, 0), Pmode,
2147 convert_to_mode (TYPE_MODE (integer_type_node),
2149 TREE_UNSIGNED (integer_type_node)),
2150 TYPE_MODE (integer_type_node));
2155 emit_move_insn (object, const0_rtx);
2158 /* Generate code to copy Y into X.
2159 Both Y and X must have the same mode, except that
2160 Y can be a constant with VOIDmode.
2161 This mode cannot be BLKmode; use emit_block_move for that.
2163 Return the last instruction emitted. */
2166 emit_move_insn (x, y)
2169 enum machine_mode mode = GET_MODE (x);
2171 x = protect_from_queue (x, 1);
2172 y = protect_from_queue (y, 0);
2174 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2177 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2178 y = force_const_mem (mode, y);
2180 /* If X or Y are memory references, verify that their addresses are valid
2182 if (GET_CODE (x) == MEM
2183 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2184 && ! push_operand (x, GET_MODE (x)))
2186 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2187 x = change_address (x, VOIDmode, XEXP (x, 0));
2189 if (GET_CODE (y) == MEM
2190 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2192 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2193 y = change_address (y, VOIDmode, XEXP (y, 0));
2195 if (mode == BLKmode)
2198 return emit_move_insn_1 (x, y);
2201 /* Low level part of emit_move_insn.
2202 Called just like emit_move_insn, but assumes X and Y
2203 are basically valid. */
2206 emit_move_insn_1 (x, y)
2209 enum machine_mode mode = GET_MODE (x);
2210 enum machine_mode submode;
2211 enum mode_class class = GET_MODE_CLASS (mode);
2214 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2216 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2218 /* Expand complex moves by moving real part and imag part, if possible. */
2219 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2220 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2222 (class == MODE_COMPLEX_INT
2223 ? MODE_INT : MODE_FLOAT),
2225 && (mov_optab->handlers[(int) submode].insn_code
2226 != CODE_FOR_nothing))
2228 /* Don't split destination if it is a stack push. */
2229 int stack = push_operand (x, GET_MODE (x));
2232 /* If this is a stack, push the highpart first, so it
2233 will be in the argument order.
2235 In that case, change_address is used only to convert
2236 the mode, not to change the address. */
2239 /* Note that the real part always precedes the imag part in memory
2240 regardless of machine's endianness. */
2241 #ifdef STACK_GROWS_DOWNWARD
2242 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2243 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2244 gen_imagpart (submode, y)));
2245 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2246 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2247 gen_realpart (submode, y)));
2249 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2250 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2251 gen_realpart (submode, y)));
2252 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2253 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2254 gen_imagpart (submode, y)));
2259 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2260 (gen_realpart (submode, x), gen_realpart (submode, y)));
2261 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2262 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2265 return get_last_insn ();
2268 /* This will handle any multi-word mode that lacks a move_insn pattern.
2269 However, you will get better code if you define such patterns,
2270 even if they must turn into multiple assembler instructions. */
2271 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2276 #ifdef PUSH_ROUNDING
2278 /* If X is a push on the stack, do the push now and replace
2279 X with a reference to the stack pointer. */
2280 if (push_operand (x, GET_MODE (x)))
2282 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2283 x = change_address (x, VOIDmode, stack_pointer_rtx);
2287 /* Show the output dies here. */
2289 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2292 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2295 rtx xpart = operand_subword (x, i, 1, mode);
2296 rtx ypart = operand_subword (y, i, 1, mode);
2298 /* If we can't get a part of Y, put Y into memory if it is a
2299 constant. Otherwise, force it into a register. If we still
2300 can't get a part of Y, abort. */
2301 if (ypart == 0 && CONSTANT_P (y))
2303 y = force_const_mem (mode, y);
2304 ypart = operand_subword (y, i, 1, mode);
2306 else if (ypart == 0)
2307 ypart = operand_subword_force (y, i, mode);
2309 if (xpart == 0 || ypart == 0)
2312 last_insn = emit_move_insn (xpart, ypart);
2321 /* Pushing data onto the stack. */
2323 /* Push a block of length SIZE (perhaps variable)
2324 and return an rtx to address the beginning of the block.
2325 Note that it is not possible for the value returned to be a QUEUED.
2326 The value may be virtual_outgoing_args_rtx.
2328 EXTRA is the number of bytes of padding to push in addition to SIZE.
2329 BELOW nonzero means this padding comes at low addresses;
2330 otherwise, the padding comes at high addresses. */
2333 push_block (size, extra, below)
2339 size = convert_modes (Pmode, ptr_mode, size, 1);
2340 if (CONSTANT_P (size))
2341 anti_adjust_stack (plus_constant (size, extra));
2342 else if (GET_CODE (size) == REG && extra == 0)
2343 anti_adjust_stack (size);
2346 rtx temp = copy_to_mode_reg (Pmode, size);
2348 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2349 temp, 0, OPTAB_LIB_WIDEN);
2350 anti_adjust_stack (temp);
2353 #ifdef STACK_GROWS_DOWNWARD
2354 temp = virtual_outgoing_args_rtx;
2355 if (extra != 0 && below)
2356 temp = plus_constant (temp, extra);
2358 if (GET_CODE (size) == CONST_INT)
2359 temp = plus_constant (virtual_outgoing_args_rtx,
2360 - INTVAL (size) - (below ? 0 : extra));
2361 else if (extra != 0 && !below)
2362 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2363 negate_rtx (Pmode, plus_constant (size, extra)));
2365 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2366 negate_rtx (Pmode, size));
2369 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2375 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2378 /* Generate code to push X onto the stack, assuming it has mode MODE and
2380 MODE is redundant except when X is a CONST_INT (since they don't
2382 SIZE is an rtx for the size of data to be copied (in bytes),
2383 needed only if X is BLKmode.
2385 ALIGN (in bytes) is maximum alignment we can assume.
2387 If PARTIAL and REG are both nonzero, then copy that many of the first
2388 words of X into registers starting with REG, and push the rest of X.
2389 The amount of space pushed is decreased by PARTIAL words,
2390 rounded *down* to a multiple of PARM_BOUNDARY.
2391 REG must be a hard register in this case.
2392 If REG is zero but PARTIAL is not, take any all others actions for an
2393 argument partially in registers, but do not actually load any
2396 EXTRA is the amount in bytes of extra space to leave next to this arg.
2397 This is ignored if an argument block has already been allocated.
2399 On a machine that lacks real push insns, ARGS_ADDR is the address of
2400 the bottom of the argument block for this call. We use indexing off there
2401 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2402 argument block has not been preallocated.
2404 ARGS_SO_FAR is the size of args previously pushed for this call. */
2407 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2408 args_addr, args_so_far)
2410 enum machine_mode mode;
2421 enum direction stack_direction
2422 #ifdef STACK_GROWS_DOWNWARD
2428 /* Decide where to pad the argument: `downward' for below,
2429 `upward' for above, or `none' for don't pad it.
2430 Default is below for small data on big-endian machines; else above. */
2431 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2433 /* Invert direction if stack is post-update. */
2434 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2435 if (where_pad != none)
2436 where_pad = (where_pad == downward ? upward : downward);
2438 xinner = x = protect_from_queue (x, 0);
2440 if (mode == BLKmode)
2442 /* Copy a block into the stack, entirely or partially. */
2445 int used = partial * UNITS_PER_WORD;
2446 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2454 /* USED is now the # of bytes we need not copy to the stack
2455 because registers will take care of them. */
2458 xinner = change_address (xinner, BLKmode,
2459 plus_constant (XEXP (xinner, 0), used));
2461 /* If the partial register-part of the arg counts in its stack size,
2462 skip the part of stack space corresponding to the registers.
2463 Otherwise, start copying to the beginning of the stack space,
2464 by setting SKIP to 0. */
2465 #ifndef REG_PARM_STACK_SPACE
2471 #ifdef PUSH_ROUNDING
2472 /* Do it with several push insns if that doesn't take lots of insns
2473 and if there is no difficulty with push insns that skip bytes
2474 on the stack for alignment purposes. */
2476 && GET_CODE (size) == CONST_INT
2478 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2480 /* Here we avoid the case of a structure whose weak alignment
2481 forces many pushes of a small amount of data,
2482 and such small pushes do rounding that causes trouble. */
2483 && ((! SLOW_UNALIGNED_ACCESS)
2484 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2485 || PUSH_ROUNDING (align) == align)
2486 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2488 /* Push padding now if padding above and stack grows down,
2489 or if padding below and stack grows up.
2490 But if space already allocated, this has already been done. */
2491 if (extra && args_addr == 0
2492 && where_pad != none && where_pad != stack_direction)
2493 anti_adjust_stack (GEN_INT (extra));
2495 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2496 INTVAL (size) - used, align);
2499 #endif /* PUSH_ROUNDING */
2501 /* Otherwise make space on the stack and copy the data
2502 to the address of that space. */
2504 /* Deduct words put into registers from the size we must copy. */
2507 if (GET_CODE (size) == CONST_INT)
2508 size = GEN_INT (INTVAL (size) - used);
2510 size = expand_binop (GET_MODE (size), sub_optab, size,
2511 GEN_INT (used), NULL_RTX, 0,
2515 /* Get the address of the stack space.
2516 In this case, we do not deal with EXTRA separately.
2517 A single stack adjust will do. */
2520 temp = push_block (size, extra, where_pad == downward);
2523 else if (GET_CODE (args_so_far) == CONST_INT)
2524 temp = memory_address (BLKmode,
2525 plus_constant (args_addr,
2526 skip + INTVAL (args_so_far)));
2528 temp = memory_address (BLKmode,
2529 plus_constant (gen_rtx (PLUS, Pmode,
2530 args_addr, args_so_far),
2533 /* TEMP is the address of the block. Copy the data there. */
2534 if (GET_CODE (size) == CONST_INT
2535 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2538 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2539 INTVAL (size), align);
2542 /* Try the most limited insn first, because there's no point
2543 including more than one in the machine description unless
2544 the more limited one has some advantage. */
2545 #ifdef HAVE_movstrqi
2547 && GET_CODE (size) == CONST_INT
2548 && ((unsigned) INTVAL (size)
2549 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2551 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2552 xinner, size, GEN_INT (align));
2560 #ifdef HAVE_movstrhi
2562 && GET_CODE (size) == CONST_INT
2563 && ((unsigned) INTVAL (size)
2564 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2566 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2567 xinner, size, GEN_INT (align));
2575 #ifdef HAVE_movstrsi
2578 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2579 xinner, size, GEN_INT (align));
2587 #ifdef HAVE_movstrdi
2590 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2591 xinner, size, GEN_INT (align));
2600 #ifndef ACCUMULATE_OUTGOING_ARGS
2601 /* If the source is referenced relative to the stack pointer,
2602 copy it to another register to stabilize it. We do not need
2603 to do this if we know that we won't be changing sp. */
2605 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2606 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2607 temp = copy_to_reg (temp);
2610 /* Make inhibit_defer_pop nonzero around the library call
2611 to force it to pop the bcopy-arguments right away. */
2613 #ifdef TARGET_MEM_FUNCTIONS
2614 emit_library_call (memcpy_libfunc, 0,
2615 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2616 convert_to_mode (TYPE_MODE (sizetype),
2617 size, TREE_UNSIGNED (sizetype)),
2618 TYPE_MODE (sizetype));
2620 emit_library_call (bcopy_libfunc, 0,
2621 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2622 convert_to_mode (TYPE_MODE (integer_type_node),
2624 TREE_UNSIGNED (integer_type_node)),
2625 TYPE_MODE (integer_type_node));
2630 else if (partial > 0)
2632 /* Scalar partly in registers. */
2634 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2637 /* # words of start of argument
2638 that we must make space for but need not store. */
2639 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2640 int args_offset = INTVAL (args_so_far);
2643 /* Push padding now if padding above and stack grows down,
2644 or if padding below and stack grows up.
2645 But if space already allocated, this has already been done. */
2646 if (extra && args_addr == 0
2647 && where_pad != none && where_pad != stack_direction)
2648 anti_adjust_stack (GEN_INT (extra));
2650 /* If we make space by pushing it, we might as well push
2651 the real data. Otherwise, we can leave OFFSET nonzero
2652 and leave the space uninitialized. */
2656 /* Now NOT_STACK gets the number of words that we don't need to
2657 allocate on the stack. */
2658 not_stack = partial - offset;
2660 /* If the partial register-part of the arg counts in its stack size,
2661 skip the part of stack space corresponding to the registers.
2662 Otherwise, start copying to the beginning of the stack space,
2663 by setting SKIP to 0. */
2664 #ifndef REG_PARM_STACK_SPACE
2670 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2671 x = validize_mem (force_const_mem (mode, x));
2673 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2674 SUBREGs of such registers are not allowed. */
2675 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2676 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2677 x = copy_to_reg (x);
2679 /* Loop over all the words allocated on the stack for this arg. */
2680 /* We can do it by words, because any scalar bigger than a word
2681 has a size a multiple of a word. */
2682 #ifndef PUSH_ARGS_REVERSED
2683 for (i = not_stack; i < size; i++)
2685 for (i = size - 1; i >= not_stack; i--)
2687 if (i >= not_stack + offset)
2688 emit_push_insn (operand_subword_force (x, i, mode),
2689 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2691 GEN_INT (args_offset + ((i - not_stack + skip)
2692 * UNITS_PER_WORD)));
2698 /* Push padding now if padding above and stack grows down,
2699 or if padding below and stack grows up.
2700 But if space already allocated, this has already been done. */
2701 if (extra && args_addr == 0
2702 && where_pad != none && where_pad != stack_direction)
2703 anti_adjust_stack (GEN_INT (extra));
2705 #ifdef PUSH_ROUNDING
2707 addr = gen_push_operand ();
2710 if (GET_CODE (args_so_far) == CONST_INT)
2712 = memory_address (mode,
2713 plus_constant (args_addr, INTVAL (args_so_far)));
2715 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2718 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2722 /* If part should go in registers, copy that part
2723 into the appropriate registers. Do this now, at the end,
2724 since mem-to-mem copies above may do function calls. */
2725 if (partial > 0 && reg != 0)
2727 /* Handle calls that pass values in multiple non-contiguous locations.
2728 The Irix 6 ABI has examples of this. */
2729 if (GET_CODE (reg) == PARALLEL)
2730 emit_group_load (reg, x);
2732 move_block_to_reg (REGNO (reg), x, partial, mode);
2735 if (extra && args_addr == 0 && where_pad == stack_direction)
2736 anti_adjust_stack (GEN_INT (extra));
2739 /* Expand an assignment that stores the value of FROM into TO.
2740 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2741 (This may contain a QUEUED rtx;
2742 if the value is constant, this rtx is a constant.)
2743 Otherwise, the returned value is NULL_RTX.
2745 SUGGEST_REG is no longer actually used.
2746 It used to mean, copy the value through a register
2747 and return that register, if that is possible.
2748 We now use WANT_VALUE to decide whether to do this. */
2751 expand_assignment (to, from, want_value, suggest_reg)
2756 register rtx to_rtx = 0;
2759 /* Don't crash if the lhs of the assignment was erroneous. */
2761 if (TREE_CODE (to) == ERROR_MARK)
2763 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2764 return want_value ? result : NULL_RTX;
2767 if (output_bytecode)
2769 tree dest_innermost;
2771 bc_expand_expr (from);
2772 bc_emit_instruction (duplicate);
2774 dest_innermost = bc_expand_address (to);
2776 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2777 take care of it here. */
2779 bc_store_memory (TREE_TYPE (to), dest_innermost);
2783 /* Assignment of a structure component needs special treatment
2784 if the structure component's rtx is not simply a MEM.
2785 Assignment of an array element at a constant index, and assignment of
2786 an array element in an unaligned packed structure field, has the same
2789 if (TREE_CODE (to) == COMPONENT_REF
2790 || TREE_CODE (to) == BIT_FIELD_REF
2791 || (TREE_CODE (to) == ARRAY_REF
2792 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2793 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2794 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2796 enum machine_mode mode1;
2806 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2807 &unsignedp, &volatilep, &alignment);
2809 /* If we are going to use store_bit_field and extract_bit_field,
2810 make sure to_rtx will be safe for multiple use. */
2812 if (mode1 == VOIDmode && want_value)
2813 tem = stabilize_reference (tem);
2815 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2818 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2820 if (GET_CODE (to_rtx) != MEM)
2822 to_rtx = change_address (to_rtx, VOIDmode,
2823 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2824 force_reg (ptr_mode, offset_rtx)));
2828 if (GET_CODE (to_rtx) == MEM)
2830 /* When the offset is zero, to_rtx is the address of the
2831 structure we are storing into, and hence may be shared.
2832 We must make a new MEM before setting the volatile bit. */
2834 to_rtx = copy_rtx (to_rtx);
2836 MEM_VOLATILE_P (to_rtx) = 1;
2838 #if 0 /* This was turned off because, when a field is volatile
2839 in an object which is not volatile, the object may be in a register,
2840 and then we would abort over here. */
2846 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2848 /* Spurious cast makes HPUX compiler happy. */
2849 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2852 /* Required alignment of containing datum. */
2854 int_size_in_bytes (TREE_TYPE (tem)));
2855 preserve_temp_slots (result);
2859 /* If the value is meaningful, convert RESULT to the proper mode.
2860 Otherwise, return nothing. */
2861 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2862 TYPE_MODE (TREE_TYPE (from)),
2864 TREE_UNSIGNED (TREE_TYPE (to)))
2868 /* If the rhs is a function call and its value is not an aggregate,
2869 call the function before we start to compute the lhs.
2870 This is needed for correct code for cases such as
2871 val = setjmp (buf) on machines where reference to val
2872 requires loading up part of an address in a separate insn.
2874 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2875 a promoted variable where the zero- or sign- extension needs to be done.
2876 Handling this in the normal way is safe because no computation is done
2878 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2879 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2880 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2885 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2887 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2889 /* Handle calls that return values in multiple non-contiguous locations.
2890 The Irix 6 ABI has examples of this. */
2891 if (GET_CODE (to_rtx) == PARALLEL)
2892 emit_group_load (to_rtx, value);
2893 else if (GET_MODE (to_rtx) == BLKmode)
2894 emit_block_move (to_rtx, value, expr_size (from),
2895 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2897 emit_move_insn (to_rtx, value);
2898 preserve_temp_slots (to_rtx);
2901 return want_value ? to_rtx : NULL_RTX;
2904 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2905 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2908 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2910 /* Don't move directly into a return register. */
2911 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2916 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2917 emit_move_insn (to_rtx, temp);
2918 preserve_temp_slots (to_rtx);
2921 return want_value ? to_rtx : NULL_RTX;
2924 /* In case we are returning the contents of an object which overlaps
2925 the place the value is being stored, use a safe function when copying
2926 a value through a pointer into a structure value return block. */
2927 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2928 && current_function_returns_struct
2929 && !current_function_returns_pcc_struct)
2934 size = expr_size (from);
2935 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2937 #ifdef TARGET_MEM_FUNCTIONS
2938 emit_library_call (memcpy_libfunc, 0,
2939 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2940 XEXP (from_rtx, 0), Pmode,
2941 convert_to_mode (TYPE_MODE (sizetype),
2942 size, TREE_UNSIGNED (sizetype)),
2943 TYPE_MODE (sizetype));
2945 emit_library_call (bcopy_libfunc, 0,
2946 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2947 XEXP (to_rtx, 0), Pmode,
2948 convert_to_mode (TYPE_MODE (integer_type_node),
2949 size, TREE_UNSIGNED (integer_type_node)),
2950 TYPE_MODE (integer_type_node));
2953 preserve_temp_slots (to_rtx);
2956 return want_value ? to_rtx : NULL_RTX;
2959 /* Compute FROM and store the value in the rtx we got. */
2962 result = store_expr (from, to_rtx, want_value);
2963 preserve_temp_slots (result);
2966 return want_value ? result : NULL_RTX;
2969 /* Generate code for computing expression EXP,
2970 and storing the value into TARGET.
2971 TARGET may contain a QUEUED rtx.
2973 If WANT_VALUE is nonzero, return a copy of the value
2974 not in TARGET, so that we can be sure to use the proper
2975 value in a containing expression even if TARGET has something
2976 else stored in it. If possible, we copy the value through a pseudo
2977 and return that pseudo. Or, if the value is constant, we try to
2978 return the constant. In some cases, we return a pseudo
2979 copied *from* TARGET.
2981 If the mode is BLKmode then we may return TARGET itself.
2982 It turns out that in BLKmode it doesn't cause a problem.
2983 because C has no operators that could combine two different
2984 assignments into the same BLKmode object with different values
2985 with no sequence point. Will other languages need this to
2988 If WANT_VALUE is 0, we return NULL, to make sure
2989 to catch quickly any cases where the caller uses the value
2990 and fails to set WANT_VALUE. */
2993 store_expr (exp, target, want_value)
2995 register rtx target;
2999 int dont_return_target = 0;
3001 if (TREE_CODE (exp) == COMPOUND_EXPR)
3003 /* Perform first part of compound expression, then assign from second
3005 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3007 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3009 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3011 /* For conditional expression, get safe form of the target. Then
3012 test the condition, doing the appropriate assignment on either
3013 side. This avoids the creation of unnecessary temporaries.
3014 For non-BLKmode, it is more efficient not to do this. */
3016 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3017 rtx flag = NULL_RTX;
3018 tree left_cleanups = NULL_TREE;
3019 tree right_cleanups = NULL_TREE;
3020 tree old_cleanups = cleanups_this_call;
3022 /* Used to save a pointer to the place to put the setting of
3023 the flag that indicates if this side of the conditional was
3024 taken. We backpatch the code, if we find out later that we
3025 have any conditional cleanups that need to be performed. */
3026 rtx dest_right_flag = NULL_RTX;
3027 rtx dest_left_flag = NULL_RTX;
3030 target = protect_from_queue (target, 1);
3032 do_pending_stack_adjust ();
3034 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3035 store_expr (TREE_OPERAND (exp, 1), target, 0);
3036 dest_left_flag = get_last_insn ();
3037 /* Handle conditional cleanups, if any. */
3038 left_cleanups = defer_cleanups_to (old_cleanups);
3040 emit_jump_insn (gen_jump (lab2));
3043 store_expr (TREE_OPERAND (exp, 2), target, 0);
3044 dest_right_flag = get_last_insn ();
3045 /* Handle conditional cleanups, if any. */
3046 right_cleanups = defer_cleanups_to (old_cleanups);
3051 /* Add back in any conditional cleanups. */
3052 if (left_cleanups || right_cleanups)
3058 /* Now that we know that a flag is needed, go back and add in the
3059 setting of the flag. */
3061 flag = gen_reg_rtx (word_mode);
3063 /* Do the left side flag. */
3064 last = get_last_insn ();
3065 /* Flag left cleanups as needed. */
3066 emit_move_insn (flag, const1_rtx);
3067 /* ??? deprecated, use sequences instead. */
3068 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3070 /* Do the right side flag. */
3071 last = get_last_insn ();
3072 /* Flag left cleanups as needed. */
3073 emit_move_insn (flag, const0_rtx);
3074 /* ??? deprecated, use sequences instead. */
3075 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3077 /* All cleanups must be on the function_obstack. */
3078 push_obstacks_nochange ();
3079 resume_temporary_allocation ();
3081 /* convert flag, which is an rtx, into a tree. */
3082 cond = make_node (RTL_EXPR);
3083 TREE_TYPE (cond) = integer_type_node;
3084 RTL_EXPR_RTL (cond) = flag;
3085 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3086 cond = save_expr (cond);
3088 if (! left_cleanups)
3089 left_cleanups = integer_zero_node;
3090 if (! right_cleanups)
3091 right_cleanups = integer_zero_node;
3092 new_cleanups = build (COND_EXPR, void_type_node,
3093 truthvalue_conversion (cond),
3094 left_cleanups, right_cleanups);
3095 new_cleanups = fold (new_cleanups);
3099 /* Now add in the conditionalized cleanups. */
3101 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3102 expand_eh_region_start ();
3104 return want_value ? target : NULL_RTX;
3106 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3107 && GET_MODE (target) != BLKmode)
3108 /* If target is in memory and caller wants value in a register instead,
3109 arrange that. Pass TARGET as target for expand_expr so that,
3110 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3111 We know expand_expr will not use the target in that case.
3112 Don't do this if TARGET is volatile because we are supposed
3113 to write it and then read it. */
3115 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3116 GET_MODE (target), 0);
3117 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3118 temp = copy_to_reg (temp);
3119 dont_return_target = 1;
3121 else if (queued_subexp_p (target))
3122 /* If target contains a postincrement, let's not risk
3123 using it as the place to generate the rhs. */
3125 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3127 /* Expand EXP into a new pseudo. */
3128 temp = gen_reg_rtx (GET_MODE (target));
3129 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3132 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3134 /* If target is volatile, ANSI requires accessing the value
3135 *from* the target, if it is accessed. So make that happen.
3136 In no case return the target itself. */
3137 if (! MEM_VOLATILE_P (target) && want_value)
3138 dont_return_target = 1;
3140 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3141 /* If this is an scalar in a register that is stored in a wider mode
3142 than the declared mode, compute the result into its declared mode
3143 and then convert to the wider mode. Our value is the computed
3146 /* If we don't want a value, we can do the conversion inside EXP,
3147 which will often result in some optimizations. Do the conversion
3148 in two steps: first change the signedness, if needed, then
3149 the extend. But don't do this if the type of EXP is a subtype
3150 of something else since then the conversion might involve
3151 more than just converting modes. */
3152 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3153 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3155 if (TREE_UNSIGNED (TREE_TYPE (exp))
3156 != SUBREG_PROMOTED_UNSIGNED_P (target))
3159 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3163 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3164 SUBREG_PROMOTED_UNSIGNED_P (target)),
3168 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3170 /* If TEMP is a volatile MEM and we want a result value, make
3171 the access now so it gets done only once. Likewise if
3172 it contains TARGET. */
3173 if (GET_CODE (temp) == MEM && want_value
3174 && (MEM_VOLATILE_P (temp)
3175 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3176 temp = copy_to_reg (temp);
3178 /* If TEMP is a VOIDmode constant, use convert_modes to make
3179 sure that we properly convert it. */
3180 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3181 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3182 TYPE_MODE (TREE_TYPE (exp)), temp,
3183 SUBREG_PROMOTED_UNSIGNED_P (target));
3185 convert_move (SUBREG_REG (target), temp,
3186 SUBREG_PROMOTED_UNSIGNED_P (target));
3187 return want_value ? temp : NULL_RTX;
3191 temp = expand_expr (exp, target, GET_MODE (target), 0);
3192 /* Return TARGET if it's a specified hardware register.
3193 If TARGET is a volatile mem ref, either return TARGET
3194 or return a reg copied *from* TARGET; ANSI requires this.
3196 Otherwise, if TEMP is not TARGET, return TEMP
3197 if it is constant (for efficiency),
3198 or if we really want the correct value. */
3199 if (!(target && GET_CODE (target) == REG
3200 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3201 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3202 && ! rtx_equal_p (temp, target)
3203 && (CONSTANT_P (temp) || want_value))
3204 dont_return_target = 1;
3207 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3208 the same as that of TARGET, adjust the constant. This is needed, for
3209 example, in case it is a CONST_DOUBLE and we want only a word-sized
3211 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3212 && TREE_CODE (exp) != ERROR_MARK
3213 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3214 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3215 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3217 /* If value was not generated in the target, store it there.
3218 Convert the value to TARGET's type first if nec. */
3220 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3222 target = protect_from_queue (target, 1);
3223 if (GET_MODE (temp) != GET_MODE (target)
3224 && GET_MODE (temp) != VOIDmode)
3226 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3227 if (dont_return_target)
3229 /* In this case, we will return TEMP,
3230 so make sure it has the proper mode.
3231 But don't forget to store the value into TARGET. */
3232 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3233 emit_move_insn (target, temp);
3236 convert_move (target, temp, unsignedp);
3239 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3241 /* Handle copying a string constant into an array.
3242 The string constant may be shorter than the array.
3243 So copy just the string's actual length, and clear the rest. */
3247 /* Get the size of the data type of the string,
3248 which is actually the size of the target. */
3249 size = expr_size (exp);
3250 if (GET_CODE (size) == CONST_INT
3251 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3252 emit_block_move (target, temp, size,
3253 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3256 /* Compute the size of the data to copy from the string. */
3258 = size_binop (MIN_EXPR,
3259 make_tree (sizetype, size),
3261 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3262 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3266 /* Copy that much. */
3267 emit_block_move (target, temp, copy_size_rtx,
3268 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3270 /* Figure out how much is left in TARGET that we have to clear.
3271 Do all calculations in ptr_mode. */
3273 addr = XEXP (target, 0);
3274 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3276 if (GET_CODE (copy_size_rtx) == CONST_INT)
3278 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3279 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3283 addr = force_reg (ptr_mode, addr);
3284 addr = expand_binop (ptr_mode, add_optab, addr,
3285 copy_size_rtx, NULL_RTX, 0,
3288 size = expand_binop (ptr_mode, sub_optab, size,
3289 copy_size_rtx, NULL_RTX, 0,
3292 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3293 GET_MODE (size), 0, 0);
3294 label = gen_label_rtx ();
3295 emit_jump_insn (gen_blt (label));
3298 if (size != const0_rtx)
3300 #ifdef TARGET_MEM_FUNCTIONS
3301 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3303 const0_rtx, TYPE_MODE (integer_type_node),
3304 convert_to_mode (TYPE_MODE (sizetype),
3306 TREE_UNSIGNED (sizetype)),
3307 TYPE_MODE (sizetype));
3309 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3311 convert_to_mode (TYPE_MODE (integer_type_node),
3313 TREE_UNSIGNED (integer_type_node)),
3314 TYPE_MODE (integer_type_node));
3322 /* Handle calls that return values in multiple non-contiguous locations.
3323 The Irix 6 ABI has examples of this. */
3324 else if (GET_CODE (target) == PARALLEL)
3325 emit_group_load (target, temp);
3326 else if (GET_MODE (temp) == BLKmode)
3327 emit_block_move (target, temp, expr_size (exp),
3328 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3330 emit_move_insn (target, temp);
3333 /* If we don't want a value, return NULL_RTX. */
3337 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3338 ??? The latter test doesn't seem to make sense. */
3339 else if (dont_return_target && GET_CODE (temp) != MEM)
3342 /* Return TARGET itself if it is a hard register. */
3343 else if (want_value && GET_MODE (target) != BLKmode
3344 && ! (GET_CODE (target) == REG
3345 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3346 return copy_to_reg (target);
3352 /* Return 1 if EXP just contains zeros. */
3360 switch (TREE_CODE (exp))
3364 case NON_LVALUE_EXPR:
3365 return is_zeros_p (TREE_OPERAND (exp, 0));
3368 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3372 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3375 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3378 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3379 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3380 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3381 if (! is_zeros_p (TREE_VALUE (elt)))
3390 /* Return 1 if EXP contains mostly (3/4) zeros. */
3393 mostly_zeros_p (exp)
3396 if (TREE_CODE (exp) == CONSTRUCTOR)
3398 int elts = 0, zeros = 0;
3399 tree elt = CONSTRUCTOR_ELTS (exp);
3400 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3402 /* If there are no ranges of true bits, it is all zero. */
3403 return elt == NULL_TREE;
3405 for (; elt; elt = TREE_CHAIN (elt))
3407 /* We do not handle the case where the index is a RANGE_EXPR,
3408 so the statistic will be somewhat inaccurate.
3409 We do make a more accurate count in store_constructor itself,
3410 so since this function is only used for nested array elements,
3411 this should be close enough. */
3412 if (mostly_zeros_p (TREE_VALUE (elt)))
3417 return 4 * zeros >= 3 * elts;
3420 return is_zeros_p (exp);
3423 /* Helper function for store_constructor.
3424 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3425 TYPE is the type of the CONSTRUCTOR, not the element type.
3426 CLEARED is as for store_constructor.
3428 This provides a recursive shortcut back to store_constructor when it isn't
3429 necessary to go through store_field. This is so that we can pass through
3430 the cleared field to let store_constructor know that we may not have to
3431 clear a substructure if the outer structure has already been cleared. */
3434 store_constructor_field (target, bitsize, bitpos,
3435 mode, exp, type, cleared)
3437 int bitsize, bitpos;
3438 enum machine_mode mode;
3442 if (TREE_CODE (exp) == CONSTRUCTOR
3443 && bitpos % BITS_PER_UNIT == 0
3444 /* If we have a non-zero bitpos for a register target, then we just
3445 let store_field do the bitfield handling. This is unlikely to
3446 generate unnecessary clear instructions anyways. */
3447 && (bitpos == 0 || GET_CODE (target) == MEM))
3450 target = change_address (target, VOIDmode,
3451 plus_constant (XEXP (target, 0),
3452 bitpos / BITS_PER_UNIT));
3453 store_constructor (exp, target, cleared);
3456 store_field (target, bitsize, bitpos, mode, exp,
3457 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3458 int_size_in_bytes (type));
3461 /* Store the value of constructor EXP into the rtx TARGET.
3462 TARGET is either a REG or a MEM.
3463 CLEARED is true if TARGET is known to have been zero'd. */
3466 store_constructor (exp, target, cleared)
3471 tree type = TREE_TYPE (exp);
3473 /* We know our target cannot conflict, since safe_from_p has been called. */
3475 /* Don't try copying piece by piece into a hard register
3476 since that is vulnerable to being clobbered by EXP.
3477 Instead, construct in a pseudo register and then copy it all. */
3478 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3480 rtx temp = gen_reg_rtx (GET_MODE (target));
3481 store_constructor (exp, temp, 0);
3482 emit_move_insn (target, temp);
3487 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3488 || TREE_CODE (type) == QUAL_UNION_TYPE)
3492 /* Inform later passes that the whole union value is dead. */
3493 if (TREE_CODE (type) == UNION_TYPE
3494 || TREE_CODE (type) == QUAL_UNION_TYPE)
3495 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3497 /* If we are building a static constructor into a register,
3498 set the initial value as zero so we can fold the value into
3499 a constant. But if more than one register is involved,
3500 this probably loses. */
3501 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3502 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3505 emit_move_insn (target, const0_rtx);
3510 /* If the constructor has fewer fields than the structure
3511 or if we are initializing the structure to mostly zeros,
3512 clear the whole structure first. */
3513 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3514 != list_length (TYPE_FIELDS (type)))
3515 || mostly_zeros_p (exp))
3518 clear_storage (target, expr_size (exp),
3519 TYPE_ALIGN (type) / BITS_PER_UNIT);
3524 /* Inform later passes that the old value is dead. */
3525 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3527 /* Store each element of the constructor into
3528 the corresponding field of TARGET. */
3530 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3532 register tree field = TREE_PURPOSE (elt);
3533 register enum machine_mode mode;
3537 tree pos, constant = 0, offset = 0;
3538 rtx to_rtx = target;
3540 /* Just ignore missing fields.
3541 We cleared the whole structure, above,
3542 if any fields are missing. */
3546 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3549 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3550 unsignedp = TREE_UNSIGNED (field);
3551 mode = DECL_MODE (field);
3552 if (DECL_BIT_FIELD (field))
3555 pos = DECL_FIELD_BITPOS (field);
3556 if (TREE_CODE (pos) == INTEGER_CST)
3558 else if (TREE_CODE (pos) == PLUS_EXPR
3559 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3560 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3565 bitpos = TREE_INT_CST_LOW (constant);
3571 if (contains_placeholder_p (offset))
3572 offset = build (WITH_RECORD_EXPR, sizetype,
3575 offset = size_binop (FLOOR_DIV_EXPR, offset,
3576 size_int (BITS_PER_UNIT));
3578 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3579 if (GET_CODE (to_rtx) != MEM)
3583 = change_address (to_rtx, VOIDmode,
3584 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3585 force_reg (ptr_mode, offset_rtx)));
3587 if (TREE_READONLY (field))
3589 if (GET_CODE (to_rtx) == MEM)
3590 to_rtx = copy_rtx (to_rtx);
3592 RTX_UNCHANGING_P (to_rtx) = 1;
3595 store_constructor_field (to_rtx, bitsize, bitpos,
3596 mode, TREE_VALUE (elt), type, cleared);
3599 else if (TREE_CODE (type) == ARRAY_TYPE)
3604 tree domain = TYPE_DOMAIN (type);
3605 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3606 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3607 tree elttype = TREE_TYPE (type);
3609 /* If the constructor has fewer elements than the array,
3610 clear the whole array first. Similarly if this this is
3611 static constructor of a non-BLKmode object. */
3612 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3616 HOST_WIDE_INT count = 0, zero_count = 0;
3618 /* This loop is a more accurate version of the loop in
3619 mostly_zeros_p (it handles RANGE_EXPR in an index).
3620 It is also needed to check for missing elements. */
3621 for (elt = CONSTRUCTOR_ELTS (exp);
3623 elt = TREE_CHAIN (elt))
3625 tree index = TREE_PURPOSE (elt);
3626 HOST_WIDE_INT this_node_count;
3627 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3629 tree lo_index = TREE_OPERAND (index, 0);
3630 tree hi_index = TREE_OPERAND (index, 1);
3631 if (TREE_CODE (lo_index) != INTEGER_CST
3632 || TREE_CODE (hi_index) != INTEGER_CST)
3637 this_node_count = TREE_INT_CST_LOW (hi_index)
3638 - TREE_INT_CST_LOW (lo_index) + 1;
3641 this_node_count = 1;
3642 count += this_node_count;
3643 if (mostly_zeros_p (TREE_VALUE (elt)))
3644 zero_count += this_node_count;
3646 /* Clear the entire array first if there are any missing elements,
3647 or if the incidence of zero elements is >= 75%. */
3648 if (count < maxelt - minelt + 1
3649 || 4 * zero_count >= 3 * count)
3655 clear_storage (target, expr_size (exp),
3656 TYPE_ALIGN (type) / BITS_PER_UNIT);
3660 /* Inform later passes that the old value is dead. */
3661 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3663 /* Store each element of the constructor into
3664 the corresponding element of TARGET, determined
3665 by counting the elements. */
3666 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3668 elt = TREE_CHAIN (elt), i++)
3670 register enum machine_mode mode;
3674 tree value = TREE_VALUE (elt);
3675 tree index = TREE_PURPOSE (elt);
3676 rtx xtarget = target;
3678 if (cleared && is_zeros_p (value))
3681 mode = TYPE_MODE (elttype);
3682 bitsize = GET_MODE_BITSIZE (mode);
3683 unsignedp = TREE_UNSIGNED (elttype);
3685 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3687 tree lo_index = TREE_OPERAND (index, 0);
3688 tree hi_index = TREE_OPERAND (index, 1);
3689 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3690 struct nesting *loop;
3691 HOST_WIDE_INT lo, hi, count;
3694 /* If the range is constant and "small", unroll the loop. */
3695 if (TREE_CODE (lo_index) == INTEGER_CST
3696 && TREE_CODE (hi_index) == INTEGER_CST
3697 && (lo = TREE_INT_CST_LOW (lo_index),
3698 hi = TREE_INT_CST_LOW (hi_index),
3699 count = hi - lo + 1,
3700 (GET_CODE (target) != MEM
3702 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3703 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3706 lo -= minelt; hi -= minelt;
3707 for (; lo <= hi; lo++)
3709 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3710 store_constructor_field (target, bitsize, bitpos,
3711 mode, value, type, cleared);
3716 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3717 loop_top = gen_label_rtx ();
3718 loop_end = gen_label_rtx ();
3720 unsignedp = TREE_UNSIGNED (domain);
3722 index = build_decl (VAR_DECL, NULL_TREE, domain);
3724 DECL_RTL (index) = index_r
3725 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3728 if (TREE_CODE (value) == SAVE_EXPR
3729 && SAVE_EXPR_RTL (value) == 0)
3731 /* Make sure value gets expanded once before the
3733 expand_expr (value, const0_rtx, VOIDmode, 0);
3736 store_expr (lo_index, index_r, 0);
3737 loop = expand_start_loop (0);
3739 /* Assign value to element index. */
3740 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3741 size_int (BITS_PER_UNIT));
3742 position = size_binop (MULT_EXPR,
3743 size_binop (MINUS_EXPR, index,
3744 TYPE_MIN_VALUE (domain)),
3746 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3747 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3748 xtarget = change_address (target, mode, addr);
3749 if (TREE_CODE (value) == CONSTRUCTOR)
3750 store_constructor (value, xtarget, cleared);
3752 store_expr (value, xtarget, 0);
3754 expand_exit_loop_if_false (loop,
3755 build (LT_EXPR, integer_type_node,
3758 expand_increment (build (PREINCREMENT_EXPR,
3760 index, integer_one_node), 0, 0);
3762 emit_label (loop_end);
3764 /* Needed by stupid register allocation. to extend the
3765 lifetime of pseudo-regs used by target past the end
3767 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3770 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3771 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3777 index = size_int (i);
3780 index = size_binop (MINUS_EXPR, index,
3781 TYPE_MIN_VALUE (domain));
3782 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3783 size_int (BITS_PER_UNIT));
3784 position = size_binop (MULT_EXPR, index, position);
3785 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3786 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3787 xtarget = change_address (target, mode, addr);
3788 store_expr (value, xtarget, 0);
3793 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3794 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3796 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3797 store_constructor_field (target, bitsize, bitpos,
3798 mode, value, type, cleared);
3802 /* set constructor assignments */
3803 else if (TREE_CODE (type) == SET_TYPE)
3805 tree elt = CONSTRUCTOR_ELTS (exp);
3806 rtx xtarget = XEXP (target, 0);
3807 int set_word_size = TYPE_ALIGN (type);
3808 int nbytes = int_size_in_bytes (type), nbits;
3809 tree domain = TYPE_DOMAIN (type);
3810 tree domain_min, domain_max, bitlength;
3812 /* The default implementation strategy is to extract the constant
3813 parts of the constructor, use that to initialize the target,
3814 and then "or" in whatever non-constant ranges we need in addition.
3816 If a large set is all zero or all ones, it is
3817 probably better to set it using memset (if available) or bzero.
3818 Also, if a large set has just a single range, it may also be
3819 better to first clear all the first clear the set (using
3820 bzero/memset), and set the bits we want. */
3822 /* Check for all zeros. */
3823 if (elt == NULL_TREE)
3826 clear_storage (target, expr_size (exp),
3827 TYPE_ALIGN (type) / BITS_PER_UNIT);
3831 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3832 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3833 bitlength = size_binop (PLUS_EXPR,
3834 size_binop (MINUS_EXPR, domain_max, domain_min),
3837 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3839 nbits = TREE_INT_CST_LOW (bitlength);
3841 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3842 are "complicated" (more than one range), initialize (the
3843 constant parts) by copying from a constant. */
3844 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3845 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3847 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3848 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3849 char *bit_buffer = (char *) alloca (nbits);
3850 HOST_WIDE_INT word = 0;
3853 int offset = 0; /* In bytes from beginning of set. */
3854 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3857 if (bit_buffer[ibit])
3859 if (BYTES_BIG_ENDIAN)
3860 word |= (1 << (set_word_size - 1 - bit_pos));
3862 word |= 1 << bit_pos;
3865 if (bit_pos >= set_word_size || ibit == nbits)
3867 if (word != 0 || ! cleared)
3869 rtx datum = GEN_INT (word);
3871 /* The assumption here is that it is safe to use
3872 XEXP if the set is multi-word, but not if
3873 it's single-word. */
3874 if (GET_CODE (target) == MEM)
3876 to_rtx = plus_constant (XEXP (target, 0), offset);
3877 to_rtx = change_address (target, mode, to_rtx);
3879 else if (offset == 0)
3883 emit_move_insn (to_rtx, datum);
3889 offset += set_word_size / BITS_PER_UNIT;
3895 /* Don't bother clearing storage if the set is all ones. */
3896 if (TREE_CHAIN (elt) != NULL_TREE
3897 || (TREE_PURPOSE (elt) == NULL_TREE
3899 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3900 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3901 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3902 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3904 clear_storage (target, expr_size (exp),
3905 TYPE_ALIGN (type) / BITS_PER_UNIT);
3908 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3910 /* start of range of element or NULL */
3911 tree startbit = TREE_PURPOSE (elt);
3912 /* end of range of element, or element value */
3913 tree endbit = TREE_VALUE (elt);
3914 HOST_WIDE_INT startb, endb;
3915 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3917 bitlength_rtx = expand_expr (bitlength,
3918 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3920 /* handle non-range tuple element like [ expr ] */
3921 if (startbit == NULL_TREE)
3923 startbit = save_expr (endbit);
3926 startbit = convert (sizetype, startbit);
3927 endbit = convert (sizetype, endbit);
3928 if (! integer_zerop (domain_min))
3930 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3931 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3933 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3934 EXPAND_CONST_ADDRESS);
3935 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3936 EXPAND_CONST_ADDRESS);
3940 targetx = assign_stack_temp (GET_MODE (target),
3941 GET_MODE_SIZE (GET_MODE (target)),
3943 emit_move_insn (targetx, target);
3945 else if (GET_CODE (target) == MEM)
3950 #ifdef TARGET_MEM_FUNCTIONS
3951 /* Optimization: If startbit and endbit are
3952 constants divisible by BITS_PER_UNIT,
3953 call memset instead. */
3954 if (TREE_CODE (startbit) == INTEGER_CST
3955 && TREE_CODE (endbit) == INTEGER_CST
3956 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3957 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3959 emit_library_call (memset_libfunc, 0,
3961 plus_constant (XEXP (targetx, 0),
3962 startb / BITS_PER_UNIT),
3964 constm1_rtx, TYPE_MODE (integer_type_node),
3965 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3966 TYPE_MODE (sizetype));
3971 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3972 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3973 bitlength_rtx, TYPE_MODE (sizetype),
3974 startbit_rtx, TYPE_MODE (sizetype),
3975 endbit_rtx, TYPE_MODE (sizetype));
3978 emit_move_insn (target, targetx);
3986 /* Store the value of EXP (an expression tree)
3987 into a subfield of TARGET which has mode MODE and occupies
3988 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3989 If MODE is VOIDmode, it means that we are storing into a bit-field.
3991 If VALUE_MODE is VOIDmode, return nothing in particular.
3992 UNSIGNEDP is not used in this case.
3994 Otherwise, return an rtx for the value stored. This rtx
3995 has mode VALUE_MODE if that is convenient to do.
3996 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3998 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3999 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4002 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4003 unsignedp, align, total_size)
4005 int bitsize, bitpos;
4006 enum machine_mode mode;
4008 enum machine_mode value_mode;
4013 HOST_WIDE_INT width_mask = 0;
4015 if (bitsize < HOST_BITS_PER_WIDE_INT)
4016 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4018 /* If we are storing into an unaligned field of an aligned union that is
4019 in a register, we may have the mode of TARGET being an integer mode but
4020 MODE == BLKmode. In that case, get an aligned object whose size and
4021 alignment are the same as TARGET and store TARGET into it (we can avoid
4022 the store if the field being stored is the entire width of TARGET). Then
4023 call ourselves recursively to store the field into a BLKmode version of
4024 that object. Finally, load from the object into TARGET. This is not
4025 very efficient in general, but should only be slightly more expensive
4026 than the otherwise-required unaligned accesses. Perhaps this can be
4027 cleaned up later. */
4030 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4032 rtx object = assign_stack_temp (GET_MODE (target),
4033 GET_MODE_SIZE (GET_MODE (target)), 0);
4034 rtx blk_object = copy_rtx (object);
4036 MEM_IN_STRUCT_P (object) = 1;
4037 MEM_IN_STRUCT_P (blk_object) = 1;
4038 PUT_MODE (blk_object, BLKmode);
4040 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4041 emit_move_insn (object, target);
4043 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4046 /* Even though we aren't returning target, we need to
4047 give it the updated value. */
4048 emit_move_insn (target, object);
4053 /* If the structure is in a register or if the component
4054 is a bit field, we cannot use addressing to access it.
4055 Use bit-field techniques or SUBREG to store in it. */
4057 if (mode == VOIDmode
4058 || (mode != BLKmode && ! direct_store[(int) mode])
4059 || GET_CODE (target) == REG
4060 || GET_CODE (target) == SUBREG
4061 /* If the field isn't aligned enough to store as an ordinary memref,
4062 store it as a bit field. */
4063 || (SLOW_UNALIGNED_ACCESS
4064 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4065 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4067 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4069 /* If BITSIZE is narrower than the size of the type of EXP
4070 we will be narrowing TEMP. Normally, what's wanted are the
4071 low-order bits. However, if EXP's type is a record and this is
4072 big-endian machine, we want the upper BITSIZE bits. */
4073 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4074 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4075 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4076 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4077 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4081 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4083 if (mode != VOIDmode && mode != BLKmode
4084 && mode != TYPE_MODE (TREE_TYPE (exp)))
4085 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4087 /* If the modes of TARGET and TEMP are both BLKmode, both
4088 must be in memory and BITPOS must be aligned on a byte
4089 boundary. If so, we simply do a block copy. */
4090 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4092 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4093 || bitpos % BITS_PER_UNIT != 0)
4096 target = change_address (target, VOIDmode,
4097 plus_constant (XEXP (target, 0),
4098 bitpos / BITS_PER_UNIT));
4100 emit_block_move (target, temp,
4101 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4105 return value_mode == VOIDmode ? const0_rtx : target;
4108 /* Store the value in the bitfield. */
4109 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4110 if (value_mode != VOIDmode)
4112 /* The caller wants an rtx for the value. */
4113 /* If possible, avoid refetching from the bitfield itself. */
4115 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4118 enum machine_mode tmode;
4121 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4122 tmode = GET_MODE (temp);
4123 if (tmode == VOIDmode)
4125 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4126 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4127 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4129 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4130 NULL_RTX, value_mode, 0, align,
4137 rtx addr = XEXP (target, 0);
4140 /* If a value is wanted, it must be the lhs;
4141 so make the address stable for multiple use. */
4143 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4144 && ! CONSTANT_ADDRESS_P (addr)
4145 /* A frame-pointer reference is already stable. */
4146 && ! (GET_CODE (addr) == PLUS
4147 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4148 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4149 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4150 addr = copy_to_reg (addr);
4152 /* Now build a reference to just the desired component. */
4154 to_rtx = copy_rtx (change_address (target, mode,
4155 plus_constant (addr,
4157 / BITS_PER_UNIT))));
4158 MEM_IN_STRUCT_P (to_rtx) = 1;
4160 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4164 /* Return true if any object containing the innermost array is an unaligned
4165 packed structure field. */
4168 get_inner_unaligned_p (exp)
4171 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4175 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4177 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4181 else if (TREE_CODE (exp) != ARRAY_REF
4182 && TREE_CODE (exp) != NON_LVALUE_EXPR
4183 && ! ((TREE_CODE (exp) == NOP_EXPR
4184 || TREE_CODE (exp) == CONVERT_EXPR)
4185 && (TYPE_MODE (TREE_TYPE (exp))
4186 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4189 exp = TREE_OPERAND (exp, 0);
4195 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4196 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4197 ARRAY_REFs and find the ultimate containing object, which we return.
4199 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4200 bit position, and *PUNSIGNEDP to the signedness of the field.
4201 If the position of the field is variable, we store a tree
4202 giving the variable offset (in units) in *POFFSET.
4203 This offset is in addition to the bit position.
4204 If the position is not variable, we store 0 in *POFFSET.
4205 We set *PALIGNMENT to the alignment in bytes of the address that will be
4206 computed. This is the alignment of the thing we return if *POFFSET
4207 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4209 If any of the extraction expressions is volatile,
4210 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4212 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4213 is a mode that can be used to access the field. In that case, *PBITSIZE
4216 If the field describes a variable-sized object, *PMODE is set to
4217 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4218 this case, but the address of the object can be found. */
4221 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4222 punsignedp, pvolatilep, palignment)
4227 enum machine_mode *pmode;
4232 tree orig_exp = exp;
4234 enum machine_mode mode = VOIDmode;
4235 tree offset = integer_zero_node;
4236 int alignment = BIGGEST_ALIGNMENT;
4238 if (TREE_CODE (exp) == COMPONENT_REF)
4240 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4241 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4242 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4243 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4245 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4247 size_tree = TREE_OPERAND (exp, 1);
4248 *punsignedp = TREE_UNSIGNED (exp);
4252 mode = TYPE_MODE (TREE_TYPE (exp));
4253 *pbitsize = GET_MODE_BITSIZE (mode);
4254 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4259 if (TREE_CODE (size_tree) != INTEGER_CST)
4260 mode = BLKmode, *pbitsize = -1;
4262 *pbitsize = TREE_INT_CST_LOW (size_tree);
4265 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4266 and find the ultimate containing object. */
4272 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4274 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4275 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4276 : TREE_OPERAND (exp, 2));
4277 tree constant = integer_zero_node, var = pos;
4279 /* If this field hasn't been filled in yet, don't go
4280 past it. This should only happen when folding expressions
4281 made during type construction. */
4285 /* Assume here that the offset is a multiple of a unit.
4286 If not, there should be an explicitly added constant. */
4287 if (TREE_CODE (pos) == PLUS_EXPR
4288 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4289 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4290 else if (TREE_CODE (pos) == INTEGER_CST)
4291 constant = pos, var = integer_zero_node;
4293 *pbitpos += TREE_INT_CST_LOW (constant);
4294 offset = size_binop (PLUS_EXPR, offset,
4295 size_binop (EXACT_DIV_EXPR, var,
4296 size_int (BITS_PER_UNIT)));
4299 else if (TREE_CODE (exp) == ARRAY_REF)
4301 /* This code is based on the code in case ARRAY_REF in expand_expr
4302 below. We assume here that the size of an array element is
4303 always an integral multiple of BITS_PER_UNIT. */
4305 tree index = TREE_OPERAND (exp, 1);
4306 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4308 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4309 tree index_type = TREE_TYPE (index);
4311 if (! integer_zerop (low_bound))
4312 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4314 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4316 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4318 index_type = TREE_TYPE (index);
4321 index = fold (build (MULT_EXPR, index_type, index,
4322 convert (index_type,
4323 TYPE_SIZE (TREE_TYPE (exp)))));
4325 if (TREE_CODE (index) == INTEGER_CST
4326 && TREE_INT_CST_HIGH (index) == 0)
4327 *pbitpos += TREE_INT_CST_LOW (index);
4329 offset = size_binop (PLUS_EXPR, offset,
4330 size_binop (FLOOR_DIV_EXPR, index,
4331 size_int (BITS_PER_UNIT)));
4333 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4334 && ! ((TREE_CODE (exp) == NOP_EXPR
4335 || TREE_CODE (exp) == CONVERT_EXPR)
4336 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4337 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4339 && (TYPE_MODE (TREE_TYPE (exp))
4340 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4343 /* If any reference in the chain is volatile, the effect is volatile. */
4344 if (TREE_THIS_VOLATILE (exp))
4347 /* If the offset is non-constant already, then we can't assume any
4348 alignment more than the alignment here. */
4349 if (! integer_zerop (offset))
4350 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4352 exp = TREE_OPERAND (exp, 0);
4355 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4356 alignment = MIN (alignment, DECL_ALIGN (exp));
4357 else if (TREE_TYPE (exp) != 0)
4358 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4360 if (integer_zerop (offset))
4363 if (offset != 0 && contains_placeholder_p (offset))
4364 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4368 *palignment = alignment / BITS_PER_UNIT;
4372 /* Given an rtx VALUE that may contain additions and multiplications,
4373 return an equivalent value that just refers to a register or memory.
4374 This is done by generating instructions to perform the arithmetic
4375 and returning a pseudo-register containing the value.
4377 The returned value may be a REG, SUBREG, MEM or constant. */
4380 force_operand (value, target)
4383 register optab binoptab = 0;
4384 /* Use a temporary to force order of execution of calls to
4388 /* Use subtarget as the target for operand 0 of a binary operation. */
4389 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4391 if (GET_CODE (value) == PLUS)
4392 binoptab = add_optab;
4393 else if (GET_CODE (value) == MINUS)
4394 binoptab = sub_optab;
4395 else if (GET_CODE (value) == MULT)
4397 op2 = XEXP (value, 1);
4398 if (!CONSTANT_P (op2)
4399 && !(GET_CODE (op2) == REG && op2 != subtarget))
4401 tmp = force_operand (XEXP (value, 0), subtarget);
4402 return expand_mult (GET_MODE (value), tmp,
4403 force_operand (op2, NULL_RTX),
4409 op2 = XEXP (value, 1);
4410 if (!CONSTANT_P (op2)
4411 && !(GET_CODE (op2) == REG && op2 != subtarget))
4413 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4415 binoptab = add_optab;
4416 op2 = negate_rtx (GET_MODE (value), op2);
4419 /* Check for an addition with OP2 a constant integer and our first
4420 operand a PLUS of a virtual register and something else. In that
4421 case, we want to emit the sum of the virtual register and the
4422 constant first and then add the other value. This allows virtual
4423 register instantiation to simply modify the constant rather than
4424 creating another one around this addition. */
4425 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4426 && GET_CODE (XEXP (value, 0)) == PLUS
4427 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4428 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4429 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4431 rtx temp = expand_binop (GET_MODE (value), binoptab,
4432 XEXP (XEXP (value, 0), 0), op2,
4433 subtarget, 0, OPTAB_LIB_WIDEN);
4434 return expand_binop (GET_MODE (value), binoptab, temp,
4435 force_operand (XEXP (XEXP (value, 0), 1), 0),
4436 target, 0, OPTAB_LIB_WIDEN);
4439 tmp = force_operand (XEXP (value, 0), subtarget);
4440 return expand_binop (GET_MODE (value), binoptab, tmp,
4441 force_operand (op2, NULL_RTX),
4442 target, 0, OPTAB_LIB_WIDEN);
4443 /* We give UNSIGNEDP = 0 to expand_binop
4444 because the only operations we are expanding here are signed ones. */
4449 /* Subroutine of expand_expr:
4450 save the non-copied parts (LIST) of an expr (LHS), and return a list
4451 which can restore these values to their previous values,
4452 should something modify their storage. */
4455 save_noncopied_parts (lhs, list)
4462 for (tail = list; tail; tail = TREE_CHAIN (tail))
4463 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4464 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4467 tree part = TREE_VALUE (tail);
4468 tree part_type = TREE_TYPE (part);
4469 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4470 rtx target = assign_temp (part_type, 0, 1, 1);
4471 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4472 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4473 parts = tree_cons (to_be_saved,
4474 build (RTL_EXPR, part_type, NULL_TREE,
4477 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4482 /* Subroutine of expand_expr:
4483 record the non-copied parts (LIST) of an expr (LHS), and return a list
4484 which specifies the initial values of these parts. */
4487 init_noncopied_parts (lhs, list)
4494 for (tail = list; tail; tail = TREE_CHAIN (tail))
4495 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4496 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4499 tree part = TREE_VALUE (tail);
4500 tree part_type = TREE_TYPE (part);
4501 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4502 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4507 /* Subroutine of expand_expr: return nonzero iff there is no way that
4508 EXP can reference X, which is being modified. */
4511 safe_from_p (x, exp)
4519 /* If EXP has varying size, we MUST use a target since we currently
4520 have no way of allocating temporaries of variable size
4521 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4522 So we assume here that something at a higher level has prevented a
4523 clash. This is somewhat bogus, but the best we can do. Only
4524 do this when X is BLKmode. */
4525 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4526 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4527 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4528 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4529 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4531 && GET_MODE (x) == BLKmode))
4534 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4535 find the underlying pseudo. */
4536 if (GET_CODE (x) == SUBREG)
4539 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4543 /* If X is a location in the outgoing argument area, it is always safe. */
4544 if (GET_CODE (x) == MEM
4545 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4546 || (GET_CODE (XEXP (x, 0)) == PLUS
4547 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4550 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4553 exp_rtl = DECL_RTL (exp);
4560 if (TREE_CODE (exp) == TREE_LIST)
4561 return ((TREE_VALUE (exp) == 0
4562 || safe_from_p (x, TREE_VALUE (exp)))
4563 && (TREE_CHAIN (exp) == 0
4564 || safe_from_p (x, TREE_CHAIN (exp))));
4569 return safe_from_p (x, TREE_OPERAND (exp, 0));
4573 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4574 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4578 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4579 the expression. If it is set, we conflict iff we are that rtx or
4580 both are in memory. Otherwise, we check all operands of the
4581 expression recursively. */
4583 switch (TREE_CODE (exp))
4586 return (staticp (TREE_OPERAND (exp, 0))
4587 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4590 if (GET_CODE (x) == MEM)
4595 exp_rtl = CALL_EXPR_RTL (exp);
4598 /* Assume that the call will clobber all hard registers and
4600 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4601 || GET_CODE (x) == MEM)
4608 /* If a sequence exists, we would have to scan every instruction
4609 in the sequence to see if it was safe. This is probably not
4611 if (RTL_EXPR_SEQUENCE (exp))
4614 exp_rtl = RTL_EXPR_RTL (exp);
4617 case WITH_CLEANUP_EXPR:
4618 exp_rtl = RTL_EXPR_RTL (exp);
4621 case CLEANUP_POINT_EXPR:
4622 return safe_from_p (x, TREE_OPERAND (exp, 0));
4625 exp_rtl = SAVE_EXPR_RTL (exp);
4629 /* The only operand we look at is operand 1. The rest aren't
4630 part of the expression. */
4631 return safe_from_p (x, TREE_OPERAND (exp, 1));
4633 case METHOD_CALL_EXPR:
4634 /* This takes a rtx argument, but shouldn't appear here. */
4638 /* If we have an rtx, we do not need to scan our operands. */
4642 nops = tree_code_length[(int) TREE_CODE (exp)];
4643 for (i = 0; i < nops; i++)
4644 if (TREE_OPERAND (exp, i) != 0
4645 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4649 /* If we have an rtl, find any enclosed object. Then see if we conflict
4653 if (GET_CODE (exp_rtl) == SUBREG)
4655 exp_rtl = SUBREG_REG (exp_rtl);
4656 if (GET_CODE (exp_rtl) == REG
4657 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4661 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4662 are memory and EXP is not readonly. */
4663 return ! (rtx_equal_p (x, exp_rtl)
4664 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4665 && ! TREE_READONLY (exp)));
4668 /* If we reach here, it is safe. */
4672 /* Subroutine of expand_expr: return nonzero iff EXP is an
4673 expression whose type is statically determinable. */
4679 if (TREE_CODE (exp) == PARM_DECL
4680 || TREE_CODE (exp) == VAR_DECL
4681 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4682 || TREE_CODE (exp) == COMPONENT_REF
4683 || TREE_CODE (exp) == ARRAY_REF)
4688 /* Subroutine of expand_expr: return rtx if EXP is a
4689 variable or parameter; else return 0. */
4696 switch (TREE_CODE (exp))
4700 return DECL_RTL (exp);
4706 /* expand_expr: generate code for computing expression EXP.
4707 An rtx for the computed value is returned. The value is never null.
4708 In the case of a void EXP, const0_rtx is returned.
4710 The value may be stored in TARGET if TARGET is nonzero.
4711 TARGET is just a suggestion; callers must assume that
4712 the rtx returned may not be the same as TARGET.
4714 If TARGET is CONST0_RTX, it means that the value will be ignored.
4716 If TMODE is not VOIDmode, it suggests generating the
4717 result in mode TMODE. But this is done only when convenient.
4718 Otherwise, TMODE is ignored and the value generated in its natural mode.
4719 TMODE is just a suggestion; callers must assume that
4720 the rtx returned may not have mode TMODE.
4722 Note that TARGET may have neither TMODE nor MODE. In that case, it
4723 probably will not be used.
4725 If MODIFIER is EXPAND_SUM then when EXP is an addition
4726 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4727 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4728 products as above, or REG or MEM, or constant.
4729 Ordinarily in such cases we would output mul or add instructions
4730 and then return a pseudo reg containing the sum.
4732 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4733 it also marks a label as absolutely required (it can't be dead).
4734 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4735 This is used for outputting expressions used in initializers.
4737 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4738 with a constant address even if that address is not normally legitimate.
4739 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4742 expand_expr (exp, target, tmode, modifier)
4745 enum machine_mode tmode;
4746 enum expand_modifier modifier;
4748 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4749 This is static so it will be accessible to our recursive callees. */
4750 static tree placeholder_list = 0;
4751 register rtx op0, op1, temp;
4752 tree type = TREE_TYPE (exp);
4753 int unsignedp = TREE_UNSIGNED (type);
4754 register enum machine_mode mode = TYPE_MODE (type);
4755 register enum tree_code code = TREE_CODE (exp);
4757 /* Use subtarget as the target for operand 0 of a binary operation. */
4758 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4759 rtx original_target = target;
4760 /* Maybe defer this until sure not doing bytecode? */
4761 int ignore = (target == const0_rtx
4762 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4763 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4764 || code == COND_EXPR)
4765 && TREE_CODE (type) == VOID_TYPE));
4769 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4771 bc_expand_expr (exp);
4775 /* Don't use hard regs as subtargets, because the combiner
4776 can only handle pseudo regs. */
4777 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4779 /* Avoid subtargets inside loops,
4780 since they hide some invariant expressions. */
4781 if (preserve_subexpressions_p ())
4784 /* If we are going to ignore this result, we need only do something
4785 if there is a side-effect somewhere in the expression. If there
4786 is, short-circuit the most common cases here. Note that we must
4787 not call expand_expr with anything but const0_rtx in case this
4788 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4792 if (! TREE_SIDE_EFFECTS (exp))
4795 /* Ensure we reference a volatile object even if value is ignored. */
4796 if (TREE_THIS_VOLATILE (exp)
4797 && TREE_CODE (exp) != FUNCTION_DECL
4798 && mode != VOIDmode && mode != BLKmode)
4800 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4801 if (GET_CODE (temp) == MEM)
4802 temp = copy_to_reg (temp);
4806 if (TREE_CODE_CLASS (code) == '1')
4807 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4808 VOIDmode, modifier);
4809 else if (TREE_CODE_CLASS (code) == '2'
4810 || TREE_CODE_CLASS (code) == '<')
4812 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4813 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4816 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4817 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4818 /* If the second operand has no side effects, just evaluate
4820 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4821 VOIDmode, modifier);
4826 /* If will do cse, generate all results into pseudo registers
4827 since 1) that allows cse to find more things
4828 and 2) otherwise cse could produce an insn the machine
4831 if (! cse_not_expected && mode != BLKmode && target
4832 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4839 tree function = decl_function_context (exp);
4840 /* Handle using a label in a containing function. */
4841 if (function != current_function_decl && function != 0)
4843 struct function *p = find_function_data (function);
4844 /* Allocate in the memory associated with the function
4845 that the label is in. */
4846 push_obstacks (p->function_obstack,
4847 p->function_maybepermanent_obstack);
4849 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4850 label_rtx (exp), p->forced_labels);
4853 else if (modifier == EXPAND_INITIALIZER)
4854 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4855 label_rtx (exp), forced_labels);
4856 temp = gen_rtx (MEM, FUNCTION_MODE,
4857 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4858 if (function != current_function_decl && function != 0)
4859 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4864 if (DECL_RTL (exp) == 0)
4866 error_with_decl (exp, "prior parameter's size depends on `%s'");
4867 return CONST0_RTX (mode);
4870 /* ... fall through ... */
4873 /* If a static var's type was incomplete when the decl was written,
4874 but the type is complete now, lay out the decl now. */
4875 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4876 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4878 push_obstacks_nochange ();
4879 end_temporary_allocation ();
4880 layout_decl (exp, 0);
4881 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4885 /* ... fall through ... */
4889 if (DECL_RTL (exp) == 0)
4892 /* Ensure variable marked as used even if it doesn't go through
4893 a parser. If it hasn't be used yet, write out an external
4895 if (! TREE_USED (exp))
4897 assemble_external (exp);
4898 TREE_USED (exp) = 1;
4901 /* Show we haven't gotten RTL for this yet. */
4904 /* Handle variables inherited from containing functions. */
4905 context = decl_function_context (exp);
4907 /* We treat inline_function_decl as an alias for the current function
4908 because that is the inline function whose vars, types, etc.
4909 are being merged into the current function.
4910 See expand_inline_function. */
4912 if (context != 0 && context != current_function_decl
4913 && context != inline_function_decl
4914 /* If var is static, we don't need a static chain to access it. */
4915 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4916 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4920 /* Mark as non-local and addressable. */
4921 DECL_NONLOCAL (exp) = 1;
4922 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4924 mark_addressable (exp);
4925 if (GET_CODE (DECL_RTL (exp)) != MEM)
4927 addr = XEXP (DECL_RTL (exp), 0);
4928 if (GET_CODE (addr) == MEM)
4929 addr = gen_rtx (MEM, Pmode,
4930 fix_lexical_addr (XEXP (addr, 0), exp));
4932 addr = fix_lexical_addr (addr, exp);
4933 temp = change_address (DECL_RTL (exp), mode, addr);
4936 /* This is the case of an array whose size is to be determined
4937 from its initializer, while the initializer is still being parsed.
4940 else if (GET_CODE (DECL_RTL (exp)) == MEM
4941 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4942 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4943 XEXP (DECL_RTL (exp), 0));
4945 /* If DECL_RTL is memory, we are in the normal case and either
4946 the address is not valid or it is not a register and -fforce-addr
4947 is specified, get the address into a register. */
4949 else if (GET_CODE (DECL_RTL (exp)) == MEM
4950 && modifier != EXPAND_CONST_ADDRESS
4951 && modifier != EXPAND_SUM
4952 && modifier != EXPAND_INITIALIZER
4953 && (! memory_address_p (DECL_MODE (exp),
4954 XEXP (DECL_RTL (exp), 0))
4956 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4957 temp = change_address (DECL_RTL (exp), VOIDmode,
4958 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4960 /* If we got something, return it. But first, set the alignment
4961 the address is a register. */
4964 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4965 mark_reg_pointer (XEXP (temp, 0),
4966 DECL_ALIGN (exp) / BITS_PER_UNIT);
4971 /* If the mode of DECL_RTL does not match that of the decl, it
4972 must be a promoted value. We return a SUBREG of the wanted mode,
4973 but mark it so that we know that it was already extended. */
4975 if (GET_CODE (DECL_RTL (exp)) == REG
4976 && GET_MODE (DECL_RTL (exp)) != mode)
4978 /* Get the signedness used for this variable. Ensure we get the
4979 same mode we got when the variable was declared. */
4980 if (GET_MODE (DECL_RTL (exp))
4981 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4984 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4985 SUBREG_PROMOTED_VAR_P (temp) = 1;
4986 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4990 return DECL_RTL (exp);
4993 return immed_double_const (TREE_INT_CST_LOW (exp),
4994 TREE_INT_CST_HIGH (exp),
4998 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
5001 /* If optimized, generate immediate CONST_DOUBLE
5002 which will be turned into memory by reload if necessary.
5004 We used to force a register so that loop.c could see it. But
5005 this does not allow gen_* patterns to perform optimizations with
5006 the constants. It also produces two insns in cases like "x = 1.0;".
5007 On most machines, floating-point constants are not permitted in
5008 many insns, so we'd end up copying it to a register in any case.
5010 Now, we do the copying in expand_binop, if appropriate. */
5011 return immed_real_const (exp);
5015 if (! TREE_CST_RTL (exp))
5016 output_constant_def (exp);
5018 /* TREE_CST_RTL probably contains a constant address.
5019 On RISC machines where a constant address isn't valid,
5020 make some insns to get that address into a register. */
5021 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5022 && modifier != EXPAND_CONST_ADDRESS
5023 && modifier != EXPAND_INITIALIZER
5024 && modifier != EXPAND_SUM
5025 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5027 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5028 return change_address (TREE_CST_RTL (exp), VOIDmode,
5029 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5030 return TREE_CST_RTL (exp);
5033 context = decl_function_context (exp);
5035 /* We treat inline_function_decl as an alias for the current function
5036 because that is the inline function whose vars, types, etc.
5037 are being merged into the current function.
5038 See expand_inline_function. */
5039 if (context == current_function_decl || context == inline_function_decl)
5042 /* If this is non-local, handle it. */
5045 temp = SAVE_EXPR_RTL (exp);
5046 if (temp && GET_CODE (temp) == REG)
5048 put_var_into_stack (exp);
5049 temp = SAVE_EXPR_RTL (exp);
5051 if (temp == 0 || GET_CODE (temp) != MEM)
5053 return change_address (temp, mode,
5054 fix_lexical_addr (XEXP (temp, 0), exp));
5056 if (SAVE_EXPR_RTL (exp) == 0)
5058 if (mode == VOIDmode)
5061 temp = assign_temp (type, 0, 0, 0);
5063 SAVE_EXPR_RTL (exp) = temp;
5064 if (!optimize && GET_CODE (temp) == REG)
5065 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5068 /* If the mode of TEMP does not match that of the expression, it
5069 must be a promoted value. We pass store_expr a SUBREG of the
5070 wanted mode but mark it so that we know that it was already
5071 extended. Note that `unsignedp' was modified above in
5074 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5076 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5077 SUBREG_PROMOTED_VAR_P (temp) = 1;
5078 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5081 if (temp == const0_rtx)
5082 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5084 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5087 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5088 must be a promoted value. We return a SUBREG of the wanted mode,
5089 but mark it so that we know that it was already extended. */
5091 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5092 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5094 /* Compute the signedness and make the proper SUBREG. */
5095 promote_mode (type, mode, &unsignedp, 0);
5096 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5097 SUBREG_PROMOTED_VAR_P (temp) = 1;
5098 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5102 return SAVE_EXPR_RTL (exp);
5107 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5108 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5112 case PLACEHOLDER_EXPR:
5113 /* If there is an object on the head of the placeholder list,
5114 see if some object in it's references is of type TYPE. For
5115 further information, see tree.def. */
5116 if (placeholder_list)
5118 tree need_type = TYPE_MAIN_VARIANT (type);
5120 tree old_list = placeholder_list;
5123 /* See if the object is the type that we want. Then see if
5124 the operand of any reference is the type we want. */
5125 if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_PURPOSE (placeholder_list)))
5127 object = TREE_PURPOSE (placeholder_list);
5129 /* Find the innermost reference that is of the type we want. */
5130 for (elt = TREE_PURPOSE (placeholder_list);
5132 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5133 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5134 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5135 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5136 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5137 || TREE_CODE (elt) == COND_EXPR)
5138 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5139 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5140 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5142 object = TREE_OPERAND (elt, 0);
5146 /* Expand this object skipping the list entries before
5147 it was found in case it is also a PLACEHOLDER_EXPR.
5148 In that case, we want to translate it using subsequent
5150 placeholder_list = TREE_CHAIN (placeholder_list);
5151 temp = expand_expr (object, original_target, tmode, modifier);
5152 placeholder_list = old_list;
5157 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5160 case WITH_RECORD_EXPR:
5161 /* Put the object on the placeholder list, expand our first operand,
5162 and pop the list. */
5163 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5165 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5167 placeholder_list = TREE_CHAIN (placeholder_list);
5171 expand_exit_loop_if_false (NULL_PTR,
5172 invert_truthvalue (TREE_OPERAND (exp, 0)));
5177 expand_start_loop (1);
5178 expand_expr_stmt (TREE_OPERAND (exp, 0));
5186 tree vars = TREE_OPERAND (exp, 0);
5187 int vars_need_expansion = 0;
5189 /* Need to open a binding contour here because
5190 if there are any cleanups they most be contained here. */
5191 expand_start_bindings (0);
5193 /* Mark the corresponding BLOCK for output in its proper place. */
5194 if (TREE_OPERAND (exp, 2) != 0
5195 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5196 insert_block (TREE_OPERAND (exp, 2));
5198 /* If VARS have not yet been expanded, expand them now. */
5201 if (DECL_RTL (vars) == 0)
5203 vars_need_expansion = 1;
5206 expand_decl_init (vars);
5207 vars = TREE_CHAIN (vars);
5210 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5212 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5218 if (RTL_EXPR_SEQUENCE (exp))
5220 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5222 emit_insns (RTL_EXPR_SEQUENCE (exp));
5223 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5225 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5226 free_temps_for_rtl_expr (exp);
5227 return RTL_EXPR_RTL (exp);
5230 /* If we don't need the result, just ensure we evaluate any
5235 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5236 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5240 /* All elts simple constants => refer to a constant in memory. But
5241 if this is a non-BLKmode mode, let it store a field at a time
5242 since that should make a CONST_INT or CONST_DOUBLE when we
5243 fold. Likewise, if we have a target we can use, it is best to
5244 store directly into the target unless the type is large enough
5245 that memcpy will be used. If we are making an initializer and
5246 all operands are constant, put it in memory as well. */
5247 else if ((TREE_STATIC (exp)
5248 && ((mode == BLKmode
5249 && ! (target != 0 && safe_from_p (target, exp)))
5250 || TREE_ADDRESSABLE (exp)
5251 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5252 && (move_by_pieces_ninsns
5253 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5254 TYPE_ALIGN (type) / BITS_PER_UNIT)
5256 && ! mostly_zeros_p (exp))))
5257 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5259 rtx constructor = output_constant_def (exp);
5260 if (modifier != EXPAND_CONST_ADDRESS
5261 && modifier != EXPAND_INITIALIZER
5262 && modifier != EXPAND_SUM
5263 && (! memory_address_p (GET_MODE (constructor),
5264 XEXP (constructor, 0))
5266 && GET_CODE (XEXP (constructor, 0)) != REG)))
5267 constructor = change_address (constructor, VOIDmode,
5268 XEXP (constructor, 0));
5274 /* Handle calls that pass values in multiple non-contiguous
5275 locations. The Irix 6 ABI has examples of this. */
5276 if (target == 0 || ! safe_from_p (target, exp)
5277 || GET_CODE (target) == PARALLEL)
5279 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5280 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5282 target = assign_temp (type, 0, 1, 1);
5285 if (TREE_READONLY (exp))
5287 if (GET_CODE (target) == MEM)
5288 target = copy_rtx (target);
5290 RTX_UNCHANGING_P (target) = 1;
5293 store_constructor (exp, target, 0);
5299 tree exp1 = TREE_OPERAND (exp, 0);
5302 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5303 op0 = memory_address (mode, op0);
5305 temp = gen_rtx (MEM, mode, op0);
5306 /* If address was computed by addition,
5307 mark this as an element of an aggregate. */
5308 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5309 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5310 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5311 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5312 || (TREE_CODE (exp1) == ADDR_EXPR
5313 && (exp2 = TREE_OPERAND (exp1, 0))
5314 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5315 MEM_IN_STRUCT_P (temp) = 1;
5316 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5318 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5319 here, because, in C and C++, the fact that a location is accessed
5320 through a pointer to const does not mean that the value there can
5321 never change. Languages where it can never change should
5322 also set TREE_STATIC. */
5323 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5328 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5332 tree array = TREE_OPERAND (exp, 0);
5333 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5334 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5335 tree index = TREE_OPERAND (exp, 1);
5336 tree index_type = TREE_TYPE (index);
5339 if (TREE_CODE (low_bound) != INTEGER_CST
5340 && contains_placeholder_p (low_bound))
5341 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5343 /* Optimize the special-case of a zero lower bound.
5345 We convert the low_bound to sizetype to avoid some problems
5346 with constant folding. (E.g. suppose the lower bound is 1,
5347 and its mode is QI. Without the conversion, (ARRAY
5348 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5349 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5351 But sizetype isn't quite right either (especially if
5352 the lowbound is negative). FIXME */
5354 if (! integer_zerop (low_bound))
5355 index = fold (build (MINUS_EXPR, index_type, index,
5356 convert (sizetype, low_bound)));
5358 if ((TREE_CODE (index) != INTEGER_CST
5359 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5360 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5362 /* Nonconstant array index or nonconstant element size, and
5363 not an array in an unaligned (packed) structure field.
5364 Generate the tree for *(&array+index) and expand that,
5365 except do it in a language-independent way
5366 and don't complain about non-lvalue arrays.
5367 `mark_addressable' should already have been called
5368 for any array for which this case will be reached. */
5370 /* Don't forget the const or volatile flag from the array
5372 tree variant_type = build_type_variant (type,
5373 TREE_READONLY (exp),
5374 TREE_THIS_VOLATILE (exp));
5375 tree array_adr = build1 (ADDR_EXPR,
5376 build_pointer_type (variant_type), array);
5378 tree size = size_in_bytes (type);
5380 /* Convert the integer argument to a type the same size as sizetype
5381 so the multiply won't overflow spuriously. */
5382 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5383 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5386 if (TREE_CODE (size) != INTEGER_CST
5387 && contains_placeholder_p (size))
5388 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5390 /* Don't think the address has side effects
5391 just because the array does.
5392 (In some cases the address might have side effects,
5393 and we fail to record that fact here. However, it should not
5394 matter, since expand_expr should not care.) */
5395 TREE_SIDE_EFFECTS (array_adr) = 0;
5399 (INDIRECT_REF, type,
5400 fold (build (PLUS_EXPR,
5401 TYPE_POINTER_TO (variant_type),
5406 TYPE_POINTER_TO (variant_type),
5407 fold (build (MULT_EXPR, TREE_TYPE (index),
5409 convert (TREE_TYPE (index),
5412 /* Volatility, etc., of new expression is same as old
5414 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5415 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5416 TREE_READONLY (elt) = TREE_READONLY (exp);
5418 return expand_expr (elt, target, tmode, modifier);
5421 /* Fold an expression like: "foo"[2].
5422 This is not done in fold so it won't happen inside &.
5423 Don't fold if this is for wide characters since it's too
5424 difficult to do correctly and this is a very rare case. */
5426 if (TREE_CODE (array) == STRING_CST
5427 && TREE_CODE (index) == INTEGER_CST
5428 && !TREE_INT_CST_HIGH (index)
5429 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5430 && GET_MODE_CLASS (mode) == MODE_INT
5431 && GET_MODE_SIZE (mode) == 1)
5432 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5434 /* If this is a constant index into a constant array,
5435 just get the value from the array. Handle both the cases when
5436 we have an explicit constructor and when our operand is a variable
5437 that was declared const. */
5439 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5441 if (TREE_CODE (index) == INTEGER_CST
5442 && TREE_INT_CST_HIGH (index) == 0)
5444 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5446 i = TREE_INT_CST_LOW (index);
5448 elem = TREE_CHAIN (elem);
5450 return expand_expr (fold (TREE_VALUE (elem)), target,
5455 else if (optimize >= 1
5456 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5457 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5458 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5460 if (TREE_CODE (index) == INTEGER_CST
5461 && TREE_INT_CST_HIGH (index) == 0)
5463 tree init = DECL_INITIAL (array);
5465 i = TREE_INT_CST_LOW (index);
5466 if (TREE_CODE (init) == CONSTRUCTOR)
5468 tree elem = CONSTRUCTOR_ELTS (init);
5471 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5472 elem = TREE_CHAIN (elem);
5474 return expand_expr (fold (TREE_VALUE (elem)), target,
5477 else if (TREE_CODE (init) == STRING_CST
5478 && i < TREE_STRING_LENGTH (init))
5479 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5484 /* Treat array-ref with constant index as a component-ref. */
5488 /* If the operand is a CONSTRUCTOR, we can just extract the
5489 appropriate field if it is present. Don't do this if we have
5490 already written the data since we want to refer to that copy
5491 and varasm.c assumes that's what we'll do. */
5492 if (code != ARRAY_REF
5493 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5494 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5498 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5499 elt = TREE_CHAIN (elt))
5500 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5501 /* We can normally use the value of the field in the
5502 CONSTRUCTOR. However, if this is a bitfield in
5503 an integral mode that we can fit in a HOST_WIDE_INT,
5504 we must mask only the number of bits in the bitfield,
5505 since this is done implicitly by the constructor. If
5506 the bitfield does not meet either of those conditions,
5507 we can't do this optimization. */
5508 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5509 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5511 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5512 <= HOST_BITS_PER_WIDE_INT))))
5514 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5515 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5517 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5518 enum machine_mode imode
5519 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5521 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5523 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5524 op0 = expand_and (op0, op1, target);
5529 = build_int_2 (imode - bitsize, 0);
5531 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5533 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5543 enum machine_mode mode1;
5549 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5550 &mode1, &unsignedp, &volatilep,
5553 /* If we got back the original object, something is wrong. Perhaps
5554 we are evaluating an expression too early. In any event, don't
5555 infinitely recurse. */
5559 /* If TEM's type is a union of variable size, pass TARGET to the inner
5560 computation, since it will need a temporary and TARGET is known
5561 to have to do. This occurs in unchecked conversion in Ada. */
5563 op0 = expand_expr (tem,
5564 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5565 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5567 ? target : NULL_RTX),
5569 modifier == EXPAND_INITIALIZER ? modifier : 0);
5571 /* If this is a constant, put it into a register if it is a
5572 legitimate constant and memory if it isn't. */
5573 if (CONSTANT_P (op0))
5575 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5576 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5577 op0 = force_reg (mode, op0);
5579 op0 = validize_mem (force_const_mem (mode, op0));
5584 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5586 if (GET_CODE (op0) != MEM)
5588 op0 = change_address (op0, VOIDmode,
5589 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5590 force_reg (ptr_mode, offset_rtx)));
5593 /* Don't forget about volatility even if this is a bitfield. */
5594 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5596 op0 = copy_rtx (op0);
5597 MEM_VOLATILE_P (op0) = 1;
5600 /* In cases where an aligned union has an unaligned object
5601 as a field, we might be extracting a BLKmode value from
5602 an integer-mode (e.g., SImode) object. Handle this case
5603 by doing the extract into an object as wide as the field
5604 (which we know to be the width of a basic mode), then
5605 storing into memory, and changing the mode to BLKmode.
5606 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5607 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5608 if (mode1 == VOIDmode
5609 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5610 || (modifier != EXPAND_CONST_ADDRESS
5611 && modifier != EXPAND_INITIALIZER
5612 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5613 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5614 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5615 /* If the field isn't aligned enough to fetch as a memref,
5616 fetch it as a bit field. */
5617 || (SLOW_UNALIGNED_ACCESS
5618 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5619 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5621 enum machine_mode ext_mode = mode;
5623 if (ext_mode == BLKmode)
5624 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5626 if (ext_mode == BLKmode)
5628 /* In this case, BITPOS must start at a byte boundary and
5629 TARGET, if specified, must be a MEM. */
5630 if (GET_CODE (op0) != MEM
5631 || (target != 0 && GET_CODE (target) != MEM)
5632 || bitpos % BITS_PER_UNIT != 0)
5635 op0 = change_address (op0, VOIDmode,
5636 plus_constant (XEXP (op0, 0),
5637 bitpos / BITS_PER_UNIT));
5639 target = assign_temp (type, 0, 1, 1);
5641 emit_block_move (target, op0,
5642 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5649 op0 = validize_mem (op0);
5651 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5652 mark_reg_pointer (XEXP (op0, 0), alignment);
5654 op0 = extract_bit_field (op0, bitsize, bitpos,
5655 unsignedp, target, ext_mode, ext_mode,
5657 int_size_in_bytes (TREE_TYPE (tem)));
5659 /* If the result is a record type and BITSIZE is narrower than
5660 the mode of OP0, an integral mode, and this is a big endian
5661 machine, we must put the field into the high-order bits. */
5662 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5663 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5664 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5665 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5666 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5670 if (mode == BLKmode)
5672 rtx new = assign_stack_temp (ext_mode,
5673 bitsize / BITS_PER_UNIT, 0);
5675 emit_move_insn (new, op0);
5676 op0 = copy_rtx (new);
5677 PUT_MODE (op0, BLKmode);
5678 MEM_IN_STRUCT_P (op0) = 1;
5684 /* If the result is BLKmode, use that to access the object
5686 if (mode == BLKmode)
5689 /* Get a reference to just this component. */
5690 if (modifier == EXPAND_CONST_ADDRESS
5691 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5692 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5693 (bitpos / BITS_PER_UNIT)));
5695 op0 = change_address (op0, mode1,
5696 plus_constant (XEXP (op0, 0),
5697 (bitpos / BITS_PER_UNIT)));
5698 if (GET_CODE (XEXP (op0, 0)) == REG)
5699 mark_reg_pointer (XEXP (op0, 0), alignment);
5701 MEM_IN_STRUCT_P (op0) = 1;
5702 MEM_VOLATILE_P (op0) |= volatilep;
5703 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5704 || modifier == EXPAND_CONST_ADDRESS
5705 || modifier == EXPAND_INITIALIZER)
5707 else if (target == 0)
5708 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5710 convert_move (target, op0, unsignedp);
5714 /* Intended for a reference to a buffer of a file-object in Pascal.
5715 But it's not certain that a special tree code will really be
5716 necessary for these. INDIRECT_REF might work for them. */
5722 /* Pascal set IN expression.
5725 rlo = set_low - (set_low%bits_per_word);
5726 the_word = set [ (index - rlo)/bits_per_word ];
5727 bit_index = index % bits_per_word;
5728 bitmask = 1 << bit_index;
5729 return !!(the_word & bitmask); */
5731 tree set = TREE_OPERAND (exp, 0);
5732 tree index = TREE_OPERAND (exp, 1);
5733 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5734 tree set_type = TREE_TYPE (set);
5735 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5736 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5737 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5738 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5739 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5740 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5741 rtx setaddr = XEXP (setval, 0);
5742 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5744 rtx diff, quo, rem, addr, bit, result;
5746 preexpand_calls (exp);
5748 /* If domain is empty, answer is no. Likewise if index is constant
5749 and out of bounds. */
5750 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5751 && TREE_CODE (set_low_bound) == INTEGER_CST
5752 && tree_int_cst_lt (set_high_bound, set_low_bound)
5753 || (TREE_CODE (index) == INTEGER_CST
5754 && TREE_CODE (set_low_bound) == INTEGER_CST
5755 && tree_int_cst_lt (index, set_low_bound))
5756 || (TREE_CODE (set_high_bound) == INTEGER_CST
5757 && TREE_CODE (index) == INTEGER_CST
5758 && tree_int_cst_lt (set_high_bound, index))))
5762 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5764 /* If we get here, we have to generate the code for both cases
5765 (in range and out of range). */
5767 op0 = gen_label_rtx ();
5768 op1 = gen_label_rtx ();
5770 if (! (GET_CODE (index_val) == CONST_INT
5771 && GET_CODE (lo_r) == CONST_INT))
5773 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5774 GET_MODE (index_val), iunsignedp, 0);
5775 emit_jump_insn (gen_blt (op1));
5778 if (! (GET_CODE (index_val) == CONST_INT
5779 && GET_CODE (hi_r) == CONST_INT))
5781 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5782 GET_MODE (index_val), iunsignedp, 0);
5783 emit_jump_insn (gen_bgt (op1));
5786 /* Calculate the element number of bit zero in the first word
5788 if (GET_CODE (lo_r) == CONST_INT)
5789 rlow = GEN_INT (INTVAL (lo_r)
5790 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5792 rlow = expand_binop (index_mode, and_optab, lo_r,
5793 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5794 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5796 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5797 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5799 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5800 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5801 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5802 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5804 addr = memory_address (byte_mode,
5805 expand_binop (index_mode, add_optab, diff,
5806 setaddr, NULL_RTX, iunsignedp,
5809 /* Extract the bit we want to examine */
5810 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5811 gen_rtx (MEM, byte_mode, addr),
5812 make_tree (TREE_TYPE (index), rem),
5814 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5815 GET_MODE (target) == byte_mode ? target : 0,
5816 1, OPTAB_LIB_WIDEN);
5818 if (result != target)
5819 convert_move (target, result, 1);
5821 /* Output the code to handle the out-of-range case. */
5824 emit_move_insn (target, const0_rtx);
5829 case WITH_CLEANUP_EXPR:
5830 if (RTL_EXPR_RTL (exp) == 0)
5833 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5835 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5836 /* That's it for this cleanup. */
5837 TREE_OPERAND (exp, 2) = 0;
5838 expand_eh_region_start ();
5840 return RTL_EXPR_RTL (exp);
5842 case CLEANUP_POINT_EXPR:
5844 extern int temp_slot_level;
5845 tree old_cleanups = cleanups_this_call;
5846 int old_temp_level = target_temp_slot_level;
5848 target_temp_slot_level = temp_slot_level;
5849 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5850 /* If we're going to use this value, load it up now. */
5852 op0 = force_not_mem (op0);
5853 expand_cleanups_to (old_cleanups);
5854 preserve_temp_slots (op0);
5857 target_temp_slot_level = old_temp_level;
5862 /* Check for a built-in function. */
5863 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5864 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5866 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5867 return expand_builtin (exp, target, subtarget, tmode, ignore);
5869 /* If this call was expanded already by preexpand_calls,
5870 just return the result we got. */
5871 if (CALL_EXPR_RTL (exp) != 0)
5872 return CALL_EXPR_RTL (exp);
5874 return expand_call (exp, target, ignore);
5876 case NON_LVALUE_EXPR:
5879 case REFERENCE_EXPR:
5880 if (TREE_CODE (type) == UNION_TYPE)
5882 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5885 if (mode != BLKmode)
5886 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5888 target = assign_temp (type, 0, 1, 1);
5891 if (GET_CODE (target) == MEM)
5892 /* Store data into beginning of memory target. */
5893 store_expr (TREE_OPERAND (exp, 0),
5894 change_address (target, TYPE_MODE (valtype), 0), 0);
5896 else if (GET_CODE (target) == REG)
5897 /* Store this field into a union of the proper type. */
5898 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5899 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5901 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5905 /* Return the entire union. */
5909 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5911 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5914 /* If the signedness of the conversion differs and OP0 is
5915 a promoted SUBREG, clear that indication since we now
5916 have to do the proper extension. */
5917 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5918 && GET_CODE (op0) == SUBREG)
5919 SUBREG_PROMOTED_VAR_P (op0) = 0;
5924 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5925 if (GET_MODE (op0) == mode)
5928 /* If OP0 is a constant, just convert it into the proper mode. */
5929 if (CONSTANT_P (op0))
5931 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5932 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5934 if (modifier == EXPAND_INITIALIZER)
5935 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5939 convert_to_mode (mode, op0,
5940 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5942 convert_move (target, op0,
5943 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5947 /* We come here from MINUS_EXPR when the second operand is a
5950 this_optab = add_optab;
5952 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5953 something else, make sure we add the register to the constant and
5954 then to the other thing. This case can occur during strength
5955 reduction and doing it this way will produce better code if the
5956 frame pointer or argument pointer is eliminated.
5958 fold-const.c will ensure that the constant is always in the inner
5959 PLUS_EXPR, so the only case we need to do anything about is if
5960 sp, ap, or fp is our second argument, in which case we must swap
5961 the innermost first argument and our second argument. */
5963 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5964 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5965 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5966 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5967 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5968 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5970 tree t = TREE_OPERAND (exp, 1);
5972 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5973 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5976 /* If the result is to be ptr_mode and we are adding an integer to
5977 something, we might be forming a constant. So try to use
5978 plus_constant. If it produces a sum and we can't accept it,
5979 use force_operand. This allows P = &ARR[const] to generate
5980 efficient code on machines where a SYMBOL_REF is not a valid
5983 If this is an EXPAND_SUM call, always return the sum. */
5984 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5985 || mode == ptr_mode)
5987 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5988 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5989 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5991 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5993 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5994 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5995 op1 = force_operand (op1, target);
5999 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6000 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6001 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6003 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6005 if (! CONSTANT_P (op0))
6007 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6008 VOIDmode, modifier);
6009 /* Don't go to both_summands if modifier
6010 says it's not right to return a PLUS. */
6011 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6015 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6016 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6017 op0 = force_operand (op0, target);
6022 /* No sense saving up arithmetic to be done
6023 if it's all in the wrong mode to form part of an address.
6024 And force_operand won't know whether to sign-extend or
6026 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6027 || mode != ptr_mode)
6030 preexpand_calls (exp);
6031 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6034 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
6035 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
6038 /* Make sure any term that's a sum with a constant comes last. */
6039 if (GET_CODE (op0) == PLUS
6040 && CONSTANT_P (XEXP (op0, 1)))
6046 /* If adding to a sum including a constant,
6047 associate it to put the constant outside. */
6048 if (GET_CODE (op1) == PLUS
6049 && CONSTANT_P (XEXP (op1, 1)))
6051 rtx constant_term = const0_rtx;
6053 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6056 /* Ensure that MULT comes first if there is one. */
6057 else if (GET_CODE (op0) == MULT)
6058 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
6060 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
6062 /* Let's also eliminate constants from op0 if possible. */
6063 op0 = eliminate_constant_term (op0, &constant_term);
6065 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6066 their sum should be a constant. Form it into OP1, since the
6067 result we want will then be OP0 + OP1. */
6069 temp = simplify_binary_operation (PLUS, mode, constant_term,
6074 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6077 /* Put a constant term last and put a multiplication first. */
6078 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6079 temp = op1, op1 = op0, op0 = temp;
6081 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6082 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6085 /* For initializers, we are allowed to return a MINUS of two
6086 symbolic constants. Here we handle all cases when both operands
6088 /* Handle difference of two symbolic constants,
6089 for the sake of an initializer. */
6090 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6091 && really_constant_p (TREE_OPERAND (exp, 0))
6092 && really_constant_p (TREE_OPERAND (exp, 1)))
6094 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6095 VOIDmode, modifier);
6096 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6097 VOIDmode, modifier);
6099 /* If the last operand is a CONST_INT, use plus_constant of
6100 the negated constant. Else make the MINUS. */
6101 if (GET_CODE (op1) == CONST_INT)
6102 return plus_constant (op0, - INTVAL (op1));
6104 return gen_rtx (MINUS, mode, op0, op1);
6106 /* Convert A - const to A + (-const). */
6107 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6109 tree negated = fold (build1 (NEGATE_EXPR, type,
6110 TREE_OPERAND (exp, 1)));
6112 /* Deal with the case where we can't negate the constant
6114 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6116 tree newtype = signed_type (type);
6117 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6118 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6119 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6121 if (! TREE_OVERFLOW (newneg))
6122 return expand_expr (convert (type,
6123 build (PLUS_EXPR, newtype,
6125 target, tmode, modifier);
6129 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6133 this_optab = sub_optab;
6137 preexpand_calls (exp);
6138 /* If first operand is constant, swap them.
6139 Thus the following special case checks need only
6140 check the second operand. */
6141 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6143 register tree t1 = TREE_OPERAND (exp, 0);
6144 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6145 TREE_OPERAND (exp, 1) = t1;
6148 /* Attempt to return something suitable for generating an
6149 indexed address, for machines that support that. */
6151 if (modifier == EXPAND_SUM && mode == ptr_mode
6152 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6153 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6155 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6157 /* Apply distributive law if OP0 is x+c. */
6158 if (GET_CODE (op0) == PLUS
6159 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6160 return gen_rtx (PLUS, mode,
6161 gen_rtx (MULT, mode, XEXP (op0, 0),
6162 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6163 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6164 * INTVAL (XEXP (op0, 1))));
6166 if (GET_CODE (op0) != REG)
6167 op0 = force_operand (op0, NULL_RTX);
6168 if (GET_CODE (op0) != REG)
6169 op0 = copy_to_mode_reg (mode, op0);
6171 return gen_rtx (MULT, mode, op0,
6172 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6175 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6178 /* Check for multiplying things that have been extended
6179 from a narrower type. If this machine supports multiplying
6180 in that narrower type with a result in the desired type,
6181 do it that way, and avoid the explicit type-conversion. */
6182 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6183 && TREE_CODE (type) == INTEGER_TYPE
6184 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6185 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6186 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6187 && int_fits_type_p (TREE_OPERAND (exp, 1),
6188 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6189 /* Don't use a widening multiply if a shift will do. */
6190 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6191 > HOST_BITS_PER_WIDE_INT)
6192 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6194 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6195 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6197 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6198 /* If both operands are extended, they must either both
6199 be zero-extended or both be sign-extended. */
6200 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6202 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6204 enum machine_mode innermode
6205 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6206 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6207 ? smul_widen_optab : umul_widen_optab);
6208 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6209 ? umul_widen_optab : smul_widen_optab);
6210 if (mode == GET_MODE_WIDER_MODE (innermode))
6212 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6214 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6215 NULL_RTX, VOIDmode, 0);
6216 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6217 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6220 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6221 NULL_RTX, VOIDmode, 0);
6224 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6225 && innermode == word_mode)
6228 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6229 NULL_RTX, VOIDmode, 0);
6230 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6231 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6234 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6235 NULL_RTX, VOIDmode, 0);
6236 temp = expand_binop (mode, other_optab, op0, op1, target,
6237 unsignedp, OPTAB_LIB_WIDEN);
6238 htem = expand_mult_highpart_adjust (innermode,
6239 gen_highpart (innermode, temp),
6241 gen_highpart (innermode, temp),
6243 emit_move_insn (gen_highpart (innermode, temp), htem);
6248 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6249 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6250 return expand_mult (mode, op0, op1, target, unsignedp);
6252 case TRUNC_DIV_EXPR:
6253 case FLOOR_DIV_EXPR:
6255 case ROUND_DIV_EXPR:
6256 case EXACT_DIV_EXPR:
6257 preexpand_calls (exp);
6258 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6260 /* Possible optimization: compute the dividend with EXPAND_SUM
6261 then if the divisor is constant can optimize the case
6262 where some terms of the dividend have coeffs divisible by it. */
6263 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6264 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6265 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6268 this_optab = flodiv_optab;
6271 case TRUNC_MOD_EXPR:
6272 case FLOOR_MOD_EXPR:
6274 case ROUND_MOD_EXPR:
6275 preexpand_calls (exp);
6276 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6278 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6279 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6280 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6282 case FIX_ROUND_EXPR:
6283 case FIX_FLOOR_EXPR:
6285 abort (); /* Not used for C. */
6287 case FIX_TRUNC_EXPR:
6288 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6290 target = gen_reg_rtx (mode);
6291 expand_fix (target, op0, unsignedp);
6295 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6297 target = gen_reg_rtx (mode);
6298 /* expand_float can't figure out what to do if FROM has VOIDmode.
6299 So give it the correct mode. With -O, cse will optimize this. */
6300 if (GET_MODE (op0) == VOIDmode)
6301 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6303 expand_float (target, op0,
6304 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6308 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6309 temp = expand_unop (mode, neg_optab, op0, target, 0);
6315 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6317 /* Handle complex values specially. */
6318 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6319 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6320 return expand_complex_abs (mode, op0, target, unsignedp);
6322 /* Unsigned abs is simply the operand. Testing here means we don't
6323 risk generating incorrect code below. */
6324 if (TREE_UNSIGNED (type))
6327 return expand_abs (mode, op0, target, unsignedp,
6328 safe_from_p (target, TREE_OPERAND (exp, 0)));
6332 target = original_target;
6333 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6334 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6335 || GET_MODE (target) != mode
6336 || (GET_CODE (target) == REG
6337 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6338 target = gen_reg_rtx (mode);
6339 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6340 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6342 /* First try to do it with a special MIN or MAX instruction.
6343 If that does not win, use a conditional jump to select the proper
6345 this_optab = (TREE_UNSIGNED (type)
6346 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6347 : (code == MIN_EXPR ? smin_optab : smax_optab));
6349 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6354 /* At this point, a MEM target is no longer useful; we will get better
6357 if (GET_CODE (target) == MEM)
6358 target = gen_reg_rtx (mode);
6361 emit_move_insn (target, op0);
6363 op0 = gen_label_rtx ();
6365 /* If this mode is an integer too wide to compare properly,
6366 compare word by word. Rely on cse to optimize constant cases. */
6367 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6369 if (code == MAX_EXPR)
6370 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6371 target, op1, NULL_RTX, op0);
6373 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6374 op1, target, NULL_RTX, op0);
6375 emit_move_insn (target, op1);
6379 if (code == MAX_EXPR)
6380 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6381 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6382 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6384 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6385 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6386 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6387 if (temp == const0_rtx)
6388 emit_move_insn (target, op1);
6389 else if (temp != const_true_rtx)
6391 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6392 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6395 emit_move_insn (target, op1);
6402 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6403 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6409 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6410 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6415 /* ??? Can optimize bitwise operations with one arg constant.
6416 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6417 and (a bitwise1 b) bitwise2 b (etc)
6418 but that is probably not worth while. */
6420 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6421 boolean values when we want in all cases to compute both of them. In
6422 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6423 as actual zero-or-1 values and then bitwise anding. In cases where
6424 there cannot be any side effects, better code would be made by
6425 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6426 how to recognize those cases. */
6428 case TRUTH_AND_EXPR:
6430 this_optab = and_optab;
6435 this_optab = ior_optab;
6438 case TRUTH_XOR_EXPR:
6440 this_optab = xor_optab;
6447 preexpand_calls (exp);
6448 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6450 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6451 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6454 /* Could determine the answer when only additive constants differ. Also,
6455 the addition of one can be handled by changing the condition. */
6462 preexpand_calls (exp);
6463 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6467 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6468 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6470 && GET_CODE (original_target) == REG
6471 && (GET_MODE (original_target)
6472 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6474 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6477 if (temp != original_target)
6478 temp = copy_to_reg (temp);
6480 op1 = gen_label_rtx ();
6481 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6482 GET_MODE (temp), unsignedp, 0);
6483 emit_jump_insn (gen_beq (op1));
6484 emit_move_insn (temp, const1_rtx);
6489 /* If no set-flag instruction, must generate a conditional
6490 store into a temporary variable. Drop through
6491 and handle this like && and ||. */
6493 case TRUTH_ANDIF_EXPR:
6494 case TRUTH_ORIF_EXPR:
6496 && (target == 0 || ! safe_from_p (target, exp)
6497 /* Make sure we don't have a hard reg (such as function's return
6498 value) live across basic blocks, if not optimizing. */
6499 || (!optimize && GET_CODE (target) == REG
6500 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6501 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6504 emit_clr_insn (target);
6506 op1 = gen_label_rtx ();
6507 jumpifnot (exp, op1);
6510 emit_0_to_1_insn (target);
6513 return ignore ? const0_rtx : target;
6515 case TRUTH_NOT_EXPR:
6516 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6517 /* The parser is careful to generate TRUTH_NOT_EXPR
6518 only with operands that are always zero or one. */
6519 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6520 target, 1, OPTAB_LIB_WIDEN);
6526 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6528 return expand_expr (TREE_OPERAND (exp, 1),
6529 (ignore ? const0_rtx : target),
6533 /* If we would have a "singleton" (see below) were it not for a
6534 conversion in each arm, bring that conversion back out. */
6535 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6536 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6537 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6538 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6540 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6541 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6543 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6544 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6545 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6546 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6547 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6548 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6549 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6550 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6551 return expand_expr (build1 (NOP_EXPR, type,
6552 build (COND_EXPR, TREE_TYPE (true),
6553 TREE_OPERAND (exp, 0),
6555 target, tmode, modifier);
6559 rtx flag = NULL_RTX;
6560 tree left_cleanups = NULL_TREE;
6561 tree right_cleanups = NULL_TREE;
6563 /* Used to save a pointer to the place to put the setting of
6564 the flag that indicates if this side of the conditional was
6565 taken. We backpatch the code, if we find out later that we
6566 have any conditional cleanups that need to be performed. */
6567 rtx dest_right_flag = NULL_RTX;
6568 rtx dest_left_flag = NULL_RTX;
6570 /* Note that COND_EXPRs whose type is a structure or union
6571 are required to be constructed to contain assignments of
6572 a temporary variable, so that we can evaluate them here
6573 for side effect only. If type is void, we must do likewise. */
6575 /* If an arm of the branch requires a cleanup,
6576 only that cleanup is performed. */
6579 tree binary_op = 0, unary_op = 0;
6580 tree old_cleanups = cleanups_this_call;
6582 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6583 convert it to our mode, if necessary. */
6584 if (integer_onep (TREE_OPERAND (exp, 1))
6585 && integer_zerop (TREE_OPERAND (exp, 2))
6586 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6590 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6595 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6596 if (GET_MODE (op0) == mode)
6600 target = gen_reg_rtx (mode);
6601 convert_move (target, op0, unsignedp);
6605 /* Check for X ? A + B : A. If we have this, we can copy A to the
6606 output and conditionally add B. Similarly for unary operations.
6607 Don't do this if X has side-effects because those side effects
6608 might affect A or B and the "?" operation is a sequence point in
6609 ANSI. (operand_equal_p tests for side effects.) */
6611 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6612 && operand_equal_p (TREE_OPERAND (exp, 2),
6613 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6614 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6615 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6616 && operand_equal_p (TREE_OPERAND (exp, 1),
6617 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6618 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6619 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6620 && operand_equal_p (TREE_OPERAND (exp, 2),
6621 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6622 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6623 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6624 && operand_equal_p (TREE_OPERAND (exp, 1),
6625 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6626 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6628 /* If we are not to produce a result, we have no target. Otherwise,
6629 if a target was specified use it; it will not be used as an
6630 intermediate target unless it is safe. If no target, use a
6635 else if (original_target
6636 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6637 || (singleton && GET_CODE (original_target) == REG
6638 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6639 && original_target == var_rtx (singleton)))
6640 && GET_MODE (original_target) == mode
6641 && ! (GET_CODE (original_target) == MEM
6642 && MEM_VOLATILE_P (original_target)))
6643 temp = original_target;
6644 else if (TREE_ADDRESSABLE (type))
6647 temp = assign_temp (type, 0, 0, 1);
6649 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6650 do the test of X as a store-flag operation, do this as
6651 A + ((X != 0) << log C). Similarly for other simple binary
6652 operators. Only do for C == 1 if BRANCH_COST is low. */
6653 if (temp && singleton && binary_op
6654 && (TREE_CODE (binary_op) == PLUS_EXPR
6655 || TREE_CODE (binary_op) == MINUS_EXPR
6656 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6657 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6658 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6659 : integer_onep (TREE_OPERAND (binary_op, 1)))
6660 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6663 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6664 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6665 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6668 /* If we had X ? A : A + 1, do this as A + (X == 0).
6670 We have to invert the truth value here and then put it
6671 back later if do_store_flag fails. We cannot simply copy
6672 TREE_OPERAND (exp, 0) to another variable and modify that
6673 because invert_truthvalue can modify the tree pointed to
6675 if (singleton == TREE_OPERAND (exp, 1))
6676 TREE_OPERAND (exp, 0)
6677 = invert_truthvalue (TREE_OPERAND (exp, 0));
6679 result = do_store_flag (TREE_OPERAND (exp, 0),
6680 (safe_from_p (temp, singleton)
6682 mode, BRANCH_COST <= 1);
6684 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6685 result = expand_shift (LSHIFT_EXPR, mode, result,
6686 build_int_2 (tree_log2
6690 (safe_from_p (temp, singleton)
6691 ? temp : NULL_RTX), 0);
6695 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6696 return expand_binop (mode, boptab, op1, result, temp,
6697 unsignedp, OPTAB_LIB_WIDEN);
6699 else if (singleton == TREE_OPERAND (exp, 1))
6700 TREE_OPERAND (exp, 0)
6701 = invert_truthvalue (TREE_OPERAND (exp, 0));
6704 do_pending_stack_adjust ();
6706 op0 = gen_label_rtx ();
6708 flag = gen_reg_rtx (word_mode);
6709 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6713 /* If the target conflicts with the other operand of the
6714 binary op, we can't use it. Also, we can't use the target
6715 if it is a hard register, because evaluating the condition
6716 might clobber it. */
6718 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6719 || (GET_CODE (temp) == REG
6720 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6721 temp = gen_reg_rtx (mode);
6722 store_expr (singleton, temp, 0);
6725 expand_expr (singleton,
6726 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6727 dest_left_flag = get_last_insn ();
6728 if (singleton == TREE_OPERAND (exp, 1))
6729 jumpif (TREE_OPERAND (exp, 0), op0);
6731 jumpifnot (TREE_OPERAND (exp, 0), op0);
6733 /* Allows cleanups up to here. */
6734 old_cleanups = cleanups_this_call;
6735 if (binary_op && temp == 0)
6736 /* Just touch the other operand. */
6737 expand_expr (TREE_OPERAND (binary_op, 1),
6738 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6740 store_expr (build (TREE_CODE (binary_op), type,
6741 make_tree (type, temp),
6742 TREE_OPERAND (binary_op, 1)),
6745 store_expr (build1 (TREE_CODE (unary_op), type,
6746 make_tree (type, temp)),
6749 dest_right_flag = get_last_insn ();
6752 /* This is now done in jump.c and is better done there because it
6753 produces shorter register lifetimes. */
6755 /* Check for both possibilities either constants or variables
6756 in registers (but not the same as the target!). If so, can
6757 save branches by assigning one, branching, and assigning the
6759 else if (temp && GET_MODE (temp) != BLKmode
6760 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6761 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6762 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6763 && DECL_RTL (TREE_OPERAND (exp, 1))
6764 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6765 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6766 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6767 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6768 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6769 && DECL_RTL (TREE_OPERAND (exp, 2))
6770 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6771 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6773 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6774 temp = gen_reg_rtx (mode);
6775 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6776 dest_left_flag = get_last_insn ();
6777 jumpifnot (TREE_OPERAND (exp, 0), op0);
6779 /* Allows cleanups up to here. */
6780 old_cleanups = cleanups_this_call;
6781 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6783 dest_right_flag = get_last_insn ();
6786 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6787 comparison operator. If we have one of these cases, set the
6788 output to A, branch on A (cse will merge these two references),
6789 then set the output to FOO. */
6791 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6792 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6793 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6794 TREE_OPERAND (exp, 1), 0)
6795 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6796 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6798 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6799 temp = gen_reg_rtx (mode);
6800 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6801 dest_left_flag = get_last_insn ();
6802 jumpif (TREE_OPERAND (exp, 0), op0);
6804 /* Allows cleanups up to here. */
6805 old_cleanups = cleanups_this_call;
6806 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6808 dest_right_flag = get_last_insn ();
6811 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6812 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6813 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6814 TREE_OPERAND (exp, 2), 0)
6815 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6816 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6818 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6819 temp = gen_reg_rtx (mode);
6820 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6821 dest_left_flag = get_last_insn ();
6822 jumpifnot (TREE_OPERAND (exp, 0), op0);
6824 /* Allows cleanups up to here. */
6825 old_cleanups = cleanups_this_call;
6826 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6828 dest_right_flag = get_last_insn ();
6832 op1 = gen_label_rtx ();
6833 jumpifnot (TREE_OPERAND (exp, 0), op0);
6835 /* Allows cleanups up to here. */
6836 old_cleanups = cleanups_this_call;
6838 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6840 expand_expr (TREE_OPERAND (exp, 1),
6841 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6842 dest_left_flag = get_last_insn ();
6844 /* Handle conditional cleanups, if any. */
6845 left_cleanups = defer_cleanups_to (old_cleanups);
6848 emit_jump_insn (gen_jump (op1));
6852 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6854 expand_expr (TREE_OPERAND (exp, 2),
6855 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6856 dest_right_flag = get_last_insn ();
6859 /* Handle conditional cleanups, if any. */
6860 right_cleanups = defer_cleanups_to (old_cleanups);
6866 /* Add back in, any conditional cleanups. */
6867 if (left_cleanups || right_cleanups)
6873 /* Now that we know that a flag is needed, go back and add in the
6874 setting of the flag. */
6876 /* Do the left side flag. */
6877 last = get_last_insn ();
6878 /* Flag left cleanups as needed. */
6879 emit_move_insn (flag, const1_rtx);
6880 /* ??? deprecated, use sequences instead. */
6881 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6883 /* Do the right side flag. */
6884 last = get_last_insn ();
6885 /* Flag left cleanups as needed. */
6886 emit_move_insn (flag, const0_rtx);
6887 /* ??? deprecated, use sequences instead. */
6888 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6890 /* All cleanups must be on the function_obstack. */
6891 push_obstacks_nochange ();
6892 resume_temporary_allocation ();
6894 /* convert flag, which is an rtx, into a tree. */
6895 cond = make_node (RTL_EXPR);
6896 TREE_TYPE (cond) = integer_type_node;
6897 RTL_EXPR_RTL (cond) = flag;
6898 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6899 cond = save_expr (cond);
6901 if (! left_cleanups)
6902 left_cleanups = integer_zero_node;
6903 if (! right_cleanups)
6904 right_cleanups = integer_zero_node;
6905 new_cleanups = build (COND_EXPR, void_type_node,
6906 truthvalue_conversion (cond),
6907 left_cleanups, right_cleanups);
6908 new_cleanups = fold (new_cleanups);
6912 /* Now add in the conditionalized cleanups. */
6914 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6915 expand_eh_region_start ();
6922 /* Something needs to be initialized, but we didn't know
6923 where that thing was when building the tree. For example,
6924 it could be the return value of a function, or a parameter
6925 to a function which lays down in the stack, or a temporary
6926 variable which must be passed by reference.
6928 We guarantee that the expression will either be constructed
6929 or copied into our original target. */
6931 tree slot = TREE_OPERAND (exp, 0);
6932 tree cleanups = NULL_TREE;
6936 if (TREE_CODE (slot) != VAR_DECL)
6940 target = original_target;
6944 if (DECL_RTL (slot) != 0)
6946 target = DECL_RTL (slot);
6947 /* If we have already expanded the slot, so don't do
6949 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6954 target = assign_temp (type, 2, 1, 1);
6955 /* All temp slots at this level must not conflict. */
6956 preserve_temp_slots (target);
6957 DECL_RTL (slot) = target;
6959 /* Since SLOT is not known to the called function
6960 to belong to its stack frame, we must build an explicit
6961 cleanup. This case occurs when we must build up a reference
6962 to pass the reference as an argument. In this case,
6963 it is very likely that such a reference need not be
6966 if (TREE_OPERAND (exp, 2) == 0)
6967 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6968 cleanups = TREE_OPERAND (exp, 2);
6973 /* This case does occur, when expanding a parameter which
6974 needs to be constructed on the stack. The target
6975 is the actual stack address that we want to initialize.
6976 The function we call will perform the cleanup in this case. */
6978 /* If we have already assigned it space, use that space,
6979 not target that we were passed in, as our target
6980 parameter is only a hint. */
6981 if (DECL_RTL (slot) != 0)
6983 target = DECL_RTL (slot);
6984 /* If we have already expanded the slot, so don't do
6986 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6990 DECL_RTL (slot) = target;
6993 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6994 /* Mark it as expanded. */
6995 TREE_OPERAND (exp, 1) = NULL_TREE;
6997 store_expr (exp1, target, 0);
7001 cleanups_this_call = tree_cons (NULL_TREE,
7003 cleanups_this_call);
7004 expand_eh_region_start ();
7012 tree lhs = TREE_OPERAND (exp, 0);
7013 tree rhs = TREE_OPERAND (exp, 1);
7014 tree noncopied_parts = 0;
7015 tree lhs_type = TREE_TYPE (lhs);
7017 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7018 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7019 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7020 TYPE_NONCOPIED_PARTS (lhs_type));
7021 while (noncopied_parts != 0)
7023 expand_assignment (TREE_VALUE (noncopied_parts),
7024 TREE_PURPOSE (noncopied_parts), 0, 0);
7025 noncopied_parts = TREE_CHAIN (noncopied_parts);
7032 /* If lhs is complex, expand calls in rhs before computing it.
7033 That's so we don't compute a pointer and save it over a call.
7034 If lhs is simple, compute it first so we can give it as a
7035 target if the rhs is just a call. This avoids an extra temp and copy
7036 and that prevents a partial-subsumption which makes bad code.
7037 Actually we could treat component_ref's of vars like vars. */
7039 tree lhs = TREE_OPERAND (exp, 0);
7040 tree rhs = TREE_OPERAND (exp, 1);
7041 tree noncopied_parts = 0;
7042 tree lhs_type = TREE_TYPE (lhs);
7046 if (TREE_CODE (lhs) != VAR_DECL
7047 && TREE_CODE (lhs) != RESULT_DECL
7048 && TREE_CODE (lhs) != PARM_DECL)
7049 preexpand_calls (exp);
7051 /* Check for |= or &= of a bitfield of size one into another bitfield
7052 of size 1. In this case, (unless we need the result of the
7053 assignment) we can do this more efficiently with a
7054 test followed by an assignment, if necessary.
7056 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7057 things change so we do, this code should be enhanced to
7060 && TREE_CODE (lhs) == COMPONENT_REF
7061 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7062 || TREE_CODE (rhs) == BIT_AND_EXPR)
7063 && TREE_OPERAND (rhs, 0) == lhs
7064 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7065 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7066 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7068 rtx label = gen_label_rtx ();
7070 do_jump (TREE_OPERAND (rhs, 1),
7071 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7072 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7073 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7074 (TREE_CODE (rhs) == BIT_IOR_EXPR
7076 : integer_zero_node)),
7078 do_pending_stack_adjust ();
7083 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7084 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7085 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7086 TYPE_NONCOPIED_PARTS (lhs_type));
7088 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7089 while (noncopied_parts != 0)
7091 expand_assignment (TREE_PURPOSE (noncopied_parts),
7092 TREE_VALUE (noncopied_parts), 0, 0);
7093 noncopied_parts = TREE_CHAIN (noncopied_parts);
7098 case PREINCREMENT_EXPR:
7099 case PREDECREMENT_EXPR:
7100 return expand_increment (exp, 0, ignore);
7102 case POSTINCREMENT_EXPR:
7103 case POSTDECREMENT_EXPR:
7104 /* Faster to treat as pre-increment if result is not used. */
7105 return expand_increment (exp, ! ignore, ignore);
7108 /* If nonzero, TEMP will be set to the address of something that might
7109 be a MEM corresponding to a stack slot. */
7112 /* Are we taking the address of a nested function? */
7113 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7114 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7115 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7117 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7118 op0 = force_operand (op0, target);
7120 /* If we are taking the address of something erroneous, just
7122 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7126 /* We make sure to pass const0_rtx down if we came in with
7127 ignore set, to avoid doing the cleanups twice for something. */
7128 op0 = expand_expr (TREE_OPERAND (exp, 0),
7129 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7130 (modifier == EXPAND_INITIALIZER
7131 ? modifier : EXPAND_CONST_ADDRESS));
7133 /* If we are going to ignore the result, OP0 will have been set
7134 to const0_rtx, so just return it. Don't get confused and
7135 think we are taking the address of the constant. */
7139 op0 = protect_from_queue (op0, 0);
7141 /* We would like the object in memory. If it is a constant,
7142 we can have it be statically allocated into memory. For
7143 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7144 memory and store the value into it. */
7146 if (CONSTANT_P (op0))
7147 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7149 else if (GET_CODE (op0) == MEM)
7151 mark_temp_addr_taken (op0);
7152 temp = XEXP (op0, 0);
7155 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7156 || GET_CODE (op0) == CONCAT)
7158 /* If this object is in a register, it must be not
7160 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7161 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7163 mark_temp_addr_taken (memloc);
7164 emit_move_insn (memloc, op0);
7168 if (GET_CODE (op0) != MEM)
7171 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7173 temp = XEXP (op0, 0);
7174 #ifdef POINTERS_EXTEND_UNSIGNED
7175 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7176 && mode == ptr_mode)
7177 temp = convert_memory_address (ptr_mode, temp);
7182 op0 = force_operand (XEXP (op0, 0), target);
7185 if (flag_force_addr && GET_CODE (op0) != REG)
7186 op0 = force_reg (Pmode, op0);
7188 if (GET_CODE (op0) == REG
7189 && ! REG_USERVAR_P (op0))
7190 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7192 /* If we might have had a temp slot, add an equivalent address
7195 update_temp_slot_address (temp, op0);
7197 #ifdef POINTERS_EXTEND_UNSIGNED
7198 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7199 && mode == ptr_mode)
7200 op0 = convert_memory_address (ptr_mode, op0);
7205 case ENTRY_VALUE_EXPR:
7208 /* COMPLEX type for Extended Pascal & Fortran */
7211 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7214 /* Get the rtx code of the operands. */
7215 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7216 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7219 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7223 /* Move the real (op0) and imaginary (op1) parts to their location. */
7224 emit_move_insn (gen_realpart (mode, target), op0);
7225 emit_move_insn (gen_imagpart (mode, target), op1);
7227 insns = get_insns ();
7230 /* Complex construction should appear as a single unit. */
7231 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7232 each with a separate pseudo as destination.
7233 It's not correct for flow to treat them as a unit. */
7234 if (GET_CODE (target) != CONCAT)
7235 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7243 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7244 return gen_realpart (mode, op0);
7247 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7248 return gen_imagpart (mode, op0);
7252 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7256 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7259 target = gen_reg_rtx (mode);
7263 /* Store the realpart and the negated imagpart to target. */
7264 emit_move_insn (gen_realpart (partmode, target),
7265 gen_realpart (partmode, op0));
7267 imag_t = gen_imagpart (partmode, target);
7268 temp = expand_unop (partmode, neg_optab,
7269 gen_imagpart (partmode, op0), imag_t, 0);
7271 emit_move_insn (imag_t, temp);
7273 insns = get_insns ();
7276 /* Conjugate should appear as a single unit
7277 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7278 each with a separate pseudo as destination.
7279 It's not correct for flow to treat them as a unit. */
7280 if (GET_CODE (target) != CONCAT)
7281 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7289 op0 = CONST0_RTX (tmode);
7295 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7298 /* Here to do an ordinary binary operator, generating an instruction
7299 from the optab already placed in `this_optab'. */
7301 preexpand_calls (exp);
7302 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7304 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7305 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7307 temp = expand_binop (mode, this_optab, op0, op1, target,
7308 unsignedp, OPTAB_LIB_WIDEN);
7315 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7318 bc_expand_expr (exp)
7321 enum tree_code code;
7324 struct binary_operator *binoptab;
7325 struct unary_operator *unoptab;
7326 struct increment_operator *incroptab;
7327 struct bc_label *lab, *lab1;
7328 enum bytecode_opcode opcode;
7331 code = TREE_CODE (exp);
7337 if (DECL_RTL (exp) == 0)
7339 error_with_decl (exp, "prior parameter's size depends on `%s'");
7343 bc_load_parmaddr (DECL_RTL (exp));
7344 bc_load_memory (TREE_TYPE (exp), exp);
7350 if (DECL_RTL (exp) == 0)
7354 if (BYTECODE_LABEL (DECL_RTL (exp)))
7355 bc_load_externaddr (DECL_RTL (exp));
7357 bc_load_localaddr (DECL_RTL (exp));
7359 if (TREE_PUBLIC (exp))
7360 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7361 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7363 bc_load_localaddr (DECL_RTL (exp));
7365 bc_load_memory (TREE_TYPE (exp), exp);
7370 #ifdef DEBUG_PRINT_CODE
7371 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7373 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7375 : TYPE_MODE (TREE_TYPE (exp)))],
7376 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7382 #ifdef DEBUG_PRINT_CODE
7383 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7385 /* FIX THIS: find a better way to pass real_cst's. -bson */
7386 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7387 (double) TREE_REAL_CST (exp));
7396 /* We build a call description vector describing the type of
7397 the return value and of the arguments; this call vector,
7398 together with a pointer to a location for the return value
7399 and the base of the argument list, is passed to the low
7400 level machine dependent call subroutine, which is responsible
7401 for putting the arguments wherever real functions expect
7402 them, as well as getting the return value back. */
7404 tree calldesc = 0, arg;
7408 /* Push the evaluated args on the evaluation stack in reverse
7409 order. Also make an entry for each arg in the calldesc
7410 vector while we're at it. */
7412 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7414 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7417 bc_expand_expr (TREE_VALUE (arg));
7419 calldesc = tree_cons ((tree) 0,
7420 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7422 calldesc = tree_cons ((tree) 0,
7423 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7427 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7429 /* Allocate a location for the return value and push its
7430 address on the evaluation stack. Also make an entry
7431 at the front of the calldesc for the return value type. */
7433 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7434 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7435 bc_load_localaddr (retval);
7437 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7438 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7440 /* Prepend the argument count. */
7441 calldesc = tree_cons ((tree) 0,
7442 build_int_2 (nargs, 0),
7445 /* Push the address of the call description vector on the stack. */
7446 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7447 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7448 build_index_type (build_int_2 (nargs * 2, 0)));
7449 r = output_constant_def (calldesc);
7450 bc_load_externaddr (r);
7452 /* Push the address of the function to be called. */
7453 bc_expand_expr (TREE_OPERAND (exp, 0));
7455 /* Call the function, popping its address and the calldesc vector
7456 address off the evaluation stack in the process. */
7457 bc_emit_instruction (call);
7459 /* Pop the arguments off the stack. */
7460 bc_adjust_stack (nargs);
7462 /* Load the return value onto the stack. */
7463 bc_load_localaddr (retval);
7464 bc_load_memory (type, TREE_OPERAND (exp, 0));
7470 if (!SAVE_EXPR_RTL (exp))
7472 /* First time around: copy to local variable */
7473 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7474 TYPE_ALIGN (TREE_TYPE(exp)));
7475 bc_expand_expr (TREE_OPERAND (exp, 0));
7476 bc_emit_instruction (duplicate);
7478 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7479 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7483 /* Consecutive reference: use saved copy */
7484 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7485 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7490 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7491 how are they handled instead? */
7494 TREE_USED (exp) = 1;
7495 bc_expand_expr (STMT_BODY (exp));
7502 bc_expand_expr (TREE_OPERAND (exp, 0));
7503 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7508 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7513 bc_expand_address (TREE_OPERAND (exp, 0));
7518 bc_expand_expr (TREE_OPERAND (exp, 0));
7519 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7524 bc_expand_expr (bc_canonicalize_array_ref (exp));
7529 bc_expand_component_address (exp);
7531 /* If we have a bitfield, generate a proper load */
7532 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7537 bc_expand_expr (TREE_OPERAND (exp, 0));
7538 bc_emit_instruction (drop);
7539 bc_expand_expr (TREE_OPERAND (exp, 1));
7544 bc_expand_expr (TREE_OPERAND (exp, 0));
7545 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7546 lab = bc_get_bytecode_label ();
7547 bc_emit_bytecode (xjumpifnot);
7548 bc_emit_bytecode_labelref (lab);
7550 #ifdef DEBUG_PRINT_CODE
7551 fputc ('\n', stderr);
7553 bc_expand_expr (TREE_OPERAND (exp, 1));
7554 lab1 = bc_get_bytecode_label ();
7555 bc_emit_bytecode (jump);
7556 bc_emit_bytecode_labelref (lab1);
7558 #ifdef DEBUG_PRINT_CODE
7559 fputc ('\n', stderr);
7562 bc_emit_bytecode_labeldef (lab);
7563 bc_expand_expr (TREE_OPERAND (exp, 2));
7564 bc_emit_bytecode_labeldef (lab1);
7567 case TRUTH_ANDIF_EXPR:
7569 opcode = xjumpifnot;
7572 case TRUTH_ORIF_EXPR:
7579 binoptab = optab_plus_expr;
7584 binoptab = optab_minus_expr;
7589 binoptab = optab_mult_expr;
7592 case TRUNC_DIV_EXPR:
7593 case FLOOR_DIV_EXPR:
7595 case ROUND_DIV_EXPR:
7596 case EXACT_DIV_EXPR:
7598 binoptab = optab_trunc_div_expr;
7601 case TRUNC_MOD_EXPR:
7602 case FLOOR_MOD_EXPR:
7604 case ROUND_MOD_EXPR:
7606 binoptab = optab_trunc_mod_expr;
7609 case FIX_ROUND_EXPR:
7610 case FIX_FLOOR_EXPR:
7612 abort (); /* Not used for C. */
7614 case FIX_TRUNC_EXPR:
7621 abort (); /* FIXME */
7625 binoptab = optab_rdiv_expr;
7630 binoptab = optab_bit_and_expr;
7635 binoptab = optab_bit_ior_expr;
7640 binoptab = optab_bit_xor_expr;
7645 binoptab = optab_lshift_expr;
7650 binoptab = optab_rshift_expr;
7653 case TRUTH_AND_EXPR:
7655 binoptab = optab_truth_and_expr;
7660 binoptab = optab_truth_or_expr;
7665 binoptab = optab_lt_expr;
7670 binoptab = optab_le_expr;
7675 binoptab = optab_ge_expr;
7680 binoptab = optab_gt_expr;
7685 binoptab = optab_eq_expr;
7690 binoptab = optab_ne_expr;
7695 unoptab = optab_negate_expr;
7700 unoptab = optab_bit_not_expr;
7703 case TRUTH_NOT_EXPR:
7705 unoptab = optab_truth_not_expr;
7708 case PREDECREMENT_EXPR:
7710 incroptab = optab_predecrement_expr;
7713 case PREINCREMENT_EXPR:
7715 incroptab = optab_preincrement_expr;
7718 case POSTDECREMENT_EXPR:
7720 incroptab = optab_postdecrement_expr;
7723 case POSTINCREMENT_EXPR:
7725 incroptab = optab_postincrement_expr;
7730 bc_expand_constructor (exp);
7740 tree vars = TREE_OPERAND (exp, 0);
7741 int vars_need_expansion = 0;
7743 /* Need to open a binding contour here because
7744 if there are any cleanups they most be contained here. */
7745 expand_start_bindings (0);
7747 /* Mark the corresponding BLOCK for output. */
7748 if (TREE_OPERAND (exp, 2) != 0)
7749 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7751 /* If VARS have not yet been expanded, expand them now. */
7754 if (DECL_RTL (vars) == 0)
7756 vars_need_expansion = 1;
7759 expand_decl_init (vars);
7760 vars = TREE_CHAIN (vars);
7763 bc_expand_expr (TREE_OPERAND (exp, 1));
7765 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7775 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7776 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7782 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7788 bc_expand_expr (TREE_OPERAND (exp, 0));
7789 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7790 lab = bc_get_bytecode_label ();
7792 bc_emit_instruction (duplicate);
7793 bc_emit_bytecode (opcode);
7794 bc_emit_bytecode_labelref (lab);
7796 #ifdef DEBUG_PRINT_CODE
7797 fputc ('\n', stderr);
7800 bc_emit_instruction (drop);
7802 bc_expand_expr (TREE_OPERAND (exp, 1));
7803 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7804 bc_emit_bytecode_labeldef (lab);
7810 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7812 /* Push the quantum. */
7813 bc_expand_expr (TREE_OPERAND (exp, 1));
7815 /* Convert it to the lvalue's type. */
7816 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7818 /* Push the address of the lvalue */
7819 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7821 /* Perform actual increment */
7822 bc_expand_increment (incroptab, type);
7826 /* Return the alignment in bits of EXP, a pointer valued expression.
7827 But don't return more than MAX_ALIGN no matter what.
7828 The alignment returned is, by default, the alignment of the thing that
7829 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7831 Otherwise, look at the expression to see if we can do better, i.e., if the
7832 expression is actually pointing at an object whose alignment is tighter. */
7835 get_pointer_alignment (exp, max_align)
7839 unsigned align, inner;
7841 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7844 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7845 align = MIN (align, max_align);
7849 switch (TREE_CODE (exp))
7853 case NON_LVALUE_EXPR:
7854 exp = TREE_OPERAND (exp, 0);
7855 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7857 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7858 align = MIN (inner, max_align);
7862 /* If sum of pointer + int, restrict our maximum alignment to that
7863 imposed by the integer. If not, we can't do any better than
7865 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7868 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7873 exp = TREE_OPERAND (exp, 0);
7877 /* See what we are pointing at and look at its alignment. */
7878 exp = TREE_OPERAND (exp, 0);
7879 if (TREE_CODE (exp) == FUNCTION_DECL)
7880 align = FUNCTION_BOUNDARY;
7881 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7882 align = DECL_ALIGN (exp);
7883 #ifdef CONSTANT_ALIGNMENT
7884 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7885 align = CONSTANT_ALIGNMENT (exp, align);
7887 return MIN (align, max_align);
7895 /* Return the tree node and offset if a given argument corresponds to
7896 a string constant. */
7899 string_constant (arg, ptr_offset)
7905 if (TREE_CODE (arg) == ADDR_EXPR
7906 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7908 *ptr_offset = integer_zero_node;
7909 return TREE_OPERAND (arg, 0);
7911 else if (TREE_CODE (arg) == PLUS_EXPR)
7913 tree arg0 = TREE_OPERAND (arg, 0);
7914 tree arg1 = TREE_OPERAND (arg, 1);
7919 if (TREE_CODE (arg0) == ADDR_EXPR
7920 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7923 return TREE_OPERAND (arg0, 0);
7925 else if (TREE_CODE (arg1) == ADDR_EXPR
7926 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7929 return TREE_OPERAND (arg1, 0);
7936 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7937 way, because it could contain a zero byte in the middle.
7938 TREE_STRING_LENGTH is the size of the character array, not the string.
7940 Unfortunately, string_constant can't access the values of const char
7941 arrays with initializers, so neither can we do so here. */
7951 src = string_constant (src, &offset_node);
7954 max = TREE_STRING_LENGTH (src);
7955 ptr = TREE_STRING_POINTER (src);
7956 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7958 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7959 compute the offset to the following null if we don't know where to
7960 start searching for it. */
7962 for (i = 0; i < max; i++)
7965 /* We don't know the starting offset, but we do know that the string
7966 has no internal zero bytes. We can assume that the offset falls
7967 within the bounds of the string; otherwise, the programmer deserves
7968 what he gets. Subtract the offset from the length of the string,
7970 /* This would perhaps not be valid if we were dealing with named
7971 arrays in addition to literal string constants. */
7972 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7975 /* We have a known offset into the string. Start searching there for
7976 a null character. */
7977 if (offset_node == 0)
7981 /* Did we get a long long offset? If so, punt. */
7982 if (TREE_INT_CST_HIGH (offset_node) != 0)
7984 offset = TREE_INT_CST_LOW (offset_node);
7986 /* If the offset is known to be out of bounds, warn, and call strlen at
7988 if (offset < 0 || offset > max)
7990 warning ("offset outside bounds of constant string");
7993 /* Use strlen to search for the first zero byte. Since any strings
7994 constructed with build_string will have nulls appended, we win even
7995 if we get handed something like (char[4])"abcd".
7997 Since OFFSET is our starting index into the string, no further
7998 calculation is needed. */
7999 return size_int (strlen (ptr + offset));
8003 expand_builtin_return_addr (fndecl_code, count, tem)
8004 enum built_in_function fndecl_code;
8010 /* Some machines need special handling before we can access
8011 arbitrary frames. For example, on the sparc, we must first flush
8012 all register windows to the stack. */
8013 #ifdef SETUP_FRAME_ADDRESSES
8014 SETUP_FRAME_ADDRESSES ();
8017 /* On the sparc, the return address is not in the frame, it is in a
8018 register. There is no way to access it off of the current frame
8019 pointer, but it can be accessed off the previous frame pointer by
8020 reading the value from the register window save area. */
8021 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8022 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8026 /* Scan back COUNT frames to the specified frame. */
8027 for (i = 0; i < count; i++)
8029 /* Assume the dynamic chain pointer is in the word that the
8030 frame address points to, unless otherwise specified. */
8031 #ifdef DYNAMIC_CHAIN_ADDRESS
8032 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8034 tem = memory_address (Pmode, tem);
8035 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
8038 /* For __builtin_frame_address, return what we've got. */
8039 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8042 /* For __builtin_return_address, Get the return address from that
8044 #ifdef RETURN_ADDR_RTX
8045 tem = RETURN_ADDR_RTX (count, tem);
8047 tem = memory_address (Pmode,
8048 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8049 tem = gen_rtx (MEM, Pmode, tem);
8054 /* Expand an expression EXP that calls a built-in function,
8055 with result going to TARGET if that's convenient
8056 (and in mode MODE if that's convenient).
8057 SUBTARGET may be used as the target for computing one of EXP's operands.
8058 IGNORE is nonzero if the value is to be ignored. */
8060 #define CALLED_AS_BUILT_IN(NODE) \
8061 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8064 expand_builtin (exp, target, subtarget, mode, ignore)
8068 enum machine_mode mode;
8071 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8072 tree arglist = TREE_OPERAND (exp, 1);
8075 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8076 optab builtin_optab;
8078 switch (DECL_FUNCTION_CODE (fndecl))
8083 /* build_function_call changes these into ABS_EXPR. */
8088 /* Treat these like sqrt, but only if the user asks for them. */
8089 if (! flag_fast_math)
8091 case BUILT_IN_FSQRT:
8092 /* If not optimizing, call the library function. */
8097 /* Arg could be wrong type if user redeclared this fcn wrong. */
8098 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8101 /* Stabilize and compute the argument. */
8102 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8103 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8105 exp = copy_node (exp);
8106 arglist = copy_node (arglist);
8107 TREE_OPERAND (exp, 1) = arglist;
8108 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8110 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8112 /* Make a suitable register to place result in. */
8113 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8118 switch (DECL_FUNCTION_CODE (fndecl))
8121 builtin_optab = sin_optab; break;
8123 builtin_optab = cos_optab; break;
8124 case BUILT_IN_FSQRT:
8125 builtin_optab = sqrt_optab; break;
8130 /* Compute into TARGET.
8131 Set TARGET to wherever the result comes back. */
8132 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8133 builtin_optab, op0, target, 0);
8135 /* If we were unable to expand via the builtin, stop the
8136 sequence (without outputting the insns) and break, causing
8137 a call the the library function. */
8144 /* Check the results by default. But if flag_fast_math is turned on,
8145 then assume sqrt will always be called with valid arguments. */
8147 if (! flag_fast_math)
8149 /* Don't define the builtin FP instructions
8150 if your machine is not IEEE. */
8151 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8154 lab1 = gen_label_rtx ();
8156 /* Test the result; if it is NaN, set errno=EDOM because
8157 the argument was not in the domain. */
8158 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8159 emit_jump_insn (gen_beq (lab1));
8163 #ifdef GEN_ERRNO_RTX
8164 rtx errno_rtx = GEN_ERRNO_RTX;
8167 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8170 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8173 /* We can't set errno=EDOM directly; let the library call do it.
8174 Pop the arguments right away in case the call gets deleted. */
8176 expand_call (exp, target, 0);
8183 /* Output the entire sequence. */
8184 insns = get_insns ();
8190 /* __builtin_apply_args returns block of memory allocated on
8191 the stack into which is stored the arg pointer, structure
8192 value address, static chain, and all the registers that might
8193 possibly be used in performing a function call. The code is
8194 moved to the start of the function so the incoming values are
8196 case BUILT_IN_APPLY_ARGS:
8197 /* Don't do __builtin_apply_args more than once in a function.
8198 Save the result of the first call and reuse it. */
8199 if (apply_args_value != 0)
8200 return apply_args_value;
8202 /* When this function is called, it means that registers must be
8203 saved on entry to this function. So we migrate the
8204 call to the first insn of this function. */
8209 temp = expand_builtin_apply_args ();
8213 apply_args_value = temp;
8215 /* Put the sequence after the NOTE that starts the function.
8216 If this is inside a SEQUENCE, make the outer-level insn
8217 chain current, so the code is placed at the start of the
8219 push_topmost_sequence ();
8220 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8221 pop_topmost_sequence ();
8225 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8226 FUNCTION with a copy of the parameters described by
8227 ARGUMENTS, and ARGSIZE. It returns a block of memory
8228 allocated on the stack into which is stored all the registers
8229 that might possibly be used for returning the result of a
8230 function. ARGUMENTS is the value returned by
8231 __builtin_apply_args. ARGSIZE is the number of bytes of
8232 arguments that must be copied. ??? How should this value be
8233 computed? We'll also need a safe worst case value for varargs
8235 case BUILT_IN_APPLY:
8237 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8238 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8239 || TREE_CHAIN (arglist) == 0
8240 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8241 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8242 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8250 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8251 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8253 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8256 /* __builtin_return (RESULT) causes the function to return the
8257 value described by RESULT. RESULT is address of the block of
8258 memory returned by __builtin_apply. */
8259 case BUILT_IN_RETURN:
8261 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8262 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8263 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8264 NULL_RTX, VOIDmode, 0));
8267 case BUILT_IN_SAVEREGS:
8268 /* Don't do __builtin_saveregs more than once in a function.
8269 Save the result of the first call and reuse it. */
8270 if (saveregs_value != 0)
8271 return saveregs_value;
8273 /* When this function is called, it means that registers must be
8274 saved on entry to this function. So we migrate the
8275 call to the first insn of this function. */
8279 /* Now really call the function. `expand_call' does not call
8280 expand_builtin, so there is no danger of infinite recursion here. */
8283 #ifdef EXPAND_BUILTIN_SAVEREGS
8284 /* Do whatever the machine needs done in this case. */
8285 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8287 /* The register where the function returns its value
8288 is likely to have something else in it, such as an argument.
8289 So preserve that register around the call. */
8291 if (value_mode != VOIDmode)
8293 rtx valreg = hard_libcall_value (value_mode);
8294 rtx saved_valreg = gen_reg_rtx (value_mode);
8296 emit_move_insn (saved_valreg, valreg);
8297 temp = expand_call (exp, target, ignore);
8298 emit_move_insn (valreg, saved_valreg);
8301 /* Generate the call, putting the value in a pseudo. */
8302 temp = expand_call (exp, target, ignore);
8308 saveregs_value = temp;
8310 /* Put the sequence after the NOTE that starts the function.
8311 If this is inside a SEQUENCE, make the outer-level insn
8312 chain current, so the code is placed at the start of the
8314 push_topmost_sequence ();
8315 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8316 pop_topmost_sequence ();
8320 /* __builtin_args_info (N) returns word N of the arg space info
8321 for the current function. The number and meanings of words
8322 is controlled by the definition of CUMULATIVE_ARGS. */
8323 case BUILT_IN_ARGS_INFO:
8325 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8327 int *word_ptr = (int *) ¤t_function_args_info;
8328 tree type, elts, result;
8330 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8331 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8332 __FILE__, __LINE__);
8336 tree arg = TREE_VALUE (arglist);
8337 if (TREE_CODE (arg) != INTEGER_CST)
8338 error ("argument of `__builtin_args_info' must be constant");
8341 int wordnum = TREE_INT_CST_LOW (arg);
8343 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8344 error ("argument of `__builtin_args_info' out of range");
8346 return GEN_INT (word_ptr[wordnum]);
8350 error ("missing argument in `__builtin_args_info'");
8355 for (i = 0; i < nwords; i++)
8356 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8358 type = build_array_type (integer_type_node,
8359 build_index_type (build_int_2 (nwords, 0)));
8360 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8361 TREE_CONSTANT (result) = 1;
8362 TREE_STATIC (result) = 1;
8363 result = build (INDIRECT_REF, build_pointer_type (type), result);
8364 TREE_CONSTANT (result) = 1;
8365 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8369 /* Return the address of the first anonymous stack arg. */
8370 case BUILT_IN_NEXT_ARG:
8372 tree fntype = TREE_TYPE (current_function_decl);
8374 if ((TYPE_ARG_TYPES (fntype) == 0
8375 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8377 && ! current_function_varargs)
8379 error ("`va_start' used in function with fixed args");
8385 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8386 tree arg = TREE_VALUE (arglist);
8388 /* Strip off all nops for the sake of the comparison. This
8389 is not quite the same as STRIP_NOPS. It does more.
8390 We must also strip off INDIRECT_EXPR for C++ reference
8392 while (TREE_CODE (arg) == NOP_EXPR
8393 || TREE_CODE (arg) == CONVERT_EXPR
8394 || TREE_CODE (arg) == NON_LVALUE_EXPR
8395 || TREE_CODE (arg) == INDIRECT_REF)
8396 arg = TREE_OPERAND (arg, 0);
8397 if (arg != last_parm)
8398 warning ("second parameter of `va_start' not last named argument");
8400 else if (! current_function_varargs)
8401 /* Evidently an out of date version of <stdarg.h>; can't validate
8402 va_start's second argument, but can still work as intended. */
8403 warning ("`__builtin_next_arg' called without an argument");
8406 return expand_binop (Pmode, add_optab,
8407 current_function_internal_arg_pointer,
8408 current_function_arg_offset_rtx,
8409 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8411 case BUILT_IN_CLASSIFY_TYPE:
8414 tree type = TREE_TYPE (TREE_VALUE (arglist));
8415 enum tree_code code = TREE_CODE (type);
8416 if (code == VOID_TYPE)
8417 return GEN_INT (void_type_class);
8418 if (code == INTEGER_TYPE)
8419 return GEN_INT (integer_type_class);
8420 if (code == CHAR_TYPE)
8421 return GEN_INT (char_type_class);
8422 if (code == ENUMERAL_TYPE)
8423 return GEN_INT (enumeral_type_class);
8424 if (code == BOOLEAN_TYPE)
8425 return GEN_INT (boolean_type_class);
8426 if (code == POINTER_TYPE)
8427 return GEN_INT (pointer_type_class);
8428 if (code == REFERENCE_TYPE)
8429 return GEN_INT (reference_type_class);
8430 if (code == OFFSET_TYPE)
8431 return GEN_INT (offset_type_class);
8432 if (code == REAL_TYPE)
8433 return GEN_INT (real_type_class);
8434 if (code == COMPLEX_TYPE)
8435 return GEN_INT (complex_type_class);
8436 if (code == FUNCTION_TYPE)
8437 return GEN_INT (function_type_class);
8438 if (code == METHOD_TYPE)
8439 return GEN_INT (method_type_class);
8440 if (code == RECORD_TYPE)
8441 return GEN_INT (record_type_class);
8442 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8443 return GEN_INT (union_type_class);
8444 if (code == ARRAY_TYPE)
8446 if (TYPE_STRING_FLAG (type))
8447 return GEN_INT (string_type_class);
8449 return GEN_INT (array_type_class);
8451 if (code == SET_TYPE)
8452 return GEN_INT (set_type_class);
8453 if (code == FILE_TYPE)
8454 return GEN_INT (file_type_class);
8455 if (code == LANG_TYPE)
8456 return GEN_INT (lang_type_class);
8458 return GEN_INT (no_type_class);
8460 case BUILT_IN_CONSTANT_P:
8465 tree arg = TREE_VALUE (arglist);
8468 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8469 || (TREE_CODE (arg) == ADDR_EXPR
8470 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8471 ? const1_rtx : const0_rtx);
8474 case BUILT_IN_FRAME_ADDRESS:
8475 /* The argument must be a nonnegative integer constant.
8476 It counts the number of frames to scan up the stack.
8477 The value is the address of that frame. */
8478 case BUILT_IN_RETURN_ADDRESS:
8479 /* The argument must be a nonnegative integer constant.
8480 It counts the number of frames to scan up the stack.
8481 The value is the return address saved in that frame. */
8483 /* Warning about missing arg was already issued. */
8485 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8487 error ("invalid arg to `__builtin_return_address'");
8490 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8492 error ("invalid arg to `__builtin_return_address'");
8497 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8498 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8499 hard_frame_pointer_rtx);
8501 /* For __builtin_frame_address, return what we've got. */
8502 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8505 if (GET_CODE (tem) != REG)
8506 tem = copy_to_reg (tem);
8510 case BUILT_IN_ALLOCA:
8512 /* Arg could be non-integer if user redeclared this fcn wrong. */
8513 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8516 /* Compute the argument. */
8517 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8519 /* Allocate the desired space. */
8520 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8523 /* If not optimizing, call the library function. */
8524 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8528 /* Arg could be non-integer if user redeclared this fcn wrong. */
8529 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8532 /* Compute the argument. */
8533 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8534 /* Compute ffs, into TARGET if possible.
8535 Set TARGET to wherever the result comes back. */
8536 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8537 ffs_optab, op0, target, 1);
8542 case BUILT_IN_STRLEN:
8543 /* If not optimizing, call the library function. */
8544 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8548 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8549 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8553 tree src = TREE_VALUE (arglist);
8554 tree len = c_strlen (src);
8557 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8559 rtx result, src_rtx, char_rtx;
8560 enum machine_mode insn_mode = value_mode, char_mode;
8561 enum insn_code icode;
8563 /* If the length is known, just return it. */
8565 return expand_expr (len, target, mode, 0);
8567 /* If SRC is not a pointer type, don't do this operation inline. */
8571 /* Call a function if we can't compute strlen in the right mode. */
8573 while (insn_mode != VOIDmode)
8575 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8576 if (icode != CODE_FOR_nothing)
8579 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8581 if (insn_mode == VOIDmode)
8584 /* Make a place to write the result of the instruction. */
8587 && GET_CODE (result) == REG
8588 && GET_MODE (result) == insn_mode
8589 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8590 result = gen_reg_rtx (insn_mode);
8592 /* Make sure the operands are acceptable to the predicates. */
8594 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8595 result = gen_reg_rtx (insn_mode);
8597 src_rtx = memory_address (BLKmode,
8598 expand_expr (src, NULL_RTX, ptr_mode,
8600 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8601 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8603 char_rtx = const0_rtx;
8604 char_mode = insn_operand_mode[(int)icode][2];
8605 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8606 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8608 emit_insn (GEN_FCN (icode) (result,
8609 gen_rtx (MEM, BLKmode, src_rtx),
8610 char_rtx, GEN_INT (align)));
8612 /* Return the value in the proper mode for this function. */
8613 if (GET_MODE (result) == value_mode)
8615 else if (target != 0)
8617 convert_move (target, result, 0);
8621 return convert_to_mode (value_mode, result, 0);
8624 case BUILT_IN_STRCPY:
8625 /* If not optimizing, call the library function. */
8626 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8630 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8631 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8632 || TREE_CHAIN (arglist) == 0
8633 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8637 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8642 len = size_binop (PLUS_EXPR, len, integer_one_node);
8644 chainon (arglist, build_tree_list (NULL_TREE, len));
8648 case BUILT_IN_MEMCPY:
8649 /* If not optimizing, call the library function. */
8650 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8654 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8655 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8656 || TREE_CHAIN (arglist) == 0
8657 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8658 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8659 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8663 tree dest = TREE_VALUE (arglist);
8664 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8665 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8669 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8671 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8672 rtx dest_rtx, dest_mem, src_mem;
8674 /* If either SRC or DEST is not a pointer type, don't do
8675 this operation in-line. */
8676 if (src_align == 0 || dest_align == 0)
8678 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8679 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8683 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8684 dest_mem = gen_rtx (MEM, BLKmode,
8685 memory_address (BLKmode, dest_rtx));
8686 /* There could be a void* cast on top of the object. */
8687 while (TREE_CODE (dest) == NOP_EXPR)
8688 dest = TREE_OPERAND (dest, 0);
8689 type = TREE_TYPE (TREE_TYPE (dest));
8690 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8691 src_mem = gen_rtx (MEM, BLKmode,
8692 memory_address (BLKmode,
8693 expand_expr (src, NULL_RTX,
8696 /* There could be a void* cast on top of the object. */
8697 while (TREE_CODE (src) == NOP_EXPR)
8698 src = TREE_OPERAND (src, 0);
8699 type = TREE_TYPE (TREE_TYPE (src));
8700 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8702 /* Copy word part most expediently. */
8703 emit_block_move (dest_mem, src_mem,
8704 expand_expr (len, NULL_RTX, VOIDmode, 0),
8705 MIN (src_align, dest_align));
8706 return force_operand (dest_rtx, NULL_RTX);
8709 case BUILT_IN_MEMSET:
8710 /* If not optimizing, call the library function. */
8711 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8715 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8716 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8717 || TREE_CHAIN (arglist) == 0
8718 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8720 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8722 != (TREE_CODE (TREE_TYPE
8724 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8728 tree dest = TREE_VALUE (arglist);
8729 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8730 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8734 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8735 rtx dest_rtx, dest_mem;
8737 /* If DEST is not a pointer type, don't do this
8738 operation in-line. */
8739 if (dest_align == 0)
8742 /* If VAL is not 0, don't do this operation in-line. */
8743 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8746 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8747 dest_mem = gen_rtx (MEM, BLKmode,
8748 memory_address (BLKmode, dest_rtx));
8749 /* There could be a void* cast on top of the object. */
8750 while (TREE_CODE (dest) == NOP_EXPR)
8751 dest = TREE_OPERAND (dest, 0);
8752 type = TREE_TYPE (TREE_TYPE (dest));
8753 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8755 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8758 return force_operand (dest_rtx, NULL_RTX);
8761 /* These comparison functions need an instruction that returns an actual
8762 index. An ordinary compare that just sets the condition codes
8764 #ifdef HAVE_cmpstrsi
8765 case BUILT_IN_STRCMP:
8766 /* If not optimizing, call the library function. */
8767 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8771 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8772 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8773 || TREE_CHAIN (arglist) == 0
8774 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8776 else if (!HAVE_cmpstrsi)
8779 tree arg1 = TREE_VALUE (arglist);
8780 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8784 len = c_strlen (arg1);
8786 len = size_binop (PLUS_EXPR, integer_one_node, len);
8787 len2 = c_strlen (arg2);
8789 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8791 /* If we don't have a constant length for the first, use the length
8792 of the second, if we know it. We don't require a constant for
8793 this case; some cost analysis could be done if both are available
8794 but neither is constant. For now, assume they're equally cheap.
8796 If both strings have constant lengths, use the smaller. This
8797 could arise if optimization results in strcpy being called with
8798 two fixed strings, or if the code was machine-generated. We should
8799 add some code to the `memcmp' handler below to deal with such
8800 situations, someday. */
8801 if (!len || TREE_CODE (len) != INTEGER_CST)
8808 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8810 if (tree_int_cst_lt (len2, len))
8814 chainon (arglist, build_tree_list (NULL_TREE, len));
8818 case BUILT_IN_MEMCMP:
8819 /* If not optimizing, call the library function. */
8820 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8824 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8825 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8826 || TREE_CHAIN (arglist) == 0
8827 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8828 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8829 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8831 else if (!HAVE_cmpstrsi)
8834 tree arg1 = TREE_VALUE (arglist);
8835 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8836 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8840 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8842 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8843 enum machine_mode insn_mode
8844 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8846 /* If we don't have POINTER_TYPE, call the function. */
8847 if (arg1_align == 0 || arg2_align == 0)
8849 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8850 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8854 /* Make a place to write the result of the instruction. */
8857 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8858 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8859 result = gen_reg_rtx (insn_mode);
8861 emit_insn (gen_cmpstrsi (result,
8862 gen_rtx (MEM, BLKmode,
8863 expand_expr (arg1, NULL_RTX,
8866 gen_rtx (MEM, BLKmode,
8867 expand_expr (arg2, NULL_RTX,
8870 expand_expr (len, NULL_RTX, VOIDmode, 0),
8871 GEN_INT (MIN (arg1_align, arg2_align))));
8873 /* Return the value in the proper mode for this function. */
8874 mode = TYPE_MODE (TREE_TYPE (exp));
8875 if (GET_MODE (result) == mode)
8877 else if (target != 0)
8879 convert_move (target, result, 0);
8883 return convert_to_mode (mode, result, 0);
8886 case BUILT_IN_STRCMP:
8887 case BUILT_IN_MEMCMP:
8891 /* __builtin_setjmp is passed a pointer to an array of five words
8892 (not all will be used on all machines). It operates similarly to
8893 the C library function of the same name, but is more efficient.
8894 Much of the code below (and for longjmp) is copied from the handling
8897 NOTE: This is intended for use by GNAT and will only work in
8898 the method used by it. This code will likely NOT survive to
8899 the GCC 2.8.0 release. */
8900 case BUILT_IN_SETJMP:
8902 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8906 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8908 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8909 enum machine_mode sa_mode = Pmode;
8911 int old_inhibit_defer_pop = inhibit_defer_pop;
8913 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8914 build_function_type (void_type_node, NULL_TREE),
8917 CUMULATIVE_ARGS args_so_far;
8920 #ifdef POINTERS_EXTEND_UNSIGNED
8921 buf_addr = convert_memory_address (Pmode, buf_addr);
8924 buf_addr = force_reg (Pmode, buf_addr);
8926 if (target == 0 || GET_CODE (target) != REG
8927 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8928 target = gen_reg_rtx (value_mode);
8932 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8933 current_function_calls_setjmp = 1;
8935 /* We store the frame pointer and the address of lab1 in the buffer
8936 and use the rest of it for the stack save area, which is
8937 machine-dependent. */
8938 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8939 virtual_stack_vars_rtx);
8941 (validize_mem (gen_rtx (MEM, Pmode,
8942 plus_constant (buf_addr,
8943 GET_MODE_SIZE (Pmode)))),
8944 gen_rtx (LABEL_REF, Pmode, lab1));
8946 #ifdef HAVE_save_stack_nonlocal
8947 if (HAVE_save_stack_nonlocal)
8948 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8951 stack_save = gen_rtx (MEM, sa_mode,
8952 plus_constant (buf_addr,
8953 2 * GET_MODE_SIZE (Pmode)));
8954 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8958 emit_insn (gen_setjmp ());
8961 /* Set TARGET to zero and branch around the other case. */
8962 emit_move_insn (target, const0_rtx);
8963 emit_jump_insn (gen_jump (lab2));
8967 /* Note that setjmp clobbers FP when we get here, so we have to
8968 make sure it's marked as used by this function. */
8969 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8971 /* Mark the static chain as clobbered here so life information
8972 doesn't get messed up for it. */
8973 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8975 /* Now put in the code to restore the frame pointer, and argument
8976 pointer, if needed. The code below is from expand_end_bindings
8977 in stmt.c; see detailed documentation there. */
8978 #ifdef HAVE_nonlocal_goto
8979 if (! HAVE_nonlocal_goto)
8981 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8983 current_function_has_nonlocal_goto = 1;
8985 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8986 if (fixed_regs[ARG_POINTER_REGNUM])
8988 #ifdef ELIMINABLE_REGS
8989 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8991 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8992 if (elim_regs[i].from == ARG_POINTER_REGNUM
8993 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8996 if (i == sizeof elim_regs / sizeof elim_regs [0])
8999 /* Now restore our arg pointer from the address at which it
9000 was saved in our stack frame.
9001 If there hasn't be space allocated for it yet, make
9003 if (arg_pointer_save_area == 0)
9004 arg_pointer_save_area
9005 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
9006 emit_move_insn (virtual_incoming_args_rtx,
9007 copy_to_reg (arg_pointer_save_area));
9012 #ifdef HAVE_nonlocal_goto_receiver
9013 if (HAVE_nonlocal_goto_receiver)
9014 emit_insn (gen_nonlocal_goto_receiver ());
9016 /* The static chain pointer contains the address of dummy function.
9017 We need to call it here to handle some PIC cases of restoring
9018 a global pointer. Then return 1. */
9019 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
9021 /* We can't actually call emit_library_call here, so do everything
9022 it does, which isn't much for a libfunc with no args. */
9023 op0 = memory_address (FUNCTION_MODE, op0);
9025 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
9026 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
9027 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
9029 #ifndef ACCUMULATE_OUTGOING_ARGS
9030 #ifdef HAVE_call_pop
9032 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
9033 const0_rtx, next_arg_reg,
9034 GEN_INT (return_pops)));
9041 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
9042 const0_rtx, next_arg_reg, const0_rtx));
9047 emit_move_insn (target, const1_rtx);
9052 /* __builtin_longjmp is passed a pointer to an array of five words
9053 and a value, which is a dummy. It's similar to the C library longjmp
9054 function but works with __builtin_setjmp above. */
9055 case BUILT_IN_LONGJMP:
9056 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9057 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9061 tree dummy_id = get_identifier ("__dummy");
9062 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
9063 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
9064 #ifdef POINTERS_EXTEND_UNSIGNED
9067 convert_memory_address
9069 expand_expr (TREE_VALUE (arglist),
9070 NULL_RTX, VOIDmode, 0)));
9073 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9077 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
9078 rtx lab = gen_rtx (MEM, Pmode,
9079 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
9080 enum machine_mode sa_mode
9081 #ifdef HAVE_save_stack_nonlocal
9082 = (HAVE_save_stack_nonlocal
9083 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9088 rtx stack = gen_rtx (MEM, sa_mode,
9089 plus_constant (buf_addr,
9090 2 * GET_MODE_SIZE (Pmode)));
9092 DECL_EXTERNAL (dummy_decl) = 1;
9093 TREE_PUBLIC (dummy_decl) = 1;
9094 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9096 /* Expand the second expression just for side-effects. */
9097 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9098 const0_rtx, VOIDmode, 0);
9100 assemble_external (dummy_decl);
9102 /* Pick up FP, label, and SP from the block and jump. This code is
9103 from expand_goto in stmt.c; see there for detailed comments. */
9104 #if HAVE_nonlocal_goto
9105 if (HAVE_nonlocal_goto)
9106 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9107 XEXP (DECL_RTL (dummy_decl), 0)));
9111 lab = copy_to_reg (lab);
9112 emit_move_insn (hard_frame_pointer_rtx, fp);
9113 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9115 /* Put in the static chain register the address of the dummy
9117 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9118 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9119 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9120 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9121 emit_indirect_jump (lab);
9127 default: /* just do library call, if unknown builtin */
9128 error ("built-in function `%s' not currently supported",
9129 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9132 /* The switch statement above can drop through to cause the function
9133 to be called normally. */
9135 return expand_call (exp, target, ignore);
9138 /* Built-in functions to perform an untyped call and return. */
9140 /* For each register that may be used for calling a function, this
9141 gives a mode used to copy the register's value. VOIDmode indicates
9142 the register is not used for calling a function. If the machine
9143 has register windows, this gives only the outbound registers.
9144 INCOMING_REGNO gives the corresponding inbound register. */
9145 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9147 /* For each register that may be used for returning values, this gives
9148 a mode used to copy the register's value. VOIDmode indicates the
9149 register is not used for returning values. If the machine has
9150 register windows, this gives only the outbound registers.
9151 INCOMING_REGNO gives the corresponding inbound register. */
9152 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9154 /* For each register that may be used for calling a function, this
9155 gives the offset of that register into the block returned by
9156 __builtin_apply_args. 0 indicates that the register is not
9157 used for calling a function. */
9158 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9160 /* Return the offset of register REGNO into the block returned by
9161 __builtin_apply_args. This is not declared static, since it is
9162 needed in objc-act.c. */
9165 apply_args_register_offset (regno)
9170 /* Arguments are always put in outgoing registers (in the argument
9171 block) if such make sense. */
9172 #ifdef OUTGOING_REGNO
9173 regno = OUTGOING_REGNO(regno);
9175 return apply_args_reg_offset[regno];
9178 /* Return the size required for the block returned by __builtin_apply_args,
9179 and initialize apply_args_mode. */
9184 static int size = -1;
9186 enum machine_mode mode;
9188 /* The values computed by this function never change. */
9191 /* The first value is the incoming arg-pointer. */
9192 size = GET_MODE_SIZE (Pmode);
9194 /* The second value is the structure value address unless this is
9195 passed as an "invisible" first argument. */
9196 if (struct_value_rtx)
9197 size += GET_MODE_SIZE (Pmode);
9199 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9200 if (FUNCTION_ARG_REGNO_P (regno))
9202 /* Search for the proper mode for copying this register's
9203 value. I'm not sure this is right, but it works so far. */
9204 enum machine_mode best_mode = VOIDmode;
9206 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9208 mode = GET_MODE_WIDER_MODE (mode))
9209 if (HARD_REGNO_MODE_OK (regno, mode)
9210 && HARD_REGNO_NREGS (regno, mode) == 1)
9213 if (best_mode == VOIDmode)
9214 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9216 mode = GET_MODE_WIDER_MODE (mode))
9217 if (HARD_REGNO_MODE_OK (regno, mode)
9218 && (mov_optab->handlers[(int) mode].insn_code
9219 != CODE_FOR_nothing))
9223 if (mode == VOIDmode)
9226 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9227 if (size % align != 0)
9228 size = CEIL (size, align) * align;
9229 apply_args_reg_offset[regno] = size;
9230 size += GET_MODE_SIZE (mode);
9231 apply_args_mode[regno] = mode;
9235 apply_args_mode[regno] = VOIDmode;
9236 apply_args_reg_offset[regno] = 0;
9242 /* Return the size required for the block returned by __builtin_apply,
9243 and initialize apply_result_mode. */
9246 apply_result_size ()
9248 static int size = -1;
9250 enum machine_mode mode;
9252 /* The values computed by this function never change. */
9257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9258 if (FUNCTION_VALUE_REGNO_P (regno))
9260 /* Search for the proper mode for copying this register's
9261 value. I'm not sure this is right, but it works so far. */
9262 enum machine_mode best_mode = VOIDmode;
9264 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9266 mode = GET_MODE_WIDER_MODE (mode))
9267 if (HARD_REGNO_MODE_OK (regno, mode))
9270 if (best_mode == VOIDmode)
9271 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9273 mode = GET_MODE_WIDER_MODE (mode))
9274 if (HARD_REGNO_MODE_OK (regno, mode)
9275 && (mov_optab->handlers[(int) mode].insn_code
9276 != CODE_FOR_nothing))
9280 if (mode == VOIDmode)
9283 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9284 if (size % align != 0)
9285 size = CEIL (size, align) * align;
9286 size += GET_MODE_SIZE (mode);
9287 apply_result_mode[regno] = mode;
9290 apply_result_mode[regno] = VOIDmode;
9292 /* Allow targets that use untyped_call and untyped_return to override
9293 the size so that machine-specific information can be stored here. */
9294 #ifdef APPLY_RESULT_SIZE
9295 size = APPLY_RESULT_SIZE;
9301 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9302 /* Create a vector describing the result block RESULT. If SAVEP is true,
9303 the result block is used to save the values; otherwise it is used to
9304 restore the values. */
9307 result_vector (savep, result)
9311 int regno, size, align, nelts;
9312 enum machine_mode mode;
9314 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9318 if ((mode = apply_result_mode[regno]) != VOIDmode)
9320 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9321 if (size % align != 0)
9322 size = CEIL (size, align) * align;
9323 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9324 mem = change_address (result, mode,
9325 plus_constant (XEXP (result, 0), size));
9326 savevec[nelts++] = (savep
9327 ? gen_rtx (SET, VOIDmode, mem, reg)
9328 : gen_rtx (SET, VOIDmode, reg, mem));
9329 size += GET_MODE_SIZE (mode);
9331 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9333 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9335 /* Save the state required to perform an untyped call with the same
9336 arguments as were passed to the current function. */
9339 expand_builtin_apply_args ()
9342 int size, align, regno;
9343 enum machine_mode mode;
9345 /* Create a block where the arg-pointer, structure value address,
9346 and argument registers can be saved. */
9347 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9349 /* Walk past the arg-pointer and structure value address. */
9350 size = GET_MODE_SIZE (Pmode);
9351 if (struct_value_rtx)
9352 size += GET_MODE_SIZE (Pmode);
9354 /* Save each register used in calling a function to the block. */
9355 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9356 if ((mode = apply_args_mode[regno]) != VOIDmode)
9360 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9361 if (size % align != 0)
9362 size = CEIL (size, align) * align;
9364 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9367 /* For reg-stack.c's stack register household.
9368 Compare with a similar piece of code in function.c. */
9370 emit_insn (gen_rtx (USE, mode, tem));
9373 emit_move_insn (change_address (registers, mode,
9374 plus_constant (XEXP (registers, 0),
9377 size += GET_MODE_SIZE (mode);
9380 /* Save the arg pointer to the block. */
9381 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9382 copy_to_reg (virtual_incoming_args_rtx));
9383 size = GET_MODE_SIZE (Pmode);
9385 /* Save the structure value address unless this is passed as an
9386 "invisible" first argument. */
9387 if (struct_value_incoming_rtx)
9389 emit_move_insn (change_address (registers, Pmode,
9390 plus_constant (XEXP (registers, 0),
9392 copy_to_reg (struct_value_incoming_rtx));
9393 size += GET_MODE_SIZE (Pmode);
9396 /* Return the address of the block. */
9397 return copy_addr_to_reg (XEXP (registers, 0));
9400 /* Perform an untyped call and save the state required to perform an
9401 untyped return of whatever value was returned by the given function. */
9404 expand_builtin_apply (function, arguments, argsize)
9405 rtx function, arguments, argsize;
9407 int size, align, regno;
9408 enum machine_mode mode;
9409 rtx incoming_args, result, reg, dest, call_insn;
9410 rtx old_stack_level = 0;
9411 rtx call_fusage = 0;
9413 /* Create a block where the return registers can be saved. */
9414 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9416 /* ??? The argsize value should be adjusted here. */
9418 /* Fetch the arg pointer from the ARGUMENTS block. */
9419 incoming_args = gen_reg_rtx (Pmode);
9420 emit_move_insn (incoming_args,
9421 gen_rtx (MEM, Pmode, arguments));
9422 #ifndef STACK_GROWS_DOWNWARD
9423 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9424 incoming_args, 0, OPTAB_LIB_WIDEN);
9427 /* Perform postincrements before actually calling the function. */
9430 /* Push a new argument block and copy the arguments. */
9431 do_pending_stack_adjust ();
9432 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9434 /* Push a block of memory onto the stack to store the memory arguments.
9435 Save the address in a register, and copy the memory arguments. ??? I
9436 haven't figured out how the calling convention macros effect this,
9437 but it's likely that the source and/or destination addresses in
9438 the block copy will need updating in machine specific ways. */
9439 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9440 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9441 gen_rtx (MEM, BLKmode, incoming_args),
9443 PARM_BOUNDARY / BITS_PER_UNIT);
9445 /* Refer to the argument block. */
9447 arguments = gen_rtx (MEM, BLKmode, arguments);
9449 /* Walk past the arg-pointer and structure value address. */
9450 size = GET_MODE_SIZE (Pmode);
9451 if (struct_value_rtx)
9452 size += GET_MODE_SIZE (Pmode);
9454 /* Restore each of the registers previously saved. Make USE insns
9455 for each of these registers for use in making the call. */
9456 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9457 if ((mode = apply_args_mode[regno]) != VOIDmode)
9459 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9460 if (size % align != 0)
9461 size = CEIL (size, align) * align;
9462 reg = gen_rtx (REG, mode, regno);
9463 emit_move_insn (reg,
9464 change_address (arguments, mode,
9465 plus_constant (XEXP (arguments, 0),
9468 use_reg (&call_fusage, reg);
9469 size += GET_MODE_SIZE (mode);
9472 /* Restore the structure value address unless this is passed as an
9473 "invisible" first argument. */
9474 size = GET_MODE_SIZE (Pmode);
9475 if (struct_value_rtx)
9477 rtx value = gen_reg_rtx (Pmode);
9478 emit_move_insn (value,
9479 change_address (arguments, Pmode,
9480 plus_constant (XEXP (arguments, 0),
9482 emit_move_insn (struct_value_rtx, value);
9483 if (GET_CODE (struct_value_rtx) == REG)
9484 use_reg (&call_fusage, struct_value_rtx);
9485 size += GET_MODE_SIZE (Pmode);
9488 /* All arguments and registers used for the call are set up by now! */
9489 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9491 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9492 and we don't want to load it into a register as an optimization,
9493 because prepare_call_address already did it if it should be done. */
9494 if (GET_CODE (function) != SYMBOL_REF)
9495 function = memory_address (FUNCTION_MODE, function);
9497 /* Generate the actual call instruction and save the return value. */
9498 #ifdef HAVE_untyped_call
9499 if (HAVE_untyped_call)
9500 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9501 result, result_vector (1, result)));
9504 #ifdef HAVE_call_value
9505 if (HAVE_call_value)
9509 /* Locate the unique return register. It is not possible to
9510 express a call that sets more than one return register using
9511 call_value; use untyped_call for that. In fact, untyped_call
9512 only needs to save the return registers in the given block. */
9513 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9514 if ((mode = apply_result_mode[regno]) != VOIDmode)
9517 abort (); /* HAVE_untyped_call required. */
9518 valreg = gen_rtx (REG, mode, regno);
9521 emit_call_insn (gen_call_value (valreg,
9522 gen_rtx (MEM, FUNCTION_MODE, function),
9523 const0_rtx, NULL_RTX, const0_rtx));
9525 emit_move_insn (change_address (result, GET_MODE (valreg),
9533 /* Find the CALL insn we just emitted. */
9534 for (call_insn = get_last_insn ();
9535 call_insn && GET_CODE (call_insn) != CALL_INSN;
9536 call_insn = PREV_INSN (call_insn))
9542 /* Put the register usage information on the CALL. If there is already
9543 some usage information, put ours at the end. */
9544 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9548 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9549 link = XEXP (link, 1))
9552 XEXP (link, 1) = call_fusage;
9555 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9557 /* Restore the stack. */
9558 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9560 /* Return the address of the result block. */
9561 return copy_addr_to_reg (XEXP (result, 0));
9564 /* Perform an untyped return. */
9567 expand_builtin_return (result)
9570 int size, align, regno;
9571 enum machine_mode mode;
9573 rtx call_fusage = 0;
9575 apply_result_size ();
9576 result = gen_rtx (MEM, BLKmode, result);
9578 #ifdef HAVE_untyped_return
9579 if (HAVE_untyped_return)
9581 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9587 /* Restore the return value and note that each value is used. */
9589 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9590 if ((mode = apply_result_mode[regno]) != VOIDmode)
9592 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9593 if (size % align != 0)
9594 size = CEIL (size, align) * align;
9595 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9596 emit_move_insn (reg,
9597 change_address (result, mode,
9598 plus_constant (XEXP (result, 0),
9601 push_to_sequence (call_fusage);
9602 emit_insn (gen_rtx (USE, VOIDmode, reg));
9603 call_fusage = get_insns ();
9605 size += GET_MODE_SIZE (mode);
9608 /* Put the USE insns before the return. */
9609 emit_insns (call_fusage);
9611 /* Return whatever values was restored by jumping directly to the end
9613 expand_null_return ();
9616 /* Expand code for a post- or pre- increment or decrement
9617 and return the RTX for the result.
9618 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9621 expand_increment (exp, post, ignore)
9625 register rtx op0, op1;
9626 register rtx temp, value;
9627 register tree incremented = TREE_OPERAND (exp, 0);
9628 optab this_optab = add_optab;
9630 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9631 int op0_is_copy = 0;
9632 int single_insn = 0;
9633 /* 1 means we can't store into OP0 directly,
9634 because it is a subreg narrower than a word,
9635 and we don't dare clobber the rest of the word. */
9638 if (output_bytecode)
9640 bc_expand_expr (exp);
9644 /* Stabilize any component ref that might need to be
9645 evaluated more than once below. */
9647 || TREE_CODE (incremented) == BIT_FIELD_REF
9648 || (TREE_CODE (incremented) == COMPONENT_REF
9649 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9650 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9651 incremented = stabilize_reference (incremented);
9652 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9653 ones into save exprs so that they don't accidentally get evaluated
9654 more than once by the code below. */
9655 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9656 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9657 incremented = save_expr (incremented);
9659 /* Compute the operands as RTX.
9660 Note whether OP0 is the actual lvalue or a copy of it:
9661 I believe it is a copy iff it is a register or subreg
9662 and insns were generated in computing it. */
9664 temp = get_last_insn ();
9665 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9667 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9668 in place but instead must do sign- or zero-extension during assignment,
9669 so we copy it into a new register and let the code below use it as
9672 Note that we can safely modify this SUBREG since it is know not to be
9673 shared (it was made by the expand_expr call above). */
9675 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9678 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9682 else if (GET_CODE (op0) == SUBREG
9683 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9685 /* We cannot increment this SUBREG in place. If we are
9686 post-incrementing, get a copy of the old value. Otherwise,
9687 just mark that we cannot increment in place. */
9689 op0 = copy_to_reg (op0);
9694 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9695 && temp != get_last_insn ());
9696 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9698 /* Decide whether incrementing or decrementing. */
9699 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9700 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9701 this_optab = sub_optab;
9703 /* Convert decrement by a constant into a negative increment. */
9704 if (this_optab == sub_optab
9705 && GET_CODE (op1) == CONST_INT)
9707 op1 = GEN_INT (- INTVAL (op1));
9708 this_optab = add_optab;
9711 /* For a preincrement, see if we can do this with a single instruction. */
9714 icode = (int) this_optab->handlers[(int) mode].insn_code;
9715 if (icode != (int) CODE_FOR_nothing
9716 /* Make sure that OP0 is valid for operands 0 and 1
9717 of the insn we want to queue. */
9718 && (*insn_operand_predicate[icode][0]) (op0, mode)
9719 && (*insn_operand_predicate[icode][1]) (op0, mode)
9720 && (*insn_operand_predicate[icode][2]) (op1, mode))
9724 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9725 then we cannot just increment OP0. We must therefore contrive to
9726 increment the original value. Then, for postincrement, we can return
9727 OP0 since it is a copy of the old value. For preincrement, expand here
9728 unless we can do it with a single insn.
9730 Likewise if storing directly into OP0 would clobber high bits
9731 we need to preserve (bad_subreg). */
9732 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9734 /* This is the easiest way to increment the value wherever it is.
9735 Problems with multiple evaluation of INCREMENTED are prevented
9736 because either (1) it is a component_ref or preincrement,
9737 in which case it was stabilized above, or (2) it is an array_ref
9738 with constant index in an array in a register, which is
9739 safe to reevaluate. */
9740 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9741 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9742 ? MINUS_EXPR : PLUS_EXPR),
9745 TREE_OPERAND (exp, 1));
9747 while (TREE_CODE (incremented) == NOP_EXPR
9748 || TREE_CODE (incremented) == CONVERT_EXPR)
9750 newexp = convert (TREE_TYPE (incremented), newexp);
9751 incremented = TREE_OPERAND (incremented, 0);
9754 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9755 return post ? op0 : temp;
9760 /* We have a true reference to the value in OP0.
9761 If there is an insn to add or subtract in this mode, queue it.
9762 Queueing the increment insn avoids the register shuffling
9763 that often results if we must increment now and first save
9764 the old value for subsequent use. */
9766 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9767 op0 = stabilize (op0);
9770 icode = (int) this_optab->handlers[(int) mode].insn_code;
9771 if (icode != (int) CODE_FOR_nothing
9772 /* Make sure that OP0 is valid for operands 0 and 1
9773 of the insn we want to queue. */
9774 && (*insn_operand_predicate[icode][0]) (op0, mode)
9775 && (*insn_operand_predicate[icode][1]) (op0, mode))
9777 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9778 op1 = force_reg (mode, op1);
9780 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9782 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9784 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9787 op0 = change_address (op0, VOIDmode, addr);
9788 temp = force_reg (GET_MODE (op0), op0);
9789 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9790 op1 = force_reg (mode, op1);
9792 /* The increment queue is LIFO, thus we have to `queue'
9793 the instructions in reverse order. */
9794 enqueue_insn (op0, gen_move_insn (op0, temp));
9795 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9800 /* Preincrement, or we can't increment with one simple insn. */
9802 /* Save a copy of the value before inc or dec, to return it later. */
9803 temp = value = copy_to_reg (op0);
9805 /* Arrange to return the incremented value. */
9806 /* Copy the rtx because expand_binop will protect from the queue,
9807 and the results of that would be invalid for us to return
9808 if our caller does emit_queue before using our result. */
9809 temp = copy_rtx (value = op0);
9811 /* Increment however we can. */
9812 op1 = expand_binop (mode, this_optab, value, op1, op0,
9813 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9814 /* Make sure the value is stored into OP0. */
9816 emit_move_insn (op0, op1);
9821 /* Expand all function calls contained within EXP, innermost ones first.
9822 But don't look within expressions that have sequence points.
9823 For each CALL_EXPR, record the rtx for its value
9824 in the CALL_EXPR_RTL field. */
9827 preexpand_calls (exp)
9830 register int nops, i;
9831 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9833 if (! do_preexpand_calls)
9836 /* Only expressions and references can contain calls. */
9838 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9841 switch (TREE_CODE (exp))
9844 /* Do nothing if already expanded. */
9845 if (CALL_EXPR_RTL (exp) != 0
9846 /* Do nothing if the call returns a variable-sized object. */
9847 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9848 /* Do nothing to built-in functions. */
9849 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9850 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9852 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9855 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9860 case TRUTH_ANDIF_EXPR:
9861 case TRUTH_ORIF_EXPR:
9862 /* If we find one of these, then we can be sure
9863 the adjust will be done for it (since it makes jumps).
9864 Do it now, so that if this is inside an argument
9865 of a function, we don't get the stack adjustment
9866 after some other args have already been pushed. */
9867 do_pending_stack_adjust ();
9872 case WITH_CLEANUP_EXPR:
9873 case CLEANUP_POINT_EXPR:
9877 if (SAVE_EXPR_RTL (exp) != 0)
9881 nops = tree_code_length[(int) TREE_CODE (exp)];
9882 for (i = 0; i < nops; i++)
9883 if (TREE_OPERAND (exp, i) != 0)
9885 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9886 if (type == 'e' || type == '<' || type == '1' || type == '2'
9888 preexpand_calls (TREE_OPERAND (exp, i));
9892 /* At the start of a function, record that we have no previously-pushed
9893 arguments waiting to be popped. */
9896 init_pending_stack_adjust ()
9898 pending_stack_adjust = 0;
9901 /* When exiting from function, if safe, clear out any pending stack adjust
9902 so the adjustment won't get done. */
9905 clear_pending_stack_adjust ()
9907 #ifdef EXIT_IGNORE_STACK
9909 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9910 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9911 && ! flag_inline_functions)
9912 pending_stack_adjust = 0;
9916 /* Pop any previously-pushed arguments that have not been popped yet. */
9919 do_pending_stack_adjust ()
9921 if (inhibit_defer_pop == 0)
9923 if (pending_stack_adjust != 0)
9924 adjust_stack (GEN_INT (pending_stack_adjust));
9925 pending_stack_adjust = 0;
9929 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9930 Returns the cleanups to be performed. */
9933 defer_cleanups_to (old_cleanups)
9936 tree new_cleanups = NULL_TREE;
9937 tree cleanups = cleanups_this_call;
9938 tree last = NULL_TREE;
9940 while (cleanups_this_call != old_cleanups)
9942 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9943 last = cleanups_this_call;
9944 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9949 /* Remove the list from the chain of cleanups. */
9950 TREE_CHAIN (last) = NULL_TREE;
9952 /* reverse them so that we can build them in the right order. */
9953 cleanups = nreverse (cleanups);
9955 /* All cleanups must be on the function_obstack. */
9956 push_obstacks_nochange ();
9957 resume_temporary_allocation ();
9962 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9963 TREE_VALUE (cleanups), new_cleanups);
9965 new_cleanups = TREE_VALUE (cleanups);
9967 cleanups = TREE_CHAIN (cleanups);
9973 return new_cleanups;
9976 /* Expand all cleanups up to OLD_CLEANUPS.
9977 Needed here, and also for language-dependent calls. */
9980 expand_cleanups_to (old_cleanups)
9983 while (cleanups_this_call != old_cleanups)
9985 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9986 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9987 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9991 /* Expand conditional expressions. */
9993 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9994 LABEL is an rtx of code CODE_LABEL, in this function and all the
9998 jumpifnot (exp, label)
10002 do_jump (exp, label, NULL_RTX);
10005 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10008 jumpif (exp, label)
10012 do_jump (exp, NULL_RTX, label);
10015 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10016 the result is zero, or IF_TRUE_LABEL if the result is one.
10017 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10018 meaning fall through in that case.
10020 do_jump always does any pending stack adjust except when it does not
10021 actually perform a jump. An example where there is no jump
10022 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10024 This function is responsible for optimizing cases such as
10025 &&, || and comparison operators in EXP. */
10028 do_jump (exp, if_false_label, if_true_label)
10030 rtx if_false_label, if_true_label;
10032 register enum tree_code code = TREE_CODE (exp);
10033 /* Some cases need to create a label to jump to
10034 in order to properly fall through.
10035 These cases set DROP_THROUGH_LABEL nonzero. */
10036 rtx drop_through_label = 0;
10038 rtx comparison = 0;
10041 enum machine_mode mode;
10051 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10057 /* This is not true with #pragma weak */
10059 /* The address of something can never be zero. */
10061 emit_jump (if_true_label);
10066 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10067 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10068 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10071 /* If we are narrowing the operand, we have to do the compare in the
10073 if ((TYPE_PRECISION (TREE_TYPE (exp))
10074 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10076 case NON_LVALUE_EXPR:
10077 case REFERENCE_EXPR:
10082 /* These cannot change zero->non-zero or vice versa. */
10083 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10087 /* This is never less insns than evaluating the PLUS_EXPR followed by
10088 a test and can be longer if the test is eliminated. */
10090 /* Reduce to minus. */
10091 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10092 TREE_OPERAND (exp, 0),
10093 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10094 TREE_OPERAND (exp, 1))));
10095 /* Process as MINUS. */
10099 /* Non-zero iff operands of minus differ. */
10100 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10101 TREE_OPERAND (exp, 0),
10102 TREE_OPERAND (exp, 1)),
10107 /* If we are AND'ing with a small constant, do this comparison in the
10108 smallest type that fits. If the machine doesn't have comparisons
10109 that small, it will be converted back to the wider comparison.
10110 This helps if we are testing the sign bit of a narrower object.
10111 combine can't do this for us because it can't know whether a
10112 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10114 if (! SLOW_BYTE_ACCESS
10115 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10116 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10117 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10118 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10119 && (type = type_for_mode (mode, 1)) != 0
10120 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10121 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10122 != CODE_FOR_nothing))
10124 do_jump (convert (type, exp), if_false_label, if_true_label);
10129 case TRUTH_NOT_EXPR:
10130 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10133 case TRUTH_ANDIF_EXPR:
10136 tree cleanups, old_cleanups;
10138 if (if_false_label == 0)
10139 if_false_label = drop_through_label = gen_label_rtx ();
10141 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10142 seq1 = get_insns ();
10145 old_cleanups = cleanups_this_call;
10147 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10148 seq2 = get_insns ();
10149 cleanups = defer_cleanups_to (old_cleanups);
10154 rtx flag = gen_reg_rtx (word_mode);
10158 /* Flag cleanups as not needed. */
10159 emit_move_insn (flag, const0_rtx);
10162 /* Flag cleanups as needed. */
10163 emit_move_insn (flag, const1_rtx);
10166 /* All cleanups must be on the function_obstack. */
10167 push_obstacks_nochange ();
10168 resume_temporary_allocation ();
10170 /* convert flag, which is an rtx, into a tree. */
10171 cond = make_node (RTL_EXPR);
10172 TREE_TYPE (cond) = integer_type_node;
10173 RTL_EXPR_RTL (cond) = flag;
10174 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10175 cond = save_expr (cond);
10177 new_cleanups = build (COND_EXPR, void_type_node,
10178 truthvalue_conversion (cond),
10179 cleanups, integer_zero_node);
10180 new_cleanups = fold (new_cleanups);
10184 /* Now add in the conditionalized cleanups. */
10186 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10187 expand_eh_region_start ();
10197 case TRUTH_ORIF_EXPR:
10200 tree cleanups, old_cleanups;
10202 if (if_true_label == 0)
10203 if_true_label = drop_through_label = gen_label_rtx ();
10205 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10206 seq1 = get_insns ();
10209 old_cleanups = cleanups_this_call;
10211 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10212 seq2 = get_insns ();
10213 cleanups = defer_cleanups_to (old_cleanups);
10218 rtx flag = gen_reg_rtx (word_mode);
10222 /* Flag cleanups as not needed. */
10223 emit_move_insn (flag, const0_rtx);
10226 /* Flag cleanups as needed. */
10227 emit_move_insn (flag, const1_rtx);
10230 /* All cleanups must be on the function_obstack. */
10231 push_obstacks_nochange ();
10232 resume_temporary_allocation ();
10234 /* convert flag, which is an rtx, into a tree. */
10235 cond = make_node (RTL_EXPR);
10236 TREE_TYPE (cond) = integer_type_node;
10237 RTL_EXPR_RTL (cond) = flag;
10238 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10239 cond = save_expr (cond);
10241 new_cleanups = build (COND_EXPR, void_type_node,
10242 truthvalue_conversion (cond),
10243 cleanups, integer_zero_node);
10244 new_cleanups = fold (new_cleanups);
10248 /* Now add in the conditionalized cleanups. */
10250 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10251 expand_eh_region_start ();
10261 case COMPOUND_EXPR:
10262 push_temp_slots ();
10263 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10264 preserve_temp_slots (NULL_RTX);
10265 free_temp_slots ();
10268 do_pending_stack_adjust ();
10269 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10272 case COMPONENT_REF:
10273 case BIT_FIELD_REF:
10276 int bitsize, bitpos, unsignedp;
10277 enum machine_mode mode;
10283 /* Get description of this reference. We don't actually care
10284 about the underlying object here. */
10285 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10286 &mode, &unsignedp, &volatilep,
10289 type = type_for_size (bitsize, unsignedp);
10290 if (! SLOW_BYTE_ACCESS
10291 && type != 0 && bitsize >= 0
10292 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10293 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10294 != CODE_FOR_nothing))
10296 do_jump (convert (type, exp), if_false_label, if_true_label);
10303 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10304 if (integer_onep (TREE_OPERAND (exp, 1))
10305 && integer_zerop (TREE_OPERAND (exp, 2)))
10306 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10308 else if (integer_zerop (TREE_OPERAND (exp, 1))
10309 && integer_onep (TREE_OPERAND (exp, 2)))
10310 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10315 tree cleanups_left_side, cleanups_right_side, old_cleanups;
10317 register rtx label1 = gen_label_rtx ();
10318 drop_through_label = gen_label_rtx ();
10320 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10322 /* We need to save the cleanups for the lhs and rhs separately.
10323 Keep track of the cleanups seen before the lhs. */
10324 old_cleanups = cleanups_this_call;
10326 /* Now the THEN-expression. */
10327 do_jump (TREE_OPERAND (exp, 1),
10328 if_false_label ? if_false_label : drop_through_label,
10329 if_true_label ? if_true_label : drop_through_label);
10330 /* In case the do_jump just above never jumps. */
10331 do_pending_stack_adjust ();
10332 emit_label (label1);
10333 seq1 = get_insns ();
10334 /* Now grab the cleanups for the lhs. */
10335 cleanups_left_side = defer_cleanups_to (old_cleanups);
10338 /* And keep track of where we start before the rhs. */
10339 old_cleanups = cleanups_this_call;
10341 /* Now the ELSE-expression. */
10342 do_jump (TREE_OPERAND (exp, 2),
10343 if_false_label ? if_false_label : drop_through_label,
10344 if_true_label ? if_true_label : drop_through_label);
10345 seq2 = get_insns ();
10346 /* Grab the cleanups for the rhs. */
10347 cleanups_right_side = defer_cleanups_to (old_cleanups);
10350 if (cleanups_left_side || cleanups_right_side)
10352 /* Make the cleanups for the THEN and ELSE clauses
10353 conditional based on which half is executed. */
10354 rtx flag = gen_reg_rtx (word_mode);
10358 /* Set the flag to 0 so that we know we executed the lhs. */
10359 emit_move_insn (flag, const0_rtx);
10362 /* Set the flag to 1 so that we know we executed the rhs. */
10363 emit_move_insn (flag, const1_rtx);
10366 /* Make sure the cleanup lives on the function_obstack. */
10367 push_obstacks_nochange ();
10368 resume_temporary_allocation ();
10370 /* Now, build up a COND_EXPR that tests the value of the
10371 flag, and then either do the cleanups for the lhs or the
10373 cond = make_node (RTL_EXPR);
10374 TREE_TYPE (cond) = integer_type_node;
10375 RTL_EXPR_RTL (cond) = flag;
10376 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10377 cond = save_expr (cond);
10379 new_cleanups = build (COND_EXPR, void_type_node,
10380 truthvalue_conversion (cond),
10381 cleanups_right_side, cleanups_left_side);
10382 new_cleanups = fold (new_cleanups);
10386 /* Now add in the conditionalized cleanups. */
10388 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10389 expand_eh_region_start ();
10393 /* No cleanups were needed, so emit the two sequences
10403 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10405 if (integer_zerop (TREE_OPERAND (exp, 1)))
10406 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10407 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10408 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10411 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10412 fold (build (EQ_EXPR, TREE_TYPE (exp),
10413 fold (build1 (REALPART_EXPR,
10414 TREE_TYPE (inner_type),
10415 TREE_OPERAND (exp, 0))),
10416 fold (build1 (REALPART_EXPR,
10417 TREE_TYPE (inner_type),
10418 TREE_OPERAND (exp, 1))))),
10419 fold (build (EQ_EXPR, TREE_TYPE (exp),
10420 fold (build1 (IMAGPART_EXPR,
10421 TREE_TYPE (inner_type),
10422 TREE_OPERAND (exp, 0))),
10423 fold (build1 (IMAGPART_EXPR,
10424 TREE_TYPE (inner_type),
10425 TREE_OPERAND (exp, 1))))))),
10426 if_false_label, if_true_label);
10427 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10428 && !can_compare_p (TYPE_MODE (inner_type)))
10429 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10431 comparison = compare (exp, EQ, EQ);
10437 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10439 if (integer_zerop (TREE_OPERAND (exp, 1)))
10440 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10441 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10442 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10445 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10446 fold (build (NE_EXPR, TREE_TYPE (exp),
10447 fold (build1 (REALPART_EXPR,
10448 TREE_TYPE (inner_type),
10449 TREE_OPERAND (exp, 0))),
10450 fold (build1 (REALPART_EXPR,
10451 TREE_TYPE (inner_type),
10452 TREE_OPERAND (exp, 1))))),
10453 fold (build (NE_EXPR, TREE_TYPE (exp),
10454 fold (build1 (IMAGPART_EXPR,
10455 TREE_TYPE (inner_type),
10456 TREE_OPERAND (exp, 0))),
10457 fold (build1 (IMAGPART_EXPR,
10458 TREE_TYPE (inner_type),
10459 TREE_OPERAND (exp, 1))))))),
10460 if_false_label, if_true_label);
10461 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10462 && !can_compare_p (TYPE_MODE (inner_type)))
10463 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10465 comparison = compare (exp, NE, NE);
10470 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10472 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10473 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10475 comparison = compare (exp, LT, LTU);
10479 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10481 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10482 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10484 comparison = compare (exp, LE, LEU);
10488 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10490 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10491 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10493 comparison = compare (exp, GT, GTU);
10497 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10499 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10500 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10502 comparison = compare (exp, GE, GEU);
10507 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10509 /* This is not needed any more and causes poor code since it causes
10510 comparisons and tests from non-SI objects to have different code
10512 /* Copy to register to avoid generating bad insns by cse
10513 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10514 if (!cse_not_expected && GET_CODE (temp) == MEM)
10515 temp = copy_to_reg (temp);
10517 do_pending_stack_adjust ();
10518 if (GET_CODE (temp) == CONST_INT)
10519 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10520 else if (GET_CODE (temp) == LABEL_REF)
10521 comparison = const_true_rtx;
10522 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10523 && !can_compare_p (GET_MODE (temp)))
10524 /* Note swapping the labels gives us not-equal. */
10525 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10526 else if (GET_MODE (temp) != VOIDmode)
10527 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10528 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10529 GET_MODE (temp), NULL_RTX, 0);
10534 /* Do any postincrements in the expression that was tested. */
10537 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10538 straight into a conditional jump instruction as the jump condition.
10539 Otherwise, all the work has been done already. */
10541 if (comparison == const_true_rtx)
10544 emit_jump (if_true_label);
10546 else if (comparison == const0_rtx)
10548 if (if_false_label)
10549 emit_jump (if_false_label);
10551 else if (comparison)
10552 do_jump_for_compare (comparison, if_false_label, if_true_label);
10554 if (drop_through_label)
10556 /* If do_jump produces code that might be jumped around,
10557 do any stack adjusts from that code, before the place
10558 where control merges in. */
10559 do_pending_stack_adjust ();
10560 emit_label (drop_through_label);
10564 /* Given a comparison expression EXP for values too wide to be compared
10565 with one insn, test the comparison and jump to the appropriate label.
10566 The code of EXP is ignored; we always test GT if SWAP is 0,
10567 and LT if SWAP is 1. */
10570 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10573 rtx if_false_label, if_true_label;
10575 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10576 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10577 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10578 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10579 rtx drop_through_label = 0;
10580 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10583 if (! if_true_label || ! if_false_label)
10584 drop_through_label = gen_label_rtx ();
10585 if (! if_true_label)
10586 if_true_label = drop_through_label;
10587 if (! if_false_label)
10588 if_false_label = drop_through_label;
10590 /* Compare a word at a time, high order first. */
10591 for (i = 0; i < nwords; i++)
10594 rtx op0_word, op1_word;
10596 if (WORDS_BIG_ENDIAN)
10598 op0_word = operand_subword_force (op0, i, mode);
10599 op1_word = operand_subword_force (op1, i, mode);
10603 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10604 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10607 /* All but high-order word must be compared as unsigned. */
10608 comp = compare_from_rtx (op0_word, op1_word,
10609 (unsignedp || i > 0) ? GTU : GT,
10610 unsignedp, word_mode, NULL_RTX, 0);
10611 if (comp == const_true_rtx)
10612 emit_jump (if_true_label);
10613 else if (comp != const0_rtx)
10614 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10616 /* Consider lower words only if these are equal. */
10617 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10619 if (comp == const_true_rtx)
10620 emit_jump (if_false_label);
10621 else if (comp != const0_rtx)
10622 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10625 if (if_false_label)
10626 emit_jump (if_false_label);
10627 if (drop_through_label)
10628 emit_label (drop_through_label);
10631 /* Compare OP0 with OP1, word at a time, in mode MODE.
10632 UNSIGNEDP says to do unsigned comparison.
10633 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10636 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10637 enum machine_mode mode;
10640 rtx if_false_label, if_true_label;
10642 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10643 rtx drop_through_label = 0;
10646 if (! if_true_label || ! if_false_label)
10647 drop_through_label = gen_label_rtx ();
10648 if (! if_true_label)
10649 if_true_label = drop_through_label;
10650 if (! if_false_label)
10651 if_false_label = drop_through_label;
10653 /* Compare a word at a time, high order first. */
10654 for (i = 0; i < nwords; i++)
10657 rtx op0_word, op1_word;
10659 if (WORDS_BIG_ENDIAN)
10661 op0_word = operand_subword_force (op0, i, mode);
10662 op1_word = operand_subword_force (op1, i, mode);
10666 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10667 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10670 /* All but high-order word must be compared as unsigned. */
10671 comp = compare_from_rtx (op0_word, op1_word,
10672 (unsignedp || i > 0) ? GTU : GT,
10673 unsignedp, word_mode, NULL_RTX, 0);
10674 if (comp == const_true_rtx)
10675 emit_jump (if_true_label);
10676 else if (comp != const0_rtx)
10677 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10679 /* Consider lower words only if these are equal. */
10680 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10682 if (comp == const_true_rtx)
10683 emit_jump (if_false_label);
10684 else if (comp != const0_rtx)
10685 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10688 if (if_false_label)
10689 emit_jump (if_false_label);
10690 if (drop_through_label)
10691 emit_label (drop_through_label);
10694 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10695 with one insn, test the comparison and jump to the appropriate label. */
10698 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10700 rtx if_false_label, if_true_label;
10702 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10703 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10704 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10705 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10707 rtx drop_through_label = 0;
10709 if (! if_false_label)
10710 drop_through_label = if_false_label = gen_label_rtx ();
10712 for (i = 0; i < nwords; i++)
10714 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10715 operand_subword_force (op1, i, mode),
10716 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10717 word_mode, NULL_RTX, 0);
10718 if (comp == const_true_rtx)
10719 emit_jump (if_false_label);
10720 else if (comp != const0_rtx)
10721 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10725 emit_jump (if_true_label);
10726 if (drop_through_label)
10727 emit_label (drop_through_label);
10730 /* Jump according to whether OP0 is 0.
10731 We assume that OP0 has an integer mode that is too wide
10732 for the available compare insns. */
10735 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10737 rtx if_false_label, if_true_label;
10739 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10741 rtx drop_through_label = 0;
10743 if (! if_false_label)
10744 drop_through_label = if_false_label = gen_label_rtx ();
10746 for (i = 0; i < nwords; i++)
10748 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10750 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10751 if (comp == const_true_rtx)
10752 emit_jump (if_false_label);
10753 else if (comp != const0_rtx)
10754 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10758 emit_jump (if_true_label);
10759 if (drop_through_label)
10760 emit_label (drop_through_label);
10763 /* Given a comparison expression in rtl form, output conditional branches to
10764 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10767 do_jump_for_compare (comparison, if_false_label, if_true_label)
10768 rtx comparison, if_false_label, if_true_label;
10772 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10773 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10777 if (if_false_label)
10778 emit_jump (if_false_label);
10780 else if (if_false_label)
10783 rtx prev = get_last_insn ();
10786 /* Output the branch with the opposite condition. Then try to invert
10787 what is generated. If more than one insn is a branch, or if the
10788 branch is not the last insn written, abort. If we can't invert
10789 the branch, emit make a true label, redirect this jump to that,
10790 emit a jump to the false label and define the true label. */
10792 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10793 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10797 /* Here we get the first insn that was just emitted. It used to be the
10798 case that, on some machines, emitting the branch would discard
10799 the previous compare insn and emit a replacement. This isn't
10800 done anymore, but abort if we see that PREV is deleted. */
10803 insn = get_insns ();
10804 else if (INSN_DELETED_P (prev))
10807 insn = NEXT_INSN (prev);
10809 for (; insn; insn = NEXT_INSN (insn))
10810 if (GET_CODE (insn) == JUMP_INSN)
10817 if (branch != get_last_insn ())
10820 JUMP_LABEL (branch) = if_false_label;
10821 if (! invert_jump (branch, if_false_label))
10823 if_true_label = gen_label_rtx ();
10824 redirect_jump (branch, if_true_label);
10825 emit_jump (if_false_label);
10826 emit_label (if_true_label);
10831 /* Generate code for a comparison expression EXP
10832 (including code to compute the values to be compared)
10833 and set (CC0) according to the result.
10834 SIGNED_CODE should be the rtx operation for this comparison for
10835 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10837 We force a stack adjustment unless there are currently
10838 things pushed on the stack that aren't yet used. */
10841 compare (exp, signed_code, unsigned_code)
10843 enum rtx_code signed_code, unsigned_code;
10846 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10848 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10849 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10850 register enum machine_mode mode = TYPE_MODE (type);
10851 int unsignedp = TREE_UNSIGNED (type);
10852 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10854 #ifdef HAVE_canonicalize_funcptr_for_compare
10855 /* If function pointers need to be "canonicalized" before they can
10856 be reliably compared, then canonicalize them. */
10857 if (HAVE_canonicalize_funcptr_for_compare
10858 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10859 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10862 rtx new_op0 = gen_reg_rtx (mode);
10864 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10868 if (HAVE_canonicalize_funcptr_for_compare
10869 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10870 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10873 rtx new_op1 = gen_reg_rtx (mode);
10875 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10880 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10882 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10883 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10886 /* Like compare but expects the values to compare as two rtx's.
10887 The decision as to signed or unsigned comparison must be made by the caller.
10889 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10892 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10893 size of MODE should be used. */
10896 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10897 register rtx op0, op1;
10898 enum rtx_code code;
10900 enum machine_mode mode;
10906 /* If one operand is constant, make it the second one. Only do this
10907 if the other operand is not constant as well. */
10909 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10910 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10915 code = swap_condition (code);
10918 if (flag_force_mem)
10920 op0 = force_not_mem (op0);
10921 op1 = force_not_mem (op1);
10924 do_pending_stack_adjust ();
10926 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10927 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10931 /* There's no need to do this now that combine.c can eliminate lots of
10932 sign extensions. This can be less efficient in certain cases on other
10935 /* If this is a signed equality comparison, we can do it as an
10936 unsigned comparison since zero-extension is cheaper than sign
10937 extension and comparisons with zero are done as unsigned. This is
10938 the case even on machines that can do fast sign extension, since
10939 zero-extension is easier to combine with other operations than
10940 sign-extension is. If we are comparing against a constant, we must
10941 convert it to what it would look like unsigned. */
10942 if ((code == EQ || code == NE) && ! unsignedp
10943 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10945 if (GET_CODE (op1) == CONST_INT
10946 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10947 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10952 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10954 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10957 /* Generate code to calculate EXP using a store-flag instruction
10958 and return an rtx for the result. EXP is either a comparison
10959 or a TRUTH_NOT_EXPR whose operand is a comparison.
10961 If TARGET is nonzero, store the result there if convenient.
10963 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10966 Return zero if there is no suitable set-flag instruction
10967 available on this machine.
10969 Once expand_expr has been called on the arguments of the comparison,
10970 we are committed to doing the store flag, since it is not safe to
10971 re-evaluate the expression. We emit the store-flag insn by calling
10972 emit_store_flag, but only expand the arguments if we have a reason
10973 to believe that emit_store_flag will be successful. If we think that
10974 it will, but it isn't, we have to simulate the store-flag with a
10975 set/jump/set sequence. */
10978 do_store_flag (exp, target, mode, only_cheap)
10981 enum machine_mode mode;
10984 enum rtx_code code;
10985 tree arg0, arg1, type;
10987 enum machine_mode operand_mode;
10991 enum insn_code icode;
10992 rtx subtarget = target;
10993 rtx result, label, pattern, jump_pat;
10995 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10996 result at the end. We can't simply invert the test since it would
10997 have already been inverted if it were valid. This case occurs for
10998 some floating-point comparisons. */
11000 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11001 invert = 1, exp = TREE_OPERAND (exp, 0);
11003 arg0 = TREE_OPERAND (exp, 0);
11004 arg1 = TREE_OPERAND (exp, 1);
11005 type = TREE_TYPE (arg0);
11006 operand_mode = TYPE_MODE (type);
11007 unsignedp = TREE_UNSIGNED (type);
11009 /* We won't bother with BLKmode store-flag operations because it would mean
11010 passing a lot of information to emit_store_flag. */
11011 if (operand_mode == BLKmode)
11014 /* We won't bother with store-flag operations involving function pointers
11015 when function pointers must be canonicalized before comparisons. */
11016 #ifdef HAVE_canonicalize_funcptr_for_compare
11017 if (HAVE_canonicalize_funcptr_for_compare
11018 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11019 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11021 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11022 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11023 == FUNCTION_TYPE))))
11030 /* Get the rtx comparison code to use. We know that EXP is a comparison
11031 operation of some type. Some comparisons against 1 and -1 can be
11032 converted to comparisons with zero. Do so here so that the tests
11033 below will be aware that we have a comparison with zero. These
11034 tests will not catch constants in the first operand, but constants
11035 are rarely passed as the first operand. */
11037 switch (TREE_CODE (exp))
11046 if (integer_onep (arg1))
11047 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11049 code = unsignedp ? LTU : LT;
11052 if (! unsignedp && integer_all_onesp (arg1))
11053 arg1 = integer_zero_node, code = LT;
11055 code = unsignedp ? LEU : LE;
11058 if (! unsignedp && integer_all_onesp (arg1))
11059 arg1 = integer_zero_node, code = GE;
11061 code = unsignedp ? GTU : GT;
11064 if (integer_onep (arg1))
11065 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11067 code = unsignedp ? GEU : GE;
11073 /* Put a constant second. */
11074 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11076 tem = arg0; arg0 = arg1; arg1 = tem;
11077 code = swap_condition (code);
11080 /* If this is an equality or inequality test of a single bit, we can
11081 do this by shifting the bit being tested to the low-order bit and
11082 masking the result with the constant 1. If the condition was EQ,
11083 we xor it with 1. This does not require an scc insn and is faster
11084 than an scc insn even if we have it. */
11086 if ((code == NE || code == EQ)
11087 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11088 && integer_pow2p (TREE_OPERAND (arg0, 1))
11089 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
11091 tree inner = TREE_OPERAND (arg0, 0);
11096 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
11097 NULL_RTX, VOIDmode, 0));
11098 /* In this case, immed_double_const will sign extend the value to make
11099 it look the same on the host and target. We must remove the
11100 sign-extension before calling exact_log2, since exact_log2 will
11101 fail for negative values. */
11102 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
11103 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
11104 /* We don't use the obvious constant shift to generate the mask,
11105 because that generates compiler warnings when BITS_PER_WORD is
11106 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
11107 code is unreachable in that case. */
11108 tem = tem & GET_MODE_MASK (word_mode);
11109 bitnum = exact_log2 (tem);
11111 /* If INNER is a right shift of a constant and it plus BITNUM does
11112 not overflow, adjust BITNUM and INNER. */
11114 if (TREE_CODE (inner) == RSHIFT_EXPR
11115 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11116 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11117 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11118 < TYPE_PRECISION (type)))
11120 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11121 inner = TREE_OPERAND (inner, 0);
11124 /* If we are going to be able to omit the AND below, we must do our
11125 operations as unsigned. If we must use the AND, we have a choice.
11126 Normally unsigned is faster, but for some machines signed is. */
11127 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11128 #ifdef LOAD_EXTEND_OP
11129 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11135 if (subtarget == 0 || GET_CODE (subtarget) != REG
11136 || GET_MODE (subtarget) != operand_mode
11137 || ! safe_from_p (subtarget, inner))
11140 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11143 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11144 size_int (bitnum), subtarget, ops_unsignedp);
11146 if (GET_MODE (op0) != mode)
11147 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11149 if ((code == EQ && ! invert) || (code == NE && invert))
11150 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11151 ops_unsignedp, OPTAB_LIB_WIDEN);
11153 /* Put the AND last so it can combine with more things. */
11154 if (bitnum != TYPE_PRECISION (type) - 1)
11155 op0 = expand_and (op0, const1_rtx, subtarget);
11160 /* Now see if we are likely to be able to do this. Return if not. */
11161 if (! can_compare_p (operand_mode))
11163 icode = setcc_gen_code[(int) code];
11164 if (icode == CODE_FOR_nothing
11165 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11167 /* We can only do this if it is one of the special cases that
11168 can be handled without an scc insn. */
11169 if ((code == LT && integer_zerop (arg1))
11170 || (! only_cheap && code == GE && integer_zerop (arg1)))
11172 else if (BRANCH_COST >= 0
11173 && ! only_cheap && (code == NE || code == EQ)
11174 && TREE_CODE (type) != REAL_TYPE
11175 && ((abs_optab->handlers[(int) operand_mode].insn_code
11176 != CODE_FOR_nothing)
11177 || (ffs_optab->handlers[(int) operand_mode].insn_code
11178 != CODE_FOR_nothing)))
11184 preexpand_calls (exp);
11185 if (subtarget == 0 || GET_CODE (subtarget) != REG
11186 || GET_MODE (subtarget) != operand_mode
11187 || ! safe_from_p (subtarget, arg1))
11190 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11191 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11194 target = gen_reg_rtx (mode);
11196 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11197 because, if the emit_store_flag does anything it will succeed and
11198 OP0 and OP1 will not be used subsequently. */
11200 result = emit_store_flag (target, code,
11201 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11202 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11203 operand_mode, unsignedp, 1);
11208 result = expand_binop (mode, xor_optab, result, const1_rtx,
11209 result, 0, OPTAB_LIB_WIDEN);
11213 /* If this failed, we have to do this with set/compare/jump/set code. */
11214 if (GET_CODE (target) != REG
11215 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11216 target = gen_reg_rtx (GET_MODE (target));
11218 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11219 result = compare_from_rtx (op0, op1, code, unsignedp,
11220 operand_mode, NULL_RTX, 0);
11221 if (GET_CODE (result) == CONST_INT)
11222 return (((result == const0_rtx && ! invert)
11223 || (result != const0_rtx && invert))
11224 ? const0_rtx : const1_rtx);
11226 label = gen_label_rtx ();
11227 if (bcc_gen_fctn[(int) code] == 0)
11230 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11231 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11232 emit_label (label);
11237 /* Generate a tablejump instruction (used for switch statements). */
11239 #ifdef HAVE_tablejump
11241 /* INDEX is the value being switched on, with the lowest value
11242 in the table already subtracted.
11243 MODE is its expected mode (needed if INDEX is constant).
11244 RANGE is the length of the jump table.
11245 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11247 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11248 index value is out of range. */
11251 do_tablejump (index, mode, range, table_label, default_label)
11252 rtx index, range, table_label, default_label;
11253 enum machine_mode mode;
11255 register rtx temp, vector;
11257 /* Do an unsigned comparison (in the proper mode) between the index
11258 expression and the value which represents the length of the range.
11259 Since we just finished subtracting the lower bound of the range
11260 from the index expression, this comparison allows us to simultaneously
11261 check that the original index expression value is both greater than
11262 or equal to the minimum value of the range and less than or equal to
11263 the maximum value of the range. */
11265 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11266 emit_jump_insn (gen_bgtu (default_label));
11268 /* If index is in range, it must fit in Pmode.
11269 Convert to Pmode so we can index with it. */
11271 index = convert_to_mode (Pmode, index, 1);
11273 /* Don't let a MEM slip thru, because then INDEX that comes
11274 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11275 and break_out_memory_refs will go to work on it and mess it up. */
11276 #ifdef PIC_CASE_VECTOR_ADDRESS
11277 if (flag_pic && GET_CODE (index) != REG)
11278 index = copy_to_mode_reg (Pmode, index);
11281 /* If flag_force_addr were to affect this address
11282 it could interfere with the tricky assumptions made
11283 about addresses that contain label-refs,
11284 which may be valid only very near the tablejump itself. */
11285 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11286 GET_MODE_SIZE, because this indicates how large insns are. The other
11287 uses should all be Pmode, because they are addresses. This code
11288 could fail if addresses and insns are not the same size. */
11289 index = gen_rtx (PLUS, Pmode,
11290 gen_rtx (MULT, Pmode, index,
11291 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11292 gen_rtx (LABEL_REF, Pmode, table_label));
11293 #ifdef PIC_CASE_VECTOR_ADDRESS
11295 index = PIC_CASE_VECTOR_ADDRESS (index);
11298 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11299 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11300 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11301 RTX_UNCHANGING_P (vector) = 1;
11302 convert_move (temp, vector, 0);
11304 emit_jump_insn (gen_tablejump (temp, table_label));
11306 #ifndef CASE_VECTOR_PC_RELATIVE
11307 /* If we are generating PIC code or if the table is PC-relative, the
11308 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11314 #endif /* HAVE_tablejump */
11317 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11318 to that value is on the top of the stack. The resulting type is TYPE, and
11319 the source declaration is DECL. */
11322 bc_load_memory (type, decl)
11325 enum bytecode_opcode opcode;
11328 /* Bit fields are special. We only know about signed and
11329 unsigned ints, and enums. The latter are treated as
11330 signed integers. */
11332 if (DECL_BIT_FIELD (decl))
11333 if (TREE_CODE (type) == ENUMERAL_TYPE
11334 || TREE_CODE (type) == INTEGER_TYPE)
11335 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11339 /* See corresponding comment in bc_store_memory(). */
11340 if (TYPE_MODE (type) == BLKmode
11341 || TYPE_MODE (type) == VOIDmode)
11344 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11346 if (opcode == neverneverland)
11349 bc_emit_bytecode (opcode);
11351 #ifdef DEBUG_PRINT_CODE
11352 fputc ('\n', stderr);
11357 /* Store the contents of the second stack slot to the address in the
11358 top stack slot. DECL is the declaration of the destination and is used
11359 to determine whether we're dealing with a bitfield. */
11362 bc_store_memory (type, decl)
11365 enum bytecode_opcode opcode;
11368 if (DECL_BIT_FIELD (decl))
11370 if (TREE_CODE (type) == ENUMERAL_TYPE
11371 || TREE_CODE (type) == INTEGER_TYPE)
11377 if (TYPE_MODE (type) == BLKmode)
11379 /* Copy structure. This expands to a block copy instruction, storeBLK.
11380 In addition to the arguments expected by the other store instructions,
11381 it also expects a type size (SImode) on top of the stack, which is the
11382 structure size in size units (usually bytes). The two first arguments
11383 are already on the stack; so we just put the size on level 1. For some
11384 other languages, the size may be variable, this is why we don't encode
11385 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11387 bc_expand_expr (TYPE_SIZE (type));
11391 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11393 if (opcode == neverneverland)
11396 bc_emit_bytecode (opcode);
11398 #ifdef DEBUG_PRINT_CODE
11399 fputc ('\n', stderr);
11404 /* Allocate local stack space sufficient to hold a value of the given
11405 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11406 integral power of 2. A special case is locals of type VOID, which
11407 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11408 remapped into the corresponding attribute of SI. */
11411 bc_allocate_local (size, alignment)
11412 int size, alignment;
11415 int byte_alignment;
11420 /* Normalize size and alignment */
11422 size = UNITS_PER_WORD;
11424 if (alignment < BITS_PER_UNIT)
11425 byte_alignment = 1 << (INT_ALIGN - 1);
11428 byte_alignment = alignment / BITS_PER_UNIT;
11430 if (local_vars_size & (byte_alignment - 1))
11431 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11433 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11434 local_vars_size += size;
11440 /* Allocate variable-sized local array. Variable-sized arrays are
11441 actually pointers to the address in memory where they are stored. */
11444 bc_allocate_variable_array (size)
11448 const int ptralign = (1 << (PTR_ALIGN - 1));
11450 /* Align pointer */
11451 if (local_vars_size & ptralign)
11452 local_vars_size += ptralign - (local_vars_size & ptralign);
11454 /* Note down local space needed: pointer to block; also return
11457 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11458 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11463 /* Push the machine address for the given external variable offset. */
11466 bc_load_externaddr (externaddr)
11469 bc_emit_bytecode (constP);
11470 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11471 BYTECODE_BC_LABEL (externaddr)->offset);
11473 #ifdef DEBUG_PRINT_CODE
11474 fputc ('\n', stderr);
11479 /* Like above, but expects an IDENTIFIER. */
11482 bc_load_externaddr_id (id, offset)
11486 if (!IDENTIFIER_POINTER (id))
11489 bc_emit_bytecode (constP);
11490 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11492 #ifdef DEBUG_PRINT_CODE
11493 fputc ('\n', stderr);
11498 /* Push the machine address for the given local variable offset. */
11501 bc_load_localaddr (localaddr)
11504 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11508 /* Push the machine address for the given parameter offset.
11509 NOTE: offset is in bits. */
11512 bc_load_parmaddr (parmaddr)
11515 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11520 /* Convert a[i] into *(a + i). */
11523 bc_canonicalize_array_ref (exp)
11526 tree type = TREE_TYPE (exp);
11527 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11528 TREE_OPERAND (exp, 0));
11529 tree index = TREE_OPERAND (exp, 1);
11532 /* Convert the integer argument to a type the same size as a pointer
11533 so the multiply won't overflow spuriously. */
11535 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11536 index = convert (type_for_size (POINTER_SIZE, 0), index);
11538 /* The array address isn't volatile even if the array is.
11539 (Of course this isn't terribly relevant since the bytecode
11540 translator treats nearly everything as volatile anyway.) */
11541 TREE_THIS_VOLATILE (array_adr) = 0;
11543 return build1 (INDIRECT_REF, type,
11544 fold (build (PLUS_EXPR,
11545 TYPE_POINTER_TO (type),
11547 fold (build (MULT_EXPR,
11548 TYPE_POINTER_TO (type),
11550 size_in_bytes (type))))));
11554 /* Load the address of the component referenced by the given
11555 COMPONENT_REF expression.
11557 Returns innermost lvalue. */
11560 bc_expand_component_address (exp)
11564 enum machine_mode mode;
11566 HOST_WIDE_INT SIval;
11569 tem = TREE_OPERAND (exp, 1);
11570 mode = DECL_MODE (tem);
11573 /* Compute cumulative bit offset for nested component refs
11574 and array refs, and find the ultimate containing object. */
11576 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11578 if (TREE_CODE (tem) == COMPONENT_REF)
11579 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11581 if (TREE_CODE (tem) == ARRAY_REF
11582 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11583 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11585 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11586 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11587 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11592 bc_expand_expr (tem);
11595 /* For bitfields also push their offset and size */
11596 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11597 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11599 if (SIval = bitpos / BITS_PER_UNIT)
11600 bc_emit_instruction (addconstPSI, SIval);
11602 return (TREE_OPERAND (exp, 1));
11606 /* Emit code to push two SI constants */
11609 bc_push_offset_and_size (offset, size)
11610 HOST_WIDE_INT offset, size;
11612 bc_emit_instruction (constSI, offset);
11613 bc_emit_instruction (constSI, size);
11617 /* Emit byte code to push the address of the given lvalue expression to
11618 the stack. If it's a bit field, we also push offset and size info.
11620 Returns innermost component, which allows us to determine not only
11621 its type, but also whether it's a bitfield. */
11624 bc_expand_address (exp)
11628 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11632 switch (TREE_CODE (exp))
11636 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11638 case COMPONENT_REF:
11640 return (bc_expand_component_address (exp));
11644 bc_expand_expr (TREE_OPERAND (exp, 0));
11646 /* For variable-sized types: retrieve pointer. Sometimes the
11647 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11648 also make sure we have an operand, just in case... */
11650 if (TREE_OPERAND (exp, 0)
11651 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11652 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11653 bc_emit_instruction (loadP);
11655 /* If packed, also return offset and size */
11656 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11658 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11659 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11661 return (TREE_OPERAND (exp, 0));
11663 case FUNCTION_DECL:
11665 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11666 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11671 bc_load_parmaddr (DECL_RTL (exp));
11673 /* For variable-sized types: retrieve pointer */
11674 if (TYPE_SIZE (TREE_TYPE (exp))
11675 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11676 bc_emit_instruction (loadP);
11678 /* If packed, also return offset and size */
11679 if (DECL_BIT_FIELD (exp))
11680 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11681 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11687 bc_emit_instruction (returnP);
11693 if (BYTECODE_LABEL (DECL_RTL (exp)))
11694 bc_load_externaddr (DECL_RTL (exp));
11697 if (DECL_EXTERNAL (exp))
11698 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11699 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11701 bc_load_localaddr (DECL_RTL (exp));
11703 /* For variable-sized types: retrieve pointer */
11704 if (TYPE_SIZE (TREE_TYPE (exp))
11705 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11706 bc_emit_instruction (loadP);
11708 /* If packed, also return offset and size */
11709 if (DECL_BIT_FIELD (exp))
11710 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11711 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11719 bc_emit_bytecode (constP);
11720 r = output_constant_def (exp);
11721 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11723 #ifdef DEBUG_PRINT_CODE
11724 fputc ('\n', stderr);
11735 /* Most lvalues don't have components. */
11740 /* Emit a type code to be used by the runtime support in handling
11741 parameter passing. The type code consists of the machine mode
11742 plus the minimal alignment shifted left 8 bits. */
11745 bc_runtime_type_code (type)
11750 switch (TREE_CODE (type))
11756 case ENUMERAL_TYPE:
11760 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11772 return build_int_2 (val, 0);
11776 /* Generate constructor label */
11779 bc_gen_constr_label ()
11781 static int label_counter;
11782 static char label[20];
11784 sprintf (label, "*LR%d", label_counter++);
11786 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11790 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11791 expand the constructor data as static data, and push a pointer to it.
11792 The pointer is put in the pointer table and is retrieved by a constP
11793 bytecode instruction. We then loop and store each constructor member in
11794 the corresponding component. Finally, we return the original pointer on
11798 bc_expand_constructor (constr)
11802 HOST_WIDE_INT ptroffs;
11806 /* Literal constructors are handled as constants, whereas
11807 non-literals are evaluated and stored element by element
11808 into the data segment. */
11810 /* Allocate space in proper segment and push pointer to space on stack.
11813 l = bc_gen_constr_label ();
11815 if (TREE_CONSTANT (constr))
11819 bc_emit_const_labeldef (l);
11820 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11826 bc_emit_data_labeldef (l);
11827 bc_output_data_constructor (constr);
11831 /* Add reference to pointer table and recall pointer to stack;
11832 this code is common for both types of constructors: literals
11833 and non-literals. */
11835 ptroffs = bc_define_pointer (l);
11836 bc_emit_instruction (constP, ptroffs);
11838 /* This is all that has to be done if it's a literal. */
11839 if (TREE_CONSTANT (constr))
11843 /* At this point, we have the pointer to the structure on top of the stack.
11844 Generate sequences of store_memory calls for the constructor. */
11846 /* constructor type is structure */
11847 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11851 /* If the constructor has fewer fields than the structure,
11852 clear the whole structure first. */
11854 if (list_length (CONSTRUCTOR_ELTS (constr))
11855 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11857 bc_emit_instruction (duplicate);
11858 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11859 bc_emit_instruction (clearBLK);
11862 /* Store each element of the constructor into the corresponding
11863 field of TARGET. */
11865 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11867 register tree field = TREE_PURPOSE (elt);
11868 register enum machine_mode mode;
11873 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11874 mode = DECL_MODE (field);
11875 unsignedp = TREE_UNSIGNED (field);
11877 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11879 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11880 /* The alignment of TARGET is
11881 at least what its type requires. */
11883 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11884 int_size_in_bytes (TREE_TYPE (constr)));
11889 /* Constructor type is array */
11890 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11894 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11895 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11896 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11897 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11899 /* If the constructor has fewer fields than the structure,
11900 clear the whole structure first. */
11902 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11904 bc_emit_instruction (duplicate);
11905 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11906 bc_emit_instruction (clearBLK);
11910 /* Store each element of the constructor into the corresponding
11911 element of TARGET, determined by counting the elements. */
11913 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11915 elt = TREE_CHAIN (elt), i++)
11917 register enum machine_mode mode;
11922 mode = TYPE_MODE (elttype);
11923 bitsize = GET_MODE_BITSIZE (mode);
11924 unsignedp = TREE_UNSIGNED (elttype);
11926 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11927 /* * TYPE_SIZE_UNIT (elttype) */ );
11929 bc_store_field (elt, bitsize, bitpos, mode,
11930 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11931 /* The alignment of TARGET is
11932 at least what its type requires. */
11934 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11935 int_size_in_bytes (TREE_TYPE (constr)));
11942 /* Store the value of EXP (an expression tree) into member FIELD of
11943 structure at address on stack, which has type TYPE, mode MODE and
11944 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11947 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11948 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11951 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11952 value_mode, unsignedp, align, total_size)
11953 int bitsize, bitpos;
11954 enum machine_mode mode;
11955 tree field, exp, type;
11956 enum machine_mode value_mode;
11962 /* Expand expression and copy pointer */
11963 bc_expand_expr (exp);
11964 bc_emit_instruction (over);
11967 /* If the component is a bit field, we cannot use addressing to access
11968 it. Use bit-field techniques to store in it. */
11970 if (DECL_BIT_FIELD (field))
11972 bc_store_bit_field (bitpos, bitsize, unsignedp);
11976 /* Not bit field */
11978 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11980 /* Advance pointer to the desired member */
11982 bc_emit_instruction (addconstPSI, offset);
11985 bc_store_memory (type, field);
11990 /* Store SI/SU in bitfield */
11993 bc_store_bit_field (offset, size, unsignedp)
11994 int offset, size, unsignedp;
11996 /* Push bitfield offset and size */
11997 bc_push_offset_and_size (offset, size);
12000 bc_emit_instruction (sstoreBI);
12004 /* Load SI/SU from bitfield */
12007 bc_load_bit_field (offset, size, unsignedp)
12008 int offset, size, unsignedp;
12010 /* Push bitfield offset and size */
12011 bc_push_offset_and_size (offset, size);
12013 /* Load: sign-extend if signed, else zero-extend */
12014 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
12018 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
12019 (adjust stack pointer upwards), negative means add that number of
12020 levels (adjust the stack pointer downwards). Only positive values
12021 normally make sense. */
12024 bc_adjust_stack (nlevels)
12033 bc_emit_instruction (drop);
12036 bc_emit_instruction (drop);
12041 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
12042 stack_depth -= nlevels;
12045 #if defined (VALIDATE_STACK_FOR_BC)
12046 VALIDATE_STACK_FOR_BC ();