1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call;
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
105 int target_temp_slot_level;
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
110 static rtx saveregs_value;
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
115 /* This structure is used by move_by_pieces to describe the move to
118 struct move_by_pieces
128 int explicit_inc_from;
135 /* This structure is used by clear_by_pieces to describe the clear to
138 struct clear_by_pieces
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
154 extern int local_vars_size;
155 extern int stack_depth;
156 extern int max_stack_depth;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
160 static rtx enqueue_insn PROTO((rtx, rtx));
161 static int queued_subexp_p PROTO((rtx));
162 static void init_queue PROTO((void));
163 static void move_by_pieces PROTO((rtx, rtx, int, int));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
175 static int get_inner_unaligned_p PROTO((tree));
176 static tree save_noncopied_parts PROTO((tree, tree));
177 static tree init_noncopied_parts PROTO((tree, tree));
178 static int safe_from_p PROTO((rtx, tree));
179 static int fixed_type_p PROTO((tree));
180 static rtx var_rtx PROTO((tree));
181 static int get_pointer_alignment PROTO((tree, unsigned));
182 static tree string_constant PROTO((tree, tree *));
183 static tree c_strlen PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 void bc_expand_increment PROTO((struct increment_operator *, tree));
194 rtx bc_allocate_local PROTO((int, int));
195 void bc_store_memory PROTO((tree, tree));
196 tree bc_expand_component_address PROTO((tree));
197 tree bc_expand_address PROTO((tree));
198 void bc_expand_constructor PROTO((tree));
199 void bc_adjust_stack PROTO((int));
200 tree bc_canonicalize_array_ref PROTO((tree));
201 void bc_load_memory PROTO((tree, tree));
202 void bc_load_externaddr PROTO((rtx));
203 void bc_load_externaddr_id PROTO((tree, int));
204 void bc_load_localaddr PROTO((rtx));
205 void bc_load_parmaddr PROTO((rtx));
206 static void preexpand_calls PROTO((tree));
207 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
208 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
209 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
210 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
211 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
212 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
213 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
214 static tree defer_cleanups_to PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
224 /* MOVE_RATIO is the number of move instructions that is better than
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
231 /* A value of around 6 would minimize code size; infinity would minimize
233 #define MOVE_RATIO 15
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
262 /* Initialize maps used to convert modes to const, load, and store
266 bc_init_mode_to_opcode_maps ()
270 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
271 mode_to_const_map[mode] =
272 mode_to_load_map[mode] =
273 mode_to_store_map[mode] = neverneverland;
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
280 #include "modemap.def"
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
291 enum machine_mode mode;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
296 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
299 insn = emit_insn (gen_rtx (SET, 0, 0));
300 pat = PATTERN (insn);
302 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
303 mode = (enum machine_mode) ((int) mode + 1))
309 direct_load[(int) mode] = direct_store[(int) mode] = 0;
310 PUT_MODE (mem, mode);
311 PUT_MODE (mem1, mode);
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
316 if (mode != VOIDmode && mode != BLKmode)
317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
318 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
321 if (! HARD_REGNO_MODE_OK (regno, mode))
324 reg = gen_rtx (REG, mode, regno);
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
331 SET_SRC (pat) = mem1;
332 SET_DEST (pat) = reg;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_load[(int) mode] = 1;
337 SET_DEST (pat) = mem;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
342 SET_DEST (pat) = mem1;
343 if (recog (pat, insn, &num_clobbers) >= 0)
344 direct_store[(int) mode] = 1;
351 /* This is run at the start of compiling a function. */
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
362 apply_args_value = 0;
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
373 /* Instead of saving the postincrement queue, empty it. */
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->cleanups_this_call = cleanups_this_call;
379 p->saveregs_value = saveregs_value;
380 p->apply_args_value = apply_args_value;
381 p->forced_labels = forced_labels;
383 pending_stack_adjust = 0;
384 inhibit_defer_pop = 0;
385 cleanups_this_call = 0;
387 apply_args_value = 0;
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
395 restore_expr_status (p)
398 pending_stack_adjust = p->pending_stack_adjust;
399 inhibit_defer_pop = p->inhibit_defer_pop;
400 cleanups_this_call = p->cleanups_this_call;
401 saveregs_value = p->saveregs_value;
402 apply_args_value = p->apply_args_value;
403 forced_labels = p->forced_labels;
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
409 static rtx pending_chain;
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
419 enqueue_insn (var, body)
422 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
423 var, NULL_RTX, NULL_RTX, body, pending_chain);
424 return pending_chain;
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
443 protect_from_queue (x, modify)
447 register RTX_CODE code = GET_CODE (x);
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain == 0)
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
462 if (code == MEM && GET_MODE (x) != BLKmode
463 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
465 register rtx y = XEXP (x, 0);
466 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
474 register rtx temp = gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp, new),
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
485 rtx tem = protect_from_queue (XEXP (x, 0), 0);
486 if (tem != XEXP (x, 0))
492 else if (code == PLUS || code == MULT)
494 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
495 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
496 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x) == 0)
507 return QUEUED_VAR (x);
508 /* If the increment has happened and a pre-increment copy exists,
510 if (QUEUED_COPY (x) != 0)
511 return QUEUED_COPY (x);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
517 return QUEUED_COPY (x);
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
529 register enum rtx_code code = GET_CODE (x);
535 return queued_subexp_p (XEXP (x, 0));
539 return queued_subexp_p (XEXP (x, 0))
540 || queued_subexp_p (XEXP (x, 1));
545 /* Perform all the pending incrementations. */
551 while (p = pending_chain)
553 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
554 pending_chain = QUEUED_NEXT (p);
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
571 convert_move (to, from, unsignedp)
572 register rtx to, from;
575 enum machine_mode to_mode = GET_MODE (to);
576 enum machine_mode from_mode = GET_MODE (from);
577 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
578 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
585 to = protect_from_queue (to, 1);
586 from = protect_from_queue (from, 0);
588 if (to_real != from_real)
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
595 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
597 >= GET_MODE_SIZE (to_mode))
598 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
599 from = gen_lowpart (to_mode, from), from_mode = to_mode;
601 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
604 if (to_mode == from_mode
605 || (from_mode == VOIDmode && CONSTANT_P (from)))
607 emit_move_insn (to, from);
615 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
617 /* Try converting directly if the insn is supported. */
618 if ((code = can_extend_p (to_mode, from_mode, 0))
621 emit_unop_insn (code, to, from, UNKNOWN);
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
770 libcall = extendsfdf2_libfunc;
774 libcall = extendsfxf2_libfunc;
778 libcall = extendsftf2_libfunc;
787 libcall = truncdfsf2_libfunc;
791 libcall = extenddfxf2_libfunc;
795 libcall = extenddftf2_libfunc;
804 libcall = truncxfsf2_libfunc;
808 libcall = truncxfdf2_libfunc;
817 libcall = trunctfsf2_libfunc;
821 libcall = trunctfdf2_libfunc;
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
833 emit_move_insn (to, value);
837 /* Now both modes are integers. */
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
861 emit_unop_insn (code, to, from, equiv_code);
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
877 /* No special multiword conversion insn; do it by hand. */
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
890 lowpart_mode = from_mode;
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
897 /* Compute the value to put in each remaining word. */
899 fill_value = const0_rtx;
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
936 insns = get_insns ();
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
971 #endif /* HAVE_truncsipsi2 */
975 if (from_mode == PSImode)
977 if (to_mode != SImode)
979 from = convert_to_mode (SImode, from, unsignedp);
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2)
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
990 #endif /* HAVE_extendpsisi2 */
995 if (to_mode == PDImode)
997 if (from_mode != DImode)
998 from = convert_to_mode (DImode, from, unsignedp);
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2)
1003 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1006 #endif /* HAVE_truncdipdi2 */
1010 if (from_mode == PDImode)
1012 if (to_mode != DImode)
1014 from = convert_to_mode (DImode, from, unsignedp);
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2)
1022 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1025 #endif /* HAVE_extendpdidi2 */
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1036 GET_MODE_BITSIZE (from_mode)))
1038 if (!((GET_CODE (from) == MEM
1039 && ! MEM_VOLATILE_P (from)
1040 && direct_load[(int) to_mode]
1041 && ! mode_dependent_address_p (XEXP (from, 0)))
1042 || GET_CODE (from) == REG
1043 || GET_CODE (from) == SUBREG))
1044 from = force_reg (from_mode, from);
1045 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047 from = copy_to_reg (from);
1048 emit_move_insn (to, gen_lowpart (to_mode, from));
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1055 /* Convert directly if that works. */
1056 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057 != CODE_FOR_nothing)
1059 emit_unop_insn (code, to, from, equiv_code);
1064 enum machine_mode intermediate;
1066 /* Search for a mode to convert via. */
1067 for (intermediate = from_mode; intermediate != VOIDmode;
1068 intermediate = GET_MODE_WIDER_MODE (intermediate))
1069 if (((can_extend_p (to_mode, intermediate, unsignedp)
1070 != CODE_FOR_nothing)
1071 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1072 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1073 && (can_extend_p (intermediate, from_mode, unsignedp)
1074 != CODE_FOR_nothing))
1076 convert_move (to, convert_to_mode (intermediate, from,
1077 unsignedp), unsignedp);
1081 /* No suitable intermediate mode. */
1086 /* Support special truncate insns for certain modes. */
1088 if (from_mode == DImode && to_mode == SImode)
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2)
1093 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1101 if (from_mode == DImode && to_mode == HImode)
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2)
1106 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1114 if (from_mode == DImode && to_mode == QImode)
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2)
1119 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 if (from_mode == SImode && to_mode == HImode)
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2)
1132 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 if (from_mode == SImode && to_mode == QImode)
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2)
1145 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 if (from_mode == HImode && to_mode == QImode)
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2)
1158 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 if (from_mode == TImode && to_mode == DImode)
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2)
1171 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 if (from_mode == TImode && to_mode == SImode)
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2)
1184 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 if (from_mode == TImode && to_mode == HImode)
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2)
1197 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 if (from_mode == TImode && to_mode == QImode)
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2)
1210 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1223 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1224 emit_move_insn (to, temp);
1228 /* Mode combination is not recognized. */
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1243 convert_to_mode (mode, x, unsignedp)
1244 enum machine_mode mode;
1248 return convert_modes (mode, VOIDmode, x, unsignedp);
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1265 convert_modes (mode, oldmode, x, unsignedp)
1266 enum machine_mode mode, oldmode;
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1275 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1278 x = gen_lowpart (mode, x);
1280 if (GET_MODE (x) != VOIDmode)
1281 oldmode = GET_MODE (x);
1283 if (mode == oldmode)
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1292 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1293 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1296 HOST_WIDE_INT val = INTVAL (x);
1298 if (oldmode != VOIDmode
1299 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1301 int width = GET_MODE_BITSIZE (oldmode);
1303 /* We need to zero extend VAL. */
1304 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1307 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1310 /* We can do this with a gen_lowpart if both desired and current modes
1311 are integer, and this is either a constant integer, a register, or a
1312 non-volatile MEM. Except for the constant case where MODE is no
1313 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1315 if ((GET_CODE (x) == CONST_INT
1316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1317 || (GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_CLASS (oldmode) == MODE_INT
1319 && (GET_CODE (x) == CONST_DOUBLE
1320 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1321 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1322 && direct_load[(int) mode])
1323 || (GET_CODE (x) == REG
1324 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1325 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1327 /* ?? If we don't know OLDMODE, we have to assume here that
1328 X does not need sign- or zero-extension. This may not be
1329 the case, but it's the best we can do. */
1330 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1331 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1333 HOST_WIDE_INT val = INTVAL (x);
1334 int width = GET_MODE_BITSIZE (oldmode);
1336 /* We must sign or zero-extend in this case. Start by
1337 zero-extending, then sign extend if we need to. */
1338 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1340 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1341 val |= (HOST_WIDE_INT) (-1) << width;
1343 return GEN_INT (val);
1346 return gen_lowpart (mode, x);
1349 temp = gen_reg_rtx (mode);
1350 convert_move (temp, x, unsignedp);
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1361 move_by_pieces (to, from, len, align)
1365 struct move_by_pieces data;
1366 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1367 int max_size = MOVE_MAX + 1;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1392 /* If copying requires more than two move insns,
1393 copy addresses to registers (to make displacements shorter)
1394 and use post-increment if available. */
1395 if (!(data.autinc_from && data.autinc_to)
1396 && move_by_pieces_ninsns (len, align) > 2)
1398 #ifdef HAVE_PRE_DECREMENT
1399 if (data.reverse && ! data.autinc_from)
1401 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1402 data.autinc_from = 1;
1403 data.explicit_inc_from = -1;
1406 #ifdef HAVE_POST_INCREMENT
1407 if (! data.autinc_from)
1409 data.from_addr = copy_addr_to_reg (from_addr);
1410 data.autinc_from = 1;
1411 data.explicit_inc_from = 1;
1414 if (!data.autinc_from && CONSTANT_P (from_addr))
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416 #ifdef HAVE_PRE_DECREMENT
1417 if (data.reverse && ! data.autinc_to)
1419 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1421 data.explicit_inc_to = -1;
1424 #ifdef HAVE_POST_INCREMENT
1425 if (! data.reverse && ! data.autinc_to)
1427 data.to_addr = copy_addr_to_reg (to_addr);
1429 data.explicit_inc_to = 1;
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1443 while (max_size > 1)
1445 enum machine_mode mode = VOIDmode, tmode;
1446 enum insn_code icode;
1448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1450 if (GET_MODE_SIZE (tmode) < max_size)
1453 if (mode == VOIDmode)
1456 icode = mov_optab->handlers[(int) mode].insn_code;
1457 if (icode != CODE_FOR_nothing
1458 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1459 GET_MODE_SIZE (mode)))
1460 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1462 max_size = GET_MODE_SIZE (mode);
1465 /* The code above should have handled everything. */
1470 /* Return number of insns required to move L bytes by pieces.
1471 ALIGN (in bytes) is maximum alignment we can assume. */
1474 move_by_pieces_ninsns (l, align)
1478 register int n_insns = 0;
1479 int max_size = MOVE_MAX + 1;
1481 if (! SLOW_UNALIGNED_ACCESS
1482 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1485 while (max_size > 1)
1487 enum machine_mode mode = VOIDmode, tmode;
1488 enum insn_code icode;
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1495 if (mode == VOIDmode)
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing
1500 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1501 GET_MODE_SIZE (mode)))
1502 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1504 max_size = GET_MODE_SIZE (mode);
1510 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1515 move_by_pieces_1 (genfun, mode, data)
1517 enum machine_mode mode;
1518 struct move_by_pieces *data;
1520 register int size = GET_MODE_SIZE (mode);
1521 register rtx to1, from1;
1523 while (data->len >= size)
1525 if (data->reverse) data->offset -= size;
1527 to1 = (data->autinc_to
1528 ? gen_rtx (MEM, mode, data->to_addr)
1529 : change_address (data->to, mode,
1530 plus_constant (data->to_addr, data->offset)));
1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
1534 ? gen_rtx (MEM, mode, data->from_addr)
1535 : change_address (data->from, mode,
1536 plus_constant (data->from_addr, data->offset)));
1537 MEM_IN_STRUCT_P (from1) = data->from_struct;
1539 #ifdef HAVE_PRE_DECREMENT
1540 if (data->explicit_inc_to < 0)
1541 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1542 if (data->explicit_inc_from < 0)
1543 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1546 emit_insn ((*genfun) (to1, from1));
1547 #ifdef HAVE_POST_INCREMENT
1548 if (data->explicit_inc_to > 0)
1549 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1550 if (data->explicit_inc_from > 0)
1551 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1554 if (! data->reverse) data->offset += size;
1560 /* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
1568 measured in bytes. */
1571 emit_block_move (x, y, size, align)
1576 if (GET_MODE (x) != BLKmode)
1579 if (GET_MODE (y) != BLKmode)
1582 x = protect_from_queue (x, 1);
1583 y = protect_from_queue (y, 0);
1584 size = protect_from_queue (size, 0);
1586 if (GET_CODE (x) != MEM)
1588 if (GET_CODE (y) != MEM)
1593 if (GET_CODE (size) == CONST_INT
1594 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1595 move_by_pieces (x, y, INTVAL (size), align);
1598 /* Try the most limited insn first, because there's no point
1599 including more than one in the machine description unless
1600 the more limited one has some advantage. */
1602 rtx opalign = GEN_INT (align);
1603 enum machine_mode mode;
1605 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1606 mode = GET_MODE_WIDER_MODE (mode))
1608 enum insn_code code = movstr_optab[(int) mode];
1610 if (code != CODE_FOR_nothing
1611 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1612 here because if SIZE is less than the mode mask, as it is
1613 returned by the macro, it will definitely be less than the
1614 actual mode mask. */
1615 && ((GET_CODE (size) == CONST_INT
1616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1617 <= GET_MODE_MASK (mode)))
1618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1619 && (insn_operand_predicate[(int) code][0] == 0
1620 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1621 && (insn_operand_predicate[(int) code][1] == 0
1622 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1623 && (insn_operand_predicate[(int) code][3] == 0
1624 || (*insn_operand_predicate[(int) code][3]) (opalign,
1628 rtx last = get_last_insn ();
1631 op2 = convert_to_mode (mode, size, 1);
1632 if (insn_operand_predicate[(int) code][2] != 0
1633 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1634 op2 = copy_to_mode_reg (mode, op2);
1636 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1643 delete_insns_since (last);
1647 #ifdef TARGET_MEM_FUNCTIONS
1648 emit_library_call (memcpy_libfunc, 0,
1649 VOIDmode, 3, XEXP (x, 0), Pmode,
1651 convert_to_mode (TYPE_MODE (sizetype), size,
1652 TREE_UNSIGNED (sizetype)),
1653 TYPE_MODE (sizetype));
1655 emit_library_call (bcopy_libfunc, 0,
1656 VOIDmode, 3, XEXP (y, 0), Pmode,
1658 convert_to_mode (TYPE_MODE (integer_type_node), size,
1659 TREE_UNSIGNED (integer_type_node)),
1660 TYPE_MODE (integer_type_node));
1665 /* Copy all or part of a value X into registers starting at REGNO.
1666 The number of registers to be filled is NREGS. */
1669 move_block_to_reg (regno, x, nregs, mode)
1673 enum machine_mode mode;
1681 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1682 x = validize_mem (force_const_mem (mode, x));
1684 /* See if the machine can do this with a load multiple insn. */
1685 #ifdef HAVE_load_multiple
1686 if (HAVE_load_multiple)
1688 last = get_last_insn ();
1689 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1697 delete_insns_since (last);
1701 for (i = 0; i < nregs; i++)
1702 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1703 operand_subword_force (x, i, mode));
1706 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1707 The number of registers to be filled is NREGS. SIZE indicates the number
1708 of bytes in the object X. */
1712 move_block_from_reg (regno, x, nregs, size)
1721 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1722 to the left before storing to memory. */
1723 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1725 rtx tem = operand_subword (x, 0, 1, BLKmode);
1731 shift = expand_shift (LSHIFT_EXPR, word_mode,
1732 gen_rtx (REG, word_mode, regno),
1733 build_int_2 ((UNITS_PER_WORD - size)
1734 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1735 emit_move_insn (tem, shift);
1739 /* See if the machine can do this with a store multiple insn. */
1740 #ifdef HAVE_store_multiple
1741 if (HAVE_store_multiple)
1743 last = get_last_insn ();
1744 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1752 delete_insns_since (last);
1756 for (i = 0; i < nregs; i++)
1758 rtx tem = operand_subword (x, i, 1, BLKmode);
1763 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1767 /* Emit code to move a block Y to a block X, where X is non-consecutive
1768 registers represented by a PARALLEL. */
1771 emit_group_load (x, y)
1774 rtx target_reg, source;
1777 if (GET_CODE (x) != PARALLEL)
1780 /* Check for a NULL entry, used to indicate that the parameter goes
1781 both on the stack and in registers. */
1782 if (XEXP (XVECEXP (x, 0, 0), 0))
1787 for (; i < XVECLEN (x, 0); i++)
1789 rtx element = XVECEXP (x, 0, i);
1791 target_reg = XEXP (element, 0);
1793 if (GET_CODE (y) == MEM)
1794 source = change_address (y, GET_MODE (target_reg),
1795 plus_constant (XEXP (y, 0),
1796 INTVAL (XEXP (element, 1))));
1797 else if (XEXP (element, 1) == const0_rtx)
1799 if (GET_MODE (target_reg) == GET_MODE (y))
1801 /* Allow for the target_reg to be smaller than the input register
1802 to allow for AIX with 4 DF arguments after a single SI arg. The
1803 last DF argument will only load 1 word into the integer registers,
1804 but load a DF value into the float registers. */
1805 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1806 <= GET_MODE_SIZE (GET_MODE (y)))
1807 source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
1814 emit_move_insn (target_reg, source);
1818 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1819 registers represented by a PARALLEL. */
1822 emit_group_store (x, y)
1825 rtx source_reg, target;
1828 if (GET_CODE (y) != PARALLEL)
1831 /* Check for a NULL entry, used to indicate that the parameter goes
1832 both on the stack and in registers. */
1833 if (XEXP (XVECEXP (y, 0, 0), 0))
1838 for (; i < XVECLEN (y, 0); i++)
1840 rtx element = XVECEXP (y, 0, i);
1842 source_reg = XEXP (element, 0);
1844 if (GET_CODE (x) == MEM)
1845 target = change_address (x, GET_MODE (source_reg),
1846 plus_constant (XEXP (x, 0),
1847 INTVAL (XEXP (element, 1))));
1848 else if (XEXP (element, 1) == const0_rtx)
1851 if (GET_MODE (target) != GET_MODE (source_reg))
1852 target = gen_lowpart (GET_MODE (source_reg), target);
1857 emit_move_insn (target, source_reg);
1861 /* Add a USE expression for REG to the (possibly empty) list pointed
1862 to by CALL_FUSAGE. REG must denote a hard register. */
1865 use_reg (call_fusage, reg)
1866 rtx *call_fusage, reg;
1868 if (GET_CODE (reg) != REG
1869 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1873 = gen_rtx (EXPR_LIST, VOIDmode,
1874 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1877 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1878 starting at REGNO. All of these registers must be hard registers. */
1881 use_regs (call_fusage, regno, nregs)
1888 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1891 for (i = 0; i < nregs; i++)
1892 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1895 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1896 PARALLEL REGS. This is for calls that pass values in multiple
1897 non-contiguous locations. The Irix 6 ABI has examples of this. */
1900 use_group_regs (call_fusage, regs)
1906 /* Check for a NULL entry, used to indicate that the parameter goes
1907 both on the stack and in registers. */
1908 if (XEXP (XVECEXP (regs, 0, 0), 0))
1913 for (; i < XVECLEN (regs, 0); i++)
1914 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1917 /* Generate several move instructions to clear LEN bytes of block TO.
1918 (A MEM rtx with BLKmode). The caller must pass TO through
1919 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1923 clear_by_pieces (to, len, align)
1927 struct clear_by_pieces data;
1928 rtx to_addr = XEXP (to, 0);
1929 int max_size = MOVE_MAX + 1;
1932 data.to_addr = to_addr;
1935 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1936 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1938 data.explicit_inc_to = 0;
1940 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1941 if (data.reverse) data.offset = len;
1944 data.to_struct = MEM_IN_STRUCT_P (to);
1946 /* If copying requires more than two move insns,
1947 copy addresses to registers (to make displacements shorter)
1948 and use post-increment if available. */
1950 && move_by_pieces_ninsns (len, align) > 2)
1952 #ifdef HAVE_PRE_DECREMENT
1953 if (data.reverse && ! data.autinc_to)
1955 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1957 data.explicit_inc_to = -1;
1960 #ifdef HAVE_POST_INCREMENT
1961 if (! data.reverse && ! data.autinc_to)
1963 data.to_addr = copy_addr_to_reg (to_addr);
1965 data.explicit_inc_to = 1;
1968 if (!data.autinc_to && CONSTANT_P (to_addr))
1969 data.to_addr = copy_addr_to_reg (to_addr);
1972 if (! SLOW_UNALIGNED_ACCESS
1973 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1976 /* First move what we can in the largest integer mode, then go to
1977 successively smaller modes. */
1979 while (max_size > 1)
1981 enum machine_mode mode = VOIDmode, tmode;
1982 enum insn_code icode;
1984 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1985 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1986 if (GET_MODE_SIZE (tmode) < max_size)
1989 if (mode == VOIDmode)
1992 icode = mov_optab->handlers[(int) mode].insn_code;
1993 if (icode != CODE_FOR_nothing
1994 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1995 GET_MODE_SIZE (mode)))
1996 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1998 max_size = GET_MODE_SIZE (mode);
2001 /* The code above should have handled everything. */
2006 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2007 with move instructions for mode MODE. GENFUN is the gen_... function
2008 to make a move insn for that mode. DATA has all the other info. */
2011 clear_by_pieces_1 (genfun, mode, data)
2013 enum machine_mode mode;
2014 struct clear_by_pieces *data;
2016 register int size = GET_MODE_SIZE (mode);
2019 while (data->len >= size)
2021 if (data->reverse) data->offset -= size;
2023 to1 = (data->autinc_to
2024 ? gen_rtx (MEM, mode, data->to_addr)
2025 : change_address (data->to, mode,
2026 plus_constant (data->to_addr, data->offset)));
2027 MEM_IN_STRUCT_P (to1) = data->to_struct;
2029 #ifdef HAVE_PRE_DECREMENT
2030 if (data->explicit_inc_to < 0)
2031 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2034 emit_insn ((*genfun) (to1, const0_rtx));
2035 #ifdef HAVE_POST_INCREMENT
2036 if (data->explicit_inc_to > 0)
2037 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2040 if (! data->reverse) data->offset += size;
2046 /* Write zeros through the storage of OBJECT.
2047 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2048 the maximum alignment we can is has, measured in bytes. */
2051 clear_storage (object, size, align)
2056 if (GET_MODE (object) == BLKmode)
2058 object = protect_from_queue (object, 1);
2059 size = protect_from_queue (size, 0);
2061 if (GET_CODE (size) == CONST_INT
2062 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2063 clear_by_pieces (object, INTVAL (size), align);
2067 /* Try the most limited insn first, because there's no point
2068 including more than one in the machine description unless
2069 the more limited one has some advantage. */
2071 rtx opalign = GEN_INT (align);
2072 enum machine_mode mode;
2074 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2075 mode = GET_MODE_WIDER_MODE (mode))
2077 enum insn_code code = clrstr_optab[(int) mode];
2079 if (code != CODE_FOR_nothing
2080 /* We don't need MODE to be narrower than
2081 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2082 the mode mask, as it is returned by the macro, it will
2083 definitely be less than the actual mode mask. */
2084 && ((GET_CODE (size) == CONST_INT
2085 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2086 <= GET_MODE_MASK (mode)))
2087 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2088 && (insn_operand_predicate[(int) code][0] == 0
2089 || (*insn_operand_predicate[(int) code][0]) (object,
2091 && (insn_operand_predicate[(int) code][2] == 0
2092 || (*insn_operand_predicate[(int) code][2]) (opalign,
2096 rtx last = get_last_insn ();
2099 op1 = convert_to_mode (mode, size, 1);
2100 if (insn_operand_predicate[(int) code][1] != 0
2101 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2103 op1 = copy_to_mode_reg (mode, op1);
2105 pat = GEN_FCN ((int) code) (object, op1, opalign);
2112 delete_insns_since (last);
2117 #ifdef TARGET_MEM_FUNCTIONS
2118 emit_library_call (memset_libfunc, 0,
2120 XEXP (object, 0), Pmode,
2121 const0_rtx, TYPE_MODE (integer_type_node),
2122 convert_to_mode (TYPE_MODE (sizetype),
2123 size, TREE_UNSIGNED (sizetype)),
2124 TYPE_MODE (sizetype));
2126 emit_library_call (bzero_libfunc, 0,
2128 XEXP (object, 0), Pmode,
2129 convert_to_mode (TYPE_MODE (integer_type_node),
2131 TREE_UNSIGNED (integer_type_node)),
2132 TYPE_MODE (integer_type_node));
2137 emit_move_insn (object, const0_rtx);
2140 /* Generate code to copy Y into X.
2141 Both Y and X must have the same mode, except that
2142 Y can be a constant with VOIDmode.
2143 This mode cannot be BLKmode; use emit_block_move for that.
2145 Return the last instruction emitted. */
2148 emit_move_insn (x, y)
2151 enum machine_mode mode = GET_MODE (x);
2153 x = protect_from_queue (x, 1);
2154 y = protect_from_queue (y, 0);
2156 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2159 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2160 y = force_const_mem (mode, y);
2162 /* If X or Y are memory references, verify that their addresses are valid
2164 if (GET_CODE (x) == MEM
2165 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2166 && ! push_operand (x, GET_MODE (x)))
2168 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2169 x = change_address (x, VOIDmode, XEXP (x, 0));
2171 if (GET_CODE (y) == MEM
2172 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2174 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2175 y = change_address (y, VOIDmode, XEXP (y, 0));
2177 if (mode == BLKmode)
2180 return emit_move_insn_1 (x, y);
2183 /* Low level part of emit_move_insn.
2184 Called just like emit_move_insn, but assumes X and Y
2185 are basically valid. */
2188 emit_move_insn_1 (x, y)
2191 enum machine_mode mode = GET_MODE (x);
2192 enum machine_mode submode;
2193 enum mode_class class = GET_MODE_CLASS (mode);
2196 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2198 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2200 /* Expand complex moves by moving real part and imag part, if possible. */
2201 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2202 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2204 (class == MODE_COMPLEX_INT
2205 ? MODE_INT : MODE_FLOAT),
2207 && (mov_optab->handlers[(int) submode].insn_code
2208 != CODE_FOR_nothing))
2210 /* Don't split destination if it is a stack push. */
2211 int stack = push_operand (x, GET_MODE (x));
2214 /* If this is a stack, push the highpart first, so it
2215 will be in the argument order.
2217 In that case, change_address is used only to convert
2218 the mode, not to change the address. */
2221 /* Note that the real part always precedes the imag part in memory
2222 regardless of machine's endianness. */
2223 #ifdef STACK_GROWS_DOWNWARD
2224 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2225 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2226 gen_imagpart (submode, y)));
2227 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2228 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2229 gen_realpart (submode, y)));
2231 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2232 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2233 gen_realpart (submode, y)));
2234 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2235 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2236 gen_imagpart (submode, y)));
2241 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2242 (gen_realpart (submode, x), gen_realpart (submode, y)));
2243 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2244 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2247 return get_last_insn ();
2250 /* This will handle any multi-word mode that lacks a move_insn pattern.
2251 However, you will get better code if you define such patterns,
2252 even if they must turn into multiple assembler instructions. */
2253 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2258 #ifdef PUSH_ROUNDING
2260 /* If X is a push on the stack, do the push now and replace
2261 X with a reference to the stack pointer. */
2262 if (push_operand (x, GET_MODE (x)))
2264 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2265 x = change_address (x, VOIDmode, stack_pointer_rtx);
2269 /* Show the output dies here. */
2271 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2274 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2277 rtx xpart = operand_subword (x, i, 1, mode);
2278 rtx ypart = operand_subword (y, i, 1, mode);
2280 /* If we can't get a part of Y, put Y into memory if it is a
2281 constant. Otherwise, force it into a register. If we still
2282 can't get a part of Y, abort. */
2283 if (ypart == 0 && CONSTANT_P (y))
2285 y = force_const_mem (mode, y);
2286 ypart = operand_subword (y, i, 1, mode);
2288 else if (ypart == 0)
2289 ypart = operand_subword_force (y, i, mode);
2291 if (xpart == 0 || ypart == 0)
2294 last_insn = emit_move_insn (xpart, ypart);
2303 /* Pushing data onto the stack. */
2305 /* Push a block of length SIZE (perhaps variable)
2306 and return an rtx to address the beginning of the block.
2307 Note that it is not possible for the value returned to be a QUEUED.
2308 The value may be virtual_outgoing_args_rtx.
2310 EXTRA is the number of bytes of padding to push in addition to SIZE.
2311 BELOW nonzero means this padding comes at low addresses;
2312 otherwise, the padding comes at high addresses. */
2315 push_block (size, extra, below)
2321 size = convert_modes (Pmode, ptr_mode, size, 1);
2322 if (CONSTANT_P (size))
2323 anti_adjust_stack (plus_constant (size, extra));
2324 else if (GET_CODE (size) == REG && extra == 0)
2325 anti_adjust_stack (size);
2328 rtx temp = copy_to_mode_reg (Pmode, size);
2330 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2331 temp, 0, OPTAB_LIB_WIDEN);
2332 anti_adjust_stack (temp);
2335 #ifdef STACK_GROWS_DOWNWARD
2336 temp = virtual_outgoing_args_rtx;
2337 if (extra != 0 && below)
2338 temp = plus_constant (temp, extra);
2340 if (GET_CODE (size) == CONST_INT)
2341 temp = plus_constant (virtual_outgoing_args_rtx,
2342 - INTVAL (size) - (below ? 0 : extra));
2343 else if (extra != 0 && !below)
2344 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2345 negate_rtx (Pmode, plus_constant (size, extra)));
2347 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2348 negate_rtx (Pmode, size));
2351 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2357 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2360 /* Generate code to push X onto the stack, assuming it has mode MODE and
2362 MODE is redundant except when X is a CONST_INT (since they don't
2364 SIZE is an rtx for the size of data to be copied (in bytes),
2365 needed only if X is BLKmode.
2367 ALIGN (in bytes) is maximum alignment we can assume.
2369 If PARTIAL and REG are both nonzero, then copy that many of the first
2370 words of X into registers starting with REG, and push the rest of X.
2371 The amount of space pushed is decreased by PARTIAL words,
2372 rounded *down* to a multiple of PARM_BOUNDARY.
2373 REG must be a hard register in this case.
2374 If REG is zero but PARTIAL is not, take any all others actions for an
2375 argument partially in registers, but do not actually load any
2378 EXTRA is the amount in bytes of extra space to leave next to this arg.
2379 This is ignored if an argument block has already been allocated.
2381 On a machine that lacks real push insns, ARGS_ADDR is the address of
2382 the bottom of the argument block for this call. We use indexing off there
2383 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2384 argument block has not been preallocated.
2386 ARGS_SO_FAR is the size of args previously pushed for this call. */
2389 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2390 args_addr, args_so_far)
2392 enum machine_mode mode;
2403 enum direction stack_direction
2404 #ifdef STACK_GROWS_DOWNWARD
2410 /* Decide where to pad the argument: `downward' for below,
2411 `upward' for above, or `none' for don't pad it.
2412 Default is below for small data on big-endian machines; else above. */
2413 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2415 /* If we're placing part of X into a register and part of X onto
2416 the stack, indicate that the entire register is clobbered to
2417 keep flow from thinking the unused part of the register is live. */
2418 if (partial > 0 && reg != 0)
2419 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2421 /* Invert direction if stack is post-update. */
2422 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2423 if (where_pad != none)
2424 where_pad = (where_pad == downward ? upward : downward);
2426 xinner = x = protect_from_queue (x, 0);
2428 if (mode == BLKmode)
2430 /* Copy a block into the stack, entirely or partially. */
2433 int used = partial * UNITS_PER_WORD;
2434 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2442 /* USED is now the # of bytes we need not copy to the stack
2443 because registers will take care of them. */
2446 xinner = change_address (xinner, BLKmode,
2447 plus_constant (XEXP (xinner, 0), used));
2449 /* If the partial register-part of the arg counts in its stack size,
2450 skip the part of stack space corresponding to the registers.
2451 Otherwise, start copying to the beginning of the stack space,
2452 by setting SKIP to 0. */
2453 #ifndef REG_PARM_STACK_SPACE
2459 #ifdef PUSH_ROUNDING
2460 /* Do it with several push insns if that doesn't take lots of insns
2461 and if there is no difficulty with push insns that skip bytes
2462 on the stack for alignment purposes. */
2464 && GET_CODE (size) == CONST_INT
2466 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2468 /* Here we avoid the case of a structure whose weak alignment
2469 forces many pushes of a small amount of data,
2470 and such small pushes do rounding that causes trouble. */
2471 && ((! SLOW_UNALIGNED_ACCESS)
2472 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2473 || PUSH_ROUNDING (align) == align)
2474 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2476 /* Push padding now if padding above and stack grows down,
2477 or if padding below and stack grows up.
2478 But if space already allocated, this has already been done. */
2479 if (extra && args_addr == 0
2480 && where_pad != none && where_pad != stack_direction)
2481 anti_adjust_stack (GEN_INT (extra));
2483 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2484 INTVAL (size) - used, align);
2487 #endif /* PUSH_ROUNDING */
2489 /* Otherwise make space on the stack and copy the data
2490 to the address of that space. */
2492 /* Deduct words put into registers from the size we must copy. */
2495 if (GET_CODE (size) == CONST_INT)
2496 size = GEN_INT (INTVAL (size) - used);
2498 size = expand_binop (GET_MODE (size), sub_optab, size,
2499 GEN_INT (used), NULL_RTX, 0,
2503 /* Get the address of the stack space.
2504 In this case, we do not deal with EXTRA separately.
2505 A single stack adjust will do. */
2508 temp = push_block (size, extra, where_pad == downward);
2511 else if (GET_CODE (args_so_far) == CONST_INT)
2512 temp = memory_address (BLKmode,
2513 plus_constant (args_addr,
2514 skip + INTVAL (args_so_far)));
2516 temp = memory_address (BLKmode,
2517 plus_constant (gen_rtx (PLUS, Pmode,
2518 args_addr, args_so_far),
2521 /* TEMP is the address of the block. Copy the data there. */
2522 if (GET_CODE (size) == CONST_INT
2523 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2526 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2527 INTVAL (size), align);
2530 /* Try the most limited insn first, because there's no point
2531 including more than one in the machine description unless
2532 the more limited one has some advantage. */
2533 #ifdef HAVE_movstrqi
2535 && GET_CODE (size) == CONST_INT
2536 && ((unsigned) INTVAL (size)
2537 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2539 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2540 xinner, size, GEN_INT (align));
2548 #ifdef HAVE_movstrhi
2550 && GET_CODE (size) == CONST_INT
2551 && ((unsigned) INTVAL (size)
2552 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2554 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2555 xinner, size, GEN_INT (align));
2563 #ifdef HAVE_movstrsi
2566 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2567 xinner, size, GEN_INT (align));
2575 #ifdef HAVE_movstrdi
2578 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2579 xinner, size, GEN_INT (align));
2588 #ifndef ACCUMULATE_OUTGOING_ARGS
2589 /* If the source is referenced relative to the stack pointer,
2590 copy it to another register to stabilize it. We do not need
2591 to do this if we know that we won't be changing sp. */
2593 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2594 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2595 temp = copy_to_reg (temp);
2598 /* Make inhibit_defer_pop nonzero around the library call
2599 to force it to pop the bcopy-arguments right away. */
2601 #ifdef TARGET_MEM_FUNCTIONS
2602 emit_library_call (memcpy_libfunc, 0,
2603 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2604 convert_to_mode (TYPE_MODE (sizetype),
2605 size, TREE_UNSIGNED (sizetype)),
2606 TYPE_MODE (sizetype));
2608 emit_library_call (bcopy_libfunc, 0,
2609 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2610 convert_to_mode (TYPE_MODE (integer_type_node),
2612 TREE_UNSIGNED (integer_type_node)),
2613 TYPE_MODE (integer_type_node));
2618 else if (partial > 0)
2620 /* Scalar partly in registers. */
2622 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2625 /* # words of start of argument
2626 that we must make space for but need not store. */
2627 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2628 int args_offset = INTVAL (args_so_far);
2631 /* Push padding now if padding above and stack grows down,
2632 or if padding below and stack grows up.
2633 But if space already allocated, this has already been done. */
2634 if (extra && args_addr == 0
2635 && where_pad != none && where_pad != stack_direction)
2636 anti_adjust_stack (GEN_INT (extra));
2638 /* If we make space by pushing it, we might as well push
2639 the real data. Otherwise, we can leave OFFSET nonzero
2640 and leave the space uninitialized. */
2644 /* Now NOT_STACK gets the number of words that we don't need to
2645 allocate on the stack. */
2646 not_stack = partial - offset;
2648 /* If the partial register-part of the arg counts in its stack size,
2649 skip the part of stack space corresponding to the registers.
2650 Otherwise, start copying to the beginning of the stack space,
2651 by setting SKIP to 0. */
2652 #ifndef REG_PARM_STACK_SPACE
2658 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2659 x = validize_mem (force_const_mem (mode, x));
2661 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2662 SUBREGs of such registers are not allowed. */
2663 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2664 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2665 x = copy_to_reg (x);
2667 /* Loop over all the words allocated on the stack for this arg. */
2668 /* We can do it by words, because any scalar bigger than a word
2669 has a size a multiple of a word. */
2670 #ifndef PUSH_ARGS_REVERSED
2671 for (i = not_stack; i < size; i++)
2673 for (i = size - 1; i >= not_stack; i--)
2675 if (i >= not_stack + offset)
2676 emit_push_insn (operand_subword_force (x, i, mode),
2677 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2679 GEN_INT (args_offset + ((i - not_stack + skip)
2680 * UNITS_PER_WORD)));
2686 /* Push padding now if padding above and stack grows down,
2687 or if padding below and stack grows up.
2688 But if space already allocated, this has already been done. */
2689 if (extra && args_addr == 0
2690 && where_pad != none && where_pad != stack_direction)
2691 anti_adjust_stack (GEN_INT (extra));
2693 #ifdef PUSH_ROUNDING
2695 addr = gen_push_operand ();
2698 if (GET_CODE (args_so_far) == CONST_INT)
2700 = memory_address (mode,
2701 plus_constant (args_addr, INTVAL (args_so_far)));
2703 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2706 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2710 /* If part should go in registers, copy that part
2711 into the appropriate registers. Do this now, at the end,
2712 since mem-to-mem copies above may do function calls. */
2713 if (partial > 0 && reg != 0)
2715 /* Handle calls that pass values in multiple non-contiguous locations.
2716 The Irix 6 ABI has examples of this. */
2717 if (GET_CODE (reg) == PARALLEL)
2718 emit_group_load (reg, x);
2720 move_block_to_reg (REGNO (reg), x, partial, mode);
2723 if (extra && args_addr == 0 && where_pad == stack_direction)
2724 anti_adjust_stack (GEN_INT (extra));
2727 /* Expand an assignment that stores the value of FROM into TO.
2728 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2729 (This may contain a QUEUED rtx;
2730 if the value is constant, this rtx is a constant.)
2731 Otherwise, the returned value is NULL_RTX.
2733 SUGGEST_REG is no longer actually used.
2734 It used to mean, copy the value through a register
2735 and return that register, if that is possible.
2736 We now use WANT_VALUE to decide whether to do this. */
2739 expand_assignment (to, from, want_value, suggest_reg)
2744 register rtx to_rtx = 0;
2747 /* Don't crash if the lhs of the assignment was erroneous. */
2749 if (TREE_CODE (to) == ERROR_MARK)
2751 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2752 return want_value ? result : NULL_RTX;
2755 if (output_bytecode)
2757 tree dest_innermost;
2759 bc_expand_expr (from);
2760 bc_emit_instruction (duplicate);
2762 dest_innermost = bc_expand_address (to);
2764 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2765 take care of it here. */
2767 bc_store_memory (TREE_TYPE (to), dest_innermost);
2771 /* Assignment of a structure component needs special treatment
2772 if the structure component's rtx is not simply a MEM.
2773 Assignment of an array element at a constant index, and assignment of
2774 an array element in an unaligned packed structure field, has the same
2777 if (TREE_CODE (to) == COMPONENT_REF
2778 || TREE_CODE (to) == BIT_FIELD_REF
2779 || (TREE_CODE (to) == ARRAY_REF
2780 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2781 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2782 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2784 enum machine_mode mode1;
2794 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2795 &unsignedp, &volatilep, &alignment);
2797 /* If we are going to use store_bit_field and extract_bit_field,
2798 make sure to_rtx will be safe for multiple use. */
2800 if (mode1 == VOIDmode && want_value)
2801 tem = stabilize_reference (tem);
2803 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2806 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2808 if (GET_CODE (to_rtx) != MEM)
2810 to_rtx = change_address (to_rtx, VOIDmode,
2811 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2812 force_reg (ptr_mode, offset_rtx)));
2816 if (GET_CODE (to_rtx) == MEM)
2818 /* When the offset is zero, to_rtx is the address of the
2819 structure we are storing into, and hence may be shared.
2820 We must make a new MEM before setting the volatile bit. */
2822 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2823 MEM_VOLATILE_P (to_rtx) = 1;
2825 #if 0 /* This was turned off because, when a field is volatile
2826 in an object which is not volatile, the object may be in a register,
2827 and then we would abort over here. */
2833 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2835 /* Spurious cast makes HPUX compiler happy. */
2836 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2839 /* Required alignment of containing datum. */
2841 int_size_in_bytes (TREE_TYPE (tem)));
2842 preserve_temp_slots (result);
2846 /* If the value is meaningful, convert RESULT to the proper mode.
2847 Otherwise, return nothing. */
2848 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2849 TYPE_MODE (TREE_TYPE (from)),
2851 TREE_UNSIGNED (TREE_TYPE (to)))
2855 /* If the rhs is a function call and its value is not an aggregate,
2856 call the function before we start to compute the lhs.
2857 This is needed for correct code for cases such as
2858 val = setjmp (buf) on machines where reference to val
2859 requires loading up part of an address in a separate insn.
2861 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2862 a promoted variable where the zero- or sign- extension needs to be done.
2863 Handling this in the normal way is safe because no computation is done
2865 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2866 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2867 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2872 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2874 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2876 /* Handle calls that return values in multiple non-contiguous locations.
2877 The Irix 6 ABI has examples of this. */
2878 if (GET_CODE (to_rtx) == PARALLEL)
2879 emit_group_load (to_rtx, value);
2880 else if (GET_MODE (to_rtx) == BLKmode)
2881 emit_block_move (to_rtx, value, expr_size (from),
2882 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2884 emit_move_insn (to_rtx, value);
2885 preserve_temp_slots (to_rtx);
2888 return want_value ? to_rtx : NULL_RTX;
2891 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2892 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2895 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2897 /* Don't move directly into a return register. */
2898 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2903 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2904 emit_move_insn (to_rtx, temp);
2905 preserve_temp_slots (to_rtx);
2908 return want_value ? to_rtx : NULL_RTX;
2911 /* In case we are returning the contents of an object which overlaps
2912 the place the value is being stored, use a safe function when copying
2913 a value through a pointer into a structure value return block. */
2914 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2915 && current_function_returns_struct
2916 && !current_function_returns_pcc_struct)
2921 size = expr_size (from);
2922 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2924 #ifdef TARGET_MEM_FUNCTIONS
2925 emit_library_call (memcpy_libfunc, 0,
2926 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2927 XEXP (from_rtx, 0), Pmode,
2928 convert_to_mode (TYPE_MODE (sizetype),
2929 size, TREE_UNSIGNED (sizetype)),
2930 TYPE_MODE (sizetype));
2932 emit_library_call (bcopy_libfunc, 0,
2933 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2934 XEXP (to_rtx, 0), Pmode,
2935 convert_to_mode (TYPE_MODE (integer_type_node),
2936 size, TREE_UNSIGNED (integer_type_node)),
2937 TYPE_MODE (integer_type_node));
2940 preserve_temp_slots (to_rtx);
2943 return want_value ? to_rtx : NULL_RTX;
2946 /* Compute FROM and store the value in the rtx we got. */
2949 result = store_expr (from, to_rtx, want_value);
2950 preserve_temp_slots (result);
2953 return want_value ? result : NULL_RTX;
2956 /* Generate code for computing expression EXP,
2957 and storing the value into TARGET.
2958 TARGET may contain a QUEUED rtx.
2960 If WANT_VALUE is nonzero, return a copy of the value
2961 not in TARGET, so that we can be sure to use the proper
2962 value in a containing expression even if TARGET has something
2963 else stored in it. If possible, we copy the value through a pseudo
2964 and return that pseudo. Or, if the value is constant, we try to
2965 return the constant. In some cases, we return a pseudo
2966 copied *from* TARGET.
2968 If the mode is BLKmode then we may return TARGET itself.
2969 It turns out that in BLKmode it doesn't cause a problem.
2970 because C has no operators that could combine two different
2971 assignments into the same BLKmode object with different values
2972 with no sequence point. Will other languages need this to
2975 If WANT_VALUE is 0, we return NULL, to make sure
2976 to catch quickly any cases where the caller uses the value
2977 and fails to set WANT_VALUE. */
2980 store_expr (exp, target, want_value)
2982 register rtx target;
2986 int dont_return_target = 0;
2988 if (TREE_CODE (exp) == COMPOUND_EXPR)
2990 /* Perform first part of compound expression, then assign from second
2992 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2994 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2996 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2998 /* For conditional expression, get safe form of the target. Then
2999 test the condition, doing the appropriate assignment on either
3000 side. This avoids the creation of unnecessary temporaries.
3001 For non-BLKmode, it is more efficient not to do this. */
3003 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3004 rtx flag = NULL_RTX;
3005 tree left_cleanups = NULL_TREE;
3006 tree right_cleanups = NULL_TREE;
3007 tree old_cleanups = cleanups_this_call;
3009 /* Used to save a pointer to the place to put the setting of
3010 the flag that indicates if this side of the conditional was
3011 taken. We backpatch the code, if we find out later that we
3012 have any conditional cleanups that need to be performed. */
3013 rtx dest_right_flag = NULL_RTX;
3014 rtx dest_left_flag = NULL_RTX;
3017 target = protect_from_queue (target, 1);
3019 do_pending_stack_adjust ();
3021 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3022 store_expr (TREE_OPERAND (exp, 1), target, 0);
3023 dest_left_flag = get_last_insn ();
3024 /* Handle conditional cleanups, if any. */
3025 left_cleanups = defer_cleanups_to (old_cleanups);
3027 emit_jump_insn (gen_jump (lab2));
3030 store_expr (TREE_OPERAND (exp, 2), target, 0);
3031 dest_right_flag = get_last_insn ();
3032 /* Handle conditional cleanups, if any. */
3033 right_cleanups = defer_cleanups_to (old_cleanups);
3038 /* Add back in any conditional cleanups. */
3039 if (left_cleanups || right_cleanups)
3045 /* Now that we know that a flag is needed, go back and add in the
3046 setting of the flag. */
3048 flag = gen_reg_rtx (word_mode);
3050 /* Do the left side flag. */
3051 last = get_last_insn ();
3052 /* Flag left cleanups as needed. */
3053 emit_move_insn (flag, const1_rtx);
3054 /* ??? deprecated, use sequences instead. */
3055 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3057 /* Do the right side flag. */
3058 last = get_last_insn ();
3059 /* Flag left cleanups as needed. */
3060 emit_move_insn (flag, const0_rtx);
3061 /* ??? deprecated, use sequences instead. */
3062 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3064 /* All cleanups must be on the function_obstack. */
3065 push_obstacks_nochange ();
3066 resume_temporary_allocation ();
3068 /* convert flag, which is an rtx, into a tree. */
3069 cond = make_node (RTL_EXPR);
3070 TREE_TYPE (cond) = integer_type_node;
3071 RTL_EXPR_RTL (cond) = flag;
3072 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3073 cond = save_expr (cond);
3075 if (! left_cleanups)
3076 left_cleanups = integer_zero_node;
3077 if (! right_cleanups)
3078 right_cleanups = integer_zero_node;
3079 new_cleanups = build (COND_EXPR, void_type_node,
3080 truthvalue_conversion (cond),
3081 left_cleanups, right_cleanups);
3082 new_cleanups = fold (new_cleanups);
3086 /* Now add in the conditionalized cleanups. */
3088 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3089 expand_eh_region_start ();
3091 return want_value ? target : NULL_RTX;
3093 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3094 && GET_MODE (target) != BLKmode)
3095 /* If target is in memory and caller wants value in a register instead,
3096 arrange that. Pass TARGET as target for expand_expr so that,
3097 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3098 We know expand_expr will not use the target in that case.
3099 Don't do this if TARGET is volatile because we are supposed
3100 to write it and then read it. */
3102 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3103 GET_MODE (target), 0);
3104 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3105 temp = copy_to_reg (temp);
3106 dont_return_target = 1;
3108 else if (queued_subexp_p (target))
3109 /* If target contains a postincrement, let's not risk
3110 using it as the place to generate the rhs. */
3112 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3114 /* Expand EXP into a new pseudo. */
3115 temp = gen_reg_rtx (GET_MODE (target));
3116 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3119 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3121 /* If target is volatile, ANSI requires accessing the value
3122 *from* the target, if it is accessed. So make that happen.
3123 In no case return the target itself. */
3124 if (! MEM_VOLATILE_P (target) && want_value)
3125 dont_return_target = 1;
3127 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3128 /* If this is an scalar in a register that is stored in a wider mode
3129 than the declared mode, compute the result into its declared mode
3130 and then convert to the wider mode. Our value is the computed
3133 /* If we don't want a value, we can do the conversion inside EXP,
3134 which will often result in some optimizations. Do the conversion
3135 in two steps: first change the signedness, if needed, then
3136 the extend. But don't do this if the type of EXP is a subtype
3137 of something else since then the conversion might involve
3138 more than just converting modes. */
3139 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3140 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3142 if (TREE_UNSIGNED (TREE_TYPE (exp))
3143 != SUBREG_PROMOTED_UNSIGNED_P (target))
3146 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3150 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3151 SUBREG_PROMOTED_UNSIGNED_P (target)),
3155 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3157 /* If TEMP is a volatile MEM and we want a result value, make
3158 the access now so it gets done only once. Likewise if
3159 it contains TARGET. */
3160 if (GET_CODE (temp) == MEM && want_value
3161 && (MEM_VOLATILE_P (temp)
3162 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3163 temp = copy_to_reg (temp);
3165 /* If TEMP is a VOIDmode constant, use convert_modes to make
3166 sure that we properly convert it. */
3167 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3168 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3169 TYPE_MODE (TREE_TYPE (exp)), temp,
3170 SUBREG_PROMOTED_UNSIGNED_P (target));
3172 convert_move (SUBREG_REG (target), temp,
3173 SUBREG_PROMOTED_UNSIGNED_P (target));
3174 return want_value ? temp : NULL_RTX;
3178 temp = expand_expr (exp, target, GET_MODE (target), 0);
3179 /* Return TARGET if it's a specified hardware register.
3180 If TARGET is a volatile mem ref, either return TARGET
3181 or return a reg copied *from* TARGET; ANSI requires this.
3183 Otherwise, if TEMP is not TARGET, return TEMP
3184 if it is constant (for efficiency),
3185 or if we really want the correct value. */
3186 if (!(target && GET_CODE (target) == REG
3187 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3188 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3190 && (CONSTANT_P (temp) || want_value))
3191 dont_return_target = 1;
3194 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3195 the same as that of TARGET, adjust the constant. This is needed, for
3196 example, in case it is a CONST_DOUBLE and we want only a word-sized
3198 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3199 && TREE_CODE (exp) != ERROR_MARK
3200 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3201 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3202 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3204 /* If value was not generated in the target, store it there.
3205 Convert the value to TARGET's type first if nec. */
3207 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3209 target = protect_from_queue (target, 1);
3210 if (GET_MODE (temp) != GET_MODE (target)
3211 && GET_MODE (temp) != VOIDmode)
3213 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3214 if (dont_return_target)
3216 /* In this case, we will return TEMP,
3217 so make sure it has the proper mode.
3218 But don't forget to store the value into TARGET. */
3219 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3220 emit_move_insn (target, temp);
3223 convert_move (target, temp, unsignedp);
3226 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3228 /* Handle copying a string constant into an array.
3229 The string constant may be shorter than the array.
3230 So copy just the string's actual length, and clear the rest. */
3234 /* Get the size of the data type of the string,
3235 which is actually the size of the target. */
3236 size = expr_size (exp);
3237 if (GET_CODE (size) == CONST_INT
3238 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3239 emit_block_move (target, temp, size,
3240 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3243 /* Compute the size of the data to copy from the string. */
3245 = size_binop (MIN_EXPR,
3246 make_tree (sizetype, size),
3248 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3249 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3253 /* Copy that much. */
3254 emit_block_move (target, temp, copy_size_rtx,
3255 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3257 /* Figure out how much is left in TARGET that we have to clear.
3258 Do all calculations in ptr_mode. */
3260 addr = XEXP (target, 0);
3261 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3263 if (GET_CODE (copy_size_rtx) == CONST_INT)
3265 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3266 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3270 addr = force_reg (ptr_mode, addr);
3271 addr = expand_binop (ptr_mode, add_optab, addr,
3272 copy_size_rtx, NULL_RTX, 0,
3275 size = expand_binop (ptr_mode, sub_optab, size,
3276 copy_size_rtx, NULL_RTX, 0,
3279 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3280 GET_MODE (size), 0, 0);
3281 label = gen_label_rtx ();
3282 emit_jump_insn (gen_blt (label));
3285 if (size != const0_rtx)
3287 #ifdef TARGET_MEM_FUNCTIONS
3288 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3290 const0_rtx, TYPE_MODE (integer_type_node),
3291 convert_to_mode (TYPE_MODE (sizetype),
3293 TREE_UNSIGNED (sizetype)),
3294 TYPE_MODE (sizetype));
3296 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3298 convert_to_mode (TYPE_MODE (integer_type_node),
3300 TREE_UNSIGNED (integer_type_node)),
3301 TYPE_MODE (integer_type_node));
3309 /* Handle calls that return values in multiple non-contiguous locations.
3310 The Irix 6 ABI has examples of this. */
3311 else if (GET_CODE (target) == PARALLEL)
3312 emit_group_load (target, temp);
3313 else if (GET_MODE (temp) == BLKmode)
3314 emit_block_move (target, temp, expr_size (exp),
3315 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3317 emit_move_insn (target, temp);
3320 /* If we don't want a value, return NULL_RTX. */
3324 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3325 ??? The latter test doesn't seem to make sense. */
3326 else if (dont_return_target && GET_CODE (temp) != MEM)
3329 /* Return TARGET itself if it is a hard register. */
3330 else if (want_value && GET_MODE (target) != BLKmode
3331 && ! (GET_CODE (target) == REG
3332 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3333 return copy_to_reg (target);
3339 /* Return 1 if EXP just contains zeros. */
3347 switch (TREE_CODE (exp))
3351 case NON_LVALUE_EXPR:
3352 return is_zeros_p (TREE_OPERAND (exp, 0));
3355 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3359 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3362 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3365 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3366 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3367 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3368 if (! is_zeros_p (TREE_VALUE (elt)))
3377 /* Return 1 if EXP contains mostly (3/4) zeros. */
3380 mostly_zeros_p (exp)
3383 if (TREE_CODE (exp) == CONSTRUCTOR)
3385 int elts = 0, zeros = 0;
3386 tree elt = CONSTRUCTOR_ELTS (exp);
3387 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3389 /* If there are no ranges of true bits, it is all zero. */
3390 return elt == NULL_TREE;
3392 for (; elt; elt = TREE_CHAIN (elt))
3394 /* We do not handle the case where the index is a RANGE_EXPR,
3395 so the statistic will be somewhat inaccurate.
3396 We do make a more accurate count in store_constructor itself,
3397 so since this function is only used for nested array elements,
3398 this should be close enough. */
3399 if (mostly_zeros_p (TREE_VALUE (elt)))
3404 return 4 * zeros >= 3 * elts;
3407 return is_zeros_p (exp);
3410 /* Helper function for store_constructor.
3411 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3412 TYPE is the type of the CONSTRUCTOR, not the element type.
3413 CLEARED is as for store_constructor.
3415 This provides a recursive shortcut back to store_constructor when it isn't
3416 necessary to go through store_field. This is so that we can pass through
3417 the cleared field to let store_constructor know that we may not have to
3418 clear a substructure if the outer structure has already been cleared. */
3421 store_constructor_field (target, bitsize, bitpos,
3422 mode, exp, type, cleared)
3424 int bitsize, bitpos;
3425 enum machine_mode mode;
3429 if (TREE_CODE (exp) == CONSTRUCTOR
3430 && bitpos % BITS_PER_UNIT == 0
3431 /* If we have a non-zero bitpos for a register target, then we just
3432 let store_field do the bitfield handling. This is unlikely to
3433 generate unnecessary clear instructions anyways. */
3434 && (bitpos == 0 || GET_CODE (target) == MEM))
3437 target = change_address (target, VOIDmode,
3438 plus_constant (XEXP (target, 0),
3439 bitpos / BITS_PER_UNIT));
3440 store_constructor (exp, target, cleared);
3443 store_field (target, bitsize, bitpos, mode, exp,
3444 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3445 int_size_in_bytes (type));
3448 /* Store the value of constructor EXP into the rtx TARGET.
3449 TARGET is either a REG or a MEM.
3450 CLEARED is true if TARGET is known to have been zero'd. */
3453 store_constructor (exp, target, cleared)
3458 tree type = TREE_TYPE (exp);
3460 /* We know our target cannot conflict, since safe_from_p has been called. */
3462 /* Don't try copying piece by piece into a hard register
3463 since that is vulnerable to being clobbered by EXP.
3464 Instead, construct in a pseudo register and then copy it all. */
3465 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3467 rtx temp = gen_reg_rtx (GET_MODE (target));
3468 store_constructor (exp, temp, 0);
3469 emit_move_insn (target, temp);
3474 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3475 || TREE_CODE (type) == QUAL_UNION_TYPE)
3479 /* Inform later passes that the whole union value is dead. */
3480 if (TREE_CODE (type) == UNION_TYPE
3481 || TREE_CODE (type) == QUAL_UNION_TYPE)
3482 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3484 /* If we are building a static constructor into a register,
3485 set the initial value as zero so we can fold the value into
3486 a constant. But if more than one register is involved,
3487 this probably loses. */
3488 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3489 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3492 emit_move_insn (target, const0_rtx);
3497 /* If the constructor has fewer fields than the structure
3498 or if we are initializing the structure to mostly zeros,
3499 clear the whole structure first. */
3500 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3501 != list_length (TYPE_FIELDS (type)))
3502 || mostly_zeros_p (exp))
3505 clear_storage (target, expr_size (exp),
3506 TYPE_ALIGN (type) / BITS_PER_UNIT);
3511 /* Inform later passes that the old value is dead. */
3512 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3514 /* Store each element of the constructor into
3515 the corresponding field of TARGET. */
3517 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3519 register tree field = TREE_PURPOSE (elt);
3520 register enum machine_mode mode;
3524 tree pos, constant = 0, offset = 0;
3525 rtx to_rtx = target;
3527 /* Just ignore missing fields.
3528 We cleared the whole structure, above,
3529 if any fields are missing. */
3533 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3536 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3537 unsignedp = TREE_UNSIGNED (field);
3538 mode = DECL_MODE (field);
3539 if (DECL_BIT_FIELD (field))
3542 pos = DECL_FIELD_BITPOS (field);
3543 if (TREE_CODE (pos) == INTEGER_CST)
3545 else if (TREE_CODE (pos) == PLUS_EXPR
3546 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3547 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3552 bitpos = TREE_INT_CST_LOW (constant);
3558 if (contains_placeholder_p (offset))
3559 offset = build (WITH_RECORD_EXPR, sizetype,
3562 offset = size_binop (FLOOR_DIV_EXPR, offset,
3563 size_int (BITS_PER_UNIT));
3565 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3566 if (GET_CODE (to_rtx) != MEM)
3570 = change_address (to_rtx, VOIDmode,
3571 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3572 force_reg (ptr_mode, offset_rtx)));
3574 if (TREE_READONLY (field))
3576 if (GET_CODE (to_rtx) == MEM)
3577 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3579 RTX_UNCHANGING_P (to_rtx) = 1;
3582 store_constructor_field (to_rtx, bitsize, bitpos,
3583 mode, TREE_VALUE (elt), type, cleared);
3586 else if (TREE_CODE (type) == ARRAY_TYPE)
3591 tree domain = TYPE_DOMAIN (type);
3592 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3593 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3594 tree elttype = TREE_TYPE (type);
3596 /* If the constructor has fewer elements than the array,
3597 clear the whole array first. Similarly if this this is
3598 static constructor of a non-BLKmode object. */
3599 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3603 HOST_WIDE_INT count = 0, zero_count = 0;
3605 /* This loop is a more accurate version of the loop in
3606 mostly_zeros_p (it handles RANGE_EXPR in an index).
3607 It is also needed to check for missing elements. */
3608 for (elt = CONSTRUCTOR_ELTS (exp);
3610 elt = TREE_CHAIN (elt))
3612 tree index = TREE_PURPOSE (elt);
3613 HOST_WIDE_INT this_node_count;
3614 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3616 tree lo_index = TREE_OPERAND (index, 0);
3617 tree hi_index = TREE_OPERAND (index, 1);
3618 if (TREE_CODE (lo_index) != INTEGER_CST
3619 || TREE_CODE (hi_index) != INTEGER_CST)
3624 this_node_count = TREE_INT_CST_LOW (hi_index)
3625 - TREE_INT_CST_LOW (lo_index) + 1;
3628 this_node_count = 1;
3629 count += this_node_count;
3630 if (mostly_zeros_p (TREE_VALUE (elt)))
3631 zero_count += this_node_count;
3633 /* Clear the entire array first if there are any missing elements,
3634 or if the incidence of zero elements is >= 75%. */
3635 if (count < maxelt - minelt + 1
3636 || 4 * zero_count >= 3 * count)
3642 clear_storage (target, expr_size (exp),
3643 TYPE_ALIGN (type) / BITS_PER_UNIT);
3647 /* Inform later passes that the old value is dead. */
3648 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3650 /* Store each element of the constructor into
3651 the corresponding element of TARGET, determined
3652 by counting the elements. */
3653 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3655 elt = TREE_CHAIN (elt), i++)
3657 register enum machine_mode mode;
3661 tree value = TREE_VALUE (elt);
3662 tree index = TREE_PURPOSE (elt);
3663 rtx xtarget = target;
3665 if (cleared && is_zeros_p (value))
3668 mode = TYPE_MODE (elttype);
3669 bitsize = GET_MODE_BITSIZE (mode);
3670 unsignedp = TREE_UNSIGNED (elttype);
3672 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3674 tree lo_index = TREE_OPERAND (index, 0);
3675 tree hi_index = TREE_OPERAND (index, 1);
3676 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3677 struct nesting *loop;
3678 HOST_WIDE_INT lo, hi, count;
3681 /* If the range is constant and "small", unroll the loop. */
3682 if (TREE_CODE (lo_index) == INTEGER_CST
3683 && TREE_CODE (hi_index) == INTEGER_CST
3684 && (lo = TREE_INT_CST_LOW (lo_index),
3685 hi = TREE_INT_CST_LOW (hi_index),
3686 count = hi - lo + 1,
3687 (GET_CODE (target) != MEM
3689 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3690 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3693 lo -= minelt; hi -= minelt;
3694 for (; lo <= hi; lo++)
3696 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3697 store_constructor_field (target, bitsize, bitpos,
3698 mode, value, type, cleared);
3703 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3704 loop_top = gen_label_rtx ();
3705 loop_end = gen_label_rtx ();
3707 unsignedp = TREE_UNSIGNED (domain);
3709 index = build_decl (VAR_DECL, NULL_TREE, domain);
3711 DECL_RTL (index) = index_r
3712 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3715 if (TREE_CODE (value) == SAVE_EXPR
3716 && SAVE_EXPR_RTL (value) == 0)
3718 /* Make sure value gets expanded once before the
3720 expand_expr (value, const0_rtx, VOIDmode, 0);
3723 store_expr (lo_index, index_r, 0);
3724 loop = expand_start_loop (0);
3726 /* Assign value to element index. */
3727 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3728 size_int (BITS_PER_UNIT));
3729 position = size_binop (MULT_EXPR,
3730 size_binop (MINUS_EXPR, index,
3731 TYPE_MIN_VALUE (domain)),
3733 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3734 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3735 xtarget = change_address (target, mode, addr);
3736 if (TREE_CODE (value) == CONSTRUCTOR)
3737 store_constructor (value, xtarget, cleared);
3739 store_expr (value, xtarget, 0);
3741 expand_exit_loop_if_false (loop,
3742 build (LT_EXPR, integer_type_node,
3745 expand_increment (build (PREINCREMENT_EXPR,
3747 index, integer_one_node), 0, 0);
3749 emit_label (loop_end);
3751 /* Needed by stupid register allocation. to extend the
3752 lifetime of pseudo-regs used by target past the end
3754 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3757 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3758 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3764 index = size_int (i);
3767 index = size_binop (MINUS_EXPR, index,
3768 TYPE_MIN_VALUE (domain));
3769 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3770 size_int (BITS_PER_UNIT));
3771 position = size_binop (MULT_EXPR, index, position);
3772 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3773 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3774 xtarget = change_address (target, mode, addr);
3775 store_expr (value, xtarget, 0);
3780 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3781 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3783 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3784 store_constructor_field (target, bitsize, bitpos,
3785 mode, value, type, cleared);
3789 /* set constructor assignments */
3790 else if (TREE_CODE (type) == SET_TYPE)
3792 tree elt = CONSTRUCTOR_ELTS (exp);
3793 rtx xtarget = XEXP (target, 0);
3794 int set_word_size = TYPE_ALIGN (type);
3795 int nbytes = int_size_in_bytes (type), nbits;
3796 tree domain = TYPE_DOMAIN (type);
3797 tree domain_min, domain_max, bitlength;
3799 /* The default implementation strategy is to extract the constant
3800 parts of the constructor, use that to initialize the target,
3801 and then "or" in whatever non-constant ranges we need in addition.
3803 If a large set is all zero or all ones, it is
3804 probably better to set it using memset (if available) or bzero.
3805 Also, if a large set has just a single range, it may also be
3806 better to first clear all the first clear the set (using
3807 bzero/memset), and set the bits we want. */
3809 /* Check for all zeros. */
3810 if (elt == NULL_TREE)
3813 clear_storage (target, expr_size (exp),
3814 TYPE_ALIGN (type) / BITS_PER_UNIT);
3818 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3819 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3820 bitlength = size_binop (PLUS_EXPR,
3821 size_binop (MINUS_EXPR, domain_max, domain_min),
3824 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3826 nbits = TREE_INT_CST_LOW (bitlength);
3828 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3829 are "complicated" (more than one range), initialize (the
3830 constant parts) by copying from a constant. */
3831 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3832 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3834 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3835 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3836 char *bit_buffer = (char *) alloca (nbits);
3837 HOST_WIDE_INT word = 0;
3840 int offset = 0; /* In bytes from beginning of set. */
3841 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3844 if (bit_buffer[ibit])
3846 if (BYTES_BIG_ENDIAN)
3847 word |= (1 << (set_word_size - 1 - bit_pos));
3849 word |= 1 << bit_pos;
3852 if (bit_pos >= set_word_size || ibit == nbits)
3854 if (word != 0 || ! cleared)
3856 rtx datum = GEN_INT (word);
3858 /* The assumption here is that it is safe to use
3859 XEXP if the set is multi-word, but not if
3860 it's single-word. */
3861 if (GET_CODE (target) == MEM)
3863 to_rtx = plus_constant (XEXP (target, 0), offset);
3864 to_rtx = change_address (target, mode, to_rtx);
3866 else if (offset == 0)
3870 emit_move_insn (to_rtx, datum);
3876 offset += set_word_size / BITS_PER_UNIT;
3882 /* Don't bother clearing storage if the set is all ones. */
3883 if (TREE_CHAIN (elt) != NULL_TREE
3884 || (TREE_PURPOSE (elt) == NULL_TREE
3886 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3887 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3888 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3889 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3891 clear_storage (target, expr_size (exp),
3892 TYPE_ALIGN (type) / BITS_PER_UNIT);
3895 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3897 /* start of range of element or NULL */
3898 tree startbit = TREE_PURPOSE (elt);
3899 /* end of range of element, or element value */
3900 tree endbit = TREE_VALUE (elt);
3901 HOST_WIDE_INT startb, endb;
3902 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3904 bitlength_rtx = expand_expr (bitlength,
3905 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3907 /* handle non-range tuple element like [ expr ] */
3908 if (startbit == NULL_TREE)
3910 startbit = save_expr (endbit);
3913 startbit = convert (sizetype, startbit);
3914 endbit = convert (sizetype, endbit);
3915 if (! integer_zerop (domain_min))
3917 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3918 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3920 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3921 EXPAND_CONST_ADDRESS);
3922 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3923 EXPAND_CONST_ADDRESS);
3927 targetx = assign_stack_temp (GET_MODE (target),
3928 GET_MODE_SIZE (GET_MODE (target)),
3930 emit_move_insn (targetx, target);
3932 else if (GET_CODE (target) == MEM)
3937 #ifdef TARGET_MEM_FUNCTIONS
3938 /* Optimization: If startbit and endbit are
3939 constants divisible by BITS_PER_UNIT,
3940 call memset instead. */
3941 if (TREE_CODE (startbit) == INTEGER_CST
3942 && TREE_CODE (endbit) == INTEGER_CST
3943 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3944 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3946 emit_library_call (memset_libfunc, 0,
3948 plus_constant (XEXP (targetx, 0),
3949 startb / BITS_PER_UNIT),
3951 constm1_rtx, TYPE_MODE (integer_type_node),
3952 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3953 TYPE_MODE (sizetype));
3958 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3959 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3960 bitlength_rtx, TYPE_MODE (sizetype),
3961 startbit_rtx, TYPE_MODE (sizetype),
3962 endbit_rtx, TYPE_MODE (sizetype));
3965 emit_move_insn (target, targetx);
3973 /* Store the value of EXP (an expression tree)
3974 into a subfield of TARGET which has mode MODE and occupies
3975 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3976 If MODE is VOIDmode, it means that we are storing into a bit-field.
3978 If VALUE_MODE is VOIDmode, return nothing in particular.
3979 UNSIGNEDP is not used in this case.
3981 Otherwise, return an rtx for the value stored. This rtx
3982 has mode VALUE_MODE if that is convenient to do.
3983 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3985 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3986 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3989 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3990 unsignedp, align, total_size)
3992 int bitsize, bitpos;
3993 enum machine_mode mode;
3995 enum machine_mode value_mode;
4000 HOST_WIDE_INT width_mask = 0;
4002 if (bitsize < HOST_BITS_PER_WIDE_INT)
4003 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4005 /* If we are storing into an unaligned field of an aligned union that is
4006 in a register, we may have the mode of TARGET being an integer mode but
4007 MODE == BLKmode. In that case, get an aligned object whose size and
4008 alignment are the same as TARGET and store TARGET into it (we can avoid
4009 the store if the field being stored is the entire width of TARGET). Then
4010 call ourselves recursively to store the field into a BLKmode version of
4011 that object. Finally, load from the object into TARGET. This is not
4012 very efficient in general, but should only be slightly more expensive
4013 than the otherwise-required unaligned accesses. Perhaps this can be
4014 cleaned up later. */
4017 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4019 rtx object = assign_stack_temp (GET_MODE (target),
4020 GET_MODE_SIZE (GET_MODE (target)), 0);
4021 rtx blk_object = copy_rtx (object);
4023 MEM_IN_STRUCT_P (object) = 1;
4024 MEM_IN_STRUCT_P (blk_object) = 1;
4025 PUT_MODE (blk_object, BLKmode);
4027 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4028 emit_move_insn (object, target);
4030 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4033 /* Even though we aren't returning target, we need to
4034 give it the updated value. */
4035 emit_move_insn (target, object);
4040 /* If the structure is in a register or if the component
4041 is a bit field, we cannot use addressing to access it.
4042 Use bit-field techniques or SUBREG to store in it. */
4044 if (mode == VOIDmode
4045 || (mode != BLKmode && ! direct_store[(int) mode])
4046 || GET_CODE (target) == REG
4047 || GET_CODE (target) == SUBREG
4048 /* If the field isn't aligned enough to store as an ordinary memref,
4049 store it as a bit field. */
4050 || (SLOW_UNALIGNED_ACCESS
4051 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4052 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4054 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4056 /* If BITSIZE is narrower than the size of the type of EXP
4057 we will be narrowing TEMP. Normally, what's wanted are the
4058 low-order bits. However, if EXP's type is a record and this is
4059 big-endian machine, we want the upper BITSIZE bits. */
4060 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4061 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4062 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4063 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4064 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4068 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4070 if (mode != VOIDmode && mode != BLKmode
4071 && mode != TYPE_MODE (TREE_TYPE (exp)))
4072 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4074 /* If the modes of TARGET and TEMP are both BLKmode, both
4075 must be in memory and BITPOS must be aligned on a byte
4076 boundary. If so, we simply do a block copy. */
4077 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4079 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4080 || bitpos % BITS_PER_UNIT != 0)
4083 target = change_address (target, VOIDmode,
4084 plus_constant (XEXP (target, 0),
4085 bitpos / BITS_PER_UNIT));
4087 emit_block_move (target, temp,
4088 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4092 return value_mode == VOIDmode ? const0_rtx : target;
4095 /* Store the value in the bitfield. */
4096 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4097 if (value_mode != VOIDmode)
4099 /* The caller wants an rtx for the value. */
4100 /* If possible, avoid refetching from the bitfield itself. */
4102 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4105 enum machine_mode tmode;
4108 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4109 tmode = GET_MODE (temp);
4110 if (tmode == VOIDmode)
4112 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4113 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4114 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4116 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4117 NULL_RTX, value_mode, 0, align,
4124 rtx addr = XEXP (target, 0);
4127 /* If a value is wanted, it must be the lhs;
4128 so make the address stable for multiple use. */
4130 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4131 && ! CONSTANT_ADDRESS_P (addr)
4132 /* A frame-pointer reference is already stable. */
4133 && ! (GET_CODE (addr) == PLUS
4134 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4135 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4136 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4137 addr = copy_to_reg (addr);
4139 /* Now build a reference to just the desired component. */
4141 to_rtx = change_address (target, mode,
4142 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4143 MEM_IN_STRUCT_P (to_rtx) = 1;
4145 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4149 /* Return true if any object containing the innermost array is an unaligned
4150 packed structure field. */
4153 get_inner_unaligned_p (exp)
4156 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4160 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4162 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4166 else if (TREE_CODE (exp) != ARRAY_REF
4167 && TREE_CODE (exp) != NON_LVALUE_EXPR
4168 && ! ((TREE_CODE (exp) == NOP_EXPR
4169 || TREE_CODE (exp) == CONVERT_EXPR)
4170 && (TYPE_MODE (TREE_TYPE (exp))
4171 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4174 exp = TREE_OPERAND (exp, 0);
4180 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4181 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4182 ARRAY_REFs and find the ultimate containing object, which we return.
4184 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4185 bit position, and *PUNSIGNEDP to the signedness of the field.
4186 If the position of the field is variable, we store a tree
4187 giving the variable offset (in units) in *POFFSET.
4188 This offset is in addition to the bit position.
4189 If the position is not variable, we store 0 in *POFFSET.
4190 We set *PALIGNMENT to the alignment in bytes of the address that will be
4191 computed. This is the alignment of the thing we return if *POFFSET
4192 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4194 If any of the extraction expressions is volatile,
4195 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4197 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4198 is a mode that can be used to access the field. In that case, *PBITSIZE
4201 If the field describes a variable-sized object, *PMODE is set to
4202 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4203 this case, but the address of the object can be found. */
4206 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4207 punsignedp, pvolatilep, palignment)
4212 enum machine_mode *pmode;
4217 tree orig_exp = exp;
4219 enum machine_mode mode = VOIDmode;
4220 tree offset = integer_zero_node;
4221 int alignment = BIGGEST_ALIGNMENT;
4223 if (TREE_CODE (exp) == COMPONENT_REF)
4225 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4226 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4227 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4228 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4230 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4232 size_tree = TREE_OPERAND (exp, 1);
4233 *punsignedp = TREE_UNSIGNED (exp);
4237 mode = TYPE_MODE (TREE_TYPE (exp));
4238 *pbitsize = GET_MODE_BITSIZE (mode);
4239 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4244 if (TREE_CODE (size_tree) != INTEGER_CST)
4245 mode = BLKmode, *pbitsize = -1;
4247 *pbitsize = TREE_INT_CST_LOW (size_tree);
4250 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4251 and find the ultimate containing object. */
4257 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4259 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4260 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4261 : TREE_OPERAND (exp, 2));
4262 tree constant = integer_zero_node, var = pos;
4264 /* If this field hasn't been filled in yet, don't go
4265 past it. This should only happen when folding expressions
4266 made during type construction. */
4270 /* Assume here that the offset is a multiple of a unit.
4271 If not, there should be an explicitly added constant. */
4272 if (TREE_CODE (pos) == PLUS_EXPR
4273 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4274 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4275 else if (TREE_CODE (pos) == INTEGER_CST)
4276 constant = pos, var = integer_zero_node;
4278 *pbitpos += TREE_INT_CST_LOW (constant);
4279 offset = size_binop (PLUS_EXPR, offset,
4280 size_binop (EXACT_DIV_EXPR, var,
4281 size_int (BITS_PER_UNIT)));
4284 else if (TREE_CODE (exp) == ARRAY_REF)
4286 /* This code is based on the code in case ARRAY_REF in expand_expr
4287 below. We assume here that the size of an array element is
4288 always an integral multiple of BITS_PER_UNIT. */
4290 tree index = TREE_OPERAND (exp, 1);
4291 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4293 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4294 tree index_type = TREE_TYPE (index);
4296 if (! integer_zerop (low_bound))
4297 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4299 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4301 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4303 index_type = TREE_TYPE (index);
4306 index = fold (build (MULT_EXPR, index_type, index,
4307 TYPE_SIZE (TREE_TYPE (exp))));
4309 if (TREE_CODE (index) == INTEGER_CST
4310 && TREE_INT_CST_HIGH (index) == 0)
4311 *pbitpos += TREE_INT_CST_LOW (index);
4313 offset = size_binop (PLUS_EXPR, offset,
4314 size_binop (FLOOR_DIV_EXPR, index,
4315 size_int (BITS_PER_UNIT)));
4317 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4318 && ! ((TREE_CODE (exp) == NOP_EXPR
4319 || TREE_CODE (exp) == CONVERT_EXPR)
4320 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4321 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4323 && (TYPE_MODE (TREE_TYPE (exp))
4324 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4327 /* If any reference in the chain is volatile, the effect is volatile. */
4328 if (TREE_THIS_VOLATILE (exp))
4331 /* If the offset is non-constant already, then we can't assume any
4332 alignment more than the alignment here. */
4333 if (! integer_zerop (offset))
4334 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4336 exp = TREE_OPERAND (exp, 0);
4339 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4340 alignment = MIN (alignment, DECL_ALIGN (exp));
4341 else if (TREE_TYPE (exp) != 0)
4342 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4344 if (integer_zerop (offset))
4347 if (offset != 0 && contains_placeholder_p (offset))
4348 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4352 *palignment = alignment / BITS_PER_UNIT;
4356 /* Given an rtx VALUE that may contain additions and multiplications,
4357 return an equivalent value that just refers to a register or memory.
4358 This is done by generating instructions to perform the arithmetic
4359 and returning a pseudo-register containing the value.
4361 The returned value may be a REG, SUBREG, MEM or constant. */
4364 force_operand (value, target)
4367 register optab binoptab = 0;
4368 /* Use a temporary to force order of execution of calls to
4372 /* Use subtarget as the target for operand 0 of a binary operation. */
4373 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4375 if (GET_CODE (value) == PLUS)
4376 binoptab = add_optab;
4377 else if (GET_CODE (value) == MINUS)
4378 binoptab = sub_optab;
4379 else if (GET_CODE (value) == MULT)
4381 op2 = XEXP (value, 1);
4382 if (!CONSTANT_P (op2)
4383 && !(GET_CODE (op2) == REG && op2 != subtarget))
4385 tmp = force_operand (XEXP (value, 0), subtarget);
4386 return expand_mult (GET_MODE (value), tmp,
4387 force_operand (op2, NULL_RTX),
4393 op2 = XEXP (value, 1);
4394 if (!CONSTANT_P (op2)
4395 && !(GET_CODE (op2) == REG && op2 != subtarget))
4397 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4399 binoptab = add_optab;
4400 op2 = negate_rtx (GET_MODE (value), op2);
4403 /* Check for an addition with OP2 a constant integer and our first
4404 operand a PLUS of a virtual register and something else. In that
4405 case, we want to emit the sum of the virtual register and the
4406 constant first and then add the other value. This allows virtual
4407 register instantiation to simply modify the constant rather than
4408 creating another one around this addition. */
4409 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4410 && GET_CODE (XEXP (value, 0)) == PLUS
4411 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4412 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4413 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4415 rtx temp = expand_binop (GET_MODE (value), binoptab,
4416 XEXP (XEXP (value, 0), 0), op2,
4417 subtarget, 0, OPTAB_LIB_WIDEN);
4418 return expand_binop (GET_MODE (value), binoptab, temp,
4419 force_operand (XEXP (XEXP (value, 0), 1), 0),
4420 target, 0, OPTAB_LIB_WIDEN);
4423 tmp = force_operand (XEXP (value, 0), subtarget);
4424 return expand_binop (GET_MODE (value), binoptab, tmp,
4425 force_operand (op2, NULL_RTX),
4426 target, 0, OPTAB_LIB_WIDEN);
4427 /* We give UNSIGNEDP = 0 to expand_binop
4428 because the only operations we are expanding here are signed ones. */
4433 /* Subroutine of expand_expr:
4434 save the non-copied parts (LIST) of an expr (LHS), and return a list
4435 which can restore these values to their previous values,
4436 should something modify their storage. */
4439 save_noncopied_parts (lhs, list)
4446 for (tail = list; tail; tail = TREE_CHAIN (tail))
4447 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4448 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4451 tree part = TREE_VALUE (tail);
4452 tree part_type = TREE_TYPE (part);
4453 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4454 rtx target = assign_temp (part_type, 0, 1, 1);
4455 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4456 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4457 parts = tree_cons (to_be_saved,
4458 build (RTL_EXPR, part_type, NULL_TREE,
4461 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4466 /* Subroutine of expand_expr:
4467 record the non-copied parts (LIST) of an expr (LHS), and return a list
4468 which specifies the initial values of these parts. */
4471 init_noncopied_parts (lhs, list)
4478 for (tail = list; tail; tail = TREE_CHAIN (tail))
4479 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4480 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4483 tree part = TREE_VALUE (tail);
4484 tree part_type = TREE_TYPE (part);
4485 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4486 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4491 /* Subroutine of expand_expr: return nonzero iff there is no way that
4492 EXP can reference X, which is being modified. */
4495 safe_from_p (x, exp)
4503 /* If EXP has varying size, we MUST use a target since we currently
4504 have no way of allocating temporaries of variable size
4505 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4506 So we assume here that something at a higher level has prevented a
4507 clash. This is somewhat bogus, but the best we can do. Only
4508 do this when X is BLKmode. */
4509 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4510 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4511 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4512 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4513 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4515 && GET_MODE (x) == BLKmode))
4518 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4519 find the underlying pseudo. */
4520 if (GET_CODE (x) == SUBREG)
4523 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4527 /* If X is a location in the outgoing argument area, it is always safe. */
4528 if (GET_CODE (x) == MEM
4529 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4530 || (GET_CODE (XEXP (x, 0)) == PLUS
4531 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4534 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4537 exp_rtl = DECL_RTL (exp);
4544 if (TREE_CODE (exp) == TREE_LIST)
4545 return ((TREE_VALUE (exp) == 0
4546 || safe_from_p (x, TREE_VALUE (exp)))
4547 && (TREE_CHAIN (exp) == 0
4548 || safe_from_p (x, TREE_CHAIN (exp))));
4553 return safe_from_p (x, TREE_OPERAND (exp, 0));
4557 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4558 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4562 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4563 the expression. If it is set, we conflict iff we are that rtx or
4564 both are in memory. Otherwise, we check all operands of the
4565 expression recursively. */
4567 switch (TREE_CODE (exp))
4570 return (staticp (TREE_OPERAND (exp, 0))
4571 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4574 if (GET_CODE (x) == MEM)
4579 exp_rtl = CALL_EXPR_RTL (exp);
4582 /* Assume that the call will clobber all hard registers and
4584 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4585 || GET_CODE (x) == MEM)
4592 /* If a sequence exists, we would have to scan every instruction
4593 in the sequence to see if it was safe. This is probably not
4595 if (RTL_EXPR_SEQUENCE (exp))
4598 exp_rtl = RTL_EXPR_RTL (exp);
4601 case WITH_CLEANUP_EXPR:
4602 exp_rtl = RTL_EXPR_RTL (exp);
4605 case CLEANUP_POINT_EXPR:
4606 return safe_from_p (x, TREE_OPERAND (exp, 0));
4609 exp_rtl = SAVE_EXPR_RTL (exp);
4613 /* The only operand we look at is operand 1. The rest aren't
4614 part of the expression. */
4615 return safe_from_p (x, TREE_OPERAND (exp, 1));
4617 case METHOD_CALL_EXPR:
4618 /* This takes a rtx argument, but shouldn't appear here. */
4622 /* If we have an rtx, we do not need to scan our operands. */
4626 nops = tree_code_length[(int) TREE_CODE (exp)];
4627 for (i = 0; i < nops; i++)
4628 if (TREE_OPERAND (exp, i) != 0
4629 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4633 /* If we have an rtl, find any enclosed object. Then see if we conflict
4637 if (GET_CODE (exp_rtl) == SUBREG)
4639 exp_rtl = SUBREG_REG (exp_rtl);
4640 if (GET_CODE (exp_rtl) == REG
4641 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4645 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4646 are memory and EXP is not readonly. */
4647 return ! (rtx_equal_p (x, exp_rtl)
4648 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4649 && ! TREE_READONLY (exp)));
4652 /* If we reach here, it is safe. */
4656 /* Subroutine of expand_expr: return nonzero iff EXP is an
4657 expression whose type is statically determinable. */
4663 if (TREE_CODE (exp) == PARM_DECL
4664 || TREE_CODE (exp) == VAR_DECL
4665 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4666 || TREE_CODE (exp) == COMPONENT_REF
4667 || TREE_CODE (exp) == ARRAY_REF)
4672 /* Subroutine of expand_expr: return rtx if EXP is a
4673 variable or parameter; else return 0. */
4680 switch (TREE_CODE (exp))
4684 return DECL_RTL (exp);
4690 /* expand_expr: generate code for computing expression EXP.
4691 An rtx for the computed value is returned. The value is never null.
4692 In the case of a void EXP, const0_rtx is returned.
4694 The value may be stored in TARGET if TARGET is nonzero.
4695 TARGET is just a suggestion; callers must assume that
4696 the rtx returned may not be the same as TARGET.
4698 If TARGET is CONST0_RTX, it means that the value will be ignored.
4700 If TMODE is not VOIDmode, it suggests generating the
4701 result in mode TMODE. But this is done only when convenient.
4702 Otherwise, TMODE is ignored and the value generated in its natural mode.
4703 TMODE is just a suggestion; callers must assume that
4704 the rtx returned may not have mode TMODE.
4706 Note that TARGET may have neither TMODE nor MODE. In that case, it
4707 probably will not be used.
4709 If MODIFIER is EXPAND_SUM then when EXP is an addition
4710 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4711 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4712 products as above, or REG or MEM, or constant.
4713 Ordinarily in such cases we would output mul or add instructions
4714 and then return a pseudo reg containing the sum.
4716 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4717 it also marks a label as absolutely required (it can't be dead).
4718 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4719 This is used for outputting expressions used in initializers.
4721 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4722 with a constant address even if that address is not normally legitimate.
4723 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4726 expand_expr (exp, target, tmode, modifier)
4729 enum machine_mode tmode;
4730 enum expand_modifier modifier;
4732 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4733 This is static so it will be accessible to our recursive callees. */
4734 static tree placeholder_list = 0;
4735 register rtx op0, op1, temp;
4736 tree type = TREE_TYPE (exp);
4737 int unsignedp = TREE_UNSIGNED (type);
4738 register enum machine_mode mode = TYPE_MODE (type);
4739 register enum tree_code code = TREE_CODE (exp);
4741 /* Use subtarget as the target for operand 0 of a binary operation. */
4742 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4743 rtx original_target = target;
4744 /* Maybe defer this until sure not doing bytecode? */
4745 int ignore = (target == const0_rtx
4746 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4747 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4748 || code == COND_EXPR)
4749 && TREE_CODE (type) == VOID_TYPE));
4753 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4755 bc_expand_expr (exp);
4759 /* Don't use hard regs as subtargets, because the combiner
4760 can only handle pseudo regs. */
4761 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4763 /* Avoid subtargets inside loops,
4764 since they hide some invariant expressions. */
4765 if (preserve_subexpressions_p ())
4768 /* If we are going to ignore this result, we need only do something
4769 if there is a side-effect somewhere in the expression. If there
4770 is, short-circuit the most common cases here. Note that we must
4771 not call expand_expr with anything but const0_rtx in case this
4772 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4776 if (! TREE_SIDE_EFFECTS (exp))
4779 /* Ensure we reference a volatile object even if value is ignored. */
4780 if (TREE_THIS_VOLATILE (exp)
4781 && TREE_CODE (exp) != FUNCTION_DECL
4782 && mode != VOIDmode && mode != BLKmode)
4784 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4785 if (GET_CODE (temp) == MEM)
4786 temp = copy_to_reg (temp);
4790 if (TREE_CODE_CLASS (code) == '1')
4791 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4792 VOIDmode, modifier);
4793 else if (TREE_CODE_CLASS (code) == '2'
4794 || TREE_CODE_CLASS (code) == '<')
4796 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4797 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4800 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4801 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4802 /* If the second operand has no side effects, just evaluate
4804 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4805 VOIDmode, modifier);
4810 /* If will do cse, generate all results into pseudo registers
4811 since 1) that allows cse to find more things
4812 and 2) otherwise cse could produce an insn the machine
4815 if (! cse_not_expected && mode != BLKmode && target
4816 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4823 tree function = decl_function_context (exp);
4824 /* Handle using a label in a containing function. */
4825 if (function != current_function_decl && function != 0)
4827 struct function *p = find_function_data (function);
4828 /* Allocate in the memory associated with the function
4829 that the label is in. */
4830 push_obstacks (p->function_obstack,
4831 p->function_maybepermanent_obstack);
4833 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4834 label_rtx (exp), p->forced_labels);
4837 else if (modifier == EXPAND_INITIALIZER)
4838 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4839 label_rtx (exp), forced_labels);
4840 temp = gen_rtx (MEM, FUNCTION_MODE,
4841 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4842 if (function != current_function_decl && function != 0)
4843 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4848 if (DECL_RTL (exp) == 0)
4850 error_with_decl (exp, "prior parameter's size depends on `%s'");
4851 return CONST0_RTX (mode);
4854 /* ... fall through ... */
4857 /* If a static var's type was incomplete when the decl was written,
4858 but the type is complete now, lay out the decl now. */
4859 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4860 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4862 push_obstacks_nochange ();
4863 end_temporary_allocation ();
4864 layout_decl (exp, 0);
4865 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4869 /* ... fall through ... */
4873 if (DECL_RTL (exp) == 0)
4876 /* Ensure variable marked as used even if it doesn't go through
4877 a parser. If it hasn't be used yet, write out an external
4879 if (! TREE_USED (exp))
4881 assemble_external (exp);
4882 TREE_USED (exp) = 1;
4885 /* Show we haven't gotten RTL for this yet. */
4888 /* Handle variables inherited from containing functions. */
4889 context = decl_function_context (exp);
4891 /* We treat inline_function_decl as an alias for the current function
4892 because that is the inline function whose vars, types, etc.
4893 are being merged into the current function.
4894 See expand_inline_function. */
4896 if (context != 0 && context != current_function_decl
4897 && context != inline_function_decl
4898 /* If var is static, we don't need a static chain to access it. */
4899 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4900 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4904 /* Mark as non-local and addressable. */
4905 DECL_NONLOCAL (exp) = 1;
4906 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4908 mark_addressable (exp);
4909 if (GET_CODE (DECL_RTL (exp)) != MEM)
4911 addr = XEXP (DECL_RTL (exp), 0);
4912 if (GET_CODE (addr) == MEM)
4913 addr = gen_rtx (MEM, Pmode,
4914 fix_lexical_addr (XEXP (addr, 0), exp));
4916 addr = fix_lexical_addr (addr, exp);
4917 temp = change_address (DECL_RTL (exp), mode, addr);
4920 /* This is the case of an array whose size is to be determined
4921 from its initializer, while the initializer is still being parsed.
4924 else if (GET_CODE (DECL_RTL (exp)) == MEM
4925 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4926 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4927 XEXP (DECL_RTL (exp), 0));
4929 /* If DECL_RTL is memory, we are in the normal case and either
4930 the address is not valid or it is not a register and -fforce-addr
4931 is specified, get the address into a register. */
4933 else if (GET_CODE (DECL_RTL (exp)) == MEM
4934 && modifier != EXPAND_CONST_ADDRESS
4935 && modifier != EXPAND_SUM
4936 && modifier != EXPAND_INITIALIZER
4937 && (! memory_address_p (DECL_MODE (exp),
4938 XEXP (DECL_RTL (exp), 0))
4940 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4941 temp = change_address (DECL_RTL (exp), VOIDmode,
4942 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4944 /* If we got something, return it. But first, set the alignment
4945 the address is a register. */
4948 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4949 mark_reg_pointer (XEXP (temp, 0),
4950 DECL_ALIGN (exp) / BITS_PER_UNIT);
4955 /* If the mode of DECL_RTL does not match that of the decl, it
4956 must be a promoted value. We return a SUBREG of the wanted mode,
4957 but mark it so that we know that it was already extended. */
4959 if (GET_CODE (DECL_RTL (exp)) == REG
4960 && GET_MODE (DECL_RTL (exp)) != mode)
4962 /* Get the signedness used for this variable. Ensure we get the
4963 same mode we got when the variable was declared. */
4964 if (GET_MODE (DECL_RTL (exp))
4965 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4968 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4969 SUBREG_PROMOTED_VAR_P (temp) = 1;
4970 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4974 return DECL_RTL (exp);
4977 return immed_double_const (TREE_INT_CST_LOW (exp),
4978 TREE_INT_CST_HIGH (exp),
4982 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4985 /* If optimized, generate immediate CONST_DOUBLE
4986 which will be turned into memory by reload if necessary.
4988 We used to force a register so that loop.c could see it. But
4989 this does not allow gen_* patterns to perform optimizations with
4990 the constants. It also produces two insns in cases like "x = 1.0;".
4991 On most machines, floating-point constants are not permitted in
4992 many insns, so we'd end up copying it to a register in any case.
4994 Now, we do the copying in expand_binop, if appropriate. */
4995 return immed_real_const (exp);
4999 if (! TREE_CST_RTL (exp))
5000 output_constant_def (exp);
5002 /* TREE_CST_RTL probably contains a constant address.
5003 On RISC machines where a constant address isn't valid,
5004 make some insns to get that address into a register. */
5005 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5006 && modifier != EXPAND_CONST_ADDRESS
5007 && modifier != EXPAND_INITIALIZER
5008 && modifier != EXPAND_SUM
5009 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5011 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5012 return change_address (TREE_CST_RTL (exp), VOIDmode,
5013 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5014 return TREE_CST_RTL (exp);
5017 context = decl_function_context (exp);
5019 /* We treat inline_function_decl as an alias for the current function
5020 because that is the inline function whose vars, types, etc.
5021 are being merged into the current function.
5022 See expand_inline_function. */
5023 if (context == current_function_decl || context == inline_function_decl)
5026 /* If this is non-local, handle it. */
5029 temp = SAVE_EXPR_RTL (exp);
5030 if (temp && GET_CODE (temp) == REG)
5032 put_var_into_stack (exp);
5033 temp = SAVE_EXPR_RTL (exp);
5035 if (temp == 0 || GET_CODE (temp) != MEM)
5037 return change_address (temp, mode,
5038 fix_lexical_addr (XEXP (temp, 0), exp));
5040 if (SAVE_EXPR_RTL (exp) == 0)
5042 if (mode == VOIDmode)
5045 temp = assign_temp (type, 0, 0, 0);
5047 SAVE_EXPR_RTL (exp) = temp;
5048 if (!optimize && GET_CODE (temp) == REG)
5049 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5052 /* If the mode of TEMP does not match that of the expression, it
5053 must be a promoted value. We pass store_expr a SUBREG of the
5054 wanted mode but mark it so that we know that it was already
5055 extended. Note that `unsignedp' was modified above in
5058 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5060 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5061 SUBREG_PROMOTED_VAR_P (temp) = 1;
5062 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5065 if (temp == const0_rtx)
5066 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5068 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5071 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5072 must be a promoted value. We return a SUBREG of the wanted mode,
5073 but mark it so that we know that it was already extended. */
5075 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5076 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5078 /* Compute the signedness and make the proper SUBREG. */
5079 promote_mode (type, mode, &unsignedp, 0);
5080 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5081 SUBREG_PROMOTED_VAR_P (temp) = 1;
5082 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5086 return SAVE_EXPR_RTL (exp);
5091 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5092 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5096 case PLACEHOLDER_EXPR:
5097 /* If there is an object on the head of the placeholder list,
5098 see if some object in it's references is of type TYPE. For
5099 further information, see tree.def. */
5100 if (placeholder_list)
5103 tree old_list = placeholder_list;
5105 for (object = TREE_PURPOSE (placeholder_list);
5106 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5107 != TYPE_MAIN_VARIANT (type))
5108 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
5109 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
5110 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
5111 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
5112 object = TREE_OPERAND (object, 0))
5116 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5117 == TYPE_MAIN_VARIANT (type)))
5119 /* Expand this object skipping the list entries before
5120 it was found in case it is also a PLACEHOLDER_EXPR.
5121 In that case, we want to translate it using subsequent
5123 placeholder_list = TREE_CHAIN (placeholder_list);
5124 temp = expand_expr (object, original_target, tmode, modifier);
5125 placeholder_list = old_list;
5130 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5133 case WITH_RECORD_EXPR:
5134 /* Put the object on the placeholder list, expand our first operand,
5135 and pop the list. */
5136 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5138 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5140 placeholder_list = TREE_CHAIN (placeholder_list);
5144 expand_exit_loop_if_false (NULL_PTR,
5145 invert_truthvalue (TREE_OPERAND (exp, 0)));
5150 expand_start_loop (1);
5151 expand_expr_stmt (TREE_OPERAND (exp, 0));
5159 tree vars = TREE_OPERAND (exp, 0);
5160 int vars_need_expansion = 0;
5162 /* Need to open a binding contour here because
5163 if there are any cleanups they most be contained here. */
5164 expand_start_bindings (0);
5166 /* Mark the corresponding BLOCK for output in its proper place. */
5167 if (TREE_OPERAND (exp, 2) != 0
5168 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5169 insert_block (TREE_OPERAND (exp, 2));
5171 /* If VARS have not yet been expanded, expand them now. */
5174 if (DECL_RTL (vars) == 0)
5176 vars_need_expansion = 1;
5179 expand_decl_init (vars);
5180 vars = TREE_CHAIN (vars);
5183 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5185 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5191 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5193 emit_insns (RTL_EXPR_SEQUENCE (exp));
5194 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5195 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5196 free_temps_for_rtl_expr (exp);
5197 return RTL_EXPR_RTL (exp);
5200 /* If we don't need the result, just ensure we evaluate any
5205 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5206 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5210 /* All elts simple constants => refer to a constant in memory. But
5211 if this is a non-BLKmode mode, let it store a field at a time
5212 since that should make a CONST_INT or CONST_DOUBLE when we
5213 fold. Likewise, if we have a target we can use, it is best to
5214 store directly into the target unless the type is large enough
5215 that memcpy will be used. If we are making an initializer and
5216 all operands are constant, put it in memory as well. */
5217 else if ((TREE_STATIC (exp)
5218 && ((mode == BLKmode
5219 && ! (target != 0 && safe_from_p (target, exp)))
5220 || TREE_ADDRESSABLE (exp)
5221 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5222 && (move_by_pieces_ninsns
5223 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5224 TYPE_ALIGN (type) / BITS_PER_UNIT)
5226 && ! mostly_zeros_p (exp))))
5227 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5229 rtx constructor = output_constant_def (exp);
5230 if (modifier != EXPAND_CONST_ADDRESS
5231 && modifier != EXPAND_INITIALIZER
5232 && modifier != EXPAND_SUM
5233 && (! memory_address_p (GET_MODE (constructor),
5234 XEXP (constructor, 0))
5236 && GET_CODE (XEXP (constructor, 0)) != REG)))
5237 constructor = change_address (constructor, VOIDmode,
5238 XEXP (constructor, 0));
5244 /* Handle calls that pass values in multiple non-contiguous
5245 locations. The Irix 6 ABI has examples of this. */
5246 if (target == 0 || ! safe_from_p (target, exp)
5247 || GET_CODE (target) == PARALLEL)
5249 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5250 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5252 target = assign_temp (type, 0, 1, 1);
5255 if (TREE_READONLY (exp))
5257 if (GET_CODE (target) == MEM)
5258 target = change_address (target, GET_MODE (target),
5260 RTX_UNCHANGING_P (target) = 1;
5263 store_constructor (exp, target, 0);
5269 tree exp1 = TREE_OPERAND (exp, 0);
5272 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5273 op0 = memory_address (mode, op0);
5275 temp = gen_rtx (MEM, mode, op0);
5276 /* If address was computed by addition,
5277 mark this as an element of an aggregate. */
5278 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5279 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5280 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5281 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5282 || (TREE_CODE (exp1) == ADDR_EXPR
5283 && (exp2 = TREE_OPERAND (exp1, 0))
5284 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5285 MEM_IN_STRUCT_P (temp) = 1;
5286 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5288 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5289 here, because, in C and C++, the fact that a location is accessed
5290 through a pointer to const does not mean that the value there can
5291 never change. Languages where it can never change should
5292 also set TREE_STATIC. */
5293 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5298 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5302 tree array = TREE_OPERAND (exp, 0);
5303 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5304 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5305 tree index = TREE_OPERAND (exp, 1);
5306 tree index_type = TREE_TYPE (index);
5309 if (TREE_CODE (low_bound) != INTEGER_CST
5310 && contains_placeholder_p (low_bound))
5311 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5313 /* Optimize the special-case of a zero lower bound.
5315 We convert the low_bound to sizetype to avoid some problems
5316 with constant folding. (E.g. suppose the lower bound is 1,
5317 and its mode is QI. Without the conversion, (ARRAY
5318 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5319 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5321 But sizetype isn't quite right either (especially if
5322 the lowbound is negative). FIXME */
5324 if (! integer_zerop (low_bound))
5325 index = fold (build (MINUS_EXPR, index_type, index,
5326 convert (sizetype, low_bound)));
5328 if ((TREE_CODE (index) != INTEGER_CST
5329 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5330 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5332 /* Nonconstant array index or nonconstant element size, and
5333 not an array in an unaligned (packed) structure field.
5334 Generate the tree for *(&array+index) and expand that,
5335 except do it in a language-independent way
5336 and don't complain about non-lvalue arrays.
5337 `mark_addressable' should already have been called
5338 for any array for which this case will be reached. */
5340 /* Don't forget the const or volatile flag from the array
5342 tree variant_type = build_type_variant (type,
5343 TREE_READONLY (exp),
5344 TREE_THIS_VOLATILE (exp));
5345 tree array_adr = build1 (ADDR_EXPR,
5346 build_pointer_type (variant_type), array);
5348 tree size = size_in_bytes (type);
5350 /* Convert the integer argument to a type the same size as sizetype
5351 so the multiply won't overflow spuriously. */
5352 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5353 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5356 if (TREE_CODE (size) != INTEGER_CST
5357 && contains_placeholder_p (size))
5358 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5360 /* Don't think the address has side effects
5361 just because the array does.
5362 (In some cases the address might have side effects,
5363 and we fail to record that fact here. However, it should not
5364 matter, since expand_expr should not care.) */
5365 TREE_SIDE_EFFECTS (array_adr) = 0;
5369 (INDIRECT_REF, type,
5370 fold (build (PLUS_EXPR,
5371 TYPE_POINTER_TO (variant_type),
5376 TYPE_POINTER_TO (variant_type),
5377 fold (build (MULT_EXPR, TREE_TYPE (index),
5379 convert (TREE_TYPE (index),
5382 /* Volatility, etc., of new expression is same as old
5384 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5385 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5386 TREE_READONLY (elt) = TREE_READONLY (exp);
5388 return expand_expr (elt, target, tmode, modifier);
5391 /* Fold an expression like: "foo"[2].
5392 This is not done in fold so it won't happen inside &.
5393 Don't fold if this is for wide characters since it's too
5394 difficult to do correctly and this is a very rare case. */
5396 if (TREE_CODE (array) == STRING_CST
5397 && TREE_CODE (index) == INTEGER_CST
5398 && !TREE_INT_CST_HIGH (index)
5399 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5400 && GET_MODE_CLASS (mode) == MODE_INT
5401 && GET_MODE_SIZE (mode) == 1)
5402 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5404 /* If this is a constant index into a constant array,
5405 just get the value from the array. Handle both the cases when
5406 we have an explicit constructor and when our operand is a variable
5407 that was declared const. */
5409 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5411 if (TREE_CODE (index) == INTEGER_CST
5412 && TREE_INT_CST_HIGH (index) == 0)
5414 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5416 i = TREE_INT_CST_LOW (index);
5418 elem = TREE_CHAIN (elem);
5420 return expand_expr (fold (TREE_VALUE (elem)), target,
5425 else if (optimize >= 1
5426 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5427 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5428 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5430 if (TREE_CODE (index) == INTEGER_CST
5431 && TREE_INT_CST_HIGH (index) == 0)
5433 tree init = DECL_INITIAL (array);
5435 i = TREE_INT_CST_LOW (index);
5436 if (TREE_CODE (init) == CONSTRUCTOR)
5438 tree elem = CONSTRUCTOR_ELTS (init);
5441 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5442 elem = TREE_CHAIN (elem);
5444 return expand_expr (fold (TREE_VALUE (elem)), target,
5447 else if (TREE_CODE (init) == STRING_CST
5448 && i < TREE_STRING_LENGTH (init))
5449 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5454 /* Treat array-ref with constant index as a component-ref. */
5458 /* If the operand is a CONSTRUCTOR, we can just extract the
5459 appropriate field if it is present. Don't do this if we have
5460 already written the data since we want to refer to that copy
5461 and varasm.c assumes that's what we'll do. */
5462 if (code != ARRAY_REF
5463 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5464 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5468 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5469 elt = TREE_CHAIN (elt))
5470 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5471 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5475 enum machine_mode mode1;
5481 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5482 &mode1, &unsignedp, &volatilep,
5485 /* If we got back the original object, something is wrong. Perhaps
5486 we are evaluating an expression too early. In any event, don't
5487 infinitely recurse. */
5491 /* If TEM's type is a union of variable size, pass TARGET to the inner
5492 computation, since it will need a temporary and TARGET is known
5493 to have to do. This occurs in unchecked conversion in Ada. */
5495 op0 = expand_expr (tem,
5496 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5497 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5499 ? target : NULL_RTX),
5501 modifier == EXPAND_INITIALIZER ? modifier : 0);
5503 /* If this is a constant, put it into a register if it is a
5504 legitimate constant and memory if it isn't. */
5505 if (CONSTANT_P (op0))
5507 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5508 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5509 op0 = force_reg (mode, op0);
5511 op0 = validize_mem (force_const_mem (mode, op0));
5516 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5518 if (GET_CODE (op0) != MEM)
5520 op0 = change_address (op0, VOIDmode,
5521 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5522 force_reg (ptr_mode, offset_rtx)));
5525 /* Don't forget about volatility even if this is a bitfield. */
5526 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5528 op0 = copy_rtx (op0);
5529 MEM_VOLATILE_P (op0) = 1;
5532 /* In cases where an aligned union has an unaligned object
5533 as a field, we might be extracting a BLKmode value from
5534 an integer-mode (e.g., SImode) object. Handle this case
5535 by doing the extract into an object as wide as the field
5536 (which we know to be the width of a basic mode), then
5537 storing into memory, and changing the mode to BLKmode.
5538 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5539 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5540 if (mode1 == VOIDmode
5541 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5542 || (modifier != EXPAND_CONST_ADDRESS
5543 && modifier != EXPAND_INITIALIZER
5544 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5545 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5546 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5547 /* If the field isn't aligned enough to fetch as a memref,
5548 fetch it as a bit field. */
5549 || (SLOW_UNALIGNED_ACCESS
5550 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5551 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5553 enum machine_mode ext_mode = mode;
5555 if (ext_mode == BLKmode)
5556 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5558 if (ext_mode == BLKmode)
5560 /* In this case, BITPOS must start at a byte boundary and
5561 TARGET, if specified, must be a MEM. */
5562 if (GET_CODE (op0) != MEM
5563 || (target != 0 && GET_CODE (target) != MEM)
5564 || bitpos % BITS_PER_UNIT != 0)
5567 op0 = change_address (op0, VOIDmode,
5568 plus_constant (XEXP (op0, 0),
5569 bitpos / BITS_PER_UNIT));
5571 target = assign_temp (type, 0, 1, 1);
5573 emit_block_move (target, op0,
5574 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5581 op0 = validize_mem (op0);
5583 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5584 mark_reg_pointer (XEXP (op0, 0), alignment);
5586 op0 = extract_bit_field (op0, bitsize, bitpos,
5587 unsignedp, target, ext_mode, ext_mode,
5589 int_size_in_bytes (TREE_TYPE (tem)));
5591 /* If the result is a record type and BITSIZE is narrower than
5592 the mode of OP0, an integral mode, and this is a big endian
5593 machine, we must put the field into the high-order bits. */
5594 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5595 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5596 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5597 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5598 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5602 if (mode == BLKmode)
5604 rtx new = assign_stack_temp (ext_mode,
5605 bitsize / BITS_PER_UNIT, 0);
5607 emit_move_insn (new, op0);
5608 op0 = copy_rtx (new);
5609 PUT_MODE (op0, BLKmode);
5610 MEM_IN_STRUCT_P (op0) = 1;
5616 /* If the result is BLKmode, use that to access the object
5618 if (mode == BLKmode)
5621 /* Get a reference to just this component. */
5622 if (modifier == EXPAND_CONST_ADDRESS
5623 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5624 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5625 (bitpos / BITS_PER_UNIT)));
5627 op0 = change_address (op0, mode1,
5628 plus_constant (XEXP (op0, 0),
5629 (bitpos / BITS_PER_UNIT)));
5630 if (GET_CODE (XEXP (op0, 0)) == REG)
5631 mark_reg_pointer (XEXP (op0, 0), alignment);
5633 MEM_IN_STRUCT_P (op0) = 1;
5634 MEM_VOLATILE_P (op0) |= volatilep;
5635 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5638 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5639 convert_move (target, op0, unsignedp);
5643 /* Intended for a reference to a buffer of a file-object in Pascal.
5644 But it's not certain that a special tree code will really be
5645 necessary for these. INDIRECT_REF might work for them. */
5651 /* Pascal set IN expression.
5654 rlo = set_low - (set_low%bits_per_word);
5655 the_word = set [ (index - rlo)/bits_per_word ];
5656 bit_index = index % bits_per_word;
5657 bitmask = 1 << bit_index;
5658 return !!(the_word & bitmask); */
5660 tree set = TREE_OPERAND (exp, 0);
5661 tree index = TREE_OPERAND (exp, 1);
5662 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5663 tree set_type = TREE_TYPE (set);
5664 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5665 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5666 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5667 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5668 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5669 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5670 rtx setaddr = XEXP (setval, 0);
5671 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5673 rtx diff, quo, rem, addr, bit, result;
5675 preexpand_calls (exp);
5677 /* If domain is empty, answer is no. Likewise if index is constant
5678 and out of bounds. */
5679 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5680 && TREE_CODE (set_low_bound) == INTEGER_CST
5681 && tree_int_cst_lt (set_high_bound, set_low_bound)
5682 || (TREE_CODE (index) == INTEGER_CST
5683 && TREE_CODE (set_low_bound) == INTEGER_CST
5684 && tree_int_cst_lt (index, set_low_bound))
5685 || (TREE_CODE (set_high_bound) == INTEGER_CST
5686 && TREE_CODE (index) == INTEGER_CST
5687 && tree_int_cst_lt (set_high_bound, index))))
5691 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5693 /* If we get here, we have to generate the code for both cases
5694 (in range and out of range). */
5696 op0 = gen_label_rtx ();
5697 op1 = gen_label_rtx ();
5699 if (! (GET_CODE (index_val) == CONST_INT
5700 && GET_CODE (lo_r) == CONST_INT))
5702 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5703 GET_MODE (index_val), iunsignedp, 0);
5704 emit_jump_insn (gen_blt (op1));
5707 if (! (GET_CODE (index_val) == CONST_INT
5708 && GET_CODE (hi_r) == CONST_INT))
5710 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5711 GET_MODE (index_val), iunsignedp, 0);
5712 emit_jump_insn (gen_bgt (op1));
5715 /* Calculate the element number of bit zero in the first word
5717 if (GET_CODE (lo_r) == CONST_INT)
5718 rlow = GEN_INT (INTVAL (lo_r)
5719 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5721 rlow = expand_binop (index_mode, and_optab, lo_r,
5722 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5723 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5725 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5726 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5728 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5729 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5730 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5731 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5733 addr = memory_address (byte_mode,
5734 expand_binop (index_mode, add_optab, diff,
5735 setaddr, NULL_RTX, iunsignedp,
5738 /* Extract the bit we want to examine */
5739 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5740 gen_rtx (MEM, byte_mode, addr),
5741 make_tree (TREE_TYPE (index), rem),
5743 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5744 GET_MODE (target) == byte_mode ? target : 0,
5745 1, OPTAB_LIB_WIDEN);
5747 if (result != target)
5748 convert_move (target, result, 1);
5750 /* Output the code to handle the out-of-range case. */
5753 emit_move_insn (target, const0_rtx);
5758 case WITH_CLEANUP_EXPR:
5759 if (RTL_EXPR_RTL (exp) == 0)
5762 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5764 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5765 /* That's it for this cleanup. */
5766 TREE_OPERAND (exp, 2) = 0;
5767 expand_eh_region_start ();
5769 return RTL_EXPR_RTL (exp);
5771 case CLEANUP_POINT_EXPR:
5773 extern int temp_slot_level;
5774 tree old_cleanups = cleanups_this_call;
5775 int old_temp_level = target_temp_slot_level;
5777 target_temp_slot_level = temp_slot_level;
5778 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5779 /* If we're going to use this value, load it up now. */
5781 op0 = force_not_mem (op0);
5782 expand_cleanups_to (old_cleanups);
5783 preserve_temp_slots (op0);
5786 target_temp_slot_level = old_temp_level;
5791 /* Check for a built-in function. */
5792 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5793 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5795 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5796 return expand_builtin (exp, target, subtarget, tmode, ignore);
5798 /* If this call was expanded already by preexpand_calls,
5799 just return the result we got. */
5800 if (CALL_EXPR_RTL (exp) != 0)
5801 return CALL_EXPR_RTL (exp);
5803 return expand_call (exp, target, ignore);
5805 case NON_LVALUE_EXPR:
5808 case REFERENCE_EXPR:
5809 if (TREE_CODE (type) == UNION_TYPE)
5811 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5814 if (mode != BLKmode)
5815 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5817 target = assign_temp (type, 0, 1, 1);
5820 if (GET_CODE (target) == MEM)
5821 /* Store data into beginning of memory target. */
5822 store_expr (TREE_OPERAND (exp, 0),
5823 change_address (target, TYPE_MODE (valtype), 0), 0);
5825 else if (GET_CODE (target) == REG)
5826 /* Store this field into a union of the proper type. */
5827 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5828 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5830 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5834 /* Return the entire union. */
5838 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5840 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5843 /* If the signedness of the conversion differs and OP0 is
5844 a promoted SUBREG, clear that indication since we now
5845 have to do the proper extension. */
5846 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5847 && GET_CODE (op0) == SUBREG)
5848 SUBREG_PROMOTED_VAR_P (op0) = 0;
5853 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5854 if (GET_MODE (op0) == mode)
5857 /* If OP0 is a constant, just convert it into the proper mode. */
5858 if (CONSTANT_P (op0))
5860 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5861 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5863 if (modifier == EXPAND_INITIALIZER)
5864 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5868 convert_to_mode (mode, op0,
5869 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5871 convert_move (target, op0,
5872 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5876 /* We come here from MINUS_EXPR when the second operand is a
5879 this_optab = add_optab;
5881 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5882 something else, make sure we add the register to the constant and
5883 then to the other thing. This case can occur during strength
5884 reduction and doing it this way will produce better code if the
5885 frame pointer or argument pointer is eliminated.
5887 fold-const.c will ensure that the constant is always in the inner
5888 PLUS_EXPR, so the only case we need to do anything about is if
5889 sp, ap, or fp is our second argument, in which case we must swap
5890 the innermost first argument and our second argument. */
5892 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5893 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5894 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5895 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5896 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5897 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5899 tree t = TREE_OPERAND (exp, 1);
5901 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5902 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5905 /* If the result is to be ptr_mode and we are adding an integer to
5906 something, we might be forming a constant. So try to use
5907 plus_constant. If it produces a sum and we can't accept it,
5908 use force_operand. This allows P = &ARR[const] to generate
5909 efficient code on machines where a SYMBOL_REF is not a valid
5912 If this is an EXPAND_SUM call, always return the sum. */
5913 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5914 || mode == ptr_mode)
5916 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5917 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5918 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5920 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5922 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5923 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5924 op1 = force_operand (op1, target);
5928 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5929 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5930 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5932 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5934 if (! CONSTANT_P (op0))
5936 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5937 VOIDmode, modifier);
5938 /* Don't go to both_summands if modifier
5939 says it's not right to return a PLUS. */
5940 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5944 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5945 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5946 op0 = force_operand (op0, target);
5951 /* No sense saving up arithmetic to be done
5952 if it's all in the wrong mode to form part of an address.
5953 And force_operand won't know whether to sign-extend or
5955 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5956 || mode != ptr_mode)
5959 preexpand_calls (exp);
5960 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5963 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5964 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5967 /* Make sure any term that's a sum with a constant comes last. */
5968 if (GET_CODE (op0) == PLUS
5969 && CONSTANT_P (XEXP (op0, 1)))
5975 /* If adding to a sum including a constant,
5976 associate it to put the constant outside. */
5977 if (GET_CODE (op1) == PLUS
5978 && CONSTANT_P (XEXP (op1, 1)))
5980 rtx constant_term = const0_rtx;
5982 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5985 /* Ensure that MULT comes first if there is one. */
5986 else if (GET_CODE (op0) == MULT)
5987 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5989 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5991 /* Let's also eliminate constants from op0 if possible. */
5992 op0 = eliminate_constant_term (op0, &constant_term);
5994 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5995 their sum should be a constant. Form it into OP1, since the
5996 result we want will then be OP0 + OP1. */
5998 temp = simplify_binary_operation (PLUS, mode, constant_term,
6003 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6006 /* Put a constant term last and put a multiplication first. */
6007 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6008 temp = op1, op1 = op0, op0 = temp;
6010 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6011 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6014 /* For initializers, we are allowed to return a MINUS of two
6015 symbolic constants. Here we handle all cases when both operands
6017 /* Handle difference of two symbolic constants,
6018 for the sake of an initializer. */
6019 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6020 && really_constant_p (TREE_OPERAND (exp, 0))
6021 && really_constant_p (TREE_OPERAND (exp, 1)))
6023 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6024 VOIDmode, modifier);
6025 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6026 VOIDmode, modifier);
6028 /* If the last operand is a CONST_INT, use plus_constant of
6029 the negated constant. Else make the MINUS. */
6030 if (GET_CODE (op1) == CONST_INT)
6031 return plus_constant (op0, - INTVAL (op1));
6033 return gen_rtx (MINUS, mode, op0, op1);
6035 /* Convert A - const to A + (-const). */
6036 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6038 tree negated = fold (build1 (NEGATE_EXPR, type,
6039 TREE_OPERAND (exp, 1)));
6041 /* Deal with the case where we can't negate the constant
6043 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6045 tree newtype = signed_type (type);
6046 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6047 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6048 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6050 if (! TREE_OVERFLOW (newneg))
6051 return expand_expr (convert (type,
6052 build (PLUS_EXPR, newtype,
6054 target, tmode, modifier);
6058 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6062 this_optab = sub_optab;
6066 preexpand_calls (exp);
6067 /* If first operand is constant, swap them.
6068 Thus the following special case checks need only
6069 check the second operand. */
6070 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6072 register tree t1 = TREE_OPERAND (exp, 0);
6073 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6074 TREE_OPERAND (exp, 1) = t1;
6077 /* Attempt to return something suitable for generating an
6078 indexed address, for machines that support that. */
6080 if (modifier == EXPAND_SUM && mode == ptr_mode
6081 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6082 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6084 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6086 /* Apply distributive law if OP0 is x+c. */
6087 if (GET_CODE (op0) == PLUS
6088 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6089 return gen_rtx (PLUS, mode,
6090 gen_rtx (MULT, mode, XEXP (op0, 0),
6091 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6092 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6093 * INTVAL (XEXP (op0, 1))));
6095 if (GET_CODE (op0) != REG)
6096 op0 = force_operand (op0, NULL_RTX);
6097 if (GET_CODE (op0) != REG)
6098 op0 = copy_to_mode_reg (mode, op0);
6100 return gen_rtx (MULT, mode, op0,
6101 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6104 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6107 /* Check for multiplying things that have been extended
6108 from a narrower type. If this machine supports multiplying
6109 in that narrower type with a result in the desired type,
6110 do it that way, and avoid the explicit type-conversion. */
6111 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6112 && TREE_CODE (type) == INTEGER_TYPE
6113 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6114 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6115 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6116 && int_fits_type_p (TREE_OPERAND (exp, 1),
6117 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6118 /* Don't use a widening multiply if a shift will do. */
6119 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6120 > HOST_BITS_PER_WIDE_INT)
6121 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6123 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6124 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6126 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6127 /* If both operands are extended, they must either both
6128 be zero-extended or both be sign-extended. */
6129 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6131 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6133 enum machine_mode innermode
6134 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6135 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6136 ? smul_widen_optab : umul_widen_optab);
6137 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6138 ? umul_widen_optab : smul_widen_optab);
6139 if (mode == GET_MODE_WIDER_MODE (innermode))
6141 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6143 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6144 NULL_RTX, VOIDmode, 0);
6145 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6146 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6149 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6150 NULL_RTX, VOIDmode, 0);
6153 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6154 && innermode == word_mode)
6157 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6158 NULL_RTX, VOIDmode, 0);
6159 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6160 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6163 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6164 NULL_RTX, VOIDmode, 0);
6165 temp = expand_binop (mode, other_optab, op0, op1, target,
6166 unsignedp, OPTAB_LIB_WIDEN);
6167 htem = expand_mult_highpart_adjust (innermode,
6168 gen_highpart (innermode, temp),
6170 gen_highpart (innermode, temp),
6172 emit_move_insn (gen_highpart (innermode, temp), htem);
6177 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6178 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6179 return expand_mult (mode, op0, op1, target, unsignedp);
6181 case TRUNC_DIV_EXPR:
6182 case FLOOR_DIV_EXPR:
6184 case ROUND_DIV_EXPR:
6185 case EXACT_DIV_EXPR:
6186 preexpand_calls (exp);
6187 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6189 /* Possible optimization: compute the dividend with EXPAND_SUM
6190 then if the divisor is constant can optimize the case
6191 where some terms of the dividend have coeffs divisible by it. */
6192 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6193 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6194 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6197 this_optab = flodiv_optab;
6200 case TRUNC_MOD_EXPR:
6201 case FLOOR_MOD_EXPR:
6203 case ROUND_MOD_EXPR:
6204 preexpand_calls (exp);
6205 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6207 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6208 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6209 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6211 case FIX_ROUND_EXPR:
6212 case FIX_FLOOR_EXPR:
6214 abort (); /* Not used for C. */
6216 case FIX_TRUNC_EXPR:
6217 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6219 target = gen_reg_rtx (mode);
6220 expand_fix (target, op0, unsignedp);
6224 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6226 target = gen_reg_rtx (mode);
6227 /* expand_float can't figure out what to do if FROM has VOIDmode.
6228 So give it the correct mode. With -O, cse will optimize this. */
6229 if (GET_MODE (op0) == VOIDmode)
6230 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6232 expand_float (target, op0,
6233 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6237 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6238 temp = expand_unop (mode, neg_optab, op0, target, 0);
6244 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6246 /* Handle complex values specially. */
6247 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6248 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6249 return expand_complex_abs (mode, op0, target, unsignedp);
6251 /* Unsigned abs is simply the operand. Testing here means we don't
6252 risk generating incorrect code below. */
6253 if (TREE_UNSIGNED (type))
6256 return expand_abs (mode, op0, target, unsignedp,
6257 safe_from_p (target, TREE_OPERAND (exp, 0)));
6261 target = original_target;
6262 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6263 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6264 || GET_MODE (target) != mode
6265 || (GET_CODE (target) == REG
6266 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6267 target = gen_reg_rtx (mode);
6268 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6269 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6271 /* First try to do it with a special MIN or MAX instruction.
6272 If that does not win, use a conditional jump to select the proper
6274 this_optab = (TREE_UNSIGNED (type)
6275 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6276 : (code == MIN_EXPR ? smin_optab : smax_optab));
6278 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6283 /* At this point, a MEM target is no longer useful; we will get better
6286 if (GET_CODE (target) == MEM)
6287 target = gen_reg_rtx (mode);
6290 emit_move_insn (target, op0);
6292 op0 = gen_label_rtx ();
6294 /* If this mode is an integer too wide to compare properly,
6295 compare word by word. Rely on cse to optimize constant cases. */
6296 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6298 if (code == MAX_EXPR)
6299 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6300 target, op1, NULL_RTX, op0);
6302 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6303 op1, target, NULL_RTX, op0);
6304 emit_move_insn (target, op1);
6308 if (code == MAX_EXPR)
6309 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6310 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6311 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6313 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6314 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6315 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6316 if (temp == const0_rtx)
6317 emit_move_insn (target, op1);
6318 else if (temp != const_true_rtx)
6320 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6321 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6324 emit_move_insn (target, op1);
6331 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6332 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6338 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6339 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6344 /* ??? Can optimize bitwise operations with one arg constant.
6345 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6346 and (a bitwise1 b) bitwise2 b (etc)
6347 but that is probably not worth while. */
6349 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6350 boolean values when we want in all cases to compute both of them. In
6351 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6352 as actual zero-or-1 values and then bitwise anding. In cases where
6353 there cannot be any side effects, better code would be made by
6354 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6355 how to recognize those cases. */
6357 case TRUTH_AND_EXPR:
6359 this_optab = and_optab;
6364 this_optab = ior_optab;
6367 case TRUTH_XOR_EXPR:
6369 this_optab = xor_optab;
6376 preexpand_calls (exp);
6377 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6379 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6380 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6383 /* Could determine the answer when only additive constants differ. Also,
6384 the addition of one can be handled by changing the condition. */
6391 preexpand_calls (exp);
6392 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6396 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6397 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6399 && GET_CODE (original_target) == REG
6400 && (GET_MODE (original_target)
6401 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6403 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6406 if (temp != original_target)
6407 temp = copy_to_reg (temp);
6409 op1 = gen_label_rtx ();
6410 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6411 GET_MODE (temp), unsignedp, 0);
6412 emit_jump_insn (gen_beq (op1));
6413 emit_move_insn (temp, const1_rtx);
6418 /* If no set-flag instruction, must generate a conditional
6419 store into a temporary variable. Drop through
6420 and handle this like && and ||. */
6422 case TRUTH_ANDIF_EXPR:
6423 case TRUTH_ORIF_EXPR:
6425 && (target == 0 || ! safe_from_p (target, exp)
6426 /* Make sure we don't have a hard reg (such as function's return
6427 value) live across basic blocks, if not optimizing. */
6428 || (!optimize && GET_CODE (target) == REG
6429 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6430 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6433 emit_clr_insn (target);
6435 op1 = gen_label_rtx ();
6436 jumpifnot (exp, op1);
6439 emit_0_to_1_insn (target);
6442 return ignore ? const0_rtx : target;
6444 case TRUTH_NOT_EXPR:
6445 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6446 /* The parser is careful to generate TRUTH_NOT_EXPR
6447 only with operands that are always zero or one. */
6448 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6449 target, 1, OPTAB_LIB_WIDEN);
6455 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6457 return expand_expr (TREE_OPERAND (exp, 1),
6458 (ignore ? const0_rtx : target),
6463 rtx flag = NULL_RTX;
6464 tree left_cleanups = NULL_TREE;
6465 tree right_cleanups = NULL_TREE;
6467 /* Used to save a pointer to the place to put the setting of
6468 the flag that indicates if this side of the conditional was
6469 taken. We backpatch the code, if we find out later that we
6470 have any conditional cleanups that need to be performed. */
6471 rtx dest_right_flag = NULL_RTX;
6472 rtx dest_left_flag = NULL_RTX;
6474 /* Note that COND_EXPRs whose type is a structure or union
6475 are required to be constructed to contain assignments of
6476 a temporary variable, so that we can evaluate them here
6477 for side effect only. If type is void, we must do likewise. */
6479 /* If an arm of the branch requires a cleanup,
6480 only that cleanup is performed. */
6483 tree binary_op = 0, unary_op = 0;
6484 tree old_cleanups = cleanups_this_call;
6486 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6487 convert it to our mode, if necessary. */
6488 if (integer_onep (TREE_OPERAND (exp, 1))
6489 && integer_zerop (TREE_OPERAND (exp, 2))
6490 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6494 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6499 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6500 if (GET_MODE (op0) == mode)
6504 target = gen_reg_rtx (mode);
6505 convert_move (target, op0, unsignedp);
6509 /* Check for X ? A + B : A. If we have this, we can copy
6510 A to the output and conditionally add B. Similarly for unary
6511 operations. Don't do this if X has side-effects because
6512 those side effects might affect A or B and the "?" operation is
6513 a sequence point in ANSI. (We test for side effects later.) */
6515 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6516 && operand_equal_p (TREE_OPERAND (exp, 2),
6517 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6518 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6519 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6520 && operand_equal_p (TREE_OPERAND (exp, 1),
6521 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6522 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6523 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6524 && operand_equal_p (TREE_OPERAND (exp, 2),
6525 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6526 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6527 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6528 && operand_equal_p (TREE_OPERAND (exp, 1),
6529 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6530 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6532 /* If we are not to produce a result, we have no target. Otherwise,
6533 if a target was specified use it; it will not be used as an
6534 intermediate target unless it is safe. If no target, use a
6539 else if (original_target
6540 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6541 || (singleton && GET_CODE (original_target) == REG
6542 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6543 && original_target == var_rtx (singleton)))
6544 && GET_MODE (original_target) == mode
6545 && ! (GET_CODE (original_target) == MEM
6546 && MEM_VOLATILE_P (original_target)))
6547 temp = original_target;
6548 else if (TREE_ADDRESSABLE (type))
6551 temp = assign_temp (type, 0, 0, 1);
6553 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6554 operation, do this as A + (X != 0). Similarly for other simple
6555 binary operators. */
6556 if (temp && singleton && binary_op
6557 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6558 && (TREE_CODE (binary_op) == PLUS_EXPR
6559 || TREE_CODE (binary_op) == MINUS_EXPR
6560 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6561 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6562 && integer_onep (TREE_OPERAND (binary_op, 1))
6563 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6566 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6567 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6568 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6571 /* If we had X ? A : A + 1, do this as A + (X == 0).
6573 We have to invert the truth value here and then put it
6574 back later if do_store_flag fails. We cannot simply copy
6575 TREE_OPERAND (exp, 0) to another variable and modify that
6576 because invert_truthvalue can modify the tree pointed to
6578 if (singleton == TREE_OPERAND (exp, 1))
6579 TREE_OPERAND (exp, 0)
6580 = invert_truthvalue (TREE_OPERAND (exp, 0));
6582 result = do_store_flag (TREE_OPERAND (exp, 0),
6583 (safe_from_p (temp, singleton)
6585 mode, BRANCH_COST <= 1);
6589 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6590 return expand_binop (mode, boptab, op1, result, temp,
6591 unsignedp, OPTAB_LIB_WIDEN);
6593 else if (singleton == TREE_OPERAND (exp, 1))
6594 TREE_OPERAND (exp, 0)
6595 = invert_truthvalue (TREE_OPERAND (exp, 0));
6598 do_pending_stack_adjust ();
6600 op0 = gen_label_rtx ();
6602 flag = gen_reg_rtx (word_mode);
6603 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6607 /* If the target conflicts with the other operand of the
6608 binary op, we can't use it. Also, we can't use the target
6609 if it is a hard register, because evaluating the condition
6610 might clobber it. */
6612 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6613 || (GET_CODE (temp) == REG
6614 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6615 temp = gen_reg_rtx (mode);
6616 store_expr (singleton, temp, 0);
6619 expand_expr (singleton,
6620 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6621 dest_left_flag = get_last_insn ();
6622 if (singleton == TREE_OPERAND (exp, 1))
6623 jumpif (TREE_OPERAND (exp, 0), op0);
6625 jumpifnot (TREE_OPERAND (exp, 0), op0);
6627 /* Allows cleanups up to here. */
6628 old_cleanups = cleanups_this_call;
6629 if (binary_op && temp == 0)
6630 /* Just touch the other operand. */
6631 expand_expr (TREE_OPERAND (binary_op, 1),
6632 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6634 store_expr (build (TREE_CODE (binary_op), type,
6635 make_tree (type, temp),
6636 TREE_OPERAND (binary_op, 1)),
6639 store_expr (build1 (TREE_CODE (unary_op), type,
6640 make_tree (type, temp)),
6643 dest_right_flag = get_last_insn ();
6646 /* This is now done in jump.c and is better done there because it
6647 produces shorter register lifetimes. */
6649 /* Check for both possibilities either constants or variables
6650 in registers (but not the same as the target!). If so, can
6651 save branches by assigning one, branching, and assigning the
6653 else if (temp && GET_MODE (temp) != BLKmode
6654 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6655 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6656 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6657 && DECL_RTL (TREE_OPERAND (exp, 1))
6658 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6659 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6660 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6661 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6662 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6663 && DECL_RTL (TREE_OPERAND (exp, 2))
6664 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6665 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6667 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6668 temp = gen_reg_rtx (mode);
6669 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6670 dest_left_flag = get_last_insn ();
6671 jumpifnot (TREE_OPERAND (exp, 0), op0);
6673 /* Allows cleanups up to here. */
6674 old_cleanups = cleanups_this_call;
6675 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6677 dest_right_flag = get_last_insn ();
6680 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6681 comparison operator. If we have one of these cases, set the
6682 output to A, branch on A (cse will merge these two references),
6683 then set the output to FOO. */
6685 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6686 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6687 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6688 TREE_OPERAND (exp, 1), 0)
6689 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6690 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6692 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6693 temp = gen_reg_rtx (mode);
6694 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6695 dest_left_flag = get_last_insn ();
6696 jumpif (TREE_OPERAND (exp, 0), op0);
6698 /* Allows cleanups up to here. */
6699 old_cleanups = cleanups_this_call;
6700 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6702 dest_right_flag = get_last_insn ();
6705 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6706 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6707 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6708 TREE_OPERAND (exp, 2), 0)
6709 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6710 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6712 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6713 temp = gen_reg_rtx (mode);
6714 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6715 dest_left_flag = get_last_insn ();
6716 jumpifnot (TREE_OPERAND (exp, 0), op0);
6718 /* Allows cleanups up to here. */
6719 old_cleanups = cleanups_this_call;
6720 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6722 dest_right_flag = get_last_insn ();
6726 op1 = gen_label_rtx ();
6727 jumpifnot (TREE_OPERAND (exp, 0), op0);
6729 /* Allows cleanups up to here. */
6730 old_cleanups = cleanups_this_call;
6732 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6734 expand_expr (TREE_OPERAND (exp, 1),
6735 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6736 dest_left_flag = get_last_insn ();
6738 /* Handle conditional cleanups, if any. */
6739 left_cleanups = defer_cleanups_to (old_cleanups);
6742 emit_jump_insn (gen_jump (op1));
6746 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6748 expand_expr (TREE_OPERAND (exp, 2),
6749 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6750 dest_right_flag = get_last_insn ();
6753 /* Handle conditional cleanups, if any. */
6754 right_cleanups = defer_cleanups_to (old_cleanups);
6760 /* Add back in, any conditional cleanups. */
6761 if (left_cleanups || right_cleanups)
6767 /* Now that we know that a flag is needed, go back and add in the
6768 setting of the flag. */
6770 /* Do the left side flag. */
6771 last = get_last_insn ();
6772 /* Flag left cleanups as needed. */
6773 emit_move_insn (flag, const1_rtx);
6774 /* ??? deprecated, use sequences instead. */
6775 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6777 /* Do the right side flag. */
6778 last = get_last_insn ();
6779 /* Flag left cleanups as needed. */
6780 emit_move_insn (flag, const0_rtx);
6781 /* ??? deprecated, use sequences instead. */
6782 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6784 /* All cleanups must be on the function_obstack. */
6785 push_obstacks_nochange ();
6786 resume_temporary_allocation ();
6788 /* convert flag, which is an rtx, into a tree. */
6789 cond = make_node (RTL_EXPR);
6790 TREE_TYPE (cond) = integer_type_node;
6791 RTL_EXPR_RTL (cond) = flag;
6792 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6793 cond = save_expr (cond);
6795 if (! left_cleanups)
6796 left_cleanups = integer_zero_node;
6797 if (! right_cleanups)
6798 right_cleanups = integer_zero_node;
6799 new_cleanups = build (COND_EXPR, void_type_node,
6800 truthvalue_conversion (cond),
6801 left_cleanups, right_cleanups);
6802 new_cleanups = fold (new_cleanups);
6806 /* Now add in the conditionalized cleanups. */
6808 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6809 expand_eh_region_start ();
6816 /* Something needs to be initialized, but we didn't know
6817 where that thing was when building the tree. For example,
6818 it could be the return value of a function, or a parameter
6819 to a function which lays down in the stack, or a temporary
6820 variable which must be passed by reference.
6822 We guarantee that the expression will either be constructed
6823 or copied into our original target. */
6825 tree slot = TREE_OPERAND (exp, 0);
6826 tree cleanups = NULL_TREE;
6830 if (TREE_CODE (slot) != VAR_DECL)
6834 target = original_target;
6838 if (DECL_RTL (slot) != 0)
6840 target = DECL_RTL (slot);
6841 /* If we have already expanded the slot, so don't do
6843 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6848 target = assign_temp (type, 2, 1, 1);
6849 /* All temp slots at this level must not conflict. */
6850 preserve_temp_slots (target);
6851 DECL_RTL (slot) = target;
6853 /* Since SLOT is not known to the called function
6854 to belong to its stack frame, we must build an explicit
6855 cleanup. This case occurs when we must build up a reference
6856 to pass the reference as an argument. In this case,
6857 it is very likely that such a reference need not be
6860 if (TREE_OPERAND (exp, 2) == 0)
6861 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6862 cleanups = TREE_OPERAND (exp, 2);
6867 /* This case does occur, when expanding a parameter which
6868 needs to be constructed on the stack. The target
6869 is the actual stack address that we want to initialize.
6870 The function we call will perform the cleanup in this case. */
6872 /* If we have already assigned it space, use that space,
6873 not target that we were passed in, as our target
6874 parameter is only a hint. */
6875 if (DECL_RTL (slot) != 0)
6877 target = DECL_RTL (slot);
6878 /* If we have already expanded the slot, so don't do
6880 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6884 DECL_RTL (slot) = target;
6887 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6888 /* Mark it as expanded. */
6889 TREE_OPERAND (exp, 1) = NULL_TREE;
6891 store_expr (exp1, target, 0);
6895 cleanups_this_call = tree_cons (NULL_TREE,
6897 cleanups_this_call);
6898 expand_eh_region_start ();
6906 tree lhs = TREE_OPERAND (exp, 0);
6907 tree rhs = TREE_OPERAND (exp, 1);
6908 tree noncopied_parts = 0;
6909 tree lhs_type = TREE_TYPE (lhs);
6911 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6912 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6913 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6914 TYPE_NONCOPIED_PARTS (lhs_type));
6915 while (noncopied_parts != 0)
6917 expand_assignment (TREE_VALUE (noncopied_parts),
6918 TREE_PURPOSE (noncopied_parts), 0, 0);
6919 noncopied_parts = TREE_CHAIN (noncopied_parts);
6926 /* If lhs is complex, expand calls in rhs before computing it.
6927 That's so we don't compute a pointer and save it over a call.
6928 If lhs is simple, compute it first so we can give it as a
6929 target if the rhs is just a call. This avoids an extra temp and copy
6930 and that prevents a partial-subsumption which makes bad code.
6931 Actually we could treat component_ref's of vars like vars. */
6933 tree lhs = TREE_OPERAND (exp, 0);
6934 tree rhs = TREE_OPERAND (exp, 1);
6935 tree noncopied_parts = 0;
6936 tree lhs_type = TREE_TYPE (lhs);
6940 if (TREE_CODE (lhs) != VAR_DECL
6941 && TREE_CODE (lhs) != RESULT_DECL
6942 && TREE_CODE (lhs) != PARM_DECL)
6943 preexpand_calls (exp);
6945 /* Check for |= or &= of a bitfield of size one into another bitfield
6946 of size 1. In this case, (unless we need the result of the
6947 assignment) we can do this more efficiently with a
6948 test followed by an assignment, if necessary.
6950 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6951 things change so we do, this code should be enhanced to
6954 && TREE_CODE (lhs) == COMPONENT_REF
6955 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6956 || TREE_CODE (rhs) == BIT_AND_EXPR)
6957 && TREE_OPERAND (rhs, 0) == lhs
6958 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6959 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6960 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6962 rtx label = gen_label_rtx ();
6964 do_jump (TREE_OPERAND (rhs, 1),
6965 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6966 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6967 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6968 (TREE_CODE (rhs) == BIT_IOR_EXPR
6970 : integer_zero_node)),
6972 do_pending_stack_adjust ();
6977 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6978 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6979 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6980 TYPE_NONCOPIED_PARTS (lhs_type));
6982 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6983 while (noncopied_parts != 0)
6985 expand_assignment (TREE_PURPOSE (noncopied_parts),
6986 TREE_VALUE (noncopied_parts), 0, 0);
6987 noncopied_parts = TREE_CHAIN (noncopied_parts);
6992 case PREINCREMENT_EXPR:
6993 case PREDECREMENT_EXPR:
6994 return expand_increment (exp, 0, ignore);
6996 case POSTINCREMENT_EXPR:
6997 case POSTDECREMENT_EXPR:
6998 /* Faster to treat as pre-increment if result is not used. */
6999 return expand_increment (exp, ! ignore, ignore);
7002 /* If nonzero, TEMP will be set to the address of something that might
7003 be a MEM corresponding to a stack slot. */
7006 /* Are we taking the address of a nested function? */
7007 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7008 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7009 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7011 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7012 op0 = force_operand (op0, target);
7014 /* If we are taking the address of something erroneous, just
7016 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7020 /* We make sure to pass const0_rtx down if we came in with
7021 ignore set, to avoid doing the cleanups twice for something. */
7022 op0 = expand_expr (TREE_OPERAND (exp, 0),
7023 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7024 (modifier == EXPAND_INITIALIZER
7025 ? modifier : EXPAND_CONST_ADDRESS));
7027 /* If we are going to ignore the result, OP0 will have been set
7028 to const0_rtx, so just return it. Don't get confused and
7029 think we are taking the address of the constant. */
7033 op0 = protect_from_queue (op0, 0);
7035 /* We would like the object in memory. If it is a constant,
7036 we can have it be statically allocated into memory. For
7037 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7038 memory and store the value into it. */
7040 if (CONSTANT_P (op0))
7041 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7043 else if (GET_CODE (op0) == MEM)
7045 mark_temp_addr_taken (op0);
7046 temp = XEXP (op0, 0);
7049 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7050 || GET_CODE (op0) == CONCAT)
7052 /* If this object is in a register, it must be not
7054 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7055 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7057 mark_temp_addr_taken (memloc);
7058 emit_move_insn (memloc, op0);
7062 if (GET_CODE (op0) != MEM)
7065 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7067 temp = XEXP (op0, 0);
7068 #ifdef POINTERS_EXTEND_UNSIGNED
7069 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7070 && mode == ptr_mode)
7071 temp = convert_memory_address (ptr_mode, temp);
7076 op0 = force_operand (XEXP (op0, 0), target);
7079 if (flag_force_addr && GET_CODE (op0) != REG)
7080 op0 = force_reg (Pmode, op0);
7082 if (GET_CODE (op0) == REG
7083 && ! REG_USERVAR_P (op0))
7084 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7086 /* If we might have had a temp slot, add an equivalent address
7089 update_temp_slot_address (temp, op0);
7091 #ifdef POINTERS_EXTEND_UNSIGNED
7092 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7093 && mode == ptr_mode)
7094 op0 = convert_memory_address (ptr_mode, op0);
7099 case ENTRY_VALUE_EXPR:
7102 /* COMPLEX type for Extended Pascal & Fortran */
7105 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7108 /* Get the rtx code of the operands. */
7109 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7110 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7113 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7117 /* Move the real (op0) and imaginary (op1) parts to their location. */
7118 emit_move_insn (gen_realpart (mode, target), op0);
7119 emit_move_insn (gen_imagpart (mode, target), op1);
7121 insns = get_insns ();
7124 /* Complex construction should appear as a single unit. */
7125 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7126 each with a separate pseudo as destination.
7127 It's not correct for flow to treat them as a unit. */
7128 if (GET_CODE (target) != CONCAT)
7129 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7137 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7138 return gen_realpart (mode, op0);
7141 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7142 return gen_imagpart (mode, op0);
7146 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7150 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7153 target = gen_reg_rtx (mode);
7157 /* Store the realpart and the negated imagpart to target. */
7158 emit_move_insn (gen_realpart (partmode, target),
7159 gen_realpart (partmode, op0));
7161 imag_t = gen_imagpart (partmode, target);
7162 temp = expand_unop (partmode, neg_optab,
7163 gen_imagpart (partmode, op0), imag_t, 0);
7165 emit_move_insn (imag_t, temp);
7167 insns = get_insns ();
7170 /* Conjugate should appear as a single unit
7171 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7172 each with a separate pseudo as destination.
7173 It's not correct for flow to treat them as a unit. */
7174 if (GET_CODE (target) != CONCAT)
7175 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7183 op0 = CONST0_RTX (tmode);
7189 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7192 /* Here to do an ordinary binary operator, generating an instruction
7193 from the optab already placed in `this_optab'. */
7195 preexpand_calls (exp);
7196 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7198 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7199 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7201 temp = expand_binop (mode, this_optab, op0, op1, target,
7202 unsignedp, OPTAB_LIB_WIDEN);
7209 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7212 bc_expand_expr (exp)
7215 enum tree_code code;
7218 struct binary_operator *binoptab;
7219 struct unary_operator *unoptab;
7220 struct increment_operator *incroptab;
7221 struct bc_label *lab, *lab1;
7222 enum bytecode_opcode opcode;
7225 code = TREE_CODE (exp);
7231 if (DECL_RTL (exp) == 0)
7233 error_with_decl (exp, "prior parameter's size depends on `%s'");
7237 bc_load_parmaddr (DECL_RTL (exp));
7238 bc_load_memory (TREE_TYPE (exp), exp);
7244 if (DECL_RTL (exp) == 0)
7248 if (BYTECODE_LABEL (DECL_RTL (exp)))
7249 bc_load_externaddr (DECL_RTL (exp));
7251 bc_load_localaddr (DECL_RTL (exp));
7253 if (TREE_PUBLIC (exp))
7254 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7255 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7257 bc_load_localaddr (DECL_RTL (exp));
7259 bc_load_memory (TREE_TYPE (exp), exp);
7264 #ifdef DEBUG_PRINT_CODE
7265 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7267 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7269 : TYPE_MODE (TREE_TYPE (exp)))],
7270 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7276 #ifdef DEBUG_PRINT_CODE
7277 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7279 /* FIX THIS: find a better way to pass real_cst's. -bson */
7280 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7281 (double) TREE_REAL_CST (exp));
7290 /* We build a call description vector describing the type of
7291 the return value and of the arguments; this call vector,
7292 together with a pointer to a location for the return value
7293 and the base of the argument list, is passed to the low
7294 level machine dependent call subroutine, which is responsible
7295 for putting the arguments wherever real functions expect
7296 them, as well as getting the return value back. */
7298 tree calldesc = 0, arg;
7302 /* Push the evaluated args on the evaluation stack in reverse
7303 order. Also make an entry for each arg in the calldesc
7304 vector while we're at it. */
7306 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7308 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7311 bc_expand_expr (TREE_VALUE (arg));
7313 calldesc = tree_cons ((tree) 0,
7314 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7316 calldesc = tree_cons ((tree) 0,
7317 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7321 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7323 /* Allocate a location for the return value and push its
7324 address on the evaluation stack. Also make an entry
7325 at the front of the calldesc for the return value type. */
7327 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7328 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7329 bc_load_localaddr (retval);
7331 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7332 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7334 /* Prepend the argument count. */
7335 calldesc = tree_cons ((tree) 0,
7336 build_int_2 (nargs, 0),
7339 /* Push the address of the call description vector on the stack. */
7340 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7341 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7342 build_index_type (build_int_2 (nargs * 2, 0)));
7343 r = output_constant_def (calldesc);
7344 bc_load_externaddr (r);
7346 /* Push the address of the function to be called. */
7347 bc_expand_expr (TREE_OPERAND (exp, 0));
7349 /* Call the function, popping its address and the calldesc vector
7350 address off the evaluation stack in the process. */
7351 bc_emit_instruction (call);
7353 /* Pop the arguments off the stack. */
7354 bc_adjust_stack (nargs);
7356 /* Load the return value onto the stack. */
7357 bc_load_localaddr (retval);
7358 bc_load_memory (type, TREE_OPERAND (exp, 0));
7364 if (!SAVE_EXPR_RTL (exp))
7366 /* First time around: copy to local variable */
7367 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7368 TYPE_ALIGN (TREE_TYPE(exp)));
7369 bc_expand_expr (TREE_OPERAND (exp, 0));
7370 bc_emit_instruction (duplicate);
7372 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7373 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7377 /* Consecutive reference: use saved copy */
7378 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7379 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7384 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7385 how are they handled instead? */
7388 TREE_USED (exp) = 1;
7389 bc_expand_expr (STMT_BODY (exp));
7396 bc_expand_expr (TREE_OPERAND (exp, 0));
7397 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7402 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7407 bc_expand_address (TREE_OPERAND (exp, 0));
7412 bc_expand_expr (TREE_OPERAND (exp, 0));
7413 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7418 bc_expand_expr (bc_canonicalize_array_ref (exp));
7423 bc_expand_component_address (exp);
7425 /* If we have a bitfield, generate a proper load */
7426 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7431 bc_expand_expr (TREE_OPERAND (exp, 0));
7432 bc_emit_instruction (drop);
7433 bc_expand_expr (TREE_OPERAND (exp, 1));
7438 bc_expand_expr (TREE_OPERAND (exp, 0));
7439 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7440 lab = bc_get_bytecode_label ();
7441 bc_emit_bytecode (xjumpifnot);
7442 bc_emit_bytecode_labelref (lab);
7444 #ifdef DEBUG_PRINT_CODE
7445 fputc ('\n', stderr);
7447 bc_expand_expr (TREE_OPERAND (exp, 1));
7448 lab1 = bc_get_bytecode_label ();
7449 bc_emit_bytecode (jump);
7450 bc_emit_bytecode_labelref (lab1);
7452 #ifdef DEBUG_PRINT_CODE
7453 fputc ('\n', stderr);
7456 bc_emit_bytecode_labeldef (lab);
7457 bc_expand_expr (TREE_OPERAND (exp, 2));
7458 bc_emit_bytecode_labeldef (lab1);
7461 case TRUTH_ANDIF_EXPR:
7463 opcode = xjumpifnot;
7466 case TRUTH_ORIF_EXPR:
7473 binoptab = optab_plus_expr;
7478 binoptab = optab_minus_expr;
7483 binoptab = optab_mult_expr;
7486 case TRUNC_DIV_EXPR:
7487 case FLOOR_DIV_EXPR:
7489 case ROUND_DIV_EXPR:
7490 case EXACT_DIV_EXPR:
7492 binoptab = optab_trunc_div_expr;
7495 case TRUNC_MOD_EXPR:
7496 case FLOOR_MOD_EXPR:
7498 case ROUND_MOD_EXPR:
7500 binoptab = optab_trunc_mod_expr;
7503 case FIX_ROUND_EXPR:
7504 case FIX_FLOOR_EXPR:
7506 abort (); /* Not used for C. */
7508 case FIX_TRUNC_EXPR:
7515 abort (); /* FIXME */
7519 binoptab = optab_rdiv_expr;
7524 binoptab = optab_bit_and_expr;
7529 binoptab = optab_bit_ior_expr;
7534 binoptab = optab_bit_xor_expr;
7539 binoptab = optab_lshift_expr;
7544 binoptab = optab_rshift_expr;
7547 case TRUTH_AND_EXPR:
7549 binoptab = optab_truth_and_expr;
7554 binoptab = optab_truth_or_expr;
7559 binoptab = optab_lt_expr;
7564 binoptab = optab_le_expr;
7569 binoptab = optab_ge_expr;
7574 binoptab = optab_gt_expr;
7579 binoptab = optab_eq_expr;
7584 binoptab = optab_ne_expr;
7589 unoptab = optab_negate_expr;
7594 unoptab = optab_bit_not_expr;
7597 case TRUTH_NOT_EXPR:
7599 unoptab = optab_truth_not_expr;
7602 case PREDECREMENT_EXPR:
7604 incroptab = optab_predecrement_expr;
7607 case PREINCREMENT_EXPR:
7609 incroptab = optab_preincrement_expr;
7612 case POSTDECREMENT_EXPR:
7614 incroptab = optab_postdecrement_expr;
7617 case POSTINCREMENT_EXPR:
7619 incroptab = optab_postincrement_expr;
7624 bc_expand_constructor (exp);
7634 tree vars = TREE_OPERAND (exp, 0);
7635 int vars_need_expansion = 0;
7637 /* Need to open a binding contour here because
7638 if there are any cleanups they most be contained here. */
7639 expand_start_bindings (0);
7641 /* Mark the corresponding BLOCK for output. */
7642 if (TREE_OPERAND (exp, 2) != 0)
7643 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7645 /* If VARS have not yet been expanded, expand them now. */
7648 if (DECL_RTL (vars) == 0)
7650 vars_need_expansion = 1;
7653 expand_decl_init (vars);
7654 vars = TREE_CHAIN (vars);
7657 bc_expand_expr (TREE_OPERAND (exp, 1));
7659 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7669 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7670 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7676 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7682 bc_expand_expr (TREE_OPERAND (exp, 0));
7683 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7684 lab = bc_get_bytecode_label ();
7686 bc_emit_instruction (duplicate);
7687 bc_emit_bytecode (opcode);
7688 bc_emit_bytecode_labelref (lab);
7690 #ifdef DEBUG_PRINT_CODE
7691 fputc ('\n', stderr);
7694 bc_emit_instruction (drop);
7696 bc_expand_expr (TREE_OPERAND (exp, 1));
7697 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7698 bc_emit_bytecode_labeldef (lab);
7704 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7706 /* Push the quantum. */
7707 bc_expand_expr (TREE_OPERAND (exp, 1));
7709 /* Convert it to the lvalue's type. */
7710 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7712 /* Push the address of the lvalue */
7713 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7715 /* Perform actual increment */
7716 bc_expand_increment (incroptab, type);
7720 /* Return the alignment in bits of EXP, a pointer valued expression.
7721 But don't return more than MAX_ALIGN no matter what.
7722 The alignment returned is, by default, the alignment of the thing that
7723 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7725 Otherwise, look at the expression to see if we can do better, i.e., if the
7726 expression is actually pointing at an object whose alignment is tighter. */
7729 get_pointer_alignment (exp, max_align)
7733 unsigned align, inner;
7735 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7738 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7739 align = MIN (align, max_align);
7743 switch (TREE_CODE (exp))
7747 case NON_LVALUE_EXPR:
7748 exp = TREE_OPERAND (exp, 0);
7749 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7751 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7752 align = MIN (inner, max_align);
7756 /* If sum of pointer + int, restrict our maximum alignment to that
7757 imposed by the integer. If not, we can't do any better than
7759 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7762 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7767 exp = TREE_OPERAND (exp, 0);
7771 /* See what we are pointing at and look at its alignment. */
7772 exp = TREE_OPERAND (exp, 0);
7773 if (TREE_CODE (exp) == FUNCTION_DECL)
7774 align = FUNCTION_BOUNDARY;
7775 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7776 align = DECL_ALIGN (exp);
7777 #ifdef CONSTANT_ALIGNMENT
7778 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7779 align = CONSTANT_ALIGNMENT (exp, align);
7781 return MIN (align, max_align);
7789 /* Return the tree node and offset if a given argument corresponds to
7790 a string constant. */
7793 string_constant (arg, ptr_offset)
7799 if (TREE_CODE (arg) == ADDR_EXPR
7800 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7802 *ptr_offset = integer_zero_node;
7803 return TREE_OPERAND (arg, 0);
7805 else if (TREE_CODE (arg) == PLUS_EXPR)
7807 tree arg0 = TREE_OPERAND (arg, 0);
7808 tree arg1 = TREE_OPERAND (arg, 1);
7813 if (TREE_CODE (arg0) == ADDR_EXPR
7814 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7817 return TREE_OPERAND (arg0, 0);
7819 else if (TREE_CODE (arg1) == ADDR_EXPR
7820 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7823 return TREE_OPERAND (arg1, 0);
7830 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7831 way, because it could contain a zero byte in the middle.
7832 TREE_STRING_LENGTH is the size of the character array, not the string.
7834 Unfortunately, string_constant can't access the values of const char
7835 arrays with initializers, so neither can we do so here. */
7845 src = string_constant (src, &offset_node);
7848 max = TREE_STRING_LENGTH (src);
7849 ptr = TREE_STRING_POINTER (src);
7850 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7852 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7853 compute the offset to the following null if we don't know where to
7854 start searching for it. */
7856 for (i = 0; i < max; i++)
7859 /* We don't know the starting offset, but we do know that the string
7860 has no internal zero bytes. We can assume that the offset falls
7861 within the bounds of the string; otherwise, the programmer deserves
7862 what he gets. Subtract the offset from the length of the string,
7864 /* This would perhaps not be valid if we were dealing with named
7865 arrays in addition to literal string constants. */
7866 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7869 /* We have a known offset into the string. Start searching there for
7870 a null character. */
7871 if (offset_node == 0)
7875 /* Did we get a long long offset? If so, punt. */
7876 if (TREE_INT_CST_HIGH (offset_node) != 0)
7878 offset = TREE_INT_CST_LOW (offset_node);
7880 /* If the offset is known to be out of bounds, warn, and call strlen at
7882 if (offset < 0 || offset > max)
7884 warning ("offset outside bounds of constant string");
7887 /* Use strlen to search for the first zero byte. Since any strings
7888 constructed with build_string will have nulls appended, we win even
7889 if we get handed something like (char[4])"abcd".
7891 Since OFFSET is our starting index into the string, no further
7892 calculation is needed. */
7893 return size_int (strlen (ptr + offset));
7897 expand_builtin_return_addr (fndecl_code, count, tem)
7898 enum built_in_function fndecl_code;
7904 /* Some machines need special handling before we can access
7905 arbitrary frames. For example, on the sparc, we must first flush
7906 all register windows to the stack. */
7907 #ifdef SETUP_FRAME_ADDRESSES
7908 SETUP_FRAME_ADDRESSES ();
7911 /* On the sparc, the return address is not in the frame, it is in a
7912 register. There is no way to access it off of the current frame
7913 pointer, but it can be accessed off the previous frame pointer by
7914 reading the value from the register window save area. */
7915 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7916 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7920 /* Scan back COUNT frames to the specified frame. */
7921 for (i = 0; i < count; i++)
7923 /* Assume the dynamic chain pointer is in the word that the
7924 frame address points to, unless otherwise specified. */
7925 #ifdef DYNAMIC_CHAIN_ADDRESS
7926 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7928 tem = memory_address (Pmode, tem);
7929 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7932 /* For __builtin_frame_address, return what we've got. */
7933 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7936 /* For __builtin_return_address, Get the return address from that
7938 #ifdef RETURN_ADDR_RTX
7939 tem = RETURN_ADDR_RTX (count, tem);
7941 tem = memory_address (Pmode,
7942 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7943 tem = gen_rtx (MEM, Pmode, tem);
7948 /* Expand an expression EXP that calls a built-in function,
7949 with result going to TARGET if that's convenient
7950 (and in mode MODE if that's convenient).
7951 SUBTARGET may be used as the target for computing one of EXP's operands.
7952 IGNORE is nonzero if the value is to be ignored. */
7954 #define CALLED_AS_BUILT_IN(NODE) \
7955 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7958 expand_builtin (exp, target, subtarget, mode, ignore)
7962 enum machine_mode mode;
7965 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7966 tree arglist = TREE_OPERAND (exp, 1);
7969 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7970 optab builtin_optab;
7972 switch (DECL_FUNCTION_CODE (fndecl))
7977 /* build_function_call changes these into ABS_EXPR. */
7982 /* Treat these like sqrt, but only if the user asks for them. */
7983 if (! flag_fast_math)
7985 case BUILT_IN_FSQRT:
7986 /* If not optimizing, call the library function. */
7991 /* Arg could be wrong type if user redeclared this fcn wrong. */
7992 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7995 /* Stabilize and compute the argument. */
7996 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7997 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7999 exp = copy_node (exp);
8000 arglist = copy_node (arglist);
8001 TREE_OPERAND (exp, 1) = arglist;
8002 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8004 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8006 /* Make a suitable register to place result in. */
8007 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8012 switch (DECL_FUNCTION_CODE (fndecl))
8015 builtin_optab = sin_optab; break;
8017 builtin_optab = cos_optab; break;
8018 case BUILT_IN_FSQRT:
8019 builtin_optab = sqrt_optab; break;
8024 /* Compute into TARGET.
8025 Set TARGET to wherever the result comes back. */
8026 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8027 builtin_optab, op0, target, 0);
8029 /* If we were unable to expand via the builtin, stop the
8030 sequence (without outputting the insns) and break, causing
8031 a call the the library function. */
8038 /* Check the results by default. But if flag_fast_math is turned on,
8039 then assume sqrt will always be called with valid arguments. */
8041 if (! flag_fast_math)
8043 /* Don't define the builtin FP instructions
8044 if your machine is not IEEE. */
8045 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8048 lab1 = gen_label_rtx ();
8050 /* Test the result; if it is NaN, set errno=EDOM because
8051 the argument was not in the domain. */
8052 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8053 emit_jump_insn (gen_beq (lab1));
8057 #ifdef GEN_ERRNO_RTX
8058 rtx errno_rtx = GEN_ERRNO_RTX;
8061 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8064 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8067 /* We can't set errno=EDOM directly; let the library call do it.
8068 Pop the arguments right away in case the call gets deleted. */
8070 expand_call (exp, target, 0);
8077 /* Output the entire sequence. */
8078 insns = get_insns ();
8084 /* __builtin_apply_args returns block of memory allocated on
8085 the stack into which is stored the arg pointer, structure
8086 value address, static chain, and all the registers that might
8087 possibly be used in performing a function call. The code is
8088 moved to the start of the function so the incoming values are
8090 case BUILT_IN_APPLY_ARGS:
8091 /* Don't do __builtin_apply_args more than once in a function.
8092 Save the result of the first call and reuse it. */
8093 if (apply_args_value != 0)
8094 return apply_args_value;
8096 /* When this function is called, it means that registers must be
8097 saved on entry to this function. So we migrate the
8098 call to the first insn of this function. */
8103 temp = expand_builtin_apply_args ();
8107 apply_args_value = temp;
8109 /* Put the sequence after the NOTE that starts the function.
8110 If this is inside a SEQUENCE, make the outer-level insn
8111 chain current, so the code is placed at the start of the
8113 push_topmost_sequence ();
8114 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8115 pop_topmost_sequence ();
8119 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8120 FUNCTION with a copy of the parameters described by
8121 ARGUMENTS, and ARGSIZE. It returns a block of memory
8122 allocated on the stack into which is stored all the registers
8123 that might possibly be used for returning the result of a
8124 function. ARGUMENTS is the value returned by
8125 __builtin_apply_args. ARGSIZE is the number of bytes of
8126 arguments that must be copied. ??? How should this value be
8127 computed? We'll also need a safe worst case value for varargs
8129 case BUILT_IN_APPLY:
8131 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8132 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8133 || TREE_CHAIN (arglist) == 0
8134 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8135 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8136 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8144 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8145 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8147 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8150 /* __builtin_return (RESULT) causes the function to return the
8151 value described by RESULT. RESULT is address of the block of
8152 memory returned by __builtin_apply. */
8153 case BUILT_IN_RETURN:
8155 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8156 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8157 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8158 NULL_RTX, VOIDmode, 0));
8161 case BUILT_IN_SAVEREGS:
8162 /* Don't do __builtin_saveregs more than once in a function.
8163 Save the result of the first call and reuse it. */
8164 if (saveregs_value != 0)
8165 return saveregs_value;
8167 /* When this function is called, it means that registers must be
8168 saved on entry to this function. So we migrate the
8169 call to the first insn of this function. */
8173 /* Now really call the function. `expand_call' does not call
8174 expand_builtin, so there is no danger of infinite recursion here. */
8177 #ifdef EXPAND_BUILTIN_SAVEREGS
8178 /* Do whatever the machine needs done in this case. */
8179 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8181 /* The register where the function returns its value
8182 is likely to have something else in it, such as an argument.
8183 So preserve that register around the call. */
8185 if (value_mode != VOIDmode)
8187 rtx valreg = hard_libcall_value (value_mode);
8188 rtx saved_valreg = gen_reg_rtx (value_mode);
8190 emit_move_insn (saved_valreg, valreg);
8191 temp = expand_call (exp, target, ignore);
8192 emit_move_insn (valreg, saved_valreg);
8195 /* Generate the call, putting the value in a pseudo. */
8196 temp = expand_call (exp, target, ignore);
8202 saveregs_value = temp;
8204 /* Put the sequence after the NOTE that starts the function.
8205 If this is inside a SEQUENCE, make the outer-level insn
8206 chain current, so the code is placed at the start of the
8208 push_topmost_sequence ();
8209 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8210 pop_topmost_sequence ();
8214 /* __builtin_args_info (N) returns word N of the arg space info
8215 for the current function. The number and meanings of words
8216 is controlled by the definition of CUMULATIVE_ARGS. */
8217 case BUILT_IN_ARGS_INFO:
8219 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8221 int *word_ptr = (int *) ¤t_function_args_info;
8222 tree type, elts, result;
8224 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8225 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8226 __FILE__, __LINE__);
8230 tree arg = TREE_VALUE (arglist);
8231 if (TREE_CODE (arg) != INTEGER_CST)
8232 error ("argument of `__builtin_args_info' must be constant");
8235 int wordnum = TREE_INT_CST_LOW (arg);
8237 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8238 error ("argument of `__builtin_args_info' out of range");
8240 return GEN_INT (word_ptr[wordnum]);
8244 error ("missing argument in `__builtin_args_info'");
8249 for (i = 0; i < nwords; i++)
8250 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8252 type = build_array_type (integer_type_node,
8253 build_index_type (build_int_2 (nwords, 0)));
8254 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8255 TREE_CONSTANT (result) = 1;
8256 TREE_STATIC (result) = 1;
8257 result = build (INDIRECT_REF, build_pointer_type (type), result);
8258 TREE_CONSTANT (result) = 1;
8259 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8263 /* Return the address of the first anonymous stack arg. */
8264 case BUILT_IN_NEXT_ARG:
8266 tree fntype = TREE_TYPE (current_function_decl);
8268 if ((TYPE_ARG_TYPES (fntype) == 0
8269 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8271 && ! current_function_varargs)
8273 error ("`va_start' used in function with fixed args");
8279 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8280 tree arg = TREE_VALUE (arglist);
8282 /* Strip off all nops for the sake of the comparison. This
8283 is not quite the same as STRIP_NOPS. It does more.
8284 We must also strip off INDIRECT_EXPR for C++ reference
8286 while (TREE_CODE (arg) == NOP_EXPR
8287 || TREE_CODE (arg) == CONVERT_EXPR
8288 || TREE_CODE (arg) == NON_LVALUE_EXPR
8289 || TREE_CODE (arg) == INDIRECT_REF)
8290 arg = TREE_OPERAND (arg, 0);
8291 if (arg != last_parm)
8292 warning ("second parameter of `va_start' not last named argument");
8294 else if (! current_function_varargs)
8295 /* Evidently an out of date version of <stdarg.h>; can't validate
8296 va_start's second argument, but can still work as intended. */
8297 warning ("`__builtin_next_arg' called without an argument");
8300 return expand_binop (Pmode, add_optab,
8301 current_function_internal_arg_pointer,
8302 current_function_arg_offset_rtx,
8303 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8305 case BUILT_IN_CLASSIFY_TYPE:
8308 tree type = TREE_TYPE (TREE_VALUE (arglist));
8309 enum tree_code code = TREE_CODE (type);
8310 if (code == VOID_TYPE)
8311 return GEN_INT (void_type_class);
8312 if (code == INTEGER_TYPE)
8313 return GEN_INT (integer_type_class);
8314 if (code == CHAR_TYPE)
8315 return GEN_INT (char_type_class);
8316 if (code == ENUMERAL_TYPE)
8317 return GEN_INT (enumeral_type_class);
8318 if (code == BOOLEAN_TYPE)
8319 return GEN_INT (boolean_type_class);
8320 if (code == POINTER_TYPE)
8321 return GEN_INT (pointer_type_class);
8322 if (code == REFERENCE_TYPE)
8323 return GEN_INT (reference_type_class);
8324 if (code == OFFSET_TYPE)
8325 return GEN_INT (offset_type_class);
8326 if (code == REAL_TYPE)
8327 return GEN_INT (real_type_class);
8328 if (code == COMPLEX_TYPE)
8329 return GEN_INT (complex_type_class);
8330 if (code == FUNCTION_TYPE)
8331 return GEN_INT (function_type_class);
8332 if (code == METHOD_TYPE)
8333 return GEN_INT (method_type_class);
8334 if (code == RECORD_TYPE)
8335 return GEN_INT (record_type_class);
8336 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8337 return GEN_INT (union_type_class);
8338 if (code == ARRAY_TYPE)
8340 if (TYPE_STRING_FLAG (type))
8341 return GEN_INT (string_type_class);
8343 return GEN_INT (array_type_class);
8345 if (code == SET_TYPE)
8346 return GEN_INT (set_type_class);
8347 if (code == FILE_TYPE)
8348 return GEN_INT (file_type_class);
8349 if (code == LANG_TYPE)
8350 return GEN_INT (lang_type_class);
8352 return GEN_INT (no_type_class);
8354 case BUILT_IN_CONSTANT_P:
8359 tree arg = TREE_VALUE (arglist);
8362 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8363 || (TREE_CODE (arg) == ADDR_EXPR
8364 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8365 ? const1_rtx : const0_rtx);
8368 case BUILT_IN_FRAME_ADDRESS:
8369 /* The argument must be a nonnegative integer constant.
8370 It counts the number of frames to scan up the stack.
8371 The value is the address of that frame. */
8372 case BUILT_IN_RETURN_ADDRESS:
8373 /* The argument must be a nonnegative integer constant.
8374 It counts the number of frames to scan up the stack.
8375 The value is the return address saved in that frame. */
8377 /* Warning about missing arg was already issued. */
8379 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8381 error ("invalid arg to `__builtin_return_address'");
8384 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8386 error ("invalid arg to `__builtin_return_address'");
8391 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8392 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8393 hard_frame_pointer_rtx);
8395 /* For __builtin_frame_address, return what we've got. */
8396 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8399 if (GET_CODE (tem) != REG)
8400 tem = copy_to_reg (tem);
8404 case BUILT_IN_ALLOCA:
8406 /* Arg could be non-integer if user redeclared this fcn wrong. */
8407 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8410 /* Compute the argument. */
8411 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8413 /* Allocate the desired space. */
8414 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8417 /* If not optimizing, call the library function. */
8418 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8422 /* Arg could be non-integer if user redeclared this fcn wrong. */
8423 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8426 /* Compute the argument. */
8427 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8428 /* Compute ffs, into TARGET if possible.
8429 Set TARGET to wherever the result comes back. */
8430 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8431 ffs_optab, op0, target, 1);
8436 case BUILT_IN_STRLEN:
8437 /* If not optimizing, call the library function. */
8438 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8442 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8443 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8447 tree src = TREE_VALUE (arglist);
8448 tree len = c_strlen (src);
8451 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8453 rtx result, src_rtx, char_rtx;
8454 enum machine_mode insn_mode = value_mode, char_mode;
8455 enum insn_code icode;
8457 /* If the length is known, just return it. */
8459 return expand_expr (len, target, mode, 0);
8461 /* If SRC is not a pointer type, don't do this operation inline. */
8465 /* Call a function if we can't compute strlen in the right mode. */
8467 while (insn_mode != VOIDmode)
8469 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8470 if (icode != CODE_FOR_nothing)
8473 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8475 if (insn_mode == VOIDmode)
8478 /* Make a place to write the result of the instruction. */
8481 && GET_CODE (result) == REG
8482 && GET_MODE (result) == insn_mode
8483 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8484 result = gen_reg_rtx (insn_mode);
8486 /* Make sure the operands are acceptable to the predicates. */
8488 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8489 result = gen_reg_rtx (insn_mode);
8491 src_rtx = memory_address (BLKmode,
8492 expand_expr (src, NULL_RTX, ptr_mode,
8494 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8495 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8497 char_rtx = const0_rtx;
8498 char_mode = insn_operand_mode[(int)icode][2];
8499 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8500 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8502 emit_insn (GEN_FCN (icode) (result,
8503 gen_rtx (MEM, BLKmode, src_rtx),
8504 char_rtx, GEN_INT (align)));
8506 /* Return the value in the proper mode for this function. */
8507 if (GET_MODE (result) == value_mode)
8509 else if (target != 0)
8511 convert_move (target, result, 0);
8515 return convert_to_mode (value_mode, result, 0);
8518 case BUILT_IN_STRCPY:
8519 /* If not optimizing, call the library function. */
8520 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8524 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8525 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8526 || TREE_CHAIN (arglist) == 0
8527 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8531 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8536 len = size_binop (PLUS_EXPR, len, integer_one_node);
8538 chainon (arglist, build_tree_list (NULL_TREE, len));
8542 case BUILT_IN_MEMCPY:
8543 /* If not optimizing, call the library function. */
8544 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8548 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8549 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8550 || TREE_CHAIN (arglist) == 0
8551 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8552 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8553 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8557 tree dest = TREE_VALUE (arglist);
8558 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8559 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8563 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8565 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8566 rtx dest_rtx, dest_mem, src_mem;
8568 /* If either SRC or DEST is not a pointer type, don't do
8569 this operation in-line. */
8570 if (src_align == 0 || dest_align == 0)
8572 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8573 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8577 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8578 dest_mem = gen_rtx (MEM, BLKmode,
8579 memory_address (BLKmode, dest_rtx));
8580 /* There could be a void* cast on top of the object. */
8581 while (TREE_CODE (dest) == NOP_EXPR)
8582 dest = TREE_OPERAND (dest, 0);
8583 type = TREE_TYPE (TREE_TYPE (dest));
8584 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8585 src_mem = gen_rtx (MEM, BLKmode,
8586 memory_address (BLKmode,
8587 expand_expr (src, NULL_RTX,
8590 /* There could be a void* cast on top of the object. */
8591 while (TREE_CODE (src) == NOP_EXPR)
8592 src = TREE_OPERAND (src, 0);
8593 type = TREE_TYPE (TREE_TYPE (src));
8594 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8596 /* Copy word part most expediently. */
8597 emit_block_move (dest_mem, src_mem,
8598 expand_expr (len, NULL_RTX, VOIDmode, 0),
8599 MIN (src_align, dest_align));
8600 return force_operand (dest_rtx, NULL_RTX);
8603 case BUILT_IN_MEMSET:
8604 /* If not optimizing, call the library function. */
8605 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8609 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8610 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8611 || TREE_CHAIN (arglist) == 0
8612 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8614 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8616 != (TREE_CODE (TREE_TYPE
8618 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8622 tree dest = TREE_VALUE (arglist);
8623 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8624 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8628 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8629 rtx dest_rtx, dest_mem;
8631 /* If DEST is not a pointer type, don't do this
8632 operation in-line. */
8633 if (dest_align == 0)
8636 /* If VAL is not 0, don't do this operation in-line. */
8637 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8640 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8641 dest_mem = gen_rtx (MEM, BLKmode,
8642 memory_address (BLKmode, dest_rtx));
8643 /* There could be a void* cast on top of the object. */
8644 while (TREE_CODE (dest) == NOP_EXPR)
8645 dest = TREE_OPERAND (dest, 0);
8646 type = TREE_TYPE (TREE_TYPE (dest));
8647 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8649 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8652 return force_operand (dest_rtx, NULL_RTX);
8655 /* These comparison functions need an instruction that returns an actual
8656 index. An ordinary compare that just sets the condition codes
8658 #ifdef HAVE_cmpstrsi
8659 case BUILT_IN_STRCMP:
8660 /* If not optimizing, call the library function. */
8661 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8665 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8666 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8667 || TREE_CHAIN (arglist) == 0
8668 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8670 else if (!HAVE_cmpstrsi)
8673 tree arg1 = TREE_VALUE (arglist);
8674 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8678 len = c_strlen (arg1);
8680 len = size_binop (PLUS_EXPR, integer_one_node, len);
8681 len2 = c_strlen (arg2);
8683 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8685 /* If we don't have a constant length for the first, use the length
8686 of the second, if we know it. We don't require a constant for
8687 this case; some cost analysis could be done if both are available
8688 but neither is constant. For now, assume they're equally cheap.
8690 If both strings have constant lengths, use the smaller. This
8691 could arise if optimization results in strcpy being called with
8692 two fixed strings, or if the code was machine-generated. We should
8693 add some code to the `memcmp' handler below to deal with such
8694 situations, someday. */
8695 if (!len || TREE_CODE (len) != INTEGER_CST)
8702 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8704 if (tree_int_cst_lt (len2, len))
8708 chainon (arglist, build_tree_list (NULL_TREE, len));
8712 case BUILT_IN_MEMCMP:
8713 /* If not optimizing, call the library function. */
8714 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8718 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8719 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8720 || TREE_CHAIN (arglist) == 0
8721 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8722 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8723 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8725 else if (!HAVE_cmpstrsi)
8728 tree arg1 = TREE_VALUE (arglist);
8729 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8730 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8734 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8736 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8737 enum machine_mode insn_mode
8738 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8740 /* If we don't have POINTER_TYPE, call the function. */
8741 if (arg1_align == 0 || arg2_align == 0)
8743 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8744 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8748 /* Make a place to write the result of the instruction. */
8751 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8752 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8753 result = gen_reg_rtx (insn_mode);
8755 emit_insn (gen_cmpstrsi (result,
8756 gen_rtx (MEM, BLKmode,
8757 expand_expr (arg1, NULL_RTX,
8760 gen_rtx (MEM, BLKmode,
8761 expand_expr (arg2, NULL_RTX,
8764 expand_expr (len, NULL_RTX, VOIDmode, 0),
8765 GEN_INT (MIN (arg1_align, arg2_align))));
8767 /* Return the value in the proper mode for this function. */
8768 mode = TYPE_MODE (TREE_TYPE (exp));
8769 if (GET_MODE (result) == mode)
8771 else if (target != 0)
8773 convert_move (target, result, 0);
8777 return convert_to_mode (mode, result, 0);
8780 case BUILT_IN_STRCMP:
8781 case BUILT_IN_MEMCMP:
8785 /* __builtin_setjmp is passed a pointer to an array of five words
8786 (not all will be used on all machines). It operates similarly to
8787 the C library function of the same name, but is more efficient.
8788 Much of the code below (and for longjmp) is copied from the handling
8791 NOTE: This is intended for use by GNAT and will only work in
8792 the method used by it. This code will likely NOT survive to
8793 the GCC 2.8.0 release. */
8794 case BUILT_IN_SETJMP:
8796 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8800 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8802 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8803 enum machine_mode sa_mode = Pmode;
8805 int old_inhibit_defer_pop = inhibit_defer_pop;
8806 int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8807 get_identifier ("__dummy"), 0);
8809 CUMULATIVE_ARGS args_so_far;
8812 #ifdef POINTERS_EXTEND_UNSIGNED
8813 buf_addr = convert_memory_address (Pmode, buf_addr);
8816 buf_addr = force_reg (Pmode, buf_addr);
8818 if (target == 0 || GET_CODE (target) != REG
8819 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8820 target = gen_reg_rtx (value_mode);
8824 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8825 current_function_calls_setjmp = 1;
8827 /* We store the frame pointer and the address of lab1 in the buffer
8828 and use the rest of it for the stack save area, which is
8829 machine-dependent. */
8830 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8831 virtual_stack_vars_rtx);
8833 (validize_mem (gen_rtx (MEM, Pmode,
8834 plus_constant (buf_addr,
8835 GET_MODE_SIZE (Pmode)))),
8836 gen_rtx (LABEL_REF, Pmode, lab1));
8838 #ifdef HAVE_save_stack_nonlocal
8839 if (HAVE_save_stack_nonlocal)
8840 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8843 stack_save = gen_rtx (MEM, sa_mode,
8844 plus_constant (buf_addr,
8845 2 * GET_MODE_SIZE (Pmode)));
8846 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8850 emit_insn (gen_setjmp ());
8853 /* Set TARGET to zero and branch around the other case. */
8854 emit_move_insn (target, const0_rtx);
8855 emit_jump_insn (gen_jump (lab2));
8859 /* Note that setjmp clobbers FP when we get here, so we have to
8860 make sure it's marked as used by this function. */
8861 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8863 /* Mark the static chain as clobbered here so life information
8864 doesn't get messed up for it. */
8865 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8867 /* Now put in the code to restore the frame pointer, and argument
8868 pointer, if needed. The code below is from expand_end_bindings
8869 in stmt.c; see detailed documentation there. */
8870 #ifdef HAVE_nonlocal_goto
8871 if (! HAVE_nonlocal_goto)
8873 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8875 current_function_has_nonlocal_goto = 1;
8877 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8878 if (fixed_regs[ARG_POINTER_REGNUM])
8880 #ifdef ELIMINABLE_REGS
8881 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8883 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8884 if (elim_regs[i].from == ARG_POINTER_REGNUM
8885 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8888 if (i == sizeof elim_regs / sizeof elim_regs [0])
8891 /* Now restore our arg pointer from the address at which it
8892 was saved in our stack frame.
8893 If there hasn't be space allocated for it yet, make
8895 if (arg_pointer_save_area == 0)
8896 arg_pointer_save_area
8897 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8898 emit_move_insn (virtual_incoming_args_rtx,
8899 copy_to_reg (arg_pointer_save_area));
8904 #ifdef HAVE_nonlocal_goto_receiver
8905 if (HAVE_nonlocal_goto_receiver)
8906 emit_insn (gen_nonlocal_goto_receiver ());
8908 /* The static chain pointer contains the address of dummy function.
8909 We need to call it here to handle some PIC cases of restoring
8910 a global pointer. Then return 1. */
8911 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8913 /* We can't actually call emit_library_call here, so do everything
8914 it does, which isn't much for a libfunc with no args. */
8915 op0 = memory_address (FUNCTION_MODE, op0);
8917 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8918 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8919 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8921 #ifndef ACCUMULATE_OUTGOING_ARGS
8922 #ifdef HAVE_call_pop
8924 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8925 const0_rtx, next_arg_reg,
8926 GEN_INT (return_pops)));
8933 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8934 const0_rtx, next_arg_reg, const0_rtx));
8939 emit_move_insn (target, const1_rtx);
8944 /* __builtin_longjmp is passed a pointer to an array of five words
8945 and a value, which is a dummy. It's similar to the C library longjmp
8946 function but works with __builtin_setjmp above. */
8947 case BUILT_IN_LONGJMP:
8948 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8949 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8953 tree dummy_id = get_identifier ("__dummy");
8954 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8955 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
8956 #ifdef POINTERS_EXTEND_UNSIGNED
8959 convert_memory_address
8961 expand_expr (TREE_VALUE (arglist),
8962 NULL_RTX, VOIDmode, 0)));
8965 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
8969 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8970 rtx lab = gen_rtx (MEM, Pmode,
8971 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8972 enum machine_mode sa_mode
8973 #ifdef HAVE_save_stack_nonlocal
8974 = (HAVE_save_stack_nonlocal
8975 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8980 rtx stack = gen_rtx (MEM, sa_mode,
8981 plus_constant (buf_addr,
8982 2 * GET_MODE_SIZE (Pmode)));
8984 DECL_EXTERNAL (dummy_decl) = 1;
8985 TREE_PUBLIC (dummy_decl) = 1;
8986 make_decl_rtl (dummy_decl, NULL_PTR, 1);
8988 /* Expand the second expression just for side-effects. */
8989 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8990 const0_rtx, VOIDmode, 0);
8992 assemble_external (dummy_decl);
8994 /* Pick up FP, label, and SP from the block and jump. This code is
8995 from expand_goto in stmt.c; see there for detailed comments. */
8996 #if HAVE_nonlocal_goto
8997 if (HAVE_nonlocal_goto)
8998 emit_insn (gen_nonlocal_goto (fp, lab, stack,
8999 XEXP (DECL_RTL (dummy_decl), 0)));
9003 lab = copy_to_reg (lab);
9004 emit_move_insn (hard_frame_pointer_rtx, fp);
9005 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9007 /* Put in the static chain register the address of the dummy
9009 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9010 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9011 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9012 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9013 emit_indirect_jump (lab);
9019 default: /* just do library call, if unknown builtin */
9020 error ("built-in function `%s' not currently supported",
9021 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9024 /* The switch statement above can drop through to cause the function
9025 to be called normally. */
9027 return expand_call (exp, target, ignore);
9030 /* Built-in functions to perform an untyped call and return. */
9032 /* For each register that may be used for calling a function, this
9033 gives a mode used to copy the register's value. VOIDmode indicates
9034 the register is not used for calling a function. If the machine
9035 has register windows, this gives only the outbound registers.
9036 INCOMING_REGNO gives the corresponding inbound register. */
9037 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9039 /* For each register that may be used for returning values, this gives
9040 a mode used to copy the register's value. VOIDmode indicates the
9041 register is not used for returning values. If the machine has
9042 register windows, this gives only the outbound registers.
9043 INCOMING_REGNO gives the corresponding inbound register. */
9044 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9046 /* For each register that may be used for calling a function, this
9047 gives the offset of that register into the block returned by
9048 __builtin_apply_args. 0 indicates that the register is not
9049 used for calling a function. */
9050 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9052 /* Return the offset of register REGNO into the block returned by
9053 __builtin_apply_args. This is not declared static, since it is
9054 needed in objc-act.c. */
9057 apply_args_register_offset (regno)
9062 /* Arguments are always put in outgoing registers (in the argument
9063 block) if such make sense. */
9064 #ifdef OUTGOING_REGNO
9065 regno = OUTGOING_REGNO(regno);
9067 return apply_args_reg_offset[regno];
9070 /* Return the size required for the block returned by __builtin_apply_args,
9071 and initialize apply_args_mode. */
9076 static int size = -1;
9078 enum machine_mode mode;
9080 /* The values computed by this function never change. */
9083 /* The first value is the incoming arg-pointer. */
9084 size = GET_MODE_SIZE (Pmode);
9086 /* The second value is the structure value address unless this is
9087 passed as an "invisible" first argument. */
9088 if (struct_value_rtx)
9089 size += GET_MODE_SIZE (Pmode);
9091 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9092 if (FUNCTION_ARG_REGNO_P (regno))
9094 /* Search for the proper mode for copying this register's
9095 value. I'm not sure this is right, but it works so far. */
9096 enum machine_mode best_mode = VOIDmode;
9098 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9100 mode = GET_MODE_WIDER_MODE (mode))
9101 if (HARD_REGNO_MODE_OK (regno, mode)
9102 && HARD_REGNO_NREGS (regno, mode) == 1)
9105 if (best_mode == VOIDmode)
9106 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9108 mode = GET_MODE_WIDER_MODE (mode))
9109 if (HARD_REGNO_MODE_OK (regno, mode)
9110 && (mov_optab->handlers[(int) mode].insn_code
9111 != CODE_FOR_nothing))
9115 if (mode == VOIDmode)
9118 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9119 if (size % align != 0)
9120 size = CEIL (size, align) * align;
9121 apply_args_reg_offset[regno] = size;
9122 size += GET_MODE_SIZE (mode);
9123 apply_args_mode[regno] = mode;
9127 apply_args_mode[regno] = VOIDmode;
9128 apply_args_reg_offset[regno] = 0;
9134 /* Return the size required for the block returned by __builtin_apply,
9135 and initialize apply_result_mode. */
9138 apply_result_size ()
9140 static int size = -1;
9142 enum machine_mode mode;
9144 /* The values computed by this function never change. */
9149 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9150 if (FUNCTION_VALUE_REGNO_P (regno))
9152 /* Search for the proper mode for copying this register's
9153 value. I'm not sure this is right, but it works so far. */
9154 enum machine_mode best_mode = VOIDmode;
9156 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9158 mode = GET_MODE_WIDER_MODE (mode))
9159 if (HARD_REGNO_MODE_OK (regno, mode))
9162 if (best_mode == VOIDmode)
9163 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9165 mode = GET_MODE_WIDER_MODE (mode))
9166 if (HARD_REGNO_MODE_OK (regno, mode)
9167 && (mov_optab->handlers[(int) mode].insn_code
9168 != CODE_FOR_nothing))
9172 if (mode == VOIDmode)
9175 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9176 if (size % align != 0)
9177 size = CEIL (size, align) * align;
9178 size += GET_MODE_SIZE (mode);
9179 apply_result_mode[regno] = mode;
9182 apply_result_mode[regno] = VOIDmode;
9184 /* Allow targets that use untyped_call and untyped_return to override
9185 the size so that machine-specific information can be stored here. */
9186 #ifdef APPLY_RESULT_SIZE
9187 size = APPLY_RESULT_SIZE;
9193 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9194 /* Create a vector describing the result block RESULT. If SAVEP is true,
9195 the result block is used to save the values; otherwise it is used to
9196 restore the values. */
9199 result_vector (savep, result)
9203 int regno, size, align, nelts;
9204 enum machine_mode mode;
9206 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9209 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9210 if ((mode = apply_result_mode[regno]) != VOIDmode)
9212 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9213 if (size % align != 0)
9214 size = CEIL (size, align) * align;
9215 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9216 mem = change_address (result, mode,
9217 plus_constant (XEXP (result, 0), size));
9218 savevec[nelts++] = (savep
9219 ? gen_rtx (SET, VOIDmode, mem, reg)
9220 : gen_rtx (SET, VOIDmode, reg, mem));
9221 size += GET_MODE_SIZE (mode);
9223 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9225 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9227 /* Save the state required to perform an untyped call with the same
9228 arguments as were passed to the current function. */
9231 expand_builtin_apply_args ()
9234 int size, align, regno;
9235 enum machine_mode mode;
9237 /* Create a block where the arg-pointer, structure value address,
9238 and argument registers can be saved. */
9239 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9241 /* Walk past the arg-pointer and structure value address. */
9242 size = GET_MODE_SIZE (Pmode);
9243 if (struct_value_rtx)
9244 size += GET_MODE_SIZE (Pmode);
9246 /* Save each register used in calling a function to the block. */
9247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9248 if ((mode = apply_args_mode[regno]) != VOIDmode)
9252 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9253 if (size % align != 0)
9254 size = CEIL (size, align) * align;
9256 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9259 /* For reg-stack.c's stack register household.
9260 Compare with a similar piece of code in function.c. */
9262 emit_insn (gen_rtx (USE, mode, tem));
9265 emit_move_insn (change_address (registers, mode,
9266 plus_constant (XEXP (registers, 0),
9269 size += GET_MODE_SIZE (mode);
9272 /* Save the arg pointer to the block. */
9273 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9274 copy_to_reg (virtual_incoming_args_rtx));
9275 size = GET_MODE_SIZE (Pmode);
9277 /* Save the structure value address unless this is passed as an
9278 "invisible" first argument. */
9279 if (struct_value_incoming_rtx)
9281 emit_move_insn (change_address (registers, Pmode,
9282 plus_constant (XEXP (registers, 0),
9284 copy_to_reg (struct_value_incoming_rtx));
9285 size += GET_MODE_SIZE (Pmode);
9288 /* Return the address of the block. */
9289 return copy_addr_to_reg (XEXP (registers, 0));
9292 /* Perform an untyped call and save the state required to perform an
9293 untyped return of whatever value was returned by the given function. */
9296 expand_builtin_apply (function, arguments, argsize)
9297 rtx function, arguments, argsize;
9299 int size, align, regno;
9300 enum machine_mode mode;
9301 rtx incoming_args, result, reg, dest, call_insn;
9302 rtx old_stack_level = 0;
9303 rtx call_fusage = 0;
9305 /* Create a block where the return registers can be saved. */
9306 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9308 /* ??? The argsize value should be adjusted here. */
9310 /* Fetch the arg pointer from the ARGUMENTS block. */
9311 incoming_args = gen_reg_rtx (Pmode);
9312 emit_move_insn (incoming_args,
9313 gen_rtx (MEM, Pmode, arguments));
9314 #ifndef STACK_GROWS_DOWNWARD
9315 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9316 incoming_args, 0, OPTAB_LIB_WIDEN);
9319 /* Perform postincrements before actually calling the function. */
9322 /* Push a new argument block and copy the arguments. */
9323 do_pending_stack_adjust ();
9324 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9326 /* Push a block of memory onto the stack to store the memory arguments.
9327 Save the address in a register, and copy the memory arguments. ??? I
9328 haven't figured out how the calling convention macros effect this,
9329 but it's likely that the source and/or destination addresses in
9330 the block copy will need updating in machine specific ways. */
9331 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9332 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9333 gen_rtx (MEM, BLKmode, incoming_args),
9335 PARM_BOUNDARY / BITS_PER_UNIT);
9337 /* Refer to the argument block. */
9339 arguments = gen_rtx (MEM, BLKmode, arguments);
9341 /* Walk past the arg-pointer and structure value address. */
9342 size = GET_MODE_SIZE (Pmode);
9343 if (struct_value_rtx)
9344 size += GET_MODE_SIZE (Pmode);
9346 /* Restore each of the registers previously saved. Make USE insns
9347 for each of these registers for use in making the call. */
9348 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9349 if ((mode = apply_args_mode[regno]) != VOIDmode)
9351 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9352 if (size % align != 0)
9353 size = CEIL (size, align) * align;
9354 reg = gen_rtx (REG, mode, regno);
9355 emit_move_insn (reg,
9356 change_address (arguments, mode,
9357 plus_constant (XEXP (arguments, 0),
9360 use_reg (&call_fusage, reg);
9361 size += GET_MODE_SIZE (mode);
9364 /* Restore the structure value address unless this is passed as an
9365 "invisible" first argument. */
9366 size = GET_MODE_SIZE (Pmode);
9367 if (struct_value_rtx)
9369 rtx value = gen_reg_rtx (Pmode);
9370 emit_move_insn (value,
9371 change_address (arguments, Pmode,
9372 plus_constant (XEXP (arguments, 0),
9374 emit_move_insn (struct_value_rtx, value);
9375 if (GET_CODE (struct_value_rtx) == REG)
9376 use_reg (&call_fusage, struct_value_rtx);
9377 size += GET_MODE_SIZE (Pmode);
9380 /* All arguments and registers used for the call are set up by now! */
9381 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9383 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9384 and we don't want to load it into a register as an optimization,
9385 because prepare_call_address already did it if it should be done. */
9386 if (GET_CODE (function) != SYMBOL_REF)
9387 function = memory_address (FUNCTION_MODE, function);
9389 /* Generate the actual call instruction and save the return value. */
9390 #ifdef HAVE_untyped_call
9391 if (HAVE_untyped_call)
9392 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9393 result, result_vector (1, result)));
9396 #ifdef HAVE_call_value
9397 if (HAVE_call_value)
9401 /* Locate the unique return register. It is not possible to
9402 express a call that sets more than one return register using
9403 call_value; use untyped_call for that. In fact, untyped_call
9404 only needs to save the return registers in the given block. */
9405 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9406 if ((mode = apply_result_mode[regno]) != VOIDmode)
9409 abort (); /* HAVE_untyped_call required. */
9410 valreg = gen_rtx (REG, mode, regno);
9413 emit_call_insn (gen_call_value (valreg,
9414 gen_rtx (MEM, FUNCTION_MODE, function),
9415 const0_rtx, NULL_RTX, const0_rtx));
9417 emit_move_insn (change_address (result, GET_MODE (valreg),
9425 /* Find the CALL insn we just emitted. */
9426 for (call_insn = get_last_insn ();
9427 call_insn && GET_CODE (call_insn) != CALL_INSN;
9428 call_insn = PREV_INSN (call_insn))
9434 /* Put the register usage information on the CALL. If there is already
9435 some usage information, put ours at the end. */
9436 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9440 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9441 link = XEXP (link, 1))
9444 XEXP (link, 1) = call_fusage;
9447 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9449 /* Restore the stack. */
9450 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9452 /* Return the address of the result block. */
9453 return copy_addr_to_reg (XEXP (result, 0));
9456 /* Perform an untyped return. */
9459 expand_builtin_return (result)
9462 int size, align, regno;
9463 enum machine_mode mode;
9465 rtx call_fusage = 0;
9467 apply_result_size ();
9468 result = gen_rtx (MEM, BLKmode, result);
9470 #ifdef HAVE_untyped_return
9471 if (HAVE_untyped_return)
9473 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9479 /* Restore the return value and note that each value is used. */
9481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9482 if ((mode = apply_result_mode[regno]) != VOIDmode)
9484 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9485 if (size % align != 0)
9486 size = CEIL (size, align) * align;
9487 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9488 emit_move_insn (reg,
9489 change_address (result, mode,
9490 plus_constant (XEXP (result, 0),
9493 push_to_sequence (call_fusage);
9494 emit_insn (gen_rtx (USE, VOIDmode, reg));
9495 call_fusage = get_insns ();
9497 size += GET_MODE_SIZE (mode);
9500 /* Put the USE insns before the return. */
9501 emit_insns (call_fusage);
9503 /* Return whatever values was restored by jumping directly to the end
9505 expand_null_return ();
9508 /* Expand code for a post- or pre- increment or decrement
9509 and return the RTX for the result.
9510 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9513 expand_increment (exp, post, ignore)
9517 register rtx op0, op1;
9518 register rtx temp, value;
9519 register tree incremented = TREE_OPERAND (exp, 0);
9520 optab this_optab = add_optab;
9522 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9523 int op0_is_copy = 0;
9524 int single_insn = 0;
9525 /* 1 means we can't store into OP0 directly,
9526 because it is a subreg narrower than a word,
9527 and we don't dare clobber the rest of the word. */
9530 if (output_bytecode)
9532 bc_expand_expr (exp);
9536 /* Stabilize any component ref that might need to be
9537 evaluated more than once below. */
9539 || TREE_CODE (incremented) == BIT_FIELD_REF
9540 || (TREE_CODE (incremented) == COMPONENT_REF
9541 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9542 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9543 incremented = stabilize_reference (incremented);
9544 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9545 ones into save exprs so that they don't accidentally get evaluated
9546 more than once by the code below. */
9547 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9548 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9549 incremented = save_expr (incremented);
9551 /* Compute the operands as RTX.
9552 Note whether OP0 is the actual lvalue or a copy of it:
9553 I believe it is a copy iff it is a register or subreg
9554 and insns were generated in computing it. */
9556 temp = get_last_insn ();
9557 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9559 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9560 in place but instead must do sign- or zero-extension during assignment,
9561 so we copy it into a new register and let the code below use it as
9564 Note that we can safely modify this SUBREG since it is know not to be
9565 shared (it was made by the expand_expr call above). */
9567 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9570 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9574 else if (GET_CODE (op0) == SUBREG
9575 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9577 /* We cannot increment this SUBREG in place. If we are
9578 post-incrementing, get a copy of the old value. Otherwise,
9579 just mark that we cannot increment in place. */
9581 op0 = copy_to_reg (op0);
9586 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9587 && temp != get_last_insn ());
9588 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9590 /* Decide whether incrementing or decrementing. */
9591 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9592 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9593 this_optab = sub_optab;
9595 /* Convert decrement by a constant into a negative increment. */
9596 if (this_optab == sub_optab
9597 && GET_CODE (op1) == CONST_INT)
9599 op1 = GEN_INT (- INTVAL (op1));
9600 this_optab = add_optab;
9603 /* For a preincrement, see if we can do this with a single instruction. */
9606 icode = (int) this_optab->handlers[(int) mode].insn_code;
9607 if (icode != (int) CODE_FOR_nothing
9608 /* Make sure that OP0 is valid for operands 0 and 1
9609 of the insn we want to queue. */
9610 && (*insn_operand_predicate[icode][0]) (op0, mode)
9611 && (*insn_operand_predicate[icode][1]) (op0, mode)
9612 && (*insn_operand_predicate[icode][2]) (op1, mode))
9616 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9617 then we cannot just increment OP0. We must therefore contrive to
9618 increment the original value. Then, for postincrement, we can return
9619 OP0 since it is a copy of the old value. For preincrement, expand here
9620 unless we can do it with a single insn.
9622 Likewise if storing directly into OP0 would clobber high bits
9623 we need to preserve (bad_subreg). */
9624 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9626 /* This is the easiest way to increment the value wherever it is.
9627 Problems with multiple evaluation of INCREMENTED are prevented
9628 because either (1) it is a component_ref or preincrement,
9629 in which case it was stabilized above, or (2) it is an array_ref
9630 with constant index in an array in a register, which is
9631 safe to reevaluate. */
9632 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9633 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9634 ? MINUS_EXPR : PLUS_EXPR),
9637 TREE_OPERAND (exp, 1));
9639 while (TREE_CODE (incremented) == NOP_EXPR
9640 || TREE_CODE (incremented) == CONVERT_EXPR)
9642 newexp = convert (TREE_TYPE (incremented), newexp);
9643 incremented = TREE_OPERAND (incremented, 0);
9646 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9647 return post ? op0 : temp;
9652 /* We have a true reference to the value in OP0.
9653 If there is an insn to add or subtract in this mode, queue it.
9654 Queueing the increment insn avoids the register shuffling
9655 that often results if we must increment now and first save
9656 the old value for subsequent use. */
9658 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9659 op0 = stabilize (op0);
9662 icode = (int) this_optab->handlers[(int) mode].insn_code;
9663 if (icode != (int) CODE_FOR_nothing
9664 /* Make sure that OP0 is valid for operands 0 and 1
9665 of the insn we want to queue. */
9666 && (*insn_operand_predicate[icode][0]) (op0, mode)
9667 && (*insn_operand_predicate[icode][1]) (op0, mode))
9669 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9670 op1 = force_reg (mode, op1);
9672 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9674 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9676 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9679 op0 = change_address (op0, VOIDmode, addr);
9680 temp = force_reg (GET_MODE (op0), op0);
9681 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9682 op1 = force_reg (mode, op1);
9684 /* The increment queue is LIFO, thus we have to `queue'
9685 the instructions in reverse order. */
9686 enqueue_insn (op0, gen_move_insn (op0, temp));
9687 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9692 /* Preincrement, or we can't increment with one simple insn. */
9694 /* Save a copy of the value before inc or dec, to return it later. */
9695 temp = value = copy_to_reg (op0);
9697 /* Arrange to return the incremented value. */
9698 /* Copy the rtx because expand_binop will protect from the queue,
9699 and the results of that would be invalid for us to return
9700 if our caller does emit_queue before using our result. */
9701 temp = copy_rtx (value = op0);
9703 /* Increment however we can. */
9704 op1 = expand_binop (mode, this_optab, value, op1, op0,
9705 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9706 /* Make sure the value is stored into OP0. */
9708 emit_move_insn (op0, op1);
9713 /* Expand all function calls contained within EXP, innermost ones first.
9714 But don't look within expressions that have sequence points.
9715 For each CALL_EXPR, record the rtx for its value
9716 in the CALL_EXPR_RTL field. */
9719 preexpand_calls (exp)
9722 register int nops, i;
9723 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9725 if (! do_preexpand_calls)
9728 /* Only expressions and references can contain calls. */
9730 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9733 switch (TREE_CODE (exp))
9736 /* Do nothing if already expanded. */
9737 if (CALL_EXPR_RTL (exp) != 0
9738 /* Do nothing if the call returns a variable-sized object. */
9739 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9740 /* Do nothing to built-in functions. */
9741 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9742 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9744 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9747 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9752 case TRUTH_ANDIF_EXPR:
9753 case TRUTH_ORIF_EXPR:
9754 /* If we find one of these, then we can be sure
9755 the adjust will be done for it (since it makes jumps).
9756 Do it now, so that if this is inside an argument
9757 of a function, we don't get the stack adjustment
9758 after some other args have already been pushed. */
9759 do_pending_stack_adjust ();
9764 case WITH_CLEANUP_EXPR:
9765 case CLEANUP_POINT_EXPR:
9769 if (SAVE_EXPR_RTL (exp) != 0)
9773 nops = tree_code_length[(int) TREE_CODE (exp)];
9774 for (i = 0; i < nops; i++)
9775 if (TREE_OPERAND (exp, i) != 0)
9777 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9778 if (type == 'e' || type == '<' || type == '1' || type == '2'
9780 preexpand_calls (TREE_OPERAND (exp, i));
9784 /* At the start of a function, record that we have no previously-pushed
9785 arguments waiting to be popped. */
9788 init_pending_stack_adjust ()
9790 pending_stack_adjust = 0;
9793 /* When exiting from function, if safe, clear out any pending stack adjust
9794 so the adjustment won't get done. */
9797 clear_pending_stack_adjust ()
9799 #ifdef EXIT_IGNORE_STACK
9801 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9802 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9803 && ! flag_inline_functions)
9804 pending_stack_adjust = 0;
9808 /* Pop any previously-pushed arguments that have not been popped yet. */
9811 do_pending_stack_adjust ()
9813 if (inhibit_defer_pop == 0)
9815 if (pending_stack_adjust != 0)
9816 adjust_stack (GEN_INT (pending_stack_adjust));
9817 pending_stack_adjust = 0;
9821 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9822 Returns the cleanups to be performed. */
9825 defer_cleanups_to (old_cleanups)
9828 tree new_cleanups = NULL_TREE;
9829 tree cleanups = cleanups_this_call;
9830 tree last = NULL_TREE;
9832 while (cleanups_this_call != old_cleanups)
9834 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9835 last = cleanups_this_call;
9836 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9841 /* Remove the list from the chain of cleanups. */
9842 TREE_CHAIN (last) = NULL_TREE;
9844 /* reverse them so that we can build them in the right order. */
9845 cleanups = nreverse (cleanups);
9847 /* All cleanups must be on the function_obstack. */
9848 push_obstacks_nochange ();
9849 resume_temporary_allocation ();
9854 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9855 TREE_VALUE (cleanups), new_cleanups);
9857 new_cleanups = TREE_VALUE (cleanups);
9859 cleanups = TREE_CHAIN (cleanups);
9865 return new_cleanups;
9868 /* Expand all cleanups up to OLD_CLEANUPS.
9869 Needed here, and also for language-dependent calls. */
9872 expand_cleanups_to (old_cleanups)
9875 while (cleanups_this_call != old_cleanups)
9877 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9878 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9879 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9883 /* Expand conditional expressions. */
9885 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9886 LABEL is an rtx of code CODE_LABEL, in this function and all the
9890 jumpifnot (exp, label)
9894 do_jump (exp, label, NULL_RTX);
9897 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9904 do_jump (exp, NULL_RTX, label);
9907 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9908 the result is zero, or IF_TRUE_LABEL if the result is one.
9909 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9910 meaning fall through in that case.
9912 do_jump always does any pending stack adjust except when it does not
9913 actually perform a jump. An example where there is no jump
9914 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9916 This function is responsible for optimizing cases such as
9917 &&, || and comparison operators in EXP. */
9920 do_jump (exp, if_false_label, if_true_label)
9922 rtx if_false_label, if_true_label;
9924 register enum tree_code code = TREE_CODE (exp);
9925 /* Some cases need to create a label to jump to
9926 in order to properly fall through.
9927 These cases set DROP_THROUGH_LABEL nonzero. */
9928 rtx drop_through_label = 0;
9933 enum machine_mode mode;
9943 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9949 /* This is not true with #pragma weak */
9951 /* The address of something can never be zero. */
9953 emit_jump (if_true_label);
9958 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9959 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9960 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9963 /* If we are narrowing the operand, we have to do the compare in the
9965 if ((TYPE_PRECISION (TREE_TYPE (exp))
9966 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9968 case NON_LVALUE_EXPR:
9969 case REFERENCE_EXPR:
9974 /* These cannot change zero->non-zero or vice versa. */
9975 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9979 /* This is never less insns than evaluating the PLUS_EXPR followed by
9980 a test and can be longer if the test is eliminated. */
9982 /* Reduce to minus. */
9983 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9984 TREE_OPERAND (exp, 0),
9985 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9986 TREE_OPERAND (exp, 1))));
9987 /* Process as MINUS. */
9991 /* Non-zero iff operands of minus differ. */
9992 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9993 TREE_OPERAND (exp, 0),
9994 TREE_OPERAND (exp, 1)),
9999 /* If we are AND'ing with a small constant, do this comparison in the
10000 smallest type that fits. If the machine doesn't have comparisons
10001 that small, it will be converted back to the wider comparison.
10002 This helps if we are testing the sign bit of a narrower object.
10003 combine can't do this for us because it can't know whether a
10004 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10006 if (! SLOW_BYTE_ACCESS
10007 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10008 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10009 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10010 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10011 && (type = type_for_mode (mode, 1)) != 0
10012 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10013 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10014 != CODE_FOR_nothing))
10016 do_jump (convert (type, exp), if_false_label, if_true_label);
10021 case TRUTH_NOT_EXPR:
10022 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10025 case TRUTH_ANDIF_EXPR:
10028 tree cleanups, old_cleanups;
10030 if (if_false_label == 0)
10031 if_false_label = drop_through_label = gen_label_rtx ();
10033 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10034 seq1 = get_insns ();
10037 old_cleanups = cleanups_this_call;
10039 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10040 seq2 = get_insns ();
10041 cleanups = defer_cleanups_to (old_cleanups);
10046 rtx flag = gen_reg_rtx (word_mode);
10050 /* Flag cleanups as not needed. */
10051 emit_move_insn (flag, const0_rtx);
10054 /* Flag cleanups as needed. */
10055 emit_move_insn (flag, const1_rtx);
10058 /* All cleanups must be on the function_obstack. */
10059 push_obstacks_nochange ();
10060 resume_temporary_allocation ();
10062 /* convert flag, which is an rtx, into a tree. */
10063 cond = make_node (RTL_EXPR);
10064 TREE_TYPE (cond) = integer_type_node;
10065 RTL_EXPR_RTL (cond) = flag;
10066 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10067 cond = save_expr (cond);
10069 new_cleanups = build (COND_EXPR, void_type_node,
10070 truthvalue_conversion (cond),
10071 cleanups, integer_zero_node);
10072 new_cleanups = fold (new_cleanups);
10076 /* Now add in the conditionalized cleanups. */
10078 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10079 expand_eh_region_start ();
10089 case TRUTH_ORIF_EXPR:
10092 tree cleanups, old_cleanups;
10094 if (if_true_label == 0)
10095 if_true_label = drop_through_label = gen_label_rtx ();
10097 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10098 seq1 = get_insns ();
10101 old_cleanups = cleanups_this_call;
10103 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10104 seq2 = get_insns ();
10105 cleanups = defer_cleanups_to (old_cleanups);
10110 rtx flag = gen_reg_rtx (word_mode);
10114 /* Flag cleanups as not needed. */
10115 emit_move_insn (flag, const0_rtx);
10118 /* Flag cleanups as needed. */
10119 emit_move_insn (flag, const1_rtx);
10122 /* All cleanups must be on the function_obstack. */
10123 push_obstacks_nochange ();
10124 resume_temporary_allocation ();
10126 /* convert flag, which is an rtx, into a tree. */
10127 cond = make_node (RTL_EXPR);
10128 TREE_TYPE (cond) = integer_type_node;
10129 RTL_EXPR_RTL (cond) = flag;
10130 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10131 cond = save_expr (cond);
10133 new_cleanups = build (COND_EXPR, void_type_node,
10134 truthvalue_conversion (cond),
10135 cleanups, integer_zero_node);
10136 new_cleanups = fold (new_cleanups);
10140 /* Now add in the conditionalized cleanups. */
10142 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10143 expand_eh_region_start ();
10153 case COMPOUND_EXPR:
10154 push_temp_slots ();
10155 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10156 preserve_temp_slots (NULL_RTX);
10157 free_temp_slots ();
10160 do_pending_stack_adjust ();
10161 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10164 case COMPONENT_REF:
10165 case BIT_FIELD_REF:
10168 int bitsize, bitpos, unsignedp;
10169 enum machine_mode mode;
10175 /* Get description of this reference. We don't actually care
10176 about the underlying object here. */
10177 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10178 &mode, &unsignedp, &volatilep,
10181 type = type_for_size (bitsize, unsignedp);
10182 if (! SLOW_BYTE_ACCESS
10183 && type != 0 && bitsize >= 0
10184 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10185 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10186 != CODE_FOR_nothing))
10188 do_jump (convert (type, exp), if_false_label, if_true_label);
10195 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10196 if (integer_onep (TREE_OPERAND (exp, 1))
10197 && integer_zerop (TREE_OPERAND (exp, 2)))
10198 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10200 else if (integer_zerop (TREE_OPERAND (exp, 1))
10201 && integer_onep (TREE_OPERAND (exp, 2)))
10202 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10207 tree cleanups_left_side, cleanups_right_side, old_cleanups;
10209 register rtx label1 = gen_label_rtx ();
10210 drop_through_label = gen_label_rtx ();
10212 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10214 /* We need to save the cleanups for the lhs and rhs separately.
10215 Keep track of the cleanups seen before the lhs. */
10216 old_cleanups = cleanups_this_call;
10218 /* Now the THEN-expression. */
10219 do_jump (TREE_OPERAND (exp, 1),
10220 if_false_label ? if_false_label : drop_through_label,
10221 if_true_label ? if_true_label : drop_through_label);
10222 /* In case the do_jump just above never jumps. */
10223 do_pending_stack_adjust ();
10224 emit_label (label1);
10225 seq1 = get_insns ();
10226 /* Now grab the cleanups for the lhs. */
10227 cleanups_left_side = defer_cleanups_to (old_cleanups);
10230 /* And keep track of where we start before the rhs. */
10231 old_cleanups = cleanups_this_call;
10233 /* Now the ELSE-expression. */
10234 do_jump (TREE_OPERAND (exp, 2),
10235 if_false_label ? if_false_label : drop_through_label,
10236 if_true_label ? if_true_label : drop_through_label);
10237 seq2 = get_insns ();
10238 /* Grab the cleanups for the rhs. */
10239 cleanups_right_side = defer_cleanups_to (old_cleanups);
10242 if (cleanups_left_side || cleanups_right_side)
10244 /* Make the cleanups for the THEN and ELSE clauses
10245 conditional based on which half is executed. */
10246 rtx flag = gen_reg_rtx (word_mode);
10250 /* Set the flag to 0 so that we know we executed the lhs. */
10251 emit_move_insn (flag, const0_rtx);
10254 /* Set the flag to 1 so that we know we executed the rhs. */
10255 emit_move_insn (flag, const1_rtx);
10258 /* Make sure the cleanup lives on the function_obstack. */
10259 push_obstacks_nochange ();
10260 resume_temporary_allocation ();
10262 /* Now, build up a COND_EXPR that tests the value of the
10263 flag, and then either do the cleanups for the lhs or the
10265 cond = make_node (RTL_EXPR);
10266 TREE_TYPE (cond) = integer_type_node;
10267 RTL_EXPR_RTL (cond) = flag;
10268 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10269 cond = save_expr (cond);
10271 new_cleanups = build (COND_EXPR, void_type_node,
10272 truthvalue_conversion (cond),
10273 cleanups_right_side, cleanups_left_side);
10274 new_cleanups = fold (new_cleanups);
10278 /* Now add in the conditionalized cleanups. */
10280 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10281 expand_eh_region_start ();
10285 /* No cleanups were needed, so emit the two sequences
10295 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10297 if (integer_zerop (TREE_OPERAND (exp, 1)))
10298 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10299 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10300 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10303 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10304 fold (build (EQ_EXPR, TREE_TYPE (exp),
10305 fold (build1 (REALPART_EXPR,
10306 TREE_TYPE (inner_type),
10307 TREE_OPERAND (exp, 0))),
10308 fold (build1 (REALPART_EXPR,
10309 TREE_TYPE (inner_type),
10310 TREE_OPERAND (exp, 1))))),
10311 fold (build (EQ_EXPR, TREE_TYPE (exp),
10312 fold (build1 (IMAGPART_EXPR,
10313 TREE_TYPE (inner_type),
10314 TREE_OPERAND (exp, 0))),
10315 fold (build1 (IMAGPART_EXPR,
10316 TREE_TYPE (inner_type),
10317 TREE_OPERAND (exp, 1))))))),
10318 if_false_label, if_true_label);
10319 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10320 && !can_compare_p (TYPE_MODE (inner_type)))
10321 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10323 comparison = compare (exp, EQ, EQ);
10329 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10331 if (integer_zerop (TREE_OPERAND (exp, 1)))
10332 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10333 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10334 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10337 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10338 fold (build (NE_EXPR, TREE_TYPE (exp),
10339 fold (build1 (REALPART_EXPR,
10340 TREE_TYPE (inner_type),
10341 TREE_OPERAND (exp, 0))),
10342 fold (build1 (REALPART_EXPR,
10343 TREE_TYPE (inner_type),
10344 TREE_OPERAND (exp, 1))))),
10345 fold (build (NE_EXPR, TREE_TYPE (exp),
10346 fold (build1 (IMAGPART_EXPR,
10347 TREE_TYPE (inner_type),
10348 TREE_OPERAND (exp, 0))),
10349 fold (build1 (IMAGPART_EXPR,
10350 TREE_TYPE (inner_type),
10351 TREE_OPERAND (exp, 1))))))),
10352 if_false_label, if_true_label);
10353 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10354 && !can_compare_p (TYPE_MODE (inner_type)))
10355 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10357 comparison = compare (exp, NE, NE);
10362 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10364 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10365 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10367 comparison = compare (exp, LT, LTU);
10371 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10373 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10374 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10376 comparison = compare (exp, LE, LEU);
10380 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10382 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10383 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10385 comparison = compare (exp, GT, GTU);
10389 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10391 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10392 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10394 comparison = compare (exp, GE, GEU);
10399 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10401 /* This is not needed any more and causes poor code since it causes
10402 comparisons and tests from non-SI objects to have different code
10404 /* Copy to register to avoid generating bad insns by cse
10405 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10406 if (!cse_not_expected && GET_CODE (temp) == MEM)
10407 temp = copy_to_reg (temp);
10409 do_pending_stack_adjust ();
10410 if (GET_CODE (temp) == CONST_INT)
10411 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10412 else if (GET_CODE (temp) == LABEL_REF)
10413 comparison = const_true_rtx;
10414 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10415 && !can_compare_p (GET_MODE (temp)))
10416 /* Note swapping the labels gives us not-equal. */
10417 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10418 else if (GET_MODE (temp) != VOIDmode)
10419 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10420 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10421 GET_MODE (temp), NULL_RTX, 0);
10426 /* Do any postincrements in the expression that was tested. */
10429 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10430 straight into a conditional jump instruction as the jump condition.
10431 Otherwise, all the work has been done already. */
10433 if (comparison == const_true_rtx)
10436 emit_jump (if_true_label);
10438 else if (comparison == const0_rtx)
10440 if (if_false_label)
10441 emit_jump (if_false_label);
10443 else if (comparison)
10444 do_jump_for_compare (comparison, if_false_label, if_true_label);
10446 if (drop_through_label)
10448 /* If do_jump produces code that might be jumped around,
10449 do any stack adjusts from that code, before the place
10450 where control merges in. */
10451 do_pending_stack_adjust ();
10452 emit_label (drop_through_label);
10456 /* Given a comparison expression EXP for values too wide to be compared
10457 with one insn, test the comparison and jump to the appropriate label.
10458 The code of EXP is ignored; we always test GT if SWAP is 0,
10459 and LT if SWAP is 1. */
10462 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10465 rtx if_false_label, if_true_label;
10467 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10468 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10469 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10470 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10471 rtx drop_through_label = 0;
10472 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10475 if (! if_true_label || ! if_false_label)
10476 drop_through_label = gen_label_rtx ();
10477 if (! if_true_label)
10478 if_true_label = drop_through_label;
10479 if (! if_false_label)
10480 if_false_label = drop_through_label;
10482 /* Compare a word at a time, high order first. */
10483 for (i = 0; i < nwords; i++)
10486 rtx op0_word, op1_word;
10488 if (WORDS_BIG_ENDIAN)
10490 op0_word = operand_subword_force (op0, i, mode);
10491 op1_word = operand_subword_force (op1, i, mode);
10495 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10496 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10499 /* All but high-order word must be compared as unsigned. */
10500 comp = compare_from_rtx (op0_word, op1_word,
10501 (unsignedp || i > 0) ? GTU : GT,
10502 unsignedp, word_mode, NULL_RTX, 0);
10503 if (comp == const_true_rtx)
10504 emit_jump (if_true_label);
10505 else if (comp != const0_rtx)
10506 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10508 /* Consider lower words only if these are equal. */
10509 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10511 if (comp == const_true_rtx)
10512 emit_jump (if_false_label);
10513 else if (comp != const0_rtx)
10514 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10517 if (if_false_label)
10518 emit_jump (if_false_label);
10519 if (drop_through_label)
10520 emit_label (drop_through_label);
10523 /* Compare OP0 with OP1, word at a time, in mode MODE.
10524 UNSIGNEDP says to do unsigned comparison.
10525 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10528 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10529 enum machine_mode mode;
10532 rtx if_false_label, if_true_label;
10534 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10535 rtx drop_through_label = 0;
10538 if (! if_true_label || ! if_false_label)
10539 drop_through_label = gen_label_rtx ();
10540 if (! if_true_label)
10541 if_true_label = drop_through_label;
10542 if (! if_false_label)
10543 if_false_label = drop_through_label;
10545 /* Compare a word at a time, high order first. */
10546 for (i = 0; i < nwords; i++)
10549 rtx op0_word, op1_word;
10551 if (WORDS_BIG_ENDIAN)
10553 op0_word = operand_subword_force (op0, i, mode);
10554 op1_word = operand_subword_force (op1, i, mode);
10558 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10559 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10562 /* All but high-order word must be compared as unsigned. */
10563 comp = compare_from_rtx (op0_word, op1_word,
10564 (unsignedp || i > 0) ? GTU : GT,
10565 unsignedp, word_mode, NULL_RTX, 0);
10566 if (comp == const_true_rtx)
10567 emit_jump (if_true_label);
10568 else if (comp != const0_rtx)
10569 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10571 /* Consider lower words only if these are equal. */
10572 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10574 if (comp == const_true_rtx)
10575 emit_jump (if_false_label);
10576 else if (comp != const0_rtx)
10577 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10580 if (if_false_label)
10581 emit_jump (if_false_label);
10582 if (drop_through_label)
10583 emit_label (drop_through_label);
10586 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10587 with one insn, test the comparison and jump to the appropriate label. */
10590 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10592 rtx if_false_label, if_true_label;
10594 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10595 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10596 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10597 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10599 rtx drop_through_label = 0;
10601 if (! if_false_label)
10602 drop_through_label = if_false_label = gen_label_rtx ();
10604 for (i = 0; i < nwords; i++)
10606 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10607 operand_subword_force (op1, i, mode),
10608 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10609 word_mode, NULL_RTX, 0);
10610 if (comp == const_true_rtx)
10611 emit_jump (if_false_label);
10612 else if (comp != const0_rtx)
10613 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10617 emit_jump (if_true_label);
10618 if (drop_through_label)
10619 emit_label (drop_through_label);
10622 /* Jump according to whether OP0 is 0.
10623 We assume that OP0 has an integer mode that is too wide
10624 for the available compare insns. */
10627 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10629 rtx if_false_label, if_true_label;
10631 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10633 rtx drop_through_label = 0;
10635 if (! if_false_label)
10636 drop_through_label = if_false_label = gen_label_rtx ();
10638 for (i = 0; i < nwords; i++)
10640 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10642 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10643 if (comp == const_true_rtx)
10644 emit_jump (if_false_label);
10645 else if (comp != const0_rtx)
10646 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10650 emit_jump (if_true_label);
10651 if (drop_through_label)
10652 emit_label (drop_through_label);
10655 /* Given a comparison expression in rtl form, output conditional branches to
10656 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10659 do_jump_for_compare (comparison, if_false_label, if_true_label)
10660 rtx comparison, if_false_label, if_true_label;
10664 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10665 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10669 if (if_false_label)
10670 emit_jump (if_false_label);
10672 else if (if_false_label)
10675 rtx prev = get_last_insn ();
10678 /* Output the branch with the opposite condition. Then try to invert
10679 what is generated. If more than one insn is a branch, or if the
10680 branch is not the last insn written, abort. If we can't invert
10681 the branch, emit make a true label, redirect this jump to that,
10682 emit a jump to the false label and define the true label. */
10684 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10685 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10689 /* Here we get the first insn that was just emitted. It used to be the
10690 case that, on some machines, emitting the branch would discard
10691 the previous compare insn and emit a replacement. This isn't
10692 done anymore, but abort if we see that PREV is deleted. */
10695 insn = get_insns ();
10696 else if (INSN_DELETED_P (prev))
10699 insn = NEXT_INSN (prev);
10701 for (; insn; insn = NEXT_INSN (insn))
10702 if (GET_CODE (insn) == JUMP_INSN)
10709 if (branch != get_last_insn ())
10712 JUMP_LABEL (branch) = if_false_label;
10713 if (! invert_jump (branch, if_false_label))
10715 if_true_label = gen_label_rtx ();
10716 redirect_jump (branch, if_true_label);
10717 emit_jump (if_false_label);
10718 emit_label (if_true_label);
10723 /* Generate code for a comparison expression EXP
10724 (including code to compute the values to be compared)
10725 and set (CC0) according to the result.
10726 SIGNED_CODE should be the rtx operation for this comparison for
10727 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10729 We force a stack adjustment unless there are currently
10730 things pushed on the stack that aren't yet used. */
10733 compare (exp, signed_code, unsigned_code)
10735 enum rtx_code signed_code, unsigned_code;
10738 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10740 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10741 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10742 register enum machine_mode mode = TYPE_MODE (type);
10743 int unsignedp = TREE_UNSIGNED (type);
10744 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10746 #ifdef HAVE_canonicalize_funcptr_for_compare
10747 /* If function pointers need to be "canonicalized" before they can
10748 be reliably compared, then canonicalize them. */
10749 if (HAVE_canonicalize_funcptr_for_compare
10750 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10751 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10754 rtx new_op0 = gen_reg_rtx (mode);
10756 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10760 if (HAVE_canonicalize_funcptr_for_compare
10761 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10762 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10765 rtx new_op1 = gen_reg_rtx (mode);
10767 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10772 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10774 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10775 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10778 /* Like compare but expects the values to compare as two rtx's.
10779 The decision as to signed or unsigned comparison must be made by the caller.
10781 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10784 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10785 size of MODE should be used. */
10788 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10789 register rtx op0, op1;
10790 enum rtx_code code;
10792 enum machine_mode mode;
10798 /* If one operand is constant, make it the second one. Only do this
10799 if the other operand is not constant as well. */
10801 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10802 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10807 code = swap_condition (code);
10810 if (flag_force_mem)
10812 op0 = force_not_mem (op0);
10813 op1 = force_not_mem (op1);
10816 do_pending_stack_adjust ();
10818 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10819 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10823 /* There's no need to do this now that combine.c can eliminate lots of
10824 sign extensions. This can be less efficient in certain cases on other
10827 /* If this is a signed equality comparison, we can do it as an
10828 unsigned comparison since zero-extension is cheaper than sign
10829 extension and comparisons with zero are done as unsigned. This is
10830 the case even on machines that can do fast sign extension, since
10831 zero-extension is easier to combine with other operations than
10832 sign-extension is. If we are comparing against a constant, we must
10833 convert it to what it would look like unsigned. */
10834 if ((code == EQ || code == NE) && ! unsignedp
10835 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10837 if (GET_CODE (op1) == CONST_INT
10838 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10839 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10844 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10846 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10849 /* Generate code to calculate EXP using a store-flag instruction
10850 and return an rtx for the result. EXP is either a comparison
10851 or a TRUTH_NOT_EXPR whose operand is a comparison.
10853 If TARGET is nonzero, store the result there if convenient.
10855 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10858 Return zero if there is no suitable set-flag instruction
10859 available on this machine.
10861 Once expand_expr has been called on the arguments of the comparison,
10862 we are committed to doing the store flag, since it is not safe to
10863 re-evaluate the expression. We emit the store-flag insn by calling
10864 emit_store_flag, but only expand the arguments if we have a reason
10865 to believe that emit_store_flag will be successful. If we think that
10866 it will, but it isn't, we have to simulate the store-flag with a
10867 set/jump/set sequence. */
10870 do_store_flag (exp, target, mode, only_cheap)
10873 enum machine_mode mode;
10876 enum rtx_code code;
10877 tree arg0, arg1, type;
10879 enum machine_mode operand_mode;
10883 enum insn_code icode;
10884 rtx subtarget = target;
10885 rtx result, label, pattern, jump_pat;
10887 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10888 result at the end. We can't simply invert the test since it would
10889 have already been inverted if it were valid. This case occurs for
10890 some floating-point comparisons. */
10892 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10893 invert = 1, exp = TREE_OPERAND (exp, 0);
10895 arg0 = TREE_OPERAND (exp, 0);
10896 arg1 = TREE_OPERAND (exp, 1);
10897 type = TREE_TYPE (arg0);
10898 operand_mode = TYPE_MODE (type);
10899 unsignedp = TREE_UNSIGNED (type);
10901 /* We won't bother with BLKmode store-flag operations because it would mean
10902 passing a lot of information to emit_store_flag. */
10903 if (operand_mode == BLKmode)
10906 /* We won't bother with store-flag operations involving function pointers
10907 when function pointers must be canonicalized before comparisons. */
10908 #ifdef HAVE_canonicalize_funcptr_for_compare
10909 if (HAVE_canonicalize_funcptr_for_compare
10910 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10911 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10913 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10914 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10915 == FUNCTION_TYPE))))
10922 /* Get the rtx comparison code to use. We know that EXP is a comparison
10923 operation of some type. Some comparisons against 1 and -1 can be
10924 converted to comparisons with zero. Do so here so that the tests
10925 below will be aware that we have a comparison with zero. These
10926 tests will not catch constants in the first operand, but constants
10927 are rarely passed as the first operand. */
10929 switch (TREE_CODE (exp))
10938 if (integer_onep (arg1))
10939 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10941 code = unsignedp ? LTU : LT;
10944 if (! unsignedp && integer_all_onesp (arg1))
10945 arg1 = integer_zero_node, code = LT;
10947 code = unsignedp ? LEU : LE;
10950 if (! unsignedp && integer_all_onesp (arg1))
10951 arg1 = integer_zero_node, code = GE;
10953 code = unsignedp ? GTU : GT;
10956 if (integer_onep (arg1))
10957 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10959 code = unsignedp ? GEU : GE;
10965 /* Put a constant second. */
10966 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10968 tem = arg0; arg0 = arg1; arg1 = tem;
10969 code = swap_condition (code);
10972 /* If this is an equality or inequality test of a single bit, we can
10973 do this by shifting the bit being tested to the low-order bit and
10974 masking the result with the constant 1. If the condition was EQ,
10975 we xor it with 1. This does not require an scc insn and is faster
10976 than an scc insn even if we have it. */
10978 if ((code == NE || code == EQ)
10979 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10980 && integer_pow2p (TREE_OPERAND (arg0, 1))
10981 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10983 tree inner = TREE_OPERAND (arg0, 0);
10988 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10989 NULL_RTX, VOIDmode, 0));
10990 /* In this case, immed_double_const will sign extend the value to make
10991 it look the same on the host and target. We must remove the
10992 sign-extension before calling exact_log2, since exact_log2 will
10993 fail for negative values. */
10994 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10995 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
10996 /* We don't use the obvious constant shift to generate the mask,
10997 because that generates compiler warnings when BITS_PER_WORD is
10998 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10999 code is unreachable in that case. */
11000 tem = tem & GET_MODE_MASK (word_mode);
11001 bitnum = exact_log2 (tem);
11003 /* If INNER is a right shift of a constant and it plus BITNUM does
11004 not overflow, adjust BITNUM and INNER. */
11006 if (TREE_CODE (inner) == RSHIFT_EXPR
11007 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11008 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11009 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11010 < TYPE_PRECISION (type)))
11012 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11013 inner = TREE_OPERAND (inner, 0);
11016 /* If we are going to be able to omit the AND below, we must do our
11017 operations as unsigned. If we must use the AND, we have a choice.
11018 Normally unsigned is faster, but for some machines signed is. */
11019 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11020 #ifdef LOAD_EXTEND_OP
11021 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11027 if (subtarget == 0 || GET_CODE (subtarget) != REG
11028 || GET_MODE (subtarget) != operand_mode
11029 || ! safe_from_p (subtarget, inner))
11032 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11035 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11036 size_int (bitnum), subtarget, ops_unsignedp);
11038 if (GET_MODE (op0) != mode)
11039 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11041 if ((code == EQ && ! invert) || (code == NE && invert))
11042 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11043 ops_unsignedp, OPTAB_LIB_WIDEN);
11045 /* Put the AND last so it can combine with more things. */
11046 if (bitnum != TYPE_PRECISION (type) - 1)
11047 op0 = expand_and (op0, const1_rtx, subtarget);
11052 /* Now see if we are likely to be able to do this. Return if not. */
11053 if (! can_compare_p (operand_mode))
11055 icode = setcc_gen_code[(int) code];
11056 if (icode == CODE_FOR_nothing
11057 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11059 /* We can only do this if it is one of the special cases that
11060 can be handled without an scc insn. */
11061 if ((code == LT && integer_zerop (arg1))
11062 || (! only_cheap && code == GE && integer_zerop (arg1)))
11064 else if (BRANCH_COST >= 0
11065 && ! only_cheap && (code == NE || code == EQ)
11066 && TREE_CODE (type) != REAL_TYPE
11067 && ((abs_optab->handlers[(int) operand_mode].insn_code
11068 != CODE_FOR_nothing)
11069 || (ffs_optab->handlers[(int) operand_mode].insn_code
11070 != CODE_FOR_nothing)))
11076 preexpand_calls (exp);
11077 if (subtarget == 0 || GET_CODE (subtarget) != REG
11078 || GET_MODE (subtarget) != operand_mode
11079 || ! safe_from_p (subtarget, arg1))
11082 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11083 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11086 target = gen_reg_rtx (mode);
11088 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11089 because, if the emit_store_flag does anything it will succeed and
11090 OP0 and OP1 will not be used subsequently. */
11092 result = emit_store_flag (target, code,
11093 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11094 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11095 operand_mode, unsignedp, 1);
11100 result = expand_binop (mode, xor_optab, result, const1_rtx,
11101 result, 0, OPTAB_LIB_WIDEN);
11105 /* If this failed, we have to do this with set/compare/jump/set code. */
11106 if (GET_CODE (target) != REG
11107 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11108 target = gen_reg_rtx (GET_MODE (target));
11110 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11111 result = compare_from_rtx (op0, op1, code, unsignedp,
11112 operand_mode, NULL_RTX, 0);
11113 if (GET_CODE (result) == CONST_INT)
11114 return (((result == const0_rtx && ! invert)
11115 || (result != const0_rtx && invert))
11116 ? const0_rtx : const1_rtx);
11118 label = gen_label_rtx ();
11119 if (bcc_gen_fctn[(int) code] == 0)
11122 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11123 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11124 emit_label (label);
11129 /* Generate a tablejump instruction (used for switch statements). */
11131 #ifdef HAVE_tablejump
11133 /* INDEX is the value being switched on, with the lowest value
11134 in the table already subtracted.
11135 MODE is its expected mode (needed if INDEX is constant).
11136 RANGE is the length of the jump table.
11137 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11139 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11140 index value is out of range. */
11143 do_tablejump (index, mode, range, table_label, default_label)
11144 rtx index, range, table_label, default_label;
11145 enum machine_mode mode;
11147 register rtx temp, vector;
11149 /* Do an unsigned comparison (in the proper mode) between the index
11150 expression and the value which represents the length of the range.
11151 Since we just finished subtracting the lower bound of the range
11152 from the index expression, this comparison allows us to simultaneously
11153 check that the original index expression value is both greater than
11154 or equal to the minimum value of the range and less than or equal to
11155 the maximum value of the range. */
11157 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11158 emit_jump_insn (gen_bgtu (default_label));
11160 /* If index is in range, it must fit in Pmode.
11161 Convert to Pmode so we can index with it. */
11163 index = convert_to_mode (Pmode, index, 1);
11165 /* Don't let a MEM slip thru, because then INDEX that comes
11166 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11167 and break_out_memory_refs will go to work on it and mess it up. */
11168 #ifdef PIC_CASE_VECTOR_ADDRESS
11169 if (flag_pic && GET_CODE (index) != REG)
11170 index = copy_to_mode_reg (Pmode, index);
11173 /* If flag_force_addr were to affect this address
11174 it could interfere with the tricky assumptions made
11175 about addresses that contain label-refs,
11176 which may be valid only very near the tablejump itself. */
11177 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11178 GET_MODE_SIZE, because this indicates how large insns are. The other
11179 uses should all be Pmode, because they are addresses. This code
11180 could fail if addresses and insns are not the same size. */
11181 index = gen_rtx (PLUS, Pmode,
11182 gen_rtx (MULT, Pmode, index,
11183 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11184 gen_rtx (LABEL_REF, Pmode, table_label));
11185 #ifdef PIC_CASE_VECTOR_ADDRESS
11187 index = PIC_CASE_VECTOR_ADDRESS (index);
11190 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11191 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11192 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11193 RTX_UNCHANGING_P (vector) = 1;
11194 convert_move (temp, vector, 0);
11196 emit_jump_insn (gen_tablejump (temp, table_label));
11198 #ifndef CASE_VECTOR_PC_RELATIVE
11199 /* If we are generating PIC code or if the table is PC-relative, the
11200 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11206 #endif /* HAVE_tablejump */
11209 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11210 to that value is on the top of the stack. The resulting type is TYPE, and
11211 the source declaration is DECL. */
11214 bc_load_memory (type, decl)
11217 enum bytecode_opcode opcode;
11220 /* Bit fields are special. We only know about signed and
11221 unsigned ints, and enums. The latter are treated as
11222 signed integers. */
11224 if (DECL_BIT_FIELD (decl))
11225 if (TREE_CODE (type) == ENUMERAL_TYPE
11226 || TREE_CODE (type) == INTEGER_TYPE)
11227 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11231 /* See corresponding comment in bc_store_memory(). */
11232 if (TYPE_MODE (type) == BLKmode
11233 || TYPE_MODE (type) == VOIDmode)
11236 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11238 if (opcode == neverneverland)
11241 bc_emit_bytecode (opcode);
11243 #ifdef DEBUG_PRINT_CODE
11244 fputc ('\n', stderr);
11249 /* Store the contents of the second stack slot to the address in the
11250 top stack slot. DECL is the declaration of the destination and is used
11251 to determine whether we're dealing with a bitfield. */
11254 bc_store_memory (type, decl)
11257 enum bytecode_opcode opcode;
11260 if (DECL_BIT_FIELD (decl))
11262 if (TREE_CODE (type) == ENUMERAL_TYPE
11263 || TREE_CODE (type) == INTEGER_TYPE)
11269 if (TYPE_MODE (type) == BLKmode)
11271 /* Copy structure. This expands to a block copy instruction, storeBLK.
11272 In addition to the arguments expected by the other store instructions,
11273 it also expects a type size (SImode) on top of the stack, which is the
11274 structure size in size units (usually bytes). The two first arguments
11275 are already on the stack; so we just put the size on level 1. For some
11276 other languages, the size may be variable, this is why we don't encode
11277 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11279 bc_expand_expr (TYPE_SIZE (type));
11283 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11285 if (opcode == neverneverland)
11288 bc_emit_bytecode (opcode);
11290 #ifdef DEBUG_PRINT_CODE
11291 fputc ('\n', stderr);
11296 /* Allocate local stack space sufficient to hold a value of the given
11297 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11298 integral power of 2. A special case is locals of type VOID, which
11299 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11300 remapped into the corresponding attribute of SI. */
11303 bc_allocate_local (size, alignment)
11304 int size, alignment;
11307 int byte_alignment;
11312 /* Normalize size and alignment */
11314 size = UNITS_PER_WORD;
11316 if (alignment < BITS_PER_UNIT)
11317 byte_alignment = 1 << (INT_ALIGN - 1);
11320 byte_alignment = alignment / BITS_PER_UNIT;
11322 if (local_vars_size & (byte_alignment - 1))
11323 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11325 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11326 local_vars_size += size;
11332 /* Allocate variable-sized local array. Variable-sized arrays are
11333 actually pointers to the address in memory where they are stored. */
11336 bc_allocate_variable_array (size)
11340 const int ptralign = (1 << (PTR_ALIGN - 1));
11342 /* Align pointer */
11343 if (local_vars_size & ptralign)
11344 local_vars_size += ptralign - (local_vars_size & ptralign);
11346 /* Note down local space needed: pointer to block; also return
11349 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11350 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11355 /* Push the machine address for the given external variable offset. */
11358 bc_load_externaddr (externaddr)
11361 bc_emit_bytecode (constP);
11362 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11363 BYTECODE_BC_LABEL (externaddr)->offset);
11365 #ifdef DEBUG_PRINT_CODE
11366 fputc ('\n', stderr);
11371 /* Like above, but expects an IDENTIFIER. */
11374 bc_load_externaddr_id (id, offset)
11378 if (!IDENTIFIER_POINTER (id))
11381 bc_emit_bytecode (constP);
11382 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11384 #ifdef DEBUG_PRINT_CODE
11385 fputc ('\n', stderr);
11390 /* Push the machine address for the given local variable offset. */
11393 bc_load_localaddr (localaddr)
11396 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11400 /* Push the machine address for the given parameter offset.
11401 NOTE: offset is in bits. */
11404 bc_load_parmaddr (parmaddr)
11407 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11412 /* Convert a[i] into *(a + i). */
11415 bc_canonicalize_array_ref (exp)
11418 tree type = TREE_TYPE (exp);
11419 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11420 TREE_OPERAND (exp, 0));
11421 tree index = TREE_OPERAND (exp, 1);
11424 /* Convert the integer argument to a type the same size as a pointer
11425 so the multiply won't overflow spuriously. */
11427 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11428 index = convert (type_for_size (POINTER_SIZE, 0), index);
11430 /* The array address isn't volatile even if the array is.
11431 (Of course this isn't terribly relevant since the bytecode
11432 translator treats nearly everything as volatile anyway.) */
11433 TREE_THIS_VOLATILE (array_adr) = 0;
11435 return build1 (INDIRECT_REF, type,
11436 fold (build (PLUS_EXPR,
11437 TYPE_POINTER_TO (type),
11439 fold (build (MULT_EXPR,
11440 TYPE_POINTER_TO (type),
11442 size_in_bytes (type))))));
11446 /* Load the address of the component referenced by the given
11447 COMPONENT_REF expression.
11449 Returns innermost lvalue. */
11452 bc_expand_component_address (exp)
11456 enum machine_mode mode;
11458 HOST_WIDE_INT SIval;
11461 tem = TREE_OPERAND (exp, 1);
11462 mode = DECL_MODE (tem);
11465 /* Compute cumulative bit offset for nested component refs
11466 and array refs, and find the ultimate containing object. */
11468 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11470 if (TREE_CODE (tem) == COMPONENT_REF)
11471 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11473 if (TREE_CODE (tem) == ARRAY_REF
11474 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11475 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11477 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11478 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11479 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11484 bc_expand_expr (tem);
11487 /* For bitfields also push their offset and size */
11488 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11489 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11491 if (SIval = bitpos / BITS_PER_UNIT)
11492 bc_emit_instruction (addconstPSI, SIval);
11494 return (TREE_OPERAND (exp, 1));
11498 /* Emit code to push two SI constants */
11501 bc_push_offset_and_size (offset, size)
11502 HOST_WIDE_INT offset, size;
11504 bc_emit_instruction (constSI, offset);
11505 bc_emit_instruction (constSI, size);
11509 /* Emit byte code to push the address of the given lvalue expression to
11510 the stack. If it's a bit field, we also push offset and size info.
11512 Returns innermost component, which allows us to determine not only
11513 its type, but also whether it's a bitfield. */
11516 bc_expand_address (exp)
11520 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11524 switch (TREE_CODE (exp))
11528 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11530 case COMPONENT_REF:
11532 return (bc_expand_component_address (exp));
11536 bc_expand_expr (TREE_OPERAND (exp, 0));
11538 /* For variable-sized types: retrieve pointer. Sometimes the
11539 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11540 also make sure we have an operand, just in case... */
11542 if (TREE_OPERAND (exp, 0)
11543 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11544 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11545 bc_emit_instruction (loadP);
11547 /* If packed, also return offset and size */
11548 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11550 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11551 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11553 return (TREE_OPERAND (exp, 0));
11555 case FUNCTION_DECL:
11557 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11558 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11563 bc_load_parmaddr (DECL_RTL (exp));
11565 /* For variable-sized types: retrieve pointer */
11566 if (TYPE_SIZE (TREE_TYPE (exp))
11567 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11568 bc_emit_instruction (loadP);
11570 /* If packed, also return offset and size */
11571 if (DECL_BIT_FIELD (exp))
11572 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11573 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11579 bc_emit_instruction (returnP);
11585 if (BYTECODE_LABEL (DECL_RTL (exp)))
11586 bc_load_externaddr (DECL_RTL (exp));
11589 if (DECL_EXTERNAL (exp))
11590 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11591 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11593 bc_load_localaddr (DECL_RTL (exp));
11595 /* For variable-sized types: retrieve pointer */
11596 if (TYPE_SIZE (TREE_TYPE (exp))
11597 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11598 bc_emit_instruction (loadP);
11600 /* If packed, also return offset and size */
11601 if (DECL_BIT_FIELD (exp))
11602 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11603 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11611 bc_emit_bytecode (constP);
11612 r = output_constant_def (exp);
11613 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11615 #ifdef DEBUG_PRINT_CODE
11616 fputc ('\n', stderr);
11627 /* Most lvalues don't have components. */
11632 /* Emit a type code to be used by the runtime support in handling
11633 parameter passing. The type code consists of the machine mode
11634 plus the minimal alignment shifted left 8 bits. */
11637 bc_runtime_type_code (type)
11642 switch (TREE_CODE (type))
11648 case ENUMERAL_TYPE:
11652 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11664 return build_int_2 (val, 0);
11668 /* Generate constructor label */
11671 bc_gen_constr_label ()
11673 static int label_counter;
11674 static char label[20];
11676 sprintf (label, "*LR%d", label_counter++);
11678 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11682 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11683 expand the constructor data as static data, and push a pointer to it.
11684 The pointer is put in the pointer table and is retrieved by a constP
11685 bytecode instruction. We then loop and store each constructor member in
11686 the corresponding component. Finally, we return the original pointer on
11690 bc_expand_constructor (constr)
11694 HOST_WIDE_INT ptroffs;
11698 /* Literal constructors are handled as constants, whereas
11699 non-literals are evaluated and stored element by element
11700 into the data segment. */
11702 /* Allocate space in proper segment and push pointer to space on stack.
11705 l = bc_gen_constr_label ();
11707 if (TREE_CONSTANT (constr))
11711 bc_emit_const_labeldef (l);
11712 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11718 bc_emit_data_labeldef (l);
11719 bc_output_data_constructor (constr);
11723 /* Add reference to pointer table and recall pointer to stack;
11724 this code is common for both types of constructors: literals
11725 and non-literals. */
11727 ptroffs = bc_define_pointer (l);
11728 bc_emit_instruction (constP, ptroffs);
11730 /* This is all that has to be done if it's a literal. */
11731 if (TREE_CONSTANT (constr))
11735 /* At this point, we have the pointer to the structure on top of the stack.
11736 Generate sequences of store_memory calls for the constructor. */
11738 /* constructor type is structure */
11739 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11743 /* If the constructor has fewer fields than the structure,
11744 clear the whole structure first. */
11746 if (list_length (CONSTRUCTOR_ELTS (constr))
11747 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11749 bc_emit_instruction (duplicate);
11750 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11751 bc_emit_instruction (clearBLK);
11754 /* Store each element of the constructor into the corresponding
11755 field of TARGET. */
11757 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11759 register tree field = TREE_PURPOSE (elt);
11760 register enum machine_mode mode;
11765 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11766 mode = DECL_MODE (field);
11767 unsignedp = TREE_UNSIGNED (field);
11769 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11771 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11772 /* The alignment of TARGET is
11773 at least what its type requires. */
11775 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11776 int_size_in_bytes (TREE_TYPE (constr)));
11781 /* Constructor type is array */
11782 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11786 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11787 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11788 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11789 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11791 /* If the constructor has fewer fields than the structure,
11792 clear the whole structure first. */
11794 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11796 bc_emit_instruction (duplicate);
11797 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11798 bc_emit_instruction (clearBLK);
11802 /* Store each element of the constructor into the corresponding
11803 element of TARGET, determined by counting the elements. */
11805 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11807 elt = TREE_CHAIN (elt), i++)
11809 register enum machine_mode mode;
11814 mode = TYPE_MODE (elttype);
11815 bitsize = GET_MODE_BITSIZE (mode);
11816 unsignedp = TREE_UNSIGNED (elttype);
11818 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11819 /* * TYPE_SIZE_UNIT (elttype) */ );
11821 bc_store_field (elt, bitsize, bitpos, mode,
11822 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11823 /* The alignment of TARGET is
11824 at least what its type requires. */
11826 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11827 int_size_in_bytes (TREE_TYPE (constr)));
11834 /* Store the value of EXP (an expression tree) into member FIELD of
11835 structure at address on stack, which has type TYPE, mode MODE and
11836 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11839 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11840 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11843 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11844 value_mode, unsignedp, align, total_size)
11845 int bitsize, bitpos;
11846 enum machine_mode mode;
11847 tree field, exp, type;
11848 enum machine_mode value_mode;
11854 /* Expand expression and copy pointer */
11855 bc_expand_expr (exp);
11856 bc_emit_instruction (over);
11859 /* If the component is a bit field, we cannot use addressing to access
11860 it. Use bit-field techniques to store in it. */
11862 if (DECL_BIT_FIELD (field))
11864 bc_store_bit_field (bitpos, bitsize, unsignedp);
11868 /* Not bit field */
11870 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11872 /* Advance pointer to the desired member */
11874 bc_emit_instruction (addconstPSI, offset);
11877 bc_store_memory (type, field);
11882 /* Store SI/SU in bitfield */
11885 bc_store_bit_field (offset, size, unsignedp)
11886 int offset, size, unsignedp;
11888 /* Push bitfield offset and size */
11889 bc_push_offset_and_size (offset, size);
11892 bc_emit_instruction (sstoreBI);
11896 /* Load SI/SU from bitfield */
11899 bc_load_bit_field (offset, size, unsignedp)
11900 int offset, size, unsignedp;
11902 /* Push bitfield offset and size */
11903 bc_push_offset_and_size (offset, size);
11905 /* Load: sign-extend if signed, else zero-extend */
11906 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11910 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11911 (adjust stack pointer upwards), negative means add that number of
11912 levels (adjust the stack pointer downwards). Only positive values
11913 normally make sense. */
11916 bc_adjust_stack (nlevels)
11925 bc_emit_instruction (drop);
11928 bc_emit_instruction (drop);
11933 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11934 stack_depth -= nlevels;
11937 #if defined (VALIDATE_STACK_FOR_BC)
11938 VALIDATE_STACK_FOR_BC ();