1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call;
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
105 int target_temp_slot_level;
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
110 static rtx saveregs_value;
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
115 /* This structure is used by move_by_pieces to describe the move to
118 struct move_by_pieces
128 int explicit_inc_from;
135 /* This structure is used by clear_by_pieces to describe the clear to
138 struct clear_by_pieces
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
154 extern int local_vars_size;
155 extern int stack_depth;
156 extern int max_stack_depth;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
160 static rtx enqueue_insn PROTO((rtx, rtx));
161 static int queued_subexp_p PROTO((rtx));
162 static void init_queue PROTO((void));
163 static void move_by_pieces PROTO((rtx, rtx, int, int));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
175 static int get_inner_unaligned_p PROTO((tree));
176 static tree save_noncopied_parts PROTO((tree, tree));
177 static tree init_noncopied_parts PROTO((tree, tree));
178 static int safe_from_p PROTO((rtx, tree));
179 static int fixed_type_p PROTO((tree));
180 static rtx var_rtx PROTO((tree));
181 static int get_pointer_alignment PROTO((tree, unsigned));
182 static tree string_constant PROTO((tree, tree *));
183 static tree c_strlen PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 void bc_expand_increment PROTO((struct increment_operator *, tree));
194 rtx bc_allocate_local PROTO((int, int));
195 void bc_store_memory PROTO((tree, tree));
196 tree bc_expand_component_address PROTO((tree));
197 tree bc_expand_address PROTO((tree));
198 void bc_expand_constructor PROTO((tree));
199 void bc_adjust_stack PROTO((int));
200 tree bc_canonicalize_array_ref PROTO((tree));
201 void bc_load_memory PROTO((tree, tree));
202 void bc_load_externaddr PROTO((rtx));
203 void bc_load_externaddr_id PROTO((tree, int));
204 void bc_load_localaddr PROTO((rtx));
205 void bc_load_parmaddr PROTO((rtx));
206 static void preexpand_calls PROTO((tree));
207 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
208 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
209 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
210 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
211 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
212 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
213 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
214 static tree defer_cleanups_to PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
224 /* MOVE_RATIO is the number of move instructions that is better than
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
231 /* A value of around 6 would minimize code size; infinity would minimize
233 #define MOVE_RATIO 15
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
262 /* Initialize maps used to convert modes to const, load, and store
266 bc_init_mode_to_opcode_maps ()
270 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
271 mode_to_const_map[mode] =
272 mode_to_load_map[mode] =
273 mode_to_store_map[mode] = neverneverland;
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
280 #include "modemap.def"
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
291 enum machine_mode mode;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
296 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
299 insn = emit_insn (gen_rtx (SET, 0, 0));
300 pat = PATTERN (insn);
302 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
303 mode = (enum machine_mode) ((int) mode + 1))
309 direct_load[(int) mode] = direct_store[(int) mode] = 0;
310 PUT_MODE (mem, mode);
311 PUT_MODE (mem1, mode);
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
316 if (mode != VOIDmode && mode != BLKmode)
317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
318 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
321 if (! HARD_REGNO_MODE_OK (regno, mode))
324 reg = gen_rtx (REG, mode, regno);
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
331 SET_SRC (pat) = mem1;
332 SET_DEST (pat) = reg;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_load[(int) mode] = 1;
337 SET_DEST (pat) = mem;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
342 SET_DEST (pat) = mem1;
343 if (recog (pat, insn, &num_clobbers) >= 0)
344 direct_store[(int) mode] = 1;
351 /* This is run at the start of compiling a function. */
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
362 apply_args_value = 0;
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
373 /* Instead of saving the postincrement queue, empty it. */
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->cleanups_this_call = cleanups_this_call;
379 p->saveregs_value = saveregs_value;
380 p->apply_args_value = apply_args_value;
381 p->forced_labels = forced_labels;
383 pending_stack_adjust = 0;
384 inhibit_defer_pop = 0;
385 cleanups_this_call = 0;
387 apply_args_value = 0;
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
395 restore_expr_status (p)
398 pending_stack_adjust = p->pending_stack_adjust;
399 inhibit_defer_pop = p->inhibit_defer_pop;
400 cleanups_this_call = p->cleanups_this_call;
401 saveregs_value = p->saveregs_value;
402 apply_args_value = p->apply_args_value;
403 forced_labels = p->forced_labels;
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
409 static rtx pending_chain;
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
419 enqueue_insn (var, body)
422 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
423 var, NULL_RTX, NULL_RTX, body, pending_chain);
424 return pending_chain;
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
443 protect_from_queue (x, modify)
447 register RTX_CODE code = GET_CODE (x);
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain == 0)
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
462 if (code == MEM && GET_MODE (x) != BLKmode
463 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
465 register rtx y = XEXP (x, 0);
466 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
474 register rtx temp = gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp, new),
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
485 rtx tem = protect_from_queue (XEXP (x, 0), 0);
486 if (tem != XEXP (x, 0))
492 else if (code == PLUS || code == MULT)
494 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
495 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
496 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x) == 0)
507 return QUEUED_VAR (x);
508 /* If the increment has happened and a pre-increment copy exists,
510 if (QUEUED_COPY (x) != 0)
511 return QUEUED_COPY (x);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
517 return QUEUED_COPY (x);
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
529 register enum rtx_code code = GET_CODE (x);
535 return queued_subexp_p (XEXP (x, 0));
539 return queued_subexp_p (XEXP (x, 0))
540 || queued_subexp_p (XEXP (x, 1));
545 /* Perform all the pending incrementations. */
551 while (p = pending_chain)
553 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
554 pending_chain = QUEUED_NEXT (p);
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
571 convert_move (to, from, unsignedp)
572 register rtx to, from;
575 enum machine_mode to_mode = GET_MODE (to);
576 enum machine_mode from_mode = GET_MODE (from);
577 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
578 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
585 to = protect_from_queue (to, 1);
586 from = protect_from_queue (from, 0);
588 if (to_real != from_real)
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
595 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
597 >= GET_MODE_SIZE (to_mode))
598 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
599 from = gen_lowpart (to_mode, from), from_mode = to_mode;
601 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
604 if (to_mode == from_mode
605 || (from_mode == VOIDmode && CONSTANT_P (from)))
607 emit_move_insn (to, from);
615 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
617 /* Try converting directly if the insn is supported. */
618 if ((code = can_extend_p (to_mode, from_mode, 0))
621 emit_unop_insn (code, to, from, UNKNOWN);
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
770 libcall = extendsfdf2_libfunc;
774 libcall = extendsfxf2_libfunc;
778 libcall = extendsftf2_libfunc;
787 libcall = truncdfsf2_libfunc;
791 libcall = extenddfxf2_libfunc;
795 libcall = extenddftf2_libfunc;
804 libcall = truncxfsf2_libfunc;
808 libcall = truncxfdf2_libfunc;
817 libcall = trunctfsf2_libfunc;
821 libcall = trunctfdf2_libfunc;
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
833 emit_move_insn (to, value);
837 /* Now both modes are integers. */
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
861 emit_unop_insn (code, to, from, equiv_code);
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
877 /* No special multiword conversion insn; do it by hand. */
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
890 lowpart_mode = from_mode;
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
897 /* Compute the value to put in each remaining word. */
899 fill_value = const0_rtx;
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
936 insns = get_insns ();
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
971 #endif /* HAVE_truncsipsi2 */
975 if (from_mode == PSImode)
977 if (to_mode != SImode)
979 from = convert_to_mode (SImode, from, unsignedp);
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2)
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
990 #endif /* HAVE_extendpsisi2 */
995 if (to_mode == PDImode)
997 if (from_mode != DImode)
998 from = convert_to_mode (DImode, from, unsignedp);
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2)
1003 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1006 #endif /* HAVE_truncdipdi2 */
1010 if (from_mode == PDImode)
1012 if (to_mode != DImode)
1014 from = convert_to_mode (DImode, from, unsignedp);
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2)
1022 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1025 #endif /* HAVE_extendpdidi2 */
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1036 GET_MODE_BITSIZE (from_mode)))
1038 if (!((GET_CODE (from) == MEM
1039 && ! MEM_VOLATILE_P (from)
1040 && direct_load[(int) to_mode]
1041 && ! mode_dependent_address_p (XEXP (from, 0)))
1042 || GET_CODE (from) == REG
1043 || GET_CODE (from) == SUBREG))
1044 from = force_reg (from_mode, from);
1045 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047 from = copy_to_reg (from);
1048 emit_move_insn (to, gen_lowpart (to_mode, from));
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1055 /* Convert directly if that works. */
1056 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057 != CODE_FOR_nothing)
1059 emit_unop_insn (code, to, from, equiv_code);
1064 enum machine_mode intermediate;
1066 /* Search for a mode to convert via. */
1067 for (intermediate = from_mode; intermediate != VOIDmode;
1068 intermediate = GET_MODE_WIDER_MODE (intermediate))
1069 if (((can_extend_p (to_mode, intermediate, unsignedp)
1070 != CODE_FOR_nothing)
1071 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1072 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1073 && (can_extend_p (intermediate, from_mode, unsignedp)
1074 != CODE_FOR_nothing))
1076 convert_move (to, convert_to_mode (intermediate, from,
1077 unsignedp), unsignedp);
1081 /* No suitable intermediate mode. */
1086 /* Support special truncate insns for certain modes. */
1088 if (from_mode == DImode && to_mode == SImode)
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2)
1093 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1101 if (from_mode == DImode && to_mode == HImode)
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2)
1106 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1114 if (from_mode == DImode && to_mode == QImode)
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2)
1119 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 if (from_mode == SImode && to_mode == HImode)
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2)
1132 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 if (from_mode == SImode && to_mode == QImode)
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2)
1145 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 if (from_mode == HImode && to_mode == QImode)
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2)
1158 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 if (from_mode == TImode && to_mode == DImode)
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2)
1171 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 if (from_mode == TImode && to_mode == SImode)
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2)
1184 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 if (from_mode == TImode && to_mode == HImode)
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2)
1197 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 if (from_mode == TImode && to_mode == QImode)
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2)
1210 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1223 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1224 emit_move_insn (to, temp);
1228 /* Mode combination is not recognized. */
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1243 convert_to_mode (mode, x, unsignedp)
1244 enum machine_mode mode;
1248 return convert_modes (mode, VOIDmode, x, unsignedp);
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1265 convert_modes (mode, oldmode, x, unsignedp)
1266 enum machine_mode mode, oldmode;
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1275 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1278 x = gen_lowpart (mode, x);
1280 if (GET_MODE (x) != VOIDmode)
1281 oldmode = GET_MODE (x);
1283 if (mode == oldmode)
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1292 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1293 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1295 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1297 /* We can do this with a gen_lowpart if both desired and current modes
1298 are integer, and this is either a constant integer, a register, or a
1299 non-volatile MEM. Except for the constant case where MODE is no
1300 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1302 if ((GET_CODE (x) == CONST_INT
1303 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1304 || (GET_MODE_CLASS (mode) == MODE_INT
1305 && GET_MODE_CLASS (oldmode) == MODE_INT
1306 && (GET_CODE (x) == CONST_DOUBLE
1307 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1308 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1309 && direct_load[(int) mode])
1310 || (GET_CODE (x) == REG
1311 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1312 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1314 /* ?? If we don't know OLDMODE, we have to assume here that
1315 X does not need sign- or zero-extension. This may not be
1316 the case, but it's the best we can do. */
1317 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1318 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1320 HOST_WIDE_INT val = INTVAL (x);
1321 int width = GET_MODE_BITSIZE (oldmode);
1323 /* We must sign or zero-extend in this case. Start by
1324 zero-extending, then sign extend if we need to. */
1325 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1327 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1328 val |= (HOST_WIDE_INT) (-1) << width;
1330 return GEN_INT (val);
1333 return gen_lowpart (mode, x);
1336 temp = gen_reg_rtx (mode);
1337 convert_move (temp, x, unsignedp);
1341 /* Generate several move instructions to copy LEN bytes
1342 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1343 The caller must pass FROM and TO
1344 through protect_from_queue before calling.
1345 ALIGN (in bytes) is maximum alignment we can assume. */
1348 move_by_pieces (to, from, len, align)
1352 struct move_by_pieces data;
1353 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1354 int max_size = MOVE_MAX + 1;
1357 data.to_addr = to_addr;
1358 data.from_addr = from_addr;
1362 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1363 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1365 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1366 || GET_CODE (from_addr) == POST_INC
1367 || GET_CODE (from_addr) == POST_DEC);
1369 data.explicit_inc_from = 0;
1370 data.explicit_inc_to = 0;
1372 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1373 if (data.reverse) data.offset = len;
1376 data.to_struct = MEM_IN_STRUCT_P (to);
1377 data.from_struct = MEM_IN_STRUCT_P (from);
1379 /* If copying requires more than two move insns,
1380 copy addresses to registers (to make displacements shorter)
1381 and use post-increment if available. */
1382 if (!(data.autinc_from && data.autinc_to)
1383 && move_by_pieces_ninsns (len, align) > 2)
1385 #ifdef HAVE_PRE_DECREMENT
1386 if (data.reverse && ! data.autinc_from)
1388 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1389 data.autinc_from = 1;
1390 data.explicit_inc_from = -1;
1393 #ifdef HAVE_POST_INCREMENT
1394 if (! data.autinc_from)
1396 data.from_addr = copy_addr_to_reg (from_addr);
1397 data.autinc_from = 1;
1398 data.explicit_inc_from = 1;
1401 if (!data.autinc_from && CONSTANT_P (from_addr))
1402 data.from_addr = copy_addr_to_reg (from_addr);
1403 #ifdef HAVE_PRE_DECREMENT
1404 if (data.reverse && ! data.autinc_to)
1406 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1408 data.explicit_inc_to = -1;
1411 #ifdef HAVE_POST_INCREMENT
1412 if (! data.reverse && ! data.autinc_to)
1414 data.to_addr = copy_addr_to_reg (to_addr);
1416 data.explicit_inc_to = 1;
1419 if (!data.autinc_to && CONSTANT_P (to_addr))
1420 data.to_addr = copy_addr_to_reg (to_addr);
1423 if (! SLOW_UNALIGNED_ACCESS
1424 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1427 /* First move what we can in the largest integer mode, then go to
1428 successively smaller modes. */
1430 while (max_size > 1)
1432 enum machine_mode mode = VOIDmode, tmode;
1433 enum insn_code icode;
1435 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1436 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1437 if (GET_MODE_SIZE (tmode) < max_size)
1440 if (mode == VOIDmode)
1443 icode = mov_optab->handlers[(int) mode].insn_code;
1444 if (icode != CODE_FOR_nothing
1445 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1446 GET_MODE_SIZE (mode)))
1447 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1449 max_size = GET_MODE_SIZE (mode);
1452 /* The code above should have handled everything. */
1457 /* Return number of insns required to move L bytes by pieces.
1458 ALIGN (in bytes) is maximum alignment we can assume. */
1461 move_by_pieces_ninsns (l, align)
1465 register int n_insns = 0;
1466 int max_size = MOVE_MAX + 1;
1468 if (! SLOW_UNALIGNED_ACCESS
1469 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1472 while (max_size > 1)
1474 enum machine_mode mode = VOIDmode, tmode;
1475 enum insn_code icode;
1477 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1478 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1479 if (GET_MODE_SIZE (tmode) < max_size)
1482 if (mode == VOIDmode)
1485 icode = mov_optab->handlers[(int) mode].insn_code;
1486 if (icode != CODE_FOR_nothing
1487 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1488 GET_MODE_SIZE (mode)))
1489 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1491 max_size = GET_MODE_SIZE (mode);
1497 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1498 with move instructions for mode MODE. GENFUN is the gen_... function
1499 to make a move insn for that mode. DATA has all the other info. */
1502 move_by_pieces_1 (genfun, mode, data)
1504 enum machine_mode mode;
1505 struct move_by_pieces *data;
1507 register int size = GET_MODE_SIZE (mode);
1508 register rtx to1, from1;
1510 while (data->len >= size)
1512 if (data->reverse) data->offset -= size;
1514 to1 = (data->autinc_to
1515 ? gen_rtx (MEM, mode, data->to_addr)
1516 : change_address (data->to, mode,
1517 plus_constant (data->to_addr, data->offset)));
1518 MEM_IN_STRUCT_P (to1) = data->to_struct;
1521 ? gen_rtx (MEM, mode, data->from_addr)
1522 : change_address (data->from, mode,
1523 plus_constant (data->from_addr, data->offset)));
1524 MEM_IN_STRUCT_P (from1) = data->from_struct;
1526 #ifdef HAVE_PRE_DECREMENT
1527 if (data->explicit_inc_to < 0)
1528 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1529 if (data->explicit_inc_from < 0)
1530 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1533 emit_insn ((*genfun) (to1, from1));
1534 #ifdef HAVE_POST_INCREMENT
1535 if (data->explicit_inc_to > 0)
1536 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1537 if (data->explicit_inc_from > 0)
1538 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1541 if (! data->reverse) data->offset += size;
1547 /* Emit code to move a block Y to a block X.
1548 This may be done with string-move instructions,
1549 with multiple scalar move instructions, or with a library call.
1551 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1553 SIZE is an rtx that says how long they are.
1554 ALIGN is the maximum alignment we can assume they have,
1555 measured in bytes. */
1558 emit_block_move (x, y, size, align)
1563 if (GET_MODE (x) != BLKmode)
1566 if (GET_MODE (y) != BLKmode)
1569 x = protect_from_queue (x, 1);
1570 y = protect_from_queue (y, 0);
1571 size = protect_from_queue (size, 0);
1573 if (GET_CODE (x) != MEM)
1575 if (GET_CODE (y) != MEM)
1580 if (GET_CODE (size) == CONST_INT
1581 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1582 move_by_pieces (x, y, INTVAL (size), align);
1585 /* Try the most limited insn first, because there's no point
1586 including more than one in the machine description unless
1587 the more limited one has some advantage. */
1589 rtx opalign = GEN_INT (align);
1590 enum machine_mode mode;
1592 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1593 mode = GET_MODE_WIDER_MODE (mode))
1595 enum insn_code code = movstr_optab[(int) mode];
1597 if (code != CODE_FOR_nothing
1598 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1599 here because if SIZE is less than the mode mask, as it is
1600 returned by the macro, it will definitely be less than the
1601 actual mode mask. */
1602 && ((GET_CODE (size) == CONST_INT
1603 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1604 <= GET_MODE_MASK (mode)))
1605 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1606 && (insn_operand_predicate[(int) code][0] == 0
1607 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1608 && (insn_operand_predicate[(int) code][1] == 0
1609 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1610 && (insn_operand_predicate[(int) code][3] == 0
1611 || (*insn_operand_predicate[(int) code][3]) (opalign,
1615 rtx last = get_last_insn ();
1618 op2 = convert_to_mode (mode, size, 1);
1619 if (insn_operand_predicate[(int) code][2] != 0
1620 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1621 op2 = copy_to_mode_reg (mode, op2);
1623 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1630 delete_insns_since (last);
1634 #ifdef TARGET_MEM_FUNCTIONS
1635 emit_library_call (memcpy_libfunc, 0,
1636 VOIDmode, 3, XEXP (x, 0), Pmode,
1638 convert_to_mode (TYPE_MODE (sizetype), size,
1639 TREE_UNSIGNED (sizetype)),
1640 TYPE_MODE (sizetype));
1642 emit_library_call (bcopy_libfunc, 0,
1643 VOIDmode, 3, XEXP (y, 0), Pmode,
1645 convert_to_mode (TYPE_MODE (integer_type_node), size,
1646 TREE_UNSIGNED (integer_type_node)),
1647 TYPE_MODE (integer_type_node));
1652 /* Copy all or part of a value X into registers starting at REGNO.
1653 The number of registers to be filled is NREGS. */
1656 move_block_to_reg (regno, x, nregs, mode)
1660 enum machine_mode mode;
1668 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1669 x = validize_mem (force_const_mem (mode, x));
1671 /* See if the machine can do this with a load multiple insn. */
1672 #ifdef HAVE_load_multiple
1673 if (HAVE_load_multiple)
1675 last = get_last_insn ();
1676 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1684 delete_insns_since (last);
1688 for (i = 0; i < nregs; i++)
1689 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1690 operand_subword_force (x, i, mode));
1693 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1694 The number of registers to be filled is NREGS. SIZE indicates the number
1695 of bytes in the object X. */
1699 move_block_from_reg (regno, x, nregs, size)
1708 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1709 to the left before storing to memory. */
1710 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1712 rtx tem = operand_subword (x, 0, 1, BLKmode);
1718 shift = expand_shift (LSHIFT_EXPR, word_mode,
1719 gen_rtx (REG, word_mode, regno),
1720 build_int_2 ((UNITS_PER_WORD - size)
1721 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1722 emit_move_insn (tem, shift);
1726 /* See if the machine can do this with a store multiple insn. */
1727 #ifdef HAVE_store_multiple
1728 if (HAVE_store_multiple)
1730 last = get_last_insn ();
1731 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1739 delete_insns_since (last);
1743 for (i = 0; i < nregs; i++)
1745 rtx tem = operand_subword (x, i, 1, BLKmode);
1750 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1754 /* Emit code to move a block Y to a block X, where X is non-consecutive
1755 registers represented by a PARALLEL. */
1758 emit_group_load (x, y)
1761 rtx target_reg, source;
1764 if (GET_CODE (x) != PARALLEL)
1767 /* Check for a NULL entry, used to indicate that the parameter goes
1768 both on the stack and in registers. */
1769 if (XEXP (XVECEXP (x, 0, 0), 0))
1774 for (; i < XVECLEN (x, 0); i++)
1776 rtx element = XVECEXP (x, 0, i);
1778 target_reg = XEXP (element, 0);
1780 if (GET_CODE (y) == MEM)
1781 source = change_address (y, GET_MODE (target_reg),
1782 plus_constant (XEXP (y, 0),
1783 INTVAL (XEXP (element, 1))));
1784 else if (XEXP (element, 1) == const0_rtx)
1786 if (GET_MODE (target_reg) == GET_MODE (y))
1788 /* Allow for the target_reg to be smaller than the input register
1789 to allow for AIX with 4 DF arguments after a single SI arg. The
1790 last DF argument will only load 1 word into the integer registers,
1791 but load a DF value into the float registers. */
1792 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1793 <= GET_MODE_SIZE (GET_MODE (y)))
1794 source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
1801 emit_move_insn (target_reg, source);
1805 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1806 registers represented by a PARALLEL. */
1809 emit_group_store (x, y)
1812 rtx source_reg, target;
1815 if (GET_CODE (y) != PARALLEL)
1818 /* Check for a NULL entry, used to indicate that the parameter goes
1819 both on the stack and in registers. */
1820 if (XEXP (XVECEXP (y, 0, 0), 0))
1825 for (; i < XVECLEN (y, 0); i++)
1827 rtx element = XVECEXP (y, 0, i);
1829 source_reg = XEXP (element, 0);
1831 if (GET_CODE (x) == MEM)
1832 target = change_address (x, GET_MODE (source_reg),
1833 plus_constant (XEXP (x, 0),
1834 INTVAL (XEXP (element, 1))));
1835 else if (XEXP (element, 1) == const0_rtx)
1840 emit_move_insn (target, source_reg);
1844 /* Add a USE expression for REG to the (possibly empty) list pointed
1845 to by CALL_FUSAGE. REG must denote a hard register. */
1848 use_reg (call_fusage, reg)
1849 rtx *call_fusage, reg;
1851 if (GET_CODE (reg) != REG
1852 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1856 = gen_rtx (EXPR_LIST, VOIDmode,
1857 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1860 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1861 starting at REGNO. All of these registers must be hard registers. */
1864 use_regs (call_fusage, regno, nregs)
1871 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1874 for (i = 0; i < nregs; i++)
1875 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1878 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1879 PARALLEL REGS. This is for calls that pass values in multiple
1880 non-contiguous locations. The Irix 6 ABI has examples of this. */
1883 use_group_regs (call_fusage, regs)
1889 /* Check for a NULL entry, used to indicate that the parameter goes
1890 both on the stack and in registers. */
1891 if (XEXP (XVECEXP (regs, 0, 0), 0))
1896 for (; i < XVECLEN (regs, 0); i++)
1897 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1900 /* Generate several move instructions to clear LEN bytes of block TO.
1901 (A MEM rtx with BLKmode). The caller must pass TO through
1902 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1906 clear_by_pieces (to, len, align)
1910 struct clear_by_pieces data;
1911 rtx to_addr = XEXP (to, 0);
1912 int max_size = MOVE_MAX + 1;
1915 data.to_addr = to_addr;
1918 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1919 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1921 data.explicit_inc_to = 0;
1923 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1924 if (data.reverse) data.offset = len;
1927 data.to_struct = MEM_IN_STRUCT_P (to);
1929 /* If copying requires more than two move insns,
1930 copy addresses to registers (to make displacements shorter)
1931 and use post-increment if available. */
1933 && move_by_pieces_ninsns (len, align) > 2)
1935 #ifdef HAVE_PRE_DECREMENT
1936 if (data.reverse && ! data.autinc_to)
1938 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1940 data.explicit_inc_to = -1;
1943 #ifdef HAVE_POST_INCREMENT
1944 if (! data.reverse && ! data.autinc_to)
1946 data.to_addr = copy_addr_to_reg (to_addr);
1948 data.explicit_inc_to = 1;
1951 if (!data.autinc_to && CONSTANT_P (to_addr))
1952 data.to_addr = copy_addr_to_reg (to_addr);
1955 if (! SLOW_UNALIGNED_ACCESS
1956 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1959 /* First move what we can in the largest integer mode, then go to
1960 successively smaller modes. */
1962 while (max_size > 1)
1964 enum machine_mode mode = VOIDmode, tmode;
1965 enum insn_code icode;
1967 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1968 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1969 if (GET_MODE_SIZE (tmode) < max_size)
1972 if (mode == VOIDmode)
1975 icode = mov_optab->handlers[(int) mode].insn_code;
1976 if (icode != CODE_FOR_nothing
1977 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1978 GET_MODE_SIZE (mode)))
1979 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1981 max_size = GET_MODE_SIZE (mode);
1984 /* The code above should have handled everything. */
1989 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
1990 with move instructions for mode MODE. GENFUN is the gen_... function
1991 to make a move insn for that mode. DATA has all the other info. */
1994 clear_by_pieces_1 (genfun, mode, data)
1996 enum machine_mode mode;
1997 struct clear_by_pieces *data;
1999 register int size = GET_MODE_SIZE (mode);
2002 while (data->len >= size)
2004 if (data->reverse) data->offset -= size;
2006 to1 = (data->autinc_to
2007 ? gen_rtx (MEM, mode, data->to_addr)
2008 : change_address (data->to, mode,
2009 plus_constant (data->to_addr, data->offset)));
2010 MEM_IN_STRUCT_P (to1) = data->to_struct;
2012 #ifdef HAVE_PRE_DECREMENT
2013 if (data->explicit_inc_to < 0)
2014 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2017 emit_insn ((*genfun) (to1, const0_rtx));
2018 #ifdef HAVE_POST_INCREMENT
2019 if (data->explicit_inc_to > 0)
2020 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2023 if (! data->reverse) data->offset += size;
2029 /* Write zeros through the storage of OBJECT.
2030 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2031 the maximum alignment we can is has, measured in bytes. */
2034 clear_storage (object, size, align)
2039 if (GET_MODE (object) == BLKmode)
2041 object = protect_from_queue (object, 1);
2042 size = protect_from_queue (size, 0);
2044 if (GET_CODE (size) == CONST_INT
2045 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2046 clear_by_pieces (object, INTVAL (size), align);
2050 /* Try the most limited insn first, because there's no point
2051 including more than one in the machine description unless
2052 the more limited one has some advantage. */
2054 rtx opalign = GEN_INT (align);
2055 enum machine_mode mode;
2057 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2058 mode = GET_MODE_WIDER_MODE (mode))
2060 enum insn_code code = clrstr_optab[(int) mode];
2062 if (code != CODE_FOR_nothing
2063 /* We don't need MODE to be narrower than
2064 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2065 the mode mask, as it is returned by the macro, it will
2066 definitely be less than the actual mode mask. */
2067 && ((GET_CODE (size) == CONST_INT
2068 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2069 <= GET_MODE_MASK (mode)))
2070 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2071 && (insn_operand_predicate[(int) code][0] == 0
2072 || (*insn_operand_predicate[(int) code][0]) (object,
2074 && (insn_operand_predicate[(int) code][2] == 0
2075 || (*insn_operand_predicate[(int) code][2]) (opalign,
2079 rtx last = get_last_insn ();
2082 op1 = convert_to_mode (mode, size, 1);
2083 if (insn_operand_predicate[(int) code][1] != 0
2084 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2086 op1 = copy_to_mode_reg (mode, op1);
2088 pat = GEN_FCN ((int) code) (object, op1, opalign);
2095 delete_insns_since (last);
2100 #ifdef TARGET_MEM_FUNCTIONS
2101 emit_library_call (memset_libfunc, 0,
2103 XEXP (object, 0), Pmode,
2104 const0_rtx, TYPE_MODE (integer_type_node),
2105 convert_to_mode (TYPE_MODE (sizetype),
2106 size, TREE_UNSIGNED (sizetype)),
2107 TYPE_MODE (sizetype));
2109 emit_library_call (bzero_libfunc, 0,
2111 XEXP (object, 0), Pmode,
2112 convert_to_mode (TYPE_MODE (integer_type_node),
2114 TREE_UNSIGNED (integer_type_node)),
2115 TYPE_MODE (integer_type_node));
2120 emit_move_insn (object, const0_rtx);
2123 /* Generate code to copy Y into X.
2124 Both Y and X must have the same mode, except that
2125 Y can be a constant with VOIDmode.
2126 This mode cannot be BLKmode; use emit_block_move for that.
2128 Return the last instruction emitted. */
2131 emit_move_insn (x, y)
2134 enum machine_mode mode = GET_MODE (x);
2136 x = protect_from_queue (x, 1);
2137 y = protect_from_queue (y, 0);
2139 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2142 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2143 y = force_const_mem (mode, y);
2145 /* If X or Y are memory references, verify that their addresses are valid
2147 if (GET_CODE (x) == MEM
2148 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2149 && ! push_operand (x, GET_MODE (x)))
2151 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2152 x = change_address (x, VOIDmode, XEXP (x, 0));
2154 if (GET_CODE (y) == MEM
2155 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2157 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2158 y = change_address (y, VOIDmode, XEXP (y, 0));
2160 if (mode == BLKmode)
2163 return emit_move_insn_1 (x, y);
2166 /* Low level part of emit_move_insn.
2167 Called just like emit_move_insn, but assumes X and Y
2168 are basically valid. */
2171 emit_move_insn_1 (x, y)
2174 enum machine_mode mode = GET_MODE (x);
2175 enum machine_mode submode;
2176 enum mode_class class = GET_MODE_CLASS (mode);
2179 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2181 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2183 /* Expand complex moves by moving real part and imag part, if possible. */
2184 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2185 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2187 (class == MODE_COMPLEX_INT
2188 ? MODE_INT : MODE_FLOAT),
2190 && (mov_optab->handlers[(int) submode].insn_code
2191 != CODE_FOR_nothing))
2193 /* Don't split destination if it is a stack push. */
2194 int stack = push_operand (x, GET_MODE (x));
2197 /* If this is a stack, push the highpart first, so it
2198 will be in the argument order.
2200 In that case, change_address is used only to convert
2201 the mode, not to change the address. */
2204 /* Note that the real part always precedes the imag part in memory
2205 regardless of machine's endianness. */
2206 #ifdef STACK_GROWS_DOWNWARD
2207 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2208 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2209 gen_imagpart (submode, y)));
2210 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2211 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2212 gen_realpart (submode, y)));
2214 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2215 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2216 gen_realpart (submode, y)));
2217 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2218 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2219 gen_imagpart (submode, y)));
2224 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2225 (gen_realpart (submode, x), gen_realpart (submode, y)));
2226 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2227 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2230 return get_last_insn ();
2233 /* This will handle any multi-word mode that lacks a move_insn pattern.
2234 However, you will get better code if you define such patterns,
2235 even if they must turn into multiple assembler instructions. */
2236 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2241 #ifdef PUSH_ROUNDING
2243 /* If X is a push on the stack, do the push now and replace
2244 X with a reference to the stack pointer. */
2245 if (push_operand (x, GET_MODE (x)))
2247 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2248 x = change_address (x, VOIDmode, stack_pointer_rtx);
2252 /* Show the output dies here. */
2254 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2257 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2260 rtx xpart = operand_subword (x, i, 1, mode);
2261 rtx ypart = operand_subword (y, i, 1, mode);
2263 /* If we can't get a part of Y, put Y into memory if it is a
2264 constant. Otherwise, force it into a register. If we still
2265 can't get a part of Y, abort. */
2266 if (ypart == 0 && CONSTANT_P (y))
2268 y = force_const_mem (mode, y);
2269 ypart = operand_subword (y, i, 1, mode);
2271 else if (ypart == 0)
2272 ypart = operand_subword_force (y, i, mode);
2274 if (xpart == 0 || ypart == 0)
2277 last_insn = emit_move_insn (xpart, ypart);
2286 /* Pushing data onto the stack. */
2288 /* Push a block of length SIZE (perhaps variable)
2289 and return an rtx to address the beginning of the block.
2290 Note that it is not possible for the value returned to be a QUEUED.
2291 The value may be virtual_outgoing_args_rtx.
2293 EXTRA is the number of bytes of padding to push in addition to SIZE.
2294 BELOW nonzero means this padding comes at low addresses;
2295 otherwise, the padding comes at high addresses. */
2298 push_block (size, extra, below)
2304 size = convert_modes (Pmode, ptr_mode, size, 1);
2305 if (CONSTANT_P (size))
2306 anti_adjust_stack (plus_constant (size, extra));
2307 else if (GET_CODE (size) == REG && extra == 0)
2308 anti_adjust_stack (size);
2311 rtx temp = copy_to_mode_reg (Pmode, size);
2313 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2314 temp, 0, OPTAB_LIB_WIDEN);
2315 anti_adjust_stack (temp);
2318 #ifdef STACK_GROWS_DOWNWARD
2319 temp = virtual_outgoing_args_rtx;
2320 if (extra != 0 && below)
2321 temp = plus_constant (temp, extra);
2323 if (GET_CODE (size) == CONST_INT)
2324 temp = plus_constant (virtual_outgoing_args_rtx,
2325 - INTVAL (size) - (below ? 0 : extra));
2326 else if (extra != 0 && !below)
2327 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2328 negate_rtx (Pmode, plus_constant (size, extra)));
2330 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2331 negate_rtx (Pmode, size));
2334 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2340 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2343 /* Generate code to push X onto the stack, assuming it has mode MODE and
2345 MODE is redundant except when X is a CONST_INT (since they don't
2347 SIZE is an rtx for the size of data to be copied (in bytes),
2348 needed only if X is BLKmode.
2350 ALIGN (in bytes) is maximum alignment we can assume.
2352 If PARTIAL and REG are both nonzero, then copy that many of the first
2353 words of X into registers starting with REG, and push the rest of X.
2354 The amount of space pushed is decreased by PARTIAL words,
2355 rounded *down* to a multiple of PARM_BOUNDARY.
2356 REG must be a hard register in this case.
2357 If REG is zero but PARTIAL is not, take any all others actions for an
2358 argument partially in registers, but do not actually load any
2361 EXTRA is the amount in bytes of extra space to leave next to this arg.
2362 This is ignored if an argument block has already been allocated.
2364 On a machine that lacks real push insns, ARGS_ADDR is the address of
2365 the bottom of the argument block for this call. We use indexing off there
2366 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2367 argument block has not been preallocated.
2369 ARGS_SO_FAR is the size of args previously pushed for this call. */
2372 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2373 args_addr, args_so_far)
2375 enum machine_mode mode;
2386 enum direction stack_direction
2387 #ifdef STACK_GROWS_DOWNWARD
2393 /* Decide where to pad the argument: `downward' for below,
2394 `upward' for above, or `none' for don't pad it.
2395 Default is below for small data on big-endian machines; else above. */
2396 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2398 /* If we're placing part of X into a register and part of X onto
2399 the stack, indicate that the entire register is clobbered to
2400 keep flow from thinking the unused part of the register is live. */
2401 if (partial > 0 && reg != 0)
2402 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2404 /* Invert direction if stack is post-update. */
2405 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2406 if (where_pad != none)
2407 where_pad = (where_pad == downward ? upward : downward);
2409 xinner = x = protect_from_queue (x, 0);
2411 if (mode == BLKmode)
2413 /* Copy a block into the stack, entirely or partially. */
2416 int used = partial * UNITS_PER_WORD;
2417 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2425 /* USED is now the # of bytes we need not copy to the stack
2426 because registers will take care of them. */
2429 xinner = change_address (xinner, BLKmode,
2430 plus_constant (XEXP (xinner, 0), used));
2432 /* If the partial register-part of the arg counts in its stack size,
2433 skip the part of stack space corresponding to the registers.
2434 Otherwise, start copying to the beginning of the stack space,
2435 by setting SKIP to 0. */
2436 #ifndef REG_PARM_STACK_SPACE
2442 #ifdef PUSH_ROUNDING
2443 /* Do it with several push insns if that doesn't take lots of insns
2444 and if there is no difficulty with push insns that skip bytes
2445 on the stack for alignment purposes. */
2447 && GET_CODE (size) == CONST_INT
2449 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2451 /* Here we avoid the case of a structure whose weak alignment
2452 forces many pushes of a small amount of data,
2453 and such small pushes do rounding that causes trouble. */
2454 && ((! SLOW_UNALIGNED_ACCESS)
2455 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2456 || PUSH_ROUNDING (align) == align)
2457 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2459 /* Push padding now if padding above and stack grows down,
2460 or if padding below and stack grows up.
2461 But if space already allocated, this has already been done. */
2462 if (extra && args_addr == 0
2463 && where_pad != none && where_pad != stack_direction)
2464 anti_adjust_stack (GEN_INT (extra));
2466 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2467 INTVAL (size) - used, align);
2470 #endif /* PUSH_ROUNDING */
2472 /* Otherwise make space on the stack and copy the data
2473 to the address of that space. */
2475 /* Deduct words put into registers from the size we must copy. */
2478 if (GET_CODE (size) == CONST_INT)
2479 size = GEN_INT (INTVAL (size) - used);
2481 size = expand_binop (GET_MODE (size), sub_optab, size,
2482 GEN_INT (used), NULL_RTX, 0,
2486 /* Get the address of the stack space.
2487 In this case, we do not deal with EXTRA separately.
2488 A single stack adjust will do. */
2491 temp = push_block (size, extra, where_pad == downward);
2494 else if (GET_CODE (args_so_far) == CONST_INT)
2495 temp = memory_address (BLKmode,
2496 plus_constant (args_addr,
2497 skip + INTVAL (args_so_far)));
2499 temp = memory_address (BLKmode,
2500 plus_constant (gen_rtx (PLUS, Pmode,
2501 args_addr, args_so_far),
2504 /* TEMP is the address of the block. Copy the data there. */
2505 if (GET_CODE (size) == CONST_INT
2506 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2509 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2510 INTVAL (size), align);
2513 /* Try the most limited insn first, because there's no point
2514 including more than one in the machine description unless
2515 the more limited one has some advantage. */
2516 #ifdef HAVE_movstrqi
2518 && GET_CODE (size) == CONST_INT
2519 && ((unsigned) INTVAL (size)
2520 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2522 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2523 xinner, size, GEN_INT (align));
2531 #ifdef HAVE_movstrhi
2533 && GET_CODE (size) == CONST_INT
2534 && ((unsigned) INTVAL (size)
2535 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2537 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2538 xinner, size, GEN_INT (align));
2546 #ifdef HAVE_movstrsi
2549 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2550 xinner, size, GEN_INT (align));
2558 #ifdef HAVE_movstrdi
2561 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2562 xinner, size, GEN_INT (align));
2571 #ifndef ACCUMULATE_OUTGOING_ARGS
2572 /* If the source is referenced relative to the stack pointer,
2573 copy it to another register to stabilize it. We do not need
2574 to do this if we know that we won't be changing sp. */
2576 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2577 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2578 temp = copy_to_reg (temp);
2581 /* Make inhibit_defer_pop nonzero around the library call
2582 to force it to pop the bcopy-arguments right away. */
2584 #ifdef TARGET_MEM_FUNCTIONS
2585 emit_library_call (memcpy_libfunc, 0,
2586 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2587 convert_to_mode (TYPE_MODE (sizetype),
2588 size, TREE_UNSIGNED (sizetype)),
2589 TYPE_MODE (sizetype));
2591 emit_library_call (bcopy_libfunc, 0,
2592 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2593 convert_to_mode (TYPE_MODE (integer_type_node),
2595 TREE_UNSIGNED (integer_type_node)),
2596 TYPE_MODE (integer_type_node));
2601 else if (partial > 0)
2603 /* Scalar partly in registers. */
2605 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2608 /* # words of start of argument
2609 that we must make space for but need not store. */
2610 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2611 int args_offset = INTVAL (args_so_far);
2614 /* Push padding now if padding above and stack grows down,
2615 or if padding below and stack grows up.
2616 But if space already allocated, this has already been done. */
2617 if (extra && args_addr == 0
2618 && where_pad != none && where_pad != stack_direction)
2619 anti_adjust_stack (GEN_INT (extra));
2621 /* If we make space by pushing it, we might as well push
2622 the real data. Otherwise, we can leave OFFSET nonzero
2623 and leave the space uninitialized. */
2627 /* Now NOT_STACK gets the number of words that we don't need to
2628 allocate on the stack. */
2629 not_stack = partial - offset;
2631 /* If the partial register-part of the arg counts in its stack size,
2632 skip the part of stack space corresponding to the registers.
2633 Otherwise, start copying to the beginning of the stack space,
2634 by setting SKIP to 0. */
2635 #ifndef REG_PARM_STACK_SPACE
2641 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2642 x = validize_mem (force_const_mem (mode, x));
2644 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2645 SUBREGs of such registers are not allowed. */
2646 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2647 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2648 x = copy_to_reg (x);
2650 /* Loop over all the words allocated on the stack for this arg. */
2651 /* We can do it by words, because any scalar bigger than a word
2652 has a size a multiple of a word. */
2653 #ifndef PUSH_ARGS_REVERSED
2654 for (i = not_stack; i < size; i++)
2656 for (i = size - 1; i >= not_stack; i--)
2658 if (i >= not_stack + offset)
2659 emit_push_insn (operand_subword_force (x, i, mode),
2660 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2662 GEN_INT (args_offset + ((i - not_stack + skip)
2663 * UNITS_PER_WORD)));
2669 /* Push padding now if padding above and stack grows down,
2670 or if padding below and stack grows up.
2671 But if space already allocated, this has already been done. */
2672 if (extra && args_addr == 0
2673 && where_pad != none && where_pad != stack_direction)
2674 anti_adjust_stack (GEN_INT (extra));
2676 #ifdef PUSH_ROUNDING
2678 addr = gen_push_operand ();
2681 if (GET_CODE (args_so_far) == CONST_INT)
2683 = memory_address (mode,
2684 plus_constant (args_addr, INTVAL (args_so_far)));
2686 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2689 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2693 /* If part should go in registers, copy that part
2694 into the appropriate registers. Do this now, at the end,
2695 since mem-to-mem copies above may do function calls. */
2696 if (partial > 0 && reg != 0)
2698 /* Handle calls that pass values in multiple non-contiguous locations.
2699 The Irix 6 ABI has examples of this. */
2700 if (GET_CODE (reg) == PARALLEL)
2701 emit_group_load (reg, x);
2703 move_block_to_reg (REGNO (reg), x, partial, mode);
2706 if (extra && args_addr == 0 && where_pad == stack_direction)
2707 anti_adjust_stack (GEN_INT (extra));
2710 /* Expand an assignment that stores the value of FROM into TO.
2711 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2712 (This may contain a QUEUED rtx;
2713 if the value is constant, this rtx is a constant.)
2714 Otherwise, the returned value is NULL_RTX.
2716 SUGGEST_REG is no longer actually used.
2717 It used to mean, copy the value through a register
2718 and return that register, if that is possible.
2719 We now use WANT_VALUE to decide whether to do this. */
2722 expand_assignment (to, from, want_value, suggest_reg)
2727 register rtx to_rtx = 0;
2730 /* Don't crash if the lhs of the assignment was erroneous. */
2732 if (TREE_CODE (to) == ERROR_MARK)
2734 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2735 return want_value ? result : NULL_RTX;
2738 if (output_bytecode)
2740 tree dest_innermost;
2742 bc_expand_expr (from);
2743 bc_emit_instruction (duplicate);
2745 dest_innermost = bc_expand_address (to);
2747 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2748 take care of it here. */
2750 bc_store_memory (TREE_TYPE (to), dest_innermost);
2754 /* Assignment of a structure component needs special treatment
2755 if the structure component's rtx is not simply a MEM.
2756 Assignment of an array element at a constant index, and assignment of
2757 an array element in an unaligned packed structure field, has the same
2760 if (TREE_CODE (to) == COMPONENT_REF
2761 || TREE_CODE (to) == BIT_FIELD_REF
2762 || (TREE_CODE (to) == ARRAY_REF
2763 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2764 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2765 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2767 enum machine_mode mode1;
2777 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2778 &mode1, &unsignedp, &volatilep);
2780 /* If we are going to use store_bit_field and extract_bit_field,
2781 make sure to_rtx will be safe for multiple use. */
2783 if (mode1 == VOIDmode && want_value)
2784 tem = stabilize_reference (tem);
2786 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2787 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2790 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2792 if (GET_CODE (to_rtx) != MEM)
2794 to_rtx = change_address (to_rtx, VOIDmode,
2795 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2796 force_reg (ptr_mode, offset_rtx)));
2797 /* If we have a variable offset, the known alignment
2798 is only that of the innermost structure containing the field.
2799 (Actually, we could sometimes do better by using the
2800 align of an element of the innermost array, but no need.) */
2801 if (TREE_CODE (to) == COMPONENT_REF
2802 || TREE_CODE (to) == BIT_FIELD_REF)
2804 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2808 if (GET_CODE (to_rtx) == MEM)
2810 /* When the offset is zero, to_rtx is the address of the
2811 structure we are storing into, and hence may be shared.
2812 We must make a new MEM before setting the volatile bit. */
2814 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2815 MEM_VOLATILE_P (to_rtx) = 1;
2817 #if 0 /* This was turned off because, when a field is volatile
2818 in an object which is not volatile, the object may be in a register,
2819 and then we would abort over here. */
2825 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2827 /* Spurious cast makes HPUX compiler happy. */
2828 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2831 /* Required alignment of containing datum. */
2833 int_size_in_bytes (TREE_TYPE (tem)));
2834 preserve_temp_slots (result);
2838 /* If the value is meaningful, convert RESULT to the proper mode.
2839 Otherwise, return nothing. */
2840 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2841 TYPE_MODE (TREE_TYPE (from)),
2843 TREE_UNSIGNED (TREE_TYPE (to)))
2847 /* If the rhs is a function call and its value is not an aggregate,
2848 call the function before we start to compute the lhs.
2849 This is needed for correct code for cases such as
2850 val = setjmp (buf) on machines where reference to val
2851 requires loading up part of an address in a separate insn.
2853 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2854 a promoted variable where the zero- or sign- extension needs to be done.
2855 Handling this in the normal way is safe because no computation is done
2857 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2858 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2859 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2864 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2866 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2868 /* Handle calls that return values in multiple non-contiguous locations.
2869 The Irix 6 ABI has examples of this. */
2870 if (GET_CODE (to_rtx) == PARALLEL)
2871 emit_group_load (to_rtx, value);
2872 else if (GET_MODE (to_rtx) == BLKmode)
2873 emit_block_move (to_rtx, value, expr_size (from),
2874 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2876 emit_move_insn (to_rtx, value);
2877 preserve_temp_slots (to_rtx);
2880 return want_value ? to_rtx : NULL_RTX;
2883 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2884 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2887 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2889 /* Don't move directly into a return register. */
2890 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2895 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2896 emit_move_insn (to_rtx, temp);
2897 preserve_temp_slots (to_rtx);
2900 return want_value ? to_rtx : NULL_RTX;
2903 /* In case we are returning the contents of an object which overlaps
2904 the place the value is being stored, use a safe function when copying
2905 a value through a pointer into a structure value return block. */
2906 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2907 && current_function_returns_struct
2908 && !current_function_returns_pcc_struct)
2913 size = expr_size (from);
2914 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2916 #ifdef TARGET_MEM_FUNCTIONS
2917 emit_library_call (memcpy_libfunc, 0,
2918 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2919 XEXP (from_rtx, 0), Pmode,
2920 convert_to_mode (TYPE_MODE (sizetype),
2921 size, TREE_UNSIGNED (sizetype)),
2922 TYPE_MODE (sizetype));
2924 emit_library_call (bcopy_libfunc, 0,
2925 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2926 XEXP (to_rtx, 0), Pmode,
2927 convert_to_mode (TYPE_MODE (integer_type_node),
2928 size, TREE_UNSIGNED (integer_type_node)),
2929 TYPE_MODE (integer_type_node));
2932 preserve_temp_slots (to_rtx);
2935 return want_value ? to_rtx : NULL_RTX;
2938 /* Compute FROM and store the value in the rtx we got. */
2941 result = store_expr (from, to_rtx, want_value);
2942 preserve_temp_slots (result);
2945 return want_value ? result : NULL_RTX;
2948 /* Generate code for computing expression EXP,
2949 and storing the value into TARGET.
2950 TARGET may contain a QUEUED rtx.
2952 If WANT_VALUE is nonzero, return a copy of the value
2953 not in TARGET, so that we can be sure to use the proper
2954 value in a containing expression even if TARGET has something
2955 else stored in it. If possible, we copy the value through a pseudo
2956 and return that pseudo. Or, if the value is constant, we try to
2957 return the constant. In some cases, we return a pseudo
2958 copied *from* TARGET.
2960 If the mode is BLKmode then we may return TARGET itself.
2961 It turns out that in BLKmode it doesn't cause a problem.
2962 because C has no operators that could combine two different
2963 assignments into the same BLKmode object with different values
2964 with no sequence point. Will other languages need this to
2967 If WANT_VALUE is 0, we return NULL, to make sure
2968 to catch quickly any cases where the caller uses the value
2969 and fails to set WANT_VALUE. */
2972 store_expr (exp, target, want_value)
2974 register rtx target;
2978 int dont_return_target = 0;
2980 if (TREE_CODE (exp) == COMPOUND_EXPR)
2982 /* Perform first part of compound expression, then assign from second
2984 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2986 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2988 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2990 /* For conditional expression, get safe form of the target. Then
2991 test the condition, doing the appropriate assignment on either
2992 side. This avoids the creation of unnecessary temporaries.
2993 For non-BLKmode, it is more efficient not to do this. */
2995 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2996 rtx flag = NULL_RTX;
2997 tree left_cleanups = NULL_TREE;
2998 tree right_cleanups = NULL_TREE;
2999 tree old_cleanups = cleanups_this_call;
3001 /* Used to save a pointer to the place to put the setting of
3002 the flag that indicates if this side of the conditional was
3003 taken. We backpatch the code, if we find out later that we
3004 have any conditional cleanups that need to be performed. */
3005 rtx dest_right_flag = NULL_RTX;
3006 rtx dest_left_flag = NULL_RTX;
3009 target = protect_from_queue (target, 1);
3011 do_pending_stack_adjust ();
3013 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3014 store_expr (TREE_OPERAND (exp, 1), target, 0);
3015 dest_left_flag = get_last_insn ();
3016 /* Handle conditional cleanups, if any. */
3017 left_cleanups = defer_cleanups_to (old_cleanups);
3019 emit_jump_insn (gen_jump (lab2));
3022 store_expr (TREE_OPERAND (exp, 2), target, 0);
3023 dest_right_flag = get_last_insn ();
3024 /* Handle conditional cleanups, if any. */
3025 right_cleanups = defer_cleanups_to (old_cleanups);
3030 /* Add back in any conditional cleanups. */
3031 if (left_cleanups || right_cleanups)
3037 /* Now that we know that a flag is needed, go back and add in the
3038 setting of the flag. */
3040 flag = gen_reg_rtx (word_mode);
3042 /* Do the left side flag. */
3043 last = get_last_insn ();
3044 /* Flag left cleanups as needed. */
3045 emit_move_insn (flag, const1_rtx);
3046 /* ??? deprecated, use sequences instead. */
3047 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3049 /* Do the right side flag. */
3050 last = get_last_insn ();
3051 /* Flag left cleanups as needed. */
3052 emit_move_insn (flag, const0_rtx);
3053 /* ??? deprecated, use sequences instead. */
3054 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3056 /* All cleanups must be on the function_obstack. */
3057 push_obstacks_nochange ();
3058 resume_temporary_allocation ();
3060 /* convert flag, which is an rtx, into a tree. */
3061 cond = make_node (RTL_EXPR);
3062 TREE_TYPE (cond) = integer_type_node;
3063 RTL_EXPR_RTL (cond) = flag;
3064 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3065 cond = save_expr (cond);
3067 if (! left_cleanups)
3068 left_cleanups = integer_zero_node;
3069 if (! right_cleanups)
3070 right_cleanups = integer_zero_node;
3071 new_cleanups = build (COND_EXPR, void_type_node,
3072 truthvalue_conversion (cond),
3073 left_cleanups, right_cleanups);
3074 new_cleanups = fold (new_cleanups);
3078 /* Now add in the conditionalized cleanups. */
3080 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3081 expand_eh_region_start ();
3083 return want_value ? target : NULL_RTX;
3085 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3086 && GET_MODE (target) != BLKmode)
3087 /* If target is in memory and caller wants value in a register instead,
3088 arrange that. Pass TARGET as target for expand_expr so that,
3089 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3090 We know expand_expr will not use the target in that case.
3091 Don't do this if TARGET is volatile because we are supposed
3092 to write it and then read it. */
3094 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3095 GET_MODE (target), 0);
3096 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3097 temp = copy_to_reg (temp);
3098 dont_return_target = 1;
3100 else if (queued_subexp_p (target))
3101 /* If target contains a postincrement, let's not risk
3102 using it as the place to generate the rhs. */
3104 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3106 /* Expand EXP into a new pseudo. */
3107 temp = gen_reg_rtx (GET_MODE (target));
3108 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3111 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3113 /* If target is volatile, ANSI requires accessing the value
3114 *from* the target, if it is accessed. So make that happen.
3115 In no case return the target itself. */
3116 if (! MEM_VOLATILE_P (target) && want_value)
3117 dont_return_target = 1;
3119 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3120 /* If this is an scalar in a register that is stored in a wider mode
3121 than the declared mode, compute the result into its declared mode
3122 and then convert to the wider mode. Our value is the computed
3125 /* If we don't want a value, we can do the conversion inside EXP,
3126 which will often result in some optimizations. Do the conversion
3127 in two steps: first change the signedness, if needed, then
3128 the extend. But don't do this if the type of EXP is a subtype
3129 of something else since then the conversion might involve
3130 more than just converting modes. */
3131 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3132 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3134 if (TREE_UNSIGNED (TREE_TYPE (exp))
3135 != SUBREG_PROMOTED_UNSIGNED_P (target))
3138 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3142 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3143 SUBREG_PROMOTED_UNSIGNED_P (target)),
3147 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3149 /* If TEMP is a volatile MEM and we want a result value, make
3150 the access now so it gets done only once. Likewise if
3151 it contains TARGET. */
3152 if (GET_CODE (temp) == MEM && want_value
3153 && (MEM_VOLATILE_P (temp)
3154 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3155 temp = copy_to_reg (temp);
3157 /* If TEMP is a VOIDmode constant, use convert_modes to make
3158 sure that we properly convert it. */
3159 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3160 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3161 TYPE_MODE (TREE_TYPE (exp)), temp,
3162 SUBREG_PROMOTED_UNSIGNED_P (target));
3164 convert_move (SUBREG_REG (target), temp,
3165 SUBREG_PROMOTED_UNSIGNED_P (target));
3166 return want_value ? temp : NULL_RTX;
3170 temp = expand_expr (exp, target, GET_MODE (target), 0);
3171 /* Return TARGET if it's a specified hardware register.
3172 If TARGET is a volatile mem ref, either return TARGET
3173 or return a reg copied *from* TARGET; ANSI requires this.
3175 Otherwise, if TEMP is not TARGET, return TEMP
3176 if it is constant (for efficiency),
3177 or if we really want the correct value. */
3178 if (!(target && GET_CODE (target) == REG
3179 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3180 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3182 && (CONSTANT_P (temp) || want_value))
3183 dont_return_target = 1;
3186 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3187 the same as that of TARGET, adjust the constant. This is needed, for
3188 example, in case it is a CONST_DOUBLE and we want only a word-sized
3190 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3191 && TREE_CODE (exp) != ERROR_MARK
3192 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3193 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3194 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3196 /* If value was not generated in the target, store it there.
3197 Convert the value to TARGET's type first if nec. */
3199 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3201 target = protect_from_queue (target, 1);
3202 if (GET_MODE (temp) != GET_MODE (target)
3203 && GET_MODE (temp) != VOIDmode)
3205 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3206 if (dont_return_target)
3208 /* In this case, we will return TEMP,
3209 so make sure it has the proper mode.
3210 But don't forget to store the value into TARGET. */
3211 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3212 emit_move_insn (target, temp);
3215 convert_move (target, temp, unsignedp);
3218 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3220 /* Handle copying a string constant into an array.
3221 The string constant may be shorter than the array.
3222 So copy just the string's actual length, and clear the rest. */
3226 /* Get the size of the data type of the string,
3227 which is actually the size of the target. */
3228 size = expr_size (exp);
3229 if (GET_CODE (size) == CONST_INT
3230 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3231 emit_block_move (target, temp, size,
3232 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3235 /* Compute the size of the data to copy from the string. */
3237 = size_binop (MIN_EXPR,
3238 make_tree (sizetype, size),
3240 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3241 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3245 /* Copy that much. */
3246 emit_block_move (target, temp, copy_size_rtx,
3247 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3249 /* Figure out how much is left in TARGET that we have to clear.
3250 Do all calculations in ptr_mode. */
3252 addr = XEXP (target, 0);
3253 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3255 if (GET_CODE (copy_size_rtx) == CONST_INT)
3257 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3258 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3262 addr = force_reg (ptr_mode, addr);
3263 addr = expand_binop (ptr_mode, add_optab, addr,
3264 copy_size_rtx, NULL_RTX, 0,
3267 size = expand_binop (ptr_mode, sub_optab, size,
3268 copy_size_rtx, NULL_RTX, 0,
3271 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3272 GET_MODE (size), 0, 0);
3273 label = gen_label_rtx ();
3274 emit_jump_insn (gen_blt (label));
3277 if (size != const0_rtx)
3279 #ifdef TARGET_MEM_FUNCTIONS
3280 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3282 const0_rtx, TYPE_MODE (integer_type_node),
3283 convert_to_mode (TYPE_MODE (sizetype),
3285 TREE_UNSIGNED (sizetype)),
3286 TYPE_MODE (sizetype));
3288 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3290 convert_to_mode (TYPE_MODE (integer_type_node),
3292 TREE_UNSIGNED (integer_type_node)),
3293 TYPE_MODE (integer_type_node));
3301 /* Handle calls that return values in multiple non-contiguous locations.
3302 The Irix 6 ABI has examples of this. */
3303 else if (GET_CODE (target) == PARALLEL)
3304 emit_group_load (target, temp);
3305 else if (GET_MODE (temp) == BLKmode)
3306 emit_block_move (target, temp, expr_size (exp),
3307 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3309 emit_move_insn (target, temp);
3312 /* If we don't want a value, return NULL_RTX. */
3316 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3317 ??? The latter test doesn't seem to make sense. */
3318 else if (dont_return_target && GET_CODE (temp) != MEM)
3321 /* Return TARGET itself if it is a hard register. */
3322 else if (want_value && GET_MODE (target) != BLKmode
3323 && ! (GET_CODE (target) == REG
3324 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3325 return copy_to_reg (target);
3331 /* Return 1 if EXP just contains zeros. */
3339 switch (TREE_CODE (exp))
3343 case NON_LVALUE_EXPR:
3344 return is_zeros_p (TREE_OPERAND (exp, 0));
3347 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3351 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3354 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3357 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3358 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3359 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3360 if (! is_zeros_p (TREE_VALUE (elt)))
3369 /* Return 1 if EXP contains mostly (3/4) zeros. */
3372 mostly_zeros_p (exp)
3375 if (TREE_CODE (exp) == CONSTRUCTOR)
3377 int elts = 0, zeros = 0;
3378 tree elt = CONSTRUCTOR_ELTS (exp);
3379 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3381 /* If there are no ranges of true bits, it is all zero. */
3382 return elt == NULL_TREE;
3384 for (; elt; elt = TREE_CHAIN (elt))
3386 /* We do not handle the case where the index is a RANGE_EXPR,
3387 so the statistic will be somewhat inaccurate.
3388 We do make a more accurate count in store_constructor itself,
3389 so since this function is only used for nested array elements,
3390 this should be close enough. */
3391 if (mostly_zeros_p (TREE_VALUE (elt)))
3396 return 4 * zeros >= 3 * elts;
3399 return is_zeros_p (exp);
3402 /* Helper function for store_constructor.
3403 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3404 TYPE is the type of the CONSTRUCTOR, not the element type.
3405 CLEARED is as for store_constructor.
3407 This provides a recursive shortcut back to store_constructor when it isn't
3408 necessary to go through store_field. This is so that we can pass through
3409 the cleared field to let store_constructor know that we may not have to
3410 clear a substructure if the outer structure has already been cleared. */
3413 store_constructor_field (target, bitsize, bitpos,
3414 mode, exp, type, cleared)
3416 int bitsize, bitpos;
3417 enum machine_mode mode;
3421 if (TREE_CODE (exp) == CONSTRUCTOR
3422 && bitpos % BITS_PER_UNIT == 0
3423 /* If we have a non-zero bitpos for a register target, then we just
3424 let store_field do the bitfield handling. This is unlikely to
3425 generate unnecessary clear instructions anyways. */
3426 && (bitpos == 0 || GET_CODE (target) == MEM))
3429 target = change_address (target, VOIDmode,
3430 plus_constant (XEXP (target, 0),
3431 bitpos / BITS_PER_UNIT));
3432 store_constructor (exp, target, cleared);
3435 store_field (target, bitsize, bitpos, mode, exp,
3436 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3437 int_size_in_bytes (type));
3440 /* Store the value of constructor EXP into the rtx TARGET.
3441 TARGET is either a REG or a MEM.
3442 CLEARED is true if TARGET is known to have been zero'd. */
3445 store_constructor (exp, target, cleared)
3450 tree type = TREE_TYPE (exp);
3452 /* We know our target cannot conflict, since safe_from_p has been called. */
3454 /* Don't try copying piece by piece into a hard register
3455 since that is vulnerable to being clobbered by EXP.
3456 Instead, construct in a pseudo register and then copy it all. */
3457 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3459 rtx temp = gen_reg_rtx (GET_MODE (target));
3460 store_constructor (exp, temp, 0);
3461 emit_move_insn (target, temp);
3466 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3467 || TREE_CODE (type) == QUAL_UNION_TYPE)
3471 /* Inform later passes that the whole union value is dead. */
3472 if (TREE_CODE (type) == UNION_TYPE
3473 || TREE_CODE (type) == QUAL_UNION_TYPE)
3474 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3476 /* If we are building a static constructor into a register,
3477 set the initial value as zero so we can fold the value into
3478 a constant. But if more than one register is involved,
3479 this probably loses. */
3480 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3481 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3484 emit_move_insn (target, const0_rtx);
3489 /* If the constructor has fewer fields than the structure
3490 or if we are initializing the structure to mostly zeros,
3491 clear the whole structure first. */
3492 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3493 != list_length (TYPE_FIELDS (type)))
3494 || mostly_zeros_p (exp))
3497 clear_storage (target, expr_size (exp),
3498 TYPE_ALIGN (type) / BITS_PER_UNIT);
3503 /* Inform later passes that the old value is dead. */
3504 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3506 /* Store each element of the constructor into
3507 the corresponding field of TARGET. */
3509 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3511 register tree field = TREE_PURPOSE (elt);
3512 register enum machine_mode mode;
3516 tree pos, constant = 0, offset = 0;
3517 rtx to_rtx = target;
3519 /* Just ignore missing fields.
3520 We cleared the whole structure, above,
3521 if any fields are missing. */
3525 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3528 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3529 unsignedp = TREE_UNSIGNED (field);
3530 mode = DECL_MODE (field);
3531 if (DECL_BIT_FIELD (field))
3534 pos = DECL_FIELD_BITPOS (field);
3535 if (TREE_CODE (pos) == INTEGER_CST)
3537 else if (TREE_CODE (pos) == PLUS_EXPR
3538 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3539 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3544 bitpos = TREE_INT_CST_LOW (constant);
3550 if (contains_placeholder_p (offset))
3551 offset = build (WITH_RECORD_EXPR, sizetype,
3554 offset = size_binop (FLOOR_DIV_EXPR, offset,
3555 size_int (BITS_PER_UNIT));
3557 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3558 if (GET_CODE (to_rtx) != MEM)
3562 = change_address (to_rtx, VOIDmode,
3563 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3564 force_reg (ptr_mode, offset_rtx)));
3566 if (TREE_READONLY (field))
3568 if (GET_CODE (to_rtx) == MEM)
3569 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3571 RTX_UNCHANGING_P (to_rtx) = 1;
3574 store_constructor_field (to_rtx, bitsize, bitpos,
3575 mode, TREE_VALUE (elt), type, cleared);
3578 else if (TREE_CODE (type) == ARRAY_TYPE)
3583 tree domain = TYPE_DOMAIN (type);
3584 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3585 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3586 tree elttype = TREE_TYPE (type);
3588 /* If the constructor has fewer elements than the array,
3589 clear the whole array first. Similarly if this this is
3590 static constructor of a non-BLKmode object. */
3591 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3595 HOST_WIDE_INT count = 0, zero_count = 0;
3597 /* This loop is a more accurate version of the loop in
3598 mostly_zeros_p (it handles RANGE_EXPR in an index).
3599 It is also needed to check for missing elements. */
3600 for (elt = CONSTRUCTOR_ELTS (exp);
3602 elt = TREE_CHAIN (elt))
3604 tree index = TREE_PURPOSE (elt);
3605 HOST_WIDE_INT this_node_count;
3606 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3608 tree lo_index = TREE_OPERAND (index, 0);
3609 tree hi_index = TREE_OPERAND (index, 1);
3610 if (TREE_CODE (lo_index) != INTEGER_CST
3611 || TREE_CODE (hi_index) != INTEGER_CST)
3616 this_node_count = TREE_INT_CST_LOW (hi_index)
3617 - TREE_INT_CST_LOW (lo_index) + 1;
3620 this_node_count = 1;
3621 count += this_node_count;
3622 if (mostly_zeros_p (TREE_VALUE (elt)))
3623 zero_count += this_node_count;
3625 /* Clear the entire array first if there are any missing elements,
3626 or if the incidence of zero elements is >= 75%. */
3627 if (count < maxelt - minelt + 1
3628 || 4 * zero_count >= 3 * count)
3634 clear_storage (target, expr_size (exp),
3635 TYPE_ALIGN (type) / BITS_PER_UNIT);
3639 /* Inform later passes that the old value is dead. */
3640 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3642 /* Store each element of the constructor into
3643 the corresponding element of TARGET, determined
3644 by counting the elements. */
3645 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3647 elt = TREE_CHAIN (elt), i++)
3649 register enum machine_mode mode;
3653 tree value = TREE_VALUE (elt);
3654 tree index = TREE_PURPOSE (elt);
3655 rtx xtarget = target;
3657 if (cleared && is_zeros_p (value))
3660 mode = TYPE_MODE (elttype);
3661 bitsize = GET_MODE_BITSIZE (mode);
3662 unsignedp = TREE_UNSIGNED (elttype);
3664 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3666 tree lo_index = TREE_OPERAND (index, 0);
3667 tree hi_index = TREE_OPERAND (index, 1);
3668 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3669 struct nesting *loop;
3670 HOST_WIDE_INT lo, hi, count;
3673 /* If the range is constant and "small", unroll the loop. */
3674 if (TREE_CODE (lo_index) == INTEGER_CST
3675 && TREE_CODE (hi_index) == INTEGER_CST
3676 && (lo = TREE_INT_CST_LOW (lo_index),
3677 hi = TREE_INT_CST_LOW (hi_index),
3678 count = hi - lo + 1,
3679 (GET_CODE (target) != MEM
3681 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3682 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3685 lo -= minelt; hi -= minelt;
3686 for (; lo <= hi; lo++)
3688 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3689 store_constructor_field (target, bitsize, bitpos,
3690 mode, value, type, cleared);
3695 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3696 loop_top = gen_label_rtx ();
3697 loop_end = gen_label_rtx ();
3699 unsignedp = TREE_UNSIGNED (domain);
3701 index = build_decl (VAR_DECL, NULL_TREE, domain);
3703 DECL_RTL (index) = index_r
3704 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3707 if (TREE_CODE (value) == SAVE_EXPR
3708 && SAVE_EXPR_RTL (value) == 0)
3710 /* Make sure value gets expanded once before the
3712 expand_expr (value, const0_rtx, VOIDmode, 0);
3715 store_expr (lo_index, index_r, 0);
3716 loop = expand_start_loop (0);
3718 /* Assign value to element index. */
3719 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3720 size_int (BITS_PER_UNIT));
3721 position = size_binop (MULT_EXPR,
3722 size_binop (MINUS_EXPR, index,
3723 TYPE_MIN_VALUE (domain)),
3725 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3726 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3727 xtarget = change_address (target, mode, addr);
3728 if (TREE_CODE (value) == CONSTRUCTOR)
3729 store_constructor (value, xtarget, cleared);
3731 store_expr (value, xtarget, 0);
3733 expand_exit_loop_if_false (loop,
3734 build (LT_EXPR, integer_type_node,
3737 expand_increment (build (PREINCREMENT_EXPR,
3739 index, integer_one_node), 0, 0);
3741 emit_label (loop_end);
3743 /* Needed by stupid register allocation. to extend the
3744 lifetime of pseudo-regs used by target past the end
3746 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3749 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3750 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3756 index = size_int (i);
3759 index = size_binop (MINUS_EXPR, index,
3760 TYPE_MIN_VALUE (domain));
3761 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3762 size_int (BITS_PER_UNIT));
3763 position = size_binop (MULT_EXPR, index, position);
3764 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3765 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3766 xtarget = change_address (target, mode, addr);
3767 store_expr (value, xtarget, 0);
3772 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3773 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3775 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3776 store_constructor_field (target, bitsize, bitpos,
3777 mode, value, type, cleared);
3781 /* set constructor assignments */
3782 else if (TREE_CODE (type) == SET_TYPE)
3784 tree elt = CONSTRUCTOR_ELTS (exp);
3785 rtx xtarget = XEXP (target, 0);
3786 int set_word_size = TYPE_ALIGN (type);
3787 int nbytes = int_size_in_bytes (type), nbits;
3788 tree domain = TYPE_DOMAIN (type);
3789 tree domain_min, domain_max, bitlength;
3791 /* The default implementation strategy is to extract the constant
3792 parts of the constructor, use that to initialize the target,
3793 and then "or" in whatever non-constant ranges we need in addition.
3795 If a large set is all zero or all ones, it is
3796 probably better to set it using memset (if available) or bzero.
3797 Also, if a large set has just a single range, it may also be
3798 better to first clear all the first clear the set (using
3799 bzero/memset), and set the bits we want. */
3801 /* Check for all zeros. */
3802 if (elt == NULL_TREE)
3805 clear_storage (target, expr_size (exp),
3806 TYPE_ALIGN (type) / BITS_PER_UNIT);
3810 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3811 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3812 bitlength = size_binop (PLUS_EXPR,
3813 size_binop (MINUS_EXPR, domain_max, domain_min),
3816 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3818 nbits = TREE_INT_CST_LOW (bitlength);
3820 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3821 are "complicated" (more than one range), initialize (the
3822 constant parts) by copying from a constant. */
3823 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3824 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3826 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3827 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3828 char *bit_buffer = (char *) alloca (nbits);
3829 HOST_WIDE_INT word = 0;
3832 int offset = 0; /* In bytes from beginning of set. */
3833 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3836 if (bit_buffer[ibit])
3838 if (BYTES_BIG_ENDIAN)
3839 word |= (1 << (set_word_size - 1 - bit_pos));
3841 word |= 1 << bit_pos;
3844 if (bit_pos >= set_word_size || ibit == nbits)
3846 if (word != 0 || ! cleared)
3848 rtx datum = GEN_INT (word);
3850 /* The assumption here is that it is safe to use
3851 XEXP if the set is multi-word, but not if
3852 it's single-word. */
3853 if (GET_CODE (target) == MEM)
3855 to_rtx = plus_constant (XEXP (target, 0), offset);
3856 to_rtx = change_address (target, mode, to_rtx);
3858 else if (offset == 0)
3862 emit_move_insn (to_rtx, datum);
3868 offset += set_word_size / BITS_PER_UNIT;
3874 /* Don't bother clearing storage if the set is all ones. */
3875 if (TREE_CHAIN (elt) != NULL_TREE
3876 || (TREE_PURPOSE (elt) == NULL_TREE
3878 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3879 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3880 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3881 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3883 clear_storage (target, expr_size (exp),
3884 TYPE_ALIGN (type) / BITS_PER_UNIT);
3887 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3889 /* start of range of element or NULL */
3890 tree startbit = TREE_PURPOSE (elt);
3891 /* end of range of element, or element value */
3892 tree endbit = TREE_VALUE (elt);
3893 HOST_WIDE_INT startb, endb;
3894 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3896 bitlength_rtx = expand_expr (bitlength,
3897 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3899 /* handle non-range tuple element like [ expr ] */
3900 if (startbit == NULL_TREE)
3902 startbit = save_expr (endbit);
3905 startbit = convert (sizetype, startbit);
3906 endbit = convert (sizetype, endbit);
3907 if (! integer_zerop (domain_min))
3909 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3910 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3912 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3913 EXPAND_CONST_ADDRESS);
3914 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3915 EXPAND_CONST_ADDRESS);
3919 targetx = assign_stack_temp (GET_MODE (target),
3920 GET_MODE_SIZE (GET_MODE (target)),
3922 emit_move_insn (targetx, target);
3924 else if (GET_CODE (target) == MEM)
3929 #ifdef TARGET_MEM_FUNCTIONS
3930 /* Optimization: If startbit and endbit are
3931 constants divisible by BITS_PER_UNIT,
3932 call memset instead. */
3933 if (TREE_CODE (startbit) == INTEGER_CST
3934 && TREE_CODE (endbit) == INTEGER_CST
3935 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3936 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3938 emit_library_call (memset_libfunc, 0,
3940 plus_constant (XEXP (targetx, 0),
3941 startb / BITS_PER_UNIT),
3943 constm1_rtx, TYPE_MODE (integer_type_node),
3944 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3945 TYPE_MODE (sizetype));
3950 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3951 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3952 bitlength_rtx, TYPE_MODE (sizetype),
3953 startbit_rtx, TYPE_MODE (sizetype),
3954 endbit_rtx, TYPE_MODE (sizetype));
3957 emit_move_insn (target, targetx);
3965 /* Store the value of EXP (an expression tree)
3966 into a subfield of TARGET which has mode MODE and occupies
3967 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3968 If MODE is VOIDmode, it means that we are storing into a bit-field.
3970 If VALUE_MODE is VOIDmode, return nothing in particular.
3971 UNSIGNEDP is not used in this case.
3973 Otherwise, return an rtx for the value stored. This rtx
3974 has mode VALUE_MODE if that is convenient to do.
3975 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3977 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3978 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3981 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3982 unsignedp, align, total_size)
3984 int bitsize, bitpos;
3985 enum machine_mode mode;
3987 enum machine_mode value_mode;
3992 HOST_WIDE_INT width_mask = 0;
3994 if (bitsize < HOST_BITS_PER_WIDE_INT)
3995 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3997 /* If we are storing into an unaligned field of an aligned union that is
3998 in a register, we may have the mode of TARGET being an integer mode but
3999 MODE == BLKmode. In that case, get an aligned object whose size and
4000 alignment are the same as TARGET and store TARGET into it (we can avoid
4001 the store if the field being stored is the entire width of TARGET). Then
4002 call ourselves recursively to store the field into a BLKmode version of
4003 that object. Finally, load from the object into TARGET. This is not
4004 very efficient in general, but should only be slightly more expensive
4005 than the otherwise-required unaligned accesses. Perhaps this can be
4006 cleaned up later. */
4009 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4011 rtx object = assign_stack_temp (GET_MODE (target),
4012 GET_MODE_SIZE (GET_MODE (target)), 0);
4013 rtx blk_object = copy_rtx (object);
4015 MEM_IN_STRUCT_P (object) = 1;
4016 MEM_IN_STRUCT_P (blk_object) = 1;
4017 PUT_MODE (blk_object, BLKmode);
4019 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4020 emit_move_insn (object, target);
4022 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4025 /* Even though we aren't returning target, we need to
4026 give it the updated value. */
4027 emit_move_insn (target, object);
4032 /* If the structure is in a register or if the component
4033 is a bit field, we cannot use addressing to access it.
4034 Use bit-field techniques or SUBREG to store in it. */
4036 if (mode == VOIDmode
4037 || (mode != BLKmode && ! direct_store[(int) mode])
4038 || GET_CODE (target) == REG
4039 || GET_CODE (target) == SUBREG
4040 /* If the field isn't aligned enough to store as an ordinary memref,
4041 store it as a bit field. */
4042 || (SLOW_UNALIGNED_ACCESS
4043 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4044 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4046 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4048 /* If BITSIZE is narrower than the size of the type of EXP
4049 we will be narrowing TEMP. Normally, what's wanted are the
4050 low-order bits. However, if EXP's type is a record and this is
4051 big-endian machine, we want the upper BITSIZE bits. */
4052 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4053 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4054 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4055 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4056 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4060 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4062 if (mode != VOIDmode && mode != BLKmode
4063 && mode != TYPE_MODE (TREE_TYPE (exp)))
4064 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4066 /* If the modes of TARGET and TEMP are both BLKmode, both
4067 must be in memory and BITPOS must be aligned on a byte
4068 boundary. If so, we simply do a block copy. */
4069 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4071 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4072 || bitpos % BITS_PER_UNIT != 0)
4075 target = change_address (target, VOIDmode,
4076 plus_constant (XEXP (target, 0),
4077 bitpos / BITS_PER_UNIT));
4079 emit_block_move (target, temp,
4080 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4084 return value_mode == VOIDmode ? const0_rtx : target;
4087 /* Store the value in the bitfield. */
4088 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4089 if (value_mode != VOIDmode)
4091 /* The caller wants an rtx for the value. */
4092 /* If possible, avoid refetching from the bitfield itself. */
4094 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4097 enum machine_mode tmode;
4100 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4101 tmode = GET_MODE (temp);
4102 if (tmode == VOIDmode)
4104 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4105 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4106 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4108 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4109 NULL_RTX, value_mode, 0, align,
4116 rtx addr = XEXP (target, 0);
4119 /* If a value is wanted, it must be the lhs;
4120 so make the address stable for multiple use. */
4122 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4123 && ! CONSTANT_ADDRESS_P (addr)
4124 /* A frame-pointer reference is already stable. */
4125 && ! (GET_CODE (addr) == PLUS
4126 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4127 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4128 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4129 addr = copy_to_reg (addr);
4131 /* Now build a reference to just the desired component. */
4133 to_rtx = change_address (target, mode,
4134 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4135 MEM_IN_STRUCT_P (to_rtx) = 1;
4137 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4141 /* Return true if any object containing the innermost array is an unaligned
4142 packed structure field. */
4145 get_inner_unaligned_p (exp)
4148 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4152 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4154 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4158 else if (TREE_CODE (exp) != ARRAY_REF
4159 && TREE_CODE (exp) != NON_LVALUE_EXPR
4160 && ! ((TREE_CODE (exp) == NOP_EXPR
4161 || TREE_CODE (exp) == CONVERT_EXPR)
4162 && (TYPE_MODE (TREE_TYPE (exp))
4163 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4166 exp = TREE_OPERAND (exp, 0);
4172 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4173 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4174 ARRAY_REFs and find the ultimate containing object, which we return.
4176 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4177 bit position, and *PUNSIGNEDP to the signedness of the field.
4178 If the position of the field is variable, we store a tree
4179 giving the variable offset (in units) in *POFFSET.
4180 This offset is in addition to the bit position.
4181 If the position is not variable, we store 0 in *POFFSET.
4183 If any of the extraction expressions is volatile,
4184 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4186 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4187 is a mode that can be used to access the field. In that case, *PBITSIZE
4190 If the field describes a variable-sized object, *PMODE is set to
4191 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4192 this case, but the address of the object can be found. */
4195 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4196 punsignedp, pvolatilep)
4201 enum machine_mode *pmode;
4205 tree orig_exp = exp;
4207 enum machine_mode mode = VOIDmode;
4208 tree offset = integer_zero_node;
4210 if (TREE_CODE (exp) == COMPONENT_REF)
4212 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4213 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4214 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4215 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4217 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4219 size_tree = TREE_OPERAND (exp, 1);
4220 *punsignedp = TREE_UNSIGNED (exp);
4224 mode = TYPE_MODE (TREE_TYPE (exp));
4225 *pbitsize = GET_MODE_BITSIZE (mode);
4226 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4231 if (TREE_CODE (size_tree) != INTEGER_CST)
4232 mode = BLKmode, *pbitsize = -1;
4234 *pbitsize = TREE_INT_CST_LOW (size_tree);
4237 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4238 and find the ultimate containing object. */
4244 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4246 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4247 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4248 : TREE_OPERAND (exp, 2));
4249 tree constant = integer_zero_node, var = pos;
4251 /* If this field hasn't been filled in yet, don't go
4252 past it. This should only happen when folding expressions
4253 made during type construction. */
4257 /* Assume here that the offset is a multiple of a unit.
4258 If not, there should be an explicitly added constant. */
4259 if (TREE_CODE (pos) == PLUS_EXPR
4260 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4261 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4262 else if (TREE_CODE (pos) == INTEGER_CST)
4263 constant = pos, var = integer_zero_node;
4265 *pbitpos += TREE_INT_CST_LOW (constant);
4266 offset = size_binop (PLUS_EXPR, offset,
4267 size_binop (EXACT_DIV_EXPR, var,
4268 size_int (BITS_PER_UNIT)));
4271 else if (TREE_CODE (exp) == ARRAY_REF)
4273 /* This code is based on the code in case ARRAY_REF in expand_expr
4274 below. We assume here that the size of an array element is
4275 always an integral multiple of BITS_PER_UNIT. */
4277 tree index = TREE_OPERAND (exp, 1);
4278 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4280 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4281 tree index_type = TREE_TYPE (index);
4283 if (! integer_zerop (low_bound))
4284 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4286 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4288 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4290 index_type = TREE_TYPE (index);
4293 index = fold (build (MULT_EXPR, index_type, index,
4294 TYPE_SIZE (TREE_TYPE (exp))));
4296 if (TREE_CODE (index) == INTEGER_CST
4297 && TREE_INT_CST_HIGH (index) == 0)
4298 *pbitpos += TREE_INT_CST_LOW (index);
4300 offset = size_binop (PLUS_EXPR, offset,
4301 size_binop (FLOOR_DIV_EXPR, index,
4302 size_int (BITS_PER_UNIT)));
4304 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4305 && ! ((TREE_CODE (exp) == NOP_EXPR
4306 || TREE_CODE (exp) == CONVERT_EXPR)
4307 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4308 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4310 && (TYPE_MODE (TREE_TYPE (exp))
4311 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4314 /* If any reference in the chain is volatile, the effect is volatile. */
4315 if (TREE_THIS_VOLATILE (exp))
4317 exp = TREE_OPERAND (exp, 0);
4320 if (integer_zerop (offset))
4323 if (offset != 0 && contains_placeholder_p (offset))
4324 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4331 /* Given an rtx VALUE that may contain additions and multiplications,
4332 return an equivalent value that just refers to a register or memory.
4333 This is done by generating instructions to perform the arithmetic
4334 and returning a pseudo-register containing the value.
4336 The returned value may be a REG, SUBREG, MEM or constant. */
4339 force_operand (value, target)
4342 register optab binoptab = 0;
4343 /* Use a temporary to force order of execution of calls to
4347 /* Use subtarget as the target for operand 0 of a binary operation. */
4348 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4350 if (GET_CODE (value) == PLUS)
4351 binoptab = add_optab;
4352 else if (GET_CODE (value) == MINUS)
4353 binoptab = sub_optab;
4354 else if (GET_CODE (value) == MULT)
4356 op2 = XEXP (value, 1);
4357 if (!CONSTANT_P (op2)
4358 && !(GET_CODE (op2) == REG && op2 != subtarget))
4360 tmp = force_operand (XEXP (value, 0), subtarget);
4361 return expand_mult (GET_MODE (value), tmp,
4362 force_operand (op2, NULL_RTX),
4368 op2 = XEXP (value, 1);
4369 if (!CONSTANT_P (op2)
4370 && !(GET_CODE (op2) == REG && op2 != subtarget))
4372 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4374 binoptab = add_optab;
4375 op2 = negate_rtx (GET_MODE (value), op2);
4378 /* Check for an addition with OP2 a constant integer and our first
4379 operand a PLUS of a virtual register and something else. In that
4380 case, we want to emit the sum of the virtual register and the
4381 constant first and then add the other value. This allows virtual
4382 register instantiation to simply modify the constant rather than
4383 creating another one around this addition. */
4384 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4385 && GET_CODE (XEXP (value, 0)) == PLUS
4386 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4387 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4388 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4390 rtx temp = expand_binop (GET_MODE (value), binoptab,
4391 XEXP (XEXP (value, 0), 0), op2,
4392 subtarget, 0, OPTAB_LIB_WIDEN);
4393 return expand_binop (GET_MODE (value), binoptab, temp,
4394 force_operand (XEXP (XEXP (value, 0), 1), 0),
4395 target, 0, OPTAB_LIB_WIDEN);
4398 tmp = force_operand (XEXP (value, 0), subtarget);
4399 return expand_binop (GET_MODE (value), binoptab, tmp,
4400 force_operand (op2, NULL_RTX),
4401 target, 0, OPTAB_LIB_WIDEN);
4402 /* We give UNSIGNEDP = 0 to expand_binop
4403 because the only operations we are expanding here are signed ones. */
4408 /* Subroutine of expand_expr:
4409 save the non-copied parts (LIST) of an expr (LHS), and return a list
4410 which can restore these values to their previous values,
4411 should something modify their storage. */
4414 save_noncopied_parts (lhs, list)
4421 for (tail = list; tail; tail = TREE_CHAIN (tail))
4422 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4423 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4426 tree part = TREE_VALUE (tail);
4427 tree part_type = TREE_TYPE (part);
4428 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4429 rtx target = assign_temp (part_type, 0, 1, 1);
4430 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4431 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4432 parts = tree_cons (to_be_saved,
4433 build (RTL_EXPR, part_type, NULL_TREE,
4436 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4441 /* Subroutine of expand_expr:
4442 record the non-copied parts (LIST) of an expr (LHS), and return a list
4443 which specifies the initial values of these parts. */
4446 init_noncopied_parts (lhs, list)
4453 for (tail = list; tail; tail = TREE_CHAIN (tail))
4454 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4455 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4458 tree part = TREE_VALUE (tail);
4459 tree part_type = TREE_TYPE (part);
4460 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4461 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4466 /* Subroutine of expand_expr: return nonzero iff there is no way that
4467 EXP can reference X, which is being modified. */
4470 safe_from_p (x, exp)
4478 /* If EXP has varying size, we MUST use a target since we currently
4479 have no way of allocating temporaries of variable size
4480 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4481 So we assume here that something at a higher level has prevented a
4482 clash. This is somewhat bogus, but the best we can do. Only
4483 do this when X is BLKmode. */
4484 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4485 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4486 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4487 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4488 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4490 && GET_MODE (x) == BLKmode))
4493 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4494 find the underlying pseudo. */
4495 if (GET_CODE (x) == SUBREG)
4498 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4502 /* If X is a location in the outgoing argument area, it is always safe. */
4503 if (GET_CODE (x) == MEM
4504 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4505 || (GET_CODE (XEXP (x, 0)) == PLUS
4506 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4509 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4512 exp_rtl = DECL_RTL (exp);
4519 if (TREE_CODE (exp) == TREE_LIST)
4520 return ((TREE_VALUE (exp) == 0
4521 || safe_from_p (x, TREE_VALUE (exp)))
4522 && (TREE_CHAIN (exp) == 0
4523 || safe_from_p (x, TREE_CHAIN (exp))));
4528 return safe_from_p (x, TREE_OPERAND (exp, 0));
4532 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4533 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4537 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4538 the expression. If it is set, we conflict iff we are that rtx or
4539 both are in memory. Otherwise, we check all operands of the
4540 expression recursively. */
4542 switch (TREE_CODE (exp))
4545 return (staticp (TREE_OPERAND (exp, 0))
4546 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4549 if (GET_CODE (x) == MEM)
4554 exp_rtl = CALL_EXPR_RTL (exp);
4557 /* Assume that the call will clobber all hard registers and
4559 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4560 || GET_CODE (x) == MEM)
4567 /* If a sequence exists, we would have to scan every instruction
4568 in the sequence to see if it was safe. This is probably not
4570 if (RTL_EXPR_SEQUENCE (exp))
4573 exp_rtl = RTL_EXPR_RTL (exp);
4576 case WITH_CLEANUP_EXPR:
4577 exp_rtl = RTL_EXPR_RTL (exp);
4580 case CLEANUP_POINT_EXPR:
4581 return safe_from_p (x, TREE_OPERAND (exp, 0));
4584 exp_rtl = SAVE_EXPR_RTL (exp);
4588 /* The only operand we look at is operand 1. The rest aren't
4589 part of the expression. */
4590 return safe_from_p (x, TREE_OPERAND (exp, 1));
4592 case METHOD_CALL_EXPR:
4593 /* This takes a rtx argument, but shouldn't appear here. */
4597 /* If we have an rtx, we do not need to scan our operands. */
4601 nops = tree_code_length[(int) TREE_CODE (exp)];
4602 for (i = 0; i < nops; i++)
4603 if (TREE_OPERAND (exp, i) != 0
4604 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4608 /* If we have an rtl, find any enclosed object. Then see if we conflict
4612 if (GET_CODE (exp_rtl) == SUBREG)
4614 exp_rtl = SUBREG_REG (exp_rtl);
4615 if (GET_CODE (exp_rtl) == REG
4616 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4620 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4621 are memory and EXP is not readonly. */
4622 return ! (rtx_equal_p (x, exp_rtl)
4623 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4624 && ! TREE_READONLY (exp)));
4627 /* If we reach here, it is safe. */
4631 /* Subroutine of expand_expr: return nonzero iff EXP is an
4632 expression whose type is statically determinable. */
4638 if (TREE_CODE (exp) == PARM_DECL
4639 || TREE_CODE (exp) == VAR_DECL
4640 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4641 || TREE_CODE (exp) == COMPONENT_REF
4642 || TREE_CODE (exp) == ARRAY_REF)
4647 /* Subroutine of expand_expr: return rtx if EXP is a
4648 variable or parameter; else return 0. */
4655 switch (TREE_CODE (exp))
4659 return DECL_RTL (exp);
4665 /* expand_expr: generate code for computing expression EXP.
4666 An rtx for the computed value is returned. The value is never null.
4667 In the case of a void EXP, const0_rtx is returned.
4669 The value may be stored in TARGET if TARGET is nonzero.
4670 TARGET is just a suggestion; callers must assume that
4671 the rtx returned may not be the same as TARGET.
4673 If TARGET is CONST0_RTX, it means that the value will be ignored.
4675 If TMODE is not VOIDmode, it suggests generating the
4676 result in mode TMODE. But this is done only when convenient.
4677 Otherwise, TMODE is ignored and the value generated in its natural mode.
4678 TMODE is just a suggestion; callers must assume that
4679 the rtx returned may not have mode TMODE.
4681 Note that TARGET may have neither TMODE nor MODE. In that case, it
4682 probably will not be used.
4684 If MODIFIER is EXPAND_SUM then when EXP is an addition
4685 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4686 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4687 products as above, or REG or MEM, or constant.
4688 Ordinarily in such cases we would output mul or add instructions
4689 and then return a pseudo reg containing the sum.
4691 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4692 it also marks a label as absolutely required (it can't be dead).
4693 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4694 This is used for outputting expressions used in initializers.
4696 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4697 with a constant address even if that address is not normally legitimate.
4698 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4701 expand_expr (exp, target, tmode, modifier)
4704 enum machine_mode tmode;
4705 enum expand_modifier modifier;
4707 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4708 This is static so it will be accessible to our recursive callees. */
4709 static tree placeholder_list = 0;
4710 register rtx op0, op1, temp;
4711 tree type = TREE_TYPE (exp);
4712 int unsignedp = TREE_UNSIGNED (type);
4713 register enum machine_mode mode = TYPE_MODE (type);
4714 register enum tree_code code = TREE_CODE (exp);
4716 /* Use subtarget as the target for operand 0 of a binary operation. */
4717 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4718 rtx original_target = target;
4719 /* Maybe defer this until sure not doing bytecode? */
4720 int ignore = (target == const0_rtx
4721 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4722 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4723 || code == COND_EXPR)
4724 && TREE_CODE (type) == VOID_TYPE));
4728 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4730 bc_expand_expr (exp);
4734 /* Don't use hard regs as subtargets, because the combiner
4735 can only handle pseudo regs. */
4736 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4738 /* Avoid subtargets inside loops,
4739 since they hide some invariant expressions. */
4740 if (preserve_subexpressions_p ())
4743 /* If we are going to ignore this result, we need only do something
4744 if there is a side-effect somewhere in the expression. If there
4745 is, short-circuit the most common cases here. Note that we must
4746 not call expand_expr with anything but const0_rtx in case this
4747 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4751 if (! TREE_SIDE_EFFECTS (exp))
4754 /* Ensure we reference a volatile object even if value is ignored. */
4755 if (TREE_THIS_VOLATILE (exp)
4756 && TREE_CODE (exp) != FUNCTION_DECL
4757 && mode != VOIDmode && mode != BLKmode)
4759 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4760 if (GET_CODE (temp) == MEM)
4761 temp = copy_to_reg (temp);
4765 if (TREE_CODE_CLASS (code) == '1')
4766 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4767 VOIDmode, modifier);
4768 else if (TREE_CODE_CLASS (code) == '2'
4769 || TREE_CODE_CLASS (code) == '<')
4771 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4772 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4775 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4776 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4777 /* If the second operand has no side effects, just evaluate
4779 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4780 VOIDmode, modifier);
4785 /* If will do cse, generate all results into pseudo registers
4786 since 1) that allows cse to find more things
4787 and 2) otherwise cse could produce an insn the machine
4790 if (! cse_not_expected && mode != BLKmode && target
4791 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4798 tree function = decl_function_context (exp);
4799 /* Handle using a label in a containing function. */
4800 if (function != current_function_decl && function != 0)
4802 struct function *p = find_function_data (function);
4803 /* Allocate in the memory associated with the function
4804 that the label is in. */
4805 push_obstacks (p->function_obstack,
4806 p->function_maybepermanent_obstack);
4808 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4809 label_rtx (exp), p->forced_labels);
4812 else if (modifier == EXPAND_INITIALIZER)
4813 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4814 label_rtx (exp), forced_labels);
4815 temp = gen_rtx (MEM, FUNCTION_MODE,
4816 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4817 if (function != current_function_decl && function != 0)
4818 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4823 if (DECL_RTL (exp) == 0)
4825 error_with_decl (exp, "prior parameter's size depends on `%s'");
4826 return CONST0_RTX (mode);
4829 /* ... fall through ... */
4832 /* If a static var's type was incomplete when the decl was written,
4833 but the type is complete now, lay out the decl now. */
4834 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4835 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4837 push_obstacks_nochange ();
4838 end_temporary_allocation ();
4839 layout_decl (exp, 0);
4840 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4844 /* ... fall through ... */
4848 if (DECL_RTL (exp) == 0)
4851 /* Ensure variable marked as used even if it doesn't go through
4852 a parser. If it hasn't be used yet, write out an external
4854 if (! TREE_USED (exp))
4856 assemble_external (exp);
4857 TREE_USED (exp) = 1;
4860 /* Show we haven't gotten RTL for this yet. */
4863 /* Handle variables inherited from containing functions. */
4864 context = decl_function_context (exp);
4866 /* We treat inline_function_decl as an alias for the current function
4867 because that is the inline function whose vars, types, etc.
4868 are being merged into the current function.
4869 See expand_inline_function. */
4871 if (context != 0 && context != current_function_decl
4872 && context != inline_function_decl
4873 /* If var is static, we don't need a static chain to access it. */
4874 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4875 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4879 /* Mark as non-local and addressable. */
4880 DECL_NONLOCAL (exp) = 1;
4881 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4883 mark_addressable (exp);
4884 if (GET_CODE (DECL_RTL (exp)) != MEM)
4886 addr = XEXP (DECL_RTL (exp), 0);
4887 if (GET_CODE (addr) == MEM)
4888 addr = gen_rtx (MEM, Pmode,
4889 fix_lexical_addr (XEXP (addr, 0), exp));
4891 addr = fix_lexical_addr (addr, exp);
4892 temp = change_address (DECL_RTL (exp), mode, addr);
4895 /* This is the case of an array whose size is to be determined
4896 from its initializer, while the initializer is still being parsed.
4899 else if (GET_CODE (DECL_RTL (exp)) == MEM
4900 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4901 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4902 XEXP (DECL_RTL (exp), 0));
4904 /* If DECL_RTL is memory, we are in the normal case and either
4905 the address is not valid or it is not a register and -fforce-addr
4906 is specified, get the address into a register. */
4908 else if (GET_CODE (DECL_RTL (exp)) == MEM
4909 && modifier != EXPAND_CONST_ADDRESS
4910 && modifier != EXPAND_SUM
4911 && modifier != EXPAND_INITIALIZER
4912 && (! memory_address_p (DECL_MODE (exp),
4913 XEXP (DECL_RTL (exp), 0))
4915 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4916 temp = change_address (DECL_RTL (exp), VOIDmode,
4917 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4919 /* If we got something, return it. But first, set the alignment
4920 the address is a register. */
4923 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4924 mark_reg_pointer (XEXP (temp, 0),
4925 DECL_ALIGN (exp) / BITS_PER_UNIT);
4930 /* If the mode of DECL_RTL does not match that of the decl, it
4931 must be a promoted value. We return a SUBREG of the wanted mode,
4932 but mark it so that we know that it was already extended. */
4934 if (GET_CODE (DECL_RTL (exp)) == REG
4935 && GET_MODE (DECL_RTL (exp)) != mode)
4937 /* Get the signedness used for this variable. Ensure we get the
4938 same mode we got when the variable was declared. */
4939 if (GET_MODE (DECL_RTL (exp))
4940 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4943 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4944 SUBREG_PROMOTED_VAR_P (temp) = 1;
4945 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4949 return DECL_RTL (exp);
4952 return immed_double_const (TREE_INT_CST_LOW (exp),
4953 TREE_INT_CST_HIGH (exp),
4957 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4960 /* If optimized, generate immediate CONST_DOUBLE
4961 which will be turned into memory by reload if necessary.
4963 We used to force a register so that loop.c could see it. But
4964 this does not allow gen_* patterns to perform optimizations with
4965 the constants. It also produces two insns in cases like "x = 1.0;".
4966 On most machines, floating-point constants are not permitted in
4967 many insns, so we'd end up copying it to a register in any case.
4969 Now, we do the copying in expand_binop, if appropriate. */
4970 return immed_real_const (exp);
4974 if (! TREE_CST_RTL (exp))
4975 output_constant_def (exp);
4977 /* TREE_CST_RTL probably contains a constant address.
4978 On RISC machines where a constant address isn't valid,
4979 make some insns to get that address into a register. */
4980 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4981 && modifier != EXPAND_CONST_ADDRESS
4982 && modifier != EXPAND_INITIALIZER
4983 && modifier != EXPAND_SUM
4984 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4986 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4987 return change_address (TREE_CST_RTL (exp), VOIDmode,
4988 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4989 return TREE_CST_RTL (exp);
4992 context = decl_function_context (exp);
4994 /* We treat inline_function_decl as an alias for the current function
4995 because that is the inline function whose vars, types, etc.
4996 are being merged into the current function.
4997 See expand_inline_function. */
4998 if (context == current_function_decl || context == inline_function_decl)
5001 /* If this is non-local, handle it. */
5004 temp = SAVE_EXPR_RTL (exp);
5005 if (temp && GET_CODE (temp) == REG)
5007 put_var_into_stack (exp);
5008 temp = SAVE_EXPR_RTL (exp);
5010 if (temp == 0 || GET_CODE (temp) != MEM)
5012 return change_address (temp, mode,
5013 fix_lexical_addr (XEXP (temp, 0), exp));
5015 if (SAVE_EXPR_RTL (exp) == 0)
5017 if (mode == VOIDmode)
5020 temp = assign_temp (type, 0, 0, 0);
5022 SAVE_EXPR_RTL (exp) = temp;
5023 if (!optimize && GET_CODE (temp) == REG)
5024 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5027 /* If the mode of TEMP does not match that of the expression, it
5028 must be a promoted value. We pass store_expr a SUBREG of the
5029 wanted mode but mark it so that we know that it was already
5030 extended. Note that `unsignedp' was modified above in
5033 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5035 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5036 SUBREG_PROMOTED_VAR_P (temp) = 1;
5037 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5040 if (temp == const0_rtx)
5041 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5043 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5046 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5047 must be a promoted value. We return a SUBREG of the wanted mode,
5048 but mark it so that we know that it was already extended. */
5050 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5051 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5053 /* Compute the signedness and make the proper SUBREG. */
5054 promote_mode (type, mode, &unsignedp, 0);
5055 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5056 SUBREG_PROMOTED_VAR_P (temp) = 1;
5057 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5061 return SAVE_EXPR_RTL (exp);
5066 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5067 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5071 case PLACEHOLDER_EXPR:
5072 /* If there is an object on the head of the placeholder list,
5073 see if some object in it's references is of type TYPE. For
5074 further information, see tree.def. */
5075 if (placeholder_list)
5078 tree old_list = placeholder_list;
5080 for (object = TREE_PURPOSE (placeholder_list);
5081 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5082 != TYPE_MAIN_VARIANT (type))
5083 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
5084 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
5085 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
5086 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
5087 object = TREE_OPERAND (object, 0))
5091 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5092 == TYPE_MAIN_VARIANT (type)))
5094 /* Expand this object skipping the list entries before
5095 it was found in case it is also a PLACEHOLDER_EXPR.
5096 In that case, we want to translate it using subsequent
5098 placeholder_list = TREE_CHAIN (placeholder_list);
5099 temp = expand_expr (object, original_target, tmode, modifier);
5100 placeholder_list = old_list;
5105 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5108 case WITH_RECORD_EXPR:
5109 /* Put the object on the placeholder list, expand our first operand,
5110 and pop the list. */
5111 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5113 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5115 placeholder_list = TREE_CHAIN (placeholder_list);
5119 expand_exit_loop_if_false (NULL_PTR,
5120 invert_truthvalue (TREE_OPERAND (exp, 0)));
5125 expand_start_loop (1);
5126 expand_expr_stmt (TREE_OPERAND (exp, 0));
5134 tree vars = TREE_OPERAND (exp, 0);
5135 int vars_need_expansion = 0;
5137 /* Need to open a binding contour here because
5138 if there are any cleanups they most be contained here. */
5139 expand_start_bindings (0);
5141 /* Mark the corresponding BLOCK for output in its proper place. */
5142 if (TREE_OPERAND (exp, 2) != 0
5143 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5144 insert_block (TREE_OPERAND (exp, 2));
5146 /* If VARS have not yet been expanded, expand them now. */
5149 if (DECL_RTL (vars) == 0)
5151 vars_need_expansion = 1;
5154 expand_decl_init (vars);
5155 vars = TREE_CHAIN (vars);
5158 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5160 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5166 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5168 emit_insns (RTL_EXPR_SEQUENCE (exp));
5169 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5170 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5171 free_temps_for_rtl_expr (exp);
5172 return RTL_EXPR_RTL (exp);
5175 /* If we don't need the result, just ensure we evaluate any
5180 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5181 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5185 /* All elts simple constants => refer to a constant in memory. But
5186 if this is a non-BLKmode mode, let it store a field at a time
5187 since that should make a CONST_INT or CONST_DOUBLE when we
5188 fold. Likewise, if we have a target we can use, it is best to
5189 store directly into the target unless the type is large enough
5190 that memcpy will be used. If we are making an initializer and
5191 all operands are constant, put it in memory as well. */
5192 else if ((TREE_STATIC (exp)
5193 && ((mode == BLKmode
5194 && ! (target != 0 && safe_from_p (target, exp)))
5195 || TREE_ADDRESSABLE (exp)
5196 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5197 && (move_by_pieces_ninsns
5198 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5199 TYPE_ALIGN (type) / BITS_PER_UNIT)
5201 && ! mostly_zeros_p (exp))))
5202 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5204 rtx constructor = output_constant_def (exp);
5205 if (modifier != EXPAND_CONST_ADDRESS
5206 && modifier != EXPAND_INITIALIZER
5207 && modifier != EXPAND_SUM
5208 && (! memory_address_p (GET_MODE (constructor),
5209 XEXP (constructor, 0))
5211 && GET_CODE (XEXP (constructor, 0)) != REG)))
5212 constructor = change_address (constructor, VOIDmode,
5213 XEXP (constructor, 0));
5219 if (target == 0 || ! safe_from_p (target, exp))
5221 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5222 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5224 target = assign_temp (type, 0, 1, 1);
5227 if (TREE_READONLY (exp))
5229 if (GET_CODE (target) == MEM)
5230 target = change_address (target, GET_MODE (target),
5232 RTX_UNCHANGING_P (target) = 1;
5235 store_constructor (exp, target, 0);
5241 tree exp1 = TREE_OPERAND (exp, 0);
5244 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5245 op0 = memory_address (mode, op0);
5247 temp = gen_rtx (MEM, mode, op0);
5248 /* If address was computed by addition,
5249 mark this as an element of an aggregate. */
5250 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5251 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5252 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5253 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5254 || (TREE_CODE (exp1) == ADDR_EXPR
5255 && (exp2 = TREE_OPERAND (exp1, 0))
5256 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5257 MEM_IN_STRUCT_P (temp) = 1;
5258 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5260 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5261 here, because, in C and C++, the fact that a location is accessed
5262 through a pointer to const does not mean that the value there can
5263 never change. Languages where it can never change should
5264 also set TREE_STATIC. */
5265 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5270 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5274 tree array = TREE_OPERAND (exp, 0);
5275 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5276 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5277 tree index = TREE_OPERAND (exp, 1);
5278 tree index_type = TREE_TYPE (index);
5281 if (TREE_CODE (low_bound) != INTEGER_CST
5282 && contains_placeholder_p (low_bound))
5283 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5285 /* Optimize the special-case of a zero lower bound.
5287 We convert the low_bound to sizetype to avoid some problems
5288 with constant folding. (E.g. suppose the lower bound is 1,
5289 and its mode is QI. Without the conversion, (ARRAY
5290 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5291 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5293 But sizetype isn't quite right either (especially if
5294 the lowbound is negative). FIXME */
5296 if (! integer_zerop (low_bound))
5297 index = fold (build (MINUS_EXPR, index_type, index,
5298 convert (sizetype, low_bound)));
5300 if ((TREE_CODE (index) != INTEGER_CST
5301 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5302 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5304 /* Nonconstant array index or nonconstant element size, and
5305 not an array in an unaligned (packed) structure field.
5306 Generate the tree for *(&array+index) and expand that,
5307 except do it in a language-independent way
5308 and don't complain about non-lvalue arrays.
5309 `mark_addressable' should already have been called
5310 for any array for which this case will be reached. */
5312 /* Don't forget the const or volatile flag from the array
5314 tree variant_type = build_type_variant (type,
5315 TREE_READONLY (exp),
5316 TREE_THIS_VOLATILE (exp));
5317 tree array_adr = build1 (ADDR_EXPR,
5318 build_pointer_type (variant_type), array);
5320 tree size = size_in_bytes (type);
5322 /* Convert the integer argument to a type the same size as sizetype
5323 so the multiply won't overflow spuriously. */
5324 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5325 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5328 if (TREE_CODE (size) != INTEGER_CST
5329 && contains_placeholder_p (size))
5330 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5332 /* Don't think the address has side effects
5333 just because the array does.
5334 (In some cases the address might have side effects,
5335 and we fail to record that fact here. However, it should not
5336 matter, since expand_expr should not care.) */
5337 TREE_SIDE_EFFECTS (array_adr) = 0;
5341 (INDIRECT_REF, type,
5342 fold (build (PLUS_EXPR,
5343 TYPE_POINTER_TO (variant_type),
5348 TYPE_POINTER_TO (variant_type),
5349 fold (build (MULT_EXPR, TREE_TYPE (index),
5351 convert (TREE_TYPE (index),
5354 /* Volatility, etc., of new expression is same as old
5356 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5357 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5358 TREE_READONLY (elt) = TREE_READONLY (exp);
5360 return expand_expr (elt, target, tmode, modifier);
5363 /* Fold an expression like: "foo"[2].
5364 This is not done in fold so it won't happen inside &.
5365 Don't fold if this is for wide characters since it's too
5366 difficult to do correctly and this is a very rare case. */
5368 if (TREE_CODE (array) == STRING_CST
5369 && TREE_CODE (index) == INTEGER_CST
5370 && !TREE_INT_CST_HIGH (index)
5371 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5372 && GET_MODE_CLASS (mode) == MODE_INT
5373 && GET_MODE_SIZE (mode) == 1)
5374 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5376 /* If this is a constant index into a constant array,
5377 just get the value from the array. Handle both the cases when
5378 we have an explicit constructor and when our operand is a variable
5379 that was declared const. */
5381 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5383 if (TREE_CODE (index) == INTEGER_CST
5384 && TREE_INT_CST_HIGH (index) == 0)
5386 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5388 i = TREE_INT_CST_LOW (index);
5390 elem = TREE_CHAIN (elem);
5392 return expand_expr (fold (TREE_VALUE (elem)), target,
5397 else if (optimize >= 1
5398 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5399 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5400 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5402 if (TREE_CODE (index) == INTEGER_CST
5403 && TREE_INT_CST_HIGH (index) == 0)
5405 tree init = DECL_INITIAL (array);
5407 i = TREE_INT_CST_LOW (index);
5408 if (TREE_CODE (init) == CONSTRUCTOR)
5410 tree elem = CONSTRUCTOR_ELTS (init);
5413 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5414 elem = TREE_CHAIN (elem);
5416 return expand_expr (fold (TREE_VALUE (elem)), target,
5419 else if (TREE_CODE (init) == STRING_CST
5420 && i < TREE_STRING_LENGTH (init))
5421 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5426 /* Treat array-ref with constant index as a component-ref. */
5430 /* If the operand is a CONSTRUCTOR, we can just extract the
5431 appropriate field if it is present. Don't do this if we have
5432 already written the data since we want to refer to that copy
5433 and varasm.c assumes that's what we'll do. */
5434 if (code != ARRAY_REF
5435 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5436 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5440 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5441 elt = TREE_CHAIN (elt))
5442 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5443 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5447 enum machine_mode mode1;
5452 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5453 &mode1, &unsignedp, &volatilep);
5456 /* If we got back the original object, something is wrong. Perhaps
5457 we are evaluating an expression too early. In any event, don't
5458 infinitely recurse. */
5462 /* If TEM's type is a union of variable size, pass TARGET to the inner
5463 computation, since it will need a temporary and TARGET is known
5464 to have to do. This occurs in unchecked conversion in Ada. */
5466 op0 = expand_expr (tem,
5467 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5468 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5470 ? target : NULL_RTX),
5472 modifier == EXPAND_INITIALIZER ? modifier : 0);
5474 /* If this is a constant, put it into a register if it is a
5475 legitimate constant and memory if it isn't. */
5476 if (CONSTANT_P (op0))
5478 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5479 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5480 op0 = force_reg (mode, op0);
5482 op0 = validize_mem (force_const_mem (mode, op0));
5485 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
5488 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5490 if (GET_CODE (op0) != MEM)
5492 op0 = change_address (op0, VOIDmode,
5493 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5494 force_reg (ptr_mode, offset_rtx)));
5495 /* If we have a variable offset, the known alignment
5496 is only that of the innermost structure containing the field.
5497 (Actually, we could sometimes do better by using the
5498 size of an element of the innermost array, but no need.) */
5499 if (TREE_CODE (exp) == COMPONENT_REF
5500 || TREE_CODE (exp) == BIT_FIELD_REF)
5501 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5505 /* Don't forget about volatility even if this is a bitfield. */
5506 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5508 op0 = copy_rtx (op0);
5509 MEM_VOLATILE_P (op0) = 1;
5512 /* In cases where an aligned union has an unaligned object
5513 as a field, we might be extracting a BLKmode value from
5514 an integer-mode (e.g., SImode) object. Handle this case
5515 by doing the extract into an object as wide as the field
5516 (which we know to be the width of a basic mode), then
5517 storing into memory, and changing the mode to BLKmode.
5518 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5519 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5520 if (mode1 == VOIDmode
5521 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5522 || (modifier != EXPAND_CONST_ADDRESS
5523 && modifier != EXPAND_INITIALIZER
5524 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5525 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5526 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5527 /* If the field isn't aligned enough to fetch as a memref,
5528 fetch it as a bit field. */
5529 || (SLOW_UNALIGNED_ACCESS
5530 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5531 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5533 enum machine_mode ext_mode = mode;
5535 if (ext_mode == BLKmode)
5536 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5538 if (ext_mode == BLKmode)
5540 /* In this case, BITPOS must start at a byte boundary and
5541 TARGET, if specified, must be a MEM. */
5542 if (GET_CODE (op0) != MEM
5543 || (target != 0 && GET_CODE (target) != MEM)
5544 || bitpos % BITS_PER_UNIT != 0)
5547 op0 = change_address (op0, VOIDmode,
5548 plus_constant (XEXP (op0, 0),
5549 bitpos / BITS_PER_UNIT));
5551 target = assign_temp (type, 0, 1, 1);
5553 emit_block_move (target, op0,
5554 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5561 op0 = validize_mem (op0);
5563 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5564 mark_reg_pointer (XEXP (op0, 0), alignment);
5566 op0 = extract_bit_field (op0, bitsize, bitpos,
5567 unsignedp, target, ext_mode, ext_mode,
5569 int_size_in_bytes (TREE_TYPE (tem)));
5571 /* If the result is a record type and BITSIZE is narrower than
5572 the mode of OP0, an integral mode, and this is a big endian
5573 machine, we must put the field into the high-order bits. */
5574 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5575 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5576 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5577 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5578 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5582 if (mode == BLKmode)
5584 rtx new = assign_stack_temp (ext_mode,
5585 bitsize / BITS_PER_UNIT, 0);
5587 emit_move_insn (new, op0);
5588 op0 = copy_rtx (new);
5589 PUT_MODE (op0, BLKmode);
5590 MEM_IN_STRUCT_P (op0) = 1;
5596 /* If the result is BLKmode, use that to access the object
5598 if (mode == BLKmode)
5601 /* Get a reference to just this component. */
5602 if (modifier == EXPAND_CONST_ADDRESS
5603 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5604 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5605 (bitpos / BITS_PER_UNIT)));
5607 op0 = change_address (op0, mode1,
5608 plus_constant (XEXP (op0, 0),
5609 (bitpos / BITS_PER_UNIT)));
5610 if (GET_CODE (XEXP (op0, 0)) == REG)
5611 mark_reg_pointer (XEXP (op0, 0), alignment);
5613 MEM_IN_STRUCT_P (op0) = 1;
5614 MEM_VOLATILE_P (op0) |= volatilep;
5615 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5618 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5619 convert_move (target, op0, unsignedp);
5623 /* Intended for a reference to a buffer of a file-object in Pascal.
5624 But it's not certain that a special tree code will really be
5625 necessary for these. INDIRECT_REF might work for them. */
5631 /* Pascal set IN expression.
5634 rlo = set_low - (set_low%bits_per_word);
5635 the_word = set [ (index - rlo)/bits_per_word ];
5636 bit_index = index % bits_per_word;
5637 bitmask = 1 << bit_index;
5638 return !!(the_word & bitmask); */
5640 tree set = TREE_OPERAND (exp, 0);
5641 tree index = TREE_OPERAND (exp, 1);
5642 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5643 tree set_type = TREE_TYPE (set);
5644 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5645 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5646 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5647 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5648 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5649 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5650 rtx setaddr = XEXP (setval, 0);
5651 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5653 rtx diff, quo, rem, addr, bit, result;
5655 preexpand_calls (exp);
5657 /* If domain is empty, answer is no. Likewise if index is constant
5658 and out of bounds. */
5659 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5660 && TREE_CODE (set_low_bound) == INTEGER_CST
5661 && tree_int_cst_lt (set_high_bound, set_low_bound)
5662 || (TREE_CODE (index) == INTEGER_CST
5663 && TREE_CODE (set_low_bound) == INTEGER_CST
5664 && tree_int_cst_lt (index, set_low_bound))
5665 || (TREE_CODE (set_high_bound) == INTEGER_CST
5666 && TREE_CODE (index) == INTEGER_CST
5667 && tree_int_cst_lt (set_high_bound, index))))
5671 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5673 /* If we get here, we have to generate the code for both cases
5674 (in range and out of range). */
5676 op0 = gen_label_rtx ();
5677 op1 = gen_label_rtx ();
5679 if (! (GET_CODE (index_val) == CONST_INT
5680 && GET_CODE (lo_r) == CONST_INT))
5682 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5683 GET_MODE (index_val), iunsignedp, 0);
5684 emit_jump_insn (gen_blt (op1));
5687 if (! (GET_CODE (index_val) == CONST_INT
5688 && GET_CODE (hi_r) == CONST_INT))
5690 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5691 GET_MODE (index_val), iunsignedp, 0);
5692 emit_jump_insn (gen_bgt (op1));
5695 /* Calculate the element number of bit zero in the first word
5697 if (GET_CODE (lo_r) == CONST_INT)
5698 rlow = GEN_INT (INTVAL (lo_r)
5699 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5701 rlow = expand_binop (index_mode, and_optab, lo_r,
5702 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5703 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5705 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5706 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5708 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5709 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5710 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5711 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5713 addr = memory_address (byte_mode,
5714 expand_binop (index_mode, add_optab, diff,
5715 setaddr, NULL_RTX, iunsignedp,
5718 /* Extract the bit we want to examine */
5719 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5720 gen_rtx (MEM, byte_mode, addr),
5721 make_tree (TREE_TYPE (index), rem),
5723 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5724 GET_MODE (target) == byte_mode ? target : 0,
5725 1, OPTAB_LIB_WIDEN);
5727 if (result != target)
5728 convert_move (target, result, 1);
5730 /* Output the code to handle the out-of-range case. */
5733 emit_move_insn (target, const0_rtx);
5738 case WITH_CLEANUP_EXPR:
5739 if (RTL_EXPR_RTL (exp) == 0)
5742 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5744 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5745 /* That's it for this cleanup. */
5746 TREE_OPERAND (exp, 2) = 0;
5747 expand_eh_region_start ();
5749 return RTL_EXPR_RTL (exp);
5751 case CLEANUP_POINT_EXPR:
5753 extern int temp_slot_level;
5754 tree old_cleanups = cleanups_this_call;
5755 int old_temp_level = target_temp_slot_level;
5757 target_temp_slot_level = temp_slot_level;
5758 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5759 /* If we're going to use this value, load it up now. */
5761 op0 = force_not_mem (op0);
5762 expand_cleanups_to (old_cleanups);
5763 preserve_temp_slots (op0);
5766 target_temp_slot_level = old_temp_level;
5771 /* Check for a built-in function. */
5772 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5773 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5775 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5776 return expand_builtin (exp, target, subtarget, tmode, ignore);
5778 /* If this call was expanded already by preexpand_calls,
5779 just return the result we got. */
5780 if (CALL_EXPR_RTL (exp) != 0)
5781 return CALL_EXPR_RTL (exp);
5783 return expand_call (exp, target, ignore);
5785 case NON_LVALUE_EXPR:
5788 case REFERENCE_EXPR:
5789 if (TREE_CODE (type) == UNION_TYPE)
5791 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5794 if (mode != BLKmode)
5795 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5797 target = assign_temp (type, 0, 1, 1);
5800 if (GET_CODE (target) == MEM)
5801 /* Store data into beginning of memory target. */
5802 store_expr (TREE_OPERAND (exp, 0),
5803 change_address (target, TYPE_MODE (valtype), 0), 0);
5805 else if (GET_CODE (target) == REG)
5806 /* Store this field into a union of the proper type. */
5807 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5808 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5810 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5814 /* Return the entire union. */
5818 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5820 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5823 /* If the signedness of the conversion differs and OP0 is
5824 a promoted SUBREG, clear that indication since we now
5825 have to do the proper extension. */
5826 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5827 && GET_CODE (op0) == SUBREG)
5828 SUBREG_PROMOTED_VAR_P (op0) = 0;
5833 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5834 if (GET_MODE (op0) == mode)
5837 /* If OP0 is a constant, just convert it into the proper mode. */
5838 if (CONSTANT_P (op0))
5840 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5841 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5843 if (modifier == EXPAND_INITIALIZER)
5844 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5848 convert_to_mode (mode, op0,
5849 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5851 convert_move (target, op0,
5852 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5856 /* We come here from MINUS_EXPR when the second operand is a
5859 this_optab = add_optab;
5861 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5862 something else, make sure we add the register to the constant and
5863 then to the other thing. This case can occur during strength
5864 reduction and doing it this way will produce better code if the
5865 frame pointer or argument pointer is eliminated.
5867 fold-const.c will ensure that the constant is always in the inner
5868 PLUS_EXPR, so the only case we need to do anything about is if
5869 sp, ap, or fp is our second argument, in which case we must swap
5870 the innermost first argument and our second argument. */
5872 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5873 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5874 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5875 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5876 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5877 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5879 tree t = TREE_OPERAND (exp, 1);
5881 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5882 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5885 /* If the result is to be ptr_mode and we are adding an integer to
5886 something, we might be forming a constant. So try to use
5887 plus_constant. If it produces a sum and we can't accept it,
5888 use force_operand. This allows P = &ARR[const] to generate
5889 efficient code on machines where a SYMBOL_REF is not a valid
5892 If this is an EXPAND_SUM call, always return the sum. */
5893 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5894 || mode == ptr_mode)
5896 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5897 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5898 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5900 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5902 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5903 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5904 op1 = force_operand (op1, target);
5908 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5909 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5910 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5912 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5914 if (! CONSTANT_P (op0))
5916 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5917 VOIDmode, modifier);
5918 /* Don't go to both_summands if modifier
5919 says it's not right to return a PLUS. */
5920 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5924 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5925 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5926 op0 = force_operand (op0, target);
5931 /* No sense saving up arithmetic to be done
5932 if it's all in the wrong mode to form part of an address.
5933 And force_operand won't know whether to sign-extend or
5935 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5936 || mode != ptr_mode)
5939 preexpand_calls (exp);
5940 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5943 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5944 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5947 /* Make sure any term that's a sum with a constant comes last. */
5948 if (GET_CODE (op0) == PLUS
5949 && CONSTANT_P (XEXP (op0, 1)))
5955 /* If adding to a sum including a constant,
5956 associate it to put the constant outside. */
5957 if (GET_CODE (op1) == PLUS
5958 && CONSTANT_P (XEXP (op1, 1)))
5960 rtx constant_term = const0_rtx;
5962 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5965 /* Ensure that MULT comes first if there is one. */
5966 else if (GET_CODE (op0) == MULT)
5967 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5969 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5971 /* Let's also eliminate constants from op0 if possible. */
5972 op0 = eliminate_constant_term (op0, &constant_term);
5974 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5975 their sum should be a constant. Form it into OP1, since the
5976 result we want will then be OP0 + OP1. */
5978 temp = simplify_binary_operation (PLUS, mode, constant_term,
5983 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5986 /* Put a constant term last and put a multiplication first. */
5987 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5988 temp = op1, op1 = op0, op0 = temp;
5990 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5991 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5994 /* For initializers, we are allowed to return a MINUS of two
5995 symbolic constants. Here we handle all cases when both operands
5997 /* Handle difference of two symbolic constants,
5998 for the sake of an initializer. */
5999 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6000 && really_constant_p (TREE_OPERAND (exp, 0))
6001 && really_constant_p (TREE_OPERAND (exp, 1)))
6003 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6004 VOIDmode, modifier);
6005 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6006 VOIDmode, modifier);
6008 /* If the last operand is a CONST_INT, use plus_constant of
6009 the negated constant. Else make the MINUS. */
6010 if (GET_CODE (op1) == CONST_INT)
6011 return plus_constant (op0, - INTVAL (op1));
6013 return gen_rtx (MINUS, mode, op0, op1);
6015 /* Convert A - const to A + (-const). */
6016 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6018 tree negated = fold (build1 (NEGATE_EXPR, type,
6019 TREE_OPERAND (exp, 1)));
6021 /* Deal with the case where we can't negate the constant
6023 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6025 tree newtype = signed_type (type);
6026 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6027 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6028 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6030 if (! TREE_OVERFLOW (newneg))
6031 return expand_expr (convert (type,
6032 build (PLUS_EXPR, newtype,
6034 target, tmode, modifier);
6038 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6042 this_optab = sub_optab;
6046 preexpand_calls (exp);
6047 /* If first operand is constant, swap them.
6048 Thus the following special case checks need only
6049 check the second operand. */
6050 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6052 register tree t1 = TREE_OPERAND (exp, 0);
6053 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6054 TREE_OPERAND (exp, 1) = t1;
6057 /* Attempt to return something suitable for generating an
6058 indexed address, for machines that support that. */
6060 if (modifier == EXPAND_SUM && mode == ptr_mode
6061 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6062 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6064 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6066 /* Apply distributive law if OP0 is x+c. */
6067 if (GET_CODE (op0) == PLUS
6068 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6069 return gen_rtx (PLUS, mode,
6070 gen_rtx (MULT, mode, XEXP (op0, 0),
6071 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6072 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6073 * INTVAL (XEXP (op0, 1))));
6075 if (GET_CODE (op0) != REG)
6076 op0 = force_operand (op0, NULL_RTX);
6077 if (GET_CODE (op0) != REG)
6078 op0 = copy_to_mode_reg (mode, op0);
6080 return gen_rtx (MULT, mode, op0,
6081 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6084 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6087 /* Check for multiplying things that have been extended
6088 from a narrower type. If this machine supports multiplying
6089 in that narrower type with a result in the desired type,
6090 do it that way, and avoid the explicit type-conversion. */
6091 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6092 && TREE_CODE (type) == INTEGER_TYPE
6093 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6094 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6095 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6096 && int_fits_type_p (TREE_OPERAND (exp, 1),
6097 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6098 /* Don't use a widening multiply if a shift will do. */
6099 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6100 > HOST_BITS_PER_WIDE_INT)
6101 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6103 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6104 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6106 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6107 /* If both operands are extended, they must either both
6108 be zero-extended or both be sign-extended. */
6109 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6111 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6113 enum machine_mode innermode
6114 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6115 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6116 ? smul_widen_optab : umul_widen_optab);
6117 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6118 ? umul_widen_optab : smul_widen_optab);
6119 if (mode == GET_MODE_WIDER_MODE (innermode))
6121 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6123 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6124 NULL_RTX, VOIDmode, 0);
6125 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6126 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6129 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6130 NULL_RTX, VOIDmode, 0);
6133 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6134 && innermode == word_mode)
6137 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6138 NULL_RTX, VOIDmode, 0);
6139 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6140 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6143 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6144 NULL_RTX, VOIDmode, 0);
6145 temp = expand_binop (mode, other_optab, op0, op1, target,
6146 unsignedp, OPTAB_LIB_WIDEN);
6147 htem = expand_mult_highpart_adjust (innermode,
6148 gen_highpart (innermode, temp),
6150 gen_highpart (innermode, temp),
6152 emit_move_insn (gen_highpart (innermode, temp), htem);
6157 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6158 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6159 return expand_mult (mode, op0, op1, target, unsignedp);
6161 case TRUNC_DIV_EXPR:
6162 case FLOOR_DIV_EXPR:
6164 case ROUND_DIV_EXPR:
6165 case EXACT_DIV_EXPR:
6166 preexpand_calls (exp);
6167 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6169 /* Possible optimization: compute the dividend with EXPAND_SUM
6170 then if the divisor is constant can optimize the case
6171 where some terms of the dividend have coeffs divisible by it. */
6172 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6173 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6174 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6177 this_optab = flodiv_optab;
6180 case TRUNC_MOD_EXPR:
6181 case FLOOR_MOD_EXPR:
6183 case ROUND_MOD_EXPR:
6184 preexpand_calls (exp);
6185 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6187 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6188 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6189 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6191 case FIX_ROUND_EXPR:
6192 case FIX_FLOOR_EXPR:
6194 abort (); /* Not used for C. */
6196 case FIX_TRUNC_EXPR:
6197 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6199 target = gen_reg_rtx (mode);
6200 expand_fix (target, op0, unsignedp);
6204 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6206 target = gen_reg_rtx (mode);
6207 /* expand_float can't figure out what to do if FROM has VOIDmode.
6208 So give it the correct mode. With -O, cse will optimize this. */
6209 if (GET_MODE (op0) == VOIDmode)
6210 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6212 expand_float (target, op0,
6213 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6217 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6218 temp = expand_unop (mode, neg_optab, op0, target, 0);
6224 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6226 /* Handle complex values specially. */
6227 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6228 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6229 return expand_complex_abs (mode, op0, target, unsignedp);
6231 /* Unsigned abs is simply the operand. Testing here means we don't
6232 risk generating incorrect code below. */
6233 if (TREE_UNSIGNED (type))
6236 return expand_abs (mode, op0, target, unsignedp,
6237 safe_from_p (target, TREE_OPERAND (exp, 0)));
6241 target = original_target;
6242 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6243 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6244 || GET_MODE (target) != mode
6245 || (GET_CODE (target) == REG
6246 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6247 target = gen_reg_rtx (mode);
6248 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6249 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6251 /* First try to do it with a special MIN or MAX instruction.
6252 If that does not win, use a conditional jump to select the proper
6254 this_optab = (TREE_UNSIGNED (type)
6255 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6256 : (code == MIN_EXPR ? smin_optab : smax_optab));
6258 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6263 /* At this point, a MEM target is no longer useful; we will get better
6266 if (GET_CODE (target) == MEM)
6267 target = gen_reg_rtx (mode);
6270 emit_move_insn (target, op0);
6272 op0 = gen_label_rtx ();
6274 /* If this mode is an integer too wide to compare properly,
6275 compare word by word. Rely on cse to optimize constant cases. */
6276 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6278 if (code == MAX_EXPR)
6279 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6280 target, op1, NULL_RTX, op0);
6282 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6283 op1, target, NULL_RTX, op0);
6284 emit_move_insn (target, op1);
6288 if (code == MAX_EXPR)
6289 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6290 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6291 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6293 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6294 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6295 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6296 if (temp == const0_rtx)
6297 emit_move_insn (target, op1);
6298 else if (temp != const_true_rtx)
6300 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6301 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6304 emit_move_insn (target, op1);
6311 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6312 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6318 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6319 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6324 /* ??? Can optimize bitwise operations with one arg constant.
6325 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6326 and (a bitwise1 b) bitwise2 b (etc)
6327 but that is probably not worth while. */
6329 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6330 boolean values when we want in all cases to compute both of them. In
6331 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6332 as actual zero-or-1 values and then bitwise anding. In cases where
6333 there cannot be any side effects, better code would be made by
6334 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6335 how to recognize those cases. */
6337 case TRUTH_AND_EXPR:
6339 this_optab = and_optab;
6344 this_optab = ior_optab;
6347 case TRUTH_XOR_EXPR:
6349 this_optab = xor_optab;
6356 preexpand_calls (exp);
6357 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6359 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6360 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6363 /* Could determine the answer when only additive constants differ. Also,
6364 the addition of one can be handled by changing the condition. */
6371 preexpand_calls (exp);
6372 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6376 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6377 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6379 && GET_CODE (original_target) == REG
6380 && (GET_MODE (original_target)
6381 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6383 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6386 if (temp != original_target)
6387 temp = copy_to_reg (temp);
6389 op1 = gen_label_rtx ();
6390 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6391 GET_MODE (temp), unsignedp, 0);
6392 emit_jump_insn (gen_beq (op1));
6393 emit_move_insn (temp, const1_rtx);
6398 /* If no set-flag instruction, must generate a conditional
6399 store into a temporary variable. Drop through
6400 and handle this like && and ||. */
6402 case TRUTH_ANDIF_EXPR:
6403 case TRUTH_ORIF_EXPR:
6405 && (target == 0 || ! safe_from_p (target, exp)
6406 /* Make sure we don't have a hard reg (such as function's return
6407 value) live across basic blocks, if not optimizing. */
6408 || (!optimize && GET_CODE (target) == REG
6409 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6410 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6413 emit_clr_insn (target);
6415 op1 = gen_label_rtx ();
6416 jumpifnot (exp, op1);
6419 emit_0_to_1_insn (target);
6422 return ignore ? const0_rtx : target;
6424 case TRUTH_NOT_EXPR:
6425 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6426 /* The parser is careful to generate TRUTH_NOT_EXPR
6427 only with operands that are always zero or one. */
6428 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6429 target, 1, OPTAB_LIB_WIDEN);
6435 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6437 return expand_expr (TREE_OPERAND (exp, 1),
6438 (ignore ? const0_rtx : target),
6443 rtx flag = NULL_RTX;
6444 tree left_cleanups = NULL_TREE;
6445 tree right_cleanups = NULL_TREE;
6447 /* Used to save a pointer to the place to put the setting of
6448 the flag that indicates if this side of the conditional was
6449 taken. We backpatch the code, if we find out later that we
6450 have any conditional cleanups that need to be performed. */
6451 rtx dest_right_flag = NULL_RTX;
6452 rtx dest_left_flag = NULL_RTX;
6454 /* Note that COND_EXPRs whose type is a structure or union
6455 are required to be constructed to contain assignments of
6456 a temporary variable, so that we can evaluate them here
6457 for side effect only. If type is void, we must do likewise. */
6459 /* If an arm of the branch requires a cleanup,
6460 only that cleanup is performed. */
6463 tree binary_op = 0, unary_op = 0;
6464 tree old_cleanups = cleanups_this_call;
6466 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6467 convert it to our mode, if necessary. */
6468 if (integer_onep (TREE_OPERAND (exp, 1))
6469 && integer_zerop (TREE_OPERAND (exp, 2))
6470 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6474 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6479 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6480 if (GET_MODE (op0) == mode)
6484 target = gen_reg_rtx (mode);
6485 convert_move (target, op0, unsignedp);
6489 /* Check for X ? A + B : A. If we have this, we can copy
6490 A to the output and conditionally add B. Similarly for unary
6491 operations. Don't do this if X has side-effects because
6492 those side effects might affect A or B and the "?" operation is
6493 a sequence point in ANSI. (We test for side effects later.) */
6495 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6496 && operand_equal_p (TREE_OPERAND (exp, 2),
6497 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6498 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6499 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6500 && operand_equal_p (TREE_OPERAND (exp, 1),
6501 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6502 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6503 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6504 && operand_equal_p (TREE_OPERAND (exp, 2),
6505 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6506 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6507 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6508 && operand_equal_p (TREE_OPERAND (exp, 1),
6509 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6510 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6512 /* If we are not to produce a result, we have no target. Otherwise,
6513 if a target was specified use it; it will not be used as an
6514 intermediate target unless it is safe. If no target, use a
6519 else if (original_target
6520 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6521 || (singleton && GET_CODE (original_target) == REG
6522 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6523 && original_target == var_rtx (singleton)))
6524 && GET_MODE (original_target) == mode
6525 && ! (GET_CODE (original_target) == MEM
6526 && MEM_VOLATILE_P (original_target)))
6527 temp = original_target;
6528 else if (TREE_ADDRESSABLE (type))
6531 temp = assign_temp (type, 0, 0, 1);
6533 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6534 operation, do this as A + (X != 0). Similarly for other simple
6535 binary operators. */
6536 if (temp && singleton && binary_op
6537 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6538 && (TREE_CODE (binary_op) == PLUS_EXPR
6539 || TREE_CODE (binary_op) == MINUS_EXPR
6540 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6541 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6542 && integer_onep (TREE_OPERAND (binary_op, 1))
6543 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6546 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6547 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6548 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6551 /* If we had X ? A : A + 1, do this as A + (X == 0).
6553 We have to invert the truth value here and then put it
6554 back later if do_store_flag fails. We cannot simply copy
6555 TREE_OPERAND (exp, 0) to another variable and modify that
6556 because invert_truthvalue can modify the tree pointed to
6558 if (singleton == TREE_OPERAND (exp, 1))
6559 TREE_OPERAND (exp, 0)
6560 = invert_truthvalue (TREE_OPERAND (exp, 0));
6562 result = do_store_flag (TREE_OPERAND (exp, 0),
6563 (safe_from_p (temp, singleton)
6565 mode, BRANCH_COST <= 1);
6569 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6570 return expand_binop (mode, boptab, op1, result, temp,
6571 unsignedp, OPTAB_LIB_WIDEN);
6573 else if (singleton == TREE_OPERAND (exp, 1))
6574 TREE_OPERAND (exp, 0)
6575 = invert_truthvalue (TREE_OPERAND (exp, 0));
6578 do_pending_stack_adjust ();
6580 op0 = gen_label_rtx ();
6582 flag = gen_reg_rtx (word_mode);
6583 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6587 /* If the target conflicts with the other operand of the
6588 binary op, we can't use it. Also, we can't use the target
6589 if it is a hard register, because evaluating the condition
6590 might clobber it. */
6592 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6593 || (GET_CODE (temp) == REG
6594 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6595 temp = gen_reg_rtx (mode);
6596 store_expr (singleton, temp, 0);
6599 expand_expr (singleton,
6600 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6601 dest_left_flag = get_last_insn ();
6602 if (singleton == TREE_OPERAND (exp, 1))
6603 jumpif (TREE_OPERAND (exp, 0), op0);
6605 jumpifnot (TREE_OPERAND (exp, 0), op0);
6607 /* Allows cleanups up to here. */
6608 old_cleanups = cleanups_this_call;
6609 if (binary_op && temp == 0)
6610 /* Just touch the other operand. */
6611 expand_expr (TREE_OPERAND (binary_op, 1),
6612 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6614 store_expr (build (TREE_CODE (binary_op), type,
6615 make_tree (type, temp),
6616 TREE_OPERAND (binary_op, 1)),
6619 store_expr (build1 (TREE_CODE (unary_op), type,
6620 make_tree (type, temp)),
6623 dest_right_flag = get_last_insn ();
6626 /* This is now done in jump.c and is better done there because it
6627 produces shorter register lifetimes. */
6629 /* Check for both possibilities either constants or variables
6630 in registers (but not the same as the target!). If so, can
6631 save branches by assigning one, branching, and assigning the
6633 else if (temp && GET_MODE (temp) != BLKmode
6634 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6635 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6636 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6637 && DECL_RTL (TREE_OPERAND (exp, 1))
6638 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6639 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6640 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6641 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6642 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6643 && DECL_RTL (TREE_OPERAND (exp, 2))
6644 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6645 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6647 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6648 temp = gen_reg_rtx (mode);
6649 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6650 dest_left_flag = get_last_insn ();
6651 jumpifnot (TREE_OPERAND (exp, 0), op0);
6653 /* Allows cleanups up to here. */
6654 old_cleanups = cleanups_this_call;
6655 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6657 dest_right_flag = get_last_insn ();
6660 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6661 comparison operator. If we have one of these cases, set the
6662 output to A, branch on A (cse will merge these two references),
6663 then set the output to FOO. */
6665 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6666 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6667 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6668 TREE_OPERAND (exp, 1), 0)
6669 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6670 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6672 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6673 temp = gen_reg_rtx (mode);
6674 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6675 dest_left_flag = get_last_insn ();
6676 jumpif (TREE_OPERAND (exp, 0), op0);
6678 /* Allows cleanups up to here. */
6679 old_cleanups = cleanups_this_call;
6680 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6682 dest_right_flag = get_last_insn ();
6685 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6686 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6687 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6688 TREE_OPERAND (exp, 2), 0)
6689 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6690 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6692 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6693 temp = gen_reg_rtx (mode);
6694 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6695 dest_left_flag = get_last_insn ();
6696 jumpifnot (TREE_OPERAND (exp, 0), op0);
6698 /* Allows cleanups up to here. */
6699 old_cleanups = cleanups_this_call;
6700 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6702 dest_right_flag = get_last_insn ();
6706 op1 = gen_label_rtx ();
6707 jumpifnot (TREE_OPERAND (exp, 0), op0);
6709 /* Allows cleanups up to here. */
6710 old_cleanups = cleanups_this_call;
6712 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6714 expand_expr (TREE_OPERAND (exp, 1),
6715 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6716 dest_left_flag = get_last_insn ();
6718 /* Handle conditional cleanups, if any. */
6719 left_cleanups = defer_cleanups_to (old_cleanups);
6722 emit_jump_insn (gen_jump (op1));
6726 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6728 expand_expr (TREE_OPERAND (exp, 2),
6729 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6730 dest_right_flag = get_last_insn ();
6733 /* Handle conditional cleanups, if any. */
6734 right_cleanups = defer_cleanups_to (old_cleanups);
6740 /* Add back in, any conditional cleanups. */
6741 if (left_cleanups || right_cleanups)
6747 /* Now that we know that a flag is needed, go back and add in the
6748 setting of the flag. */
6750 /* Do the left side flag. */
6751 last = get_last_insn ();
6752 /* Flag left cleanups as needed. */
6753 emit_move_insn (flag, const1_rtx);
6754 /* ??? deprecated, use sequences instead. */
6755 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6757 /* Do the right side flag. */
6758 last = get_last_insn ();
6759 /* Flag left cleanups as needed. */
6760 emit_move_insn (flag, const0_rtx);
6761 /* ??? deprecated, use sequences instead. */
6762 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6764 /* All cleanups must be on the function_obstack. */
6765 push_obstacks_nochange ();
6766 resume_temporary_allocation ();
6768 /* convert flag, which is an rtx, into a tree. */
6769 cond = make_node (RTL_EXPR);
6770 TREE_TYPE (cond) = integer_type_node;
6771 RTL_EXPR_RTL (cond) = flag;
6772 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6773 cond = save_expr (cond);
6775 if (! left_cleanups)
6776 left_cleanups = integer_zero_node;
6777 if (! right_cleanups)
6778 right_cleanups = integer_zero_node;
6779 new_cleanups = build (COND_EXPR, void_type_node,
6780 truthvalue_conversion (cond),
6781 left_cleanups, right_cleanups);
6782 new_cleanups = fold (new_cleanups);
6786 /* Now add in the conditionalized cleanups. */
6788 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6789 expand_eh_region_start ();
6796 /* Something needs to be initialized, but we didn't know
6797 where that thing was when building the tree. For example,
6798 it could be the return value of a function, or a parameter
6799 to a function which lays down in the stack, or a temporary
6800 variable which must be passed by reference.
6802 We guarantee that the expression will either be constructed
6803 or copied into our original target. */
6805 tree slot = TREE_OPERAND (exp, 0);
6806 tree cleanups = NULL_TREE;
6810 if (TREE_CODE (slot) != VAR_DECL)
6814 target = original_target;
6818 if (DECL_RTL (slot) != 0)
6820 target = DECL_RTL (slot);
6821 /* If we have already expanded the slot, so don't do
6823 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6828 target = assign_temp (type, 2, 1, 1);
6829 /* All temp slots at this level must not conflict. */
6830 preserve_temp_slots (target);
6831 DECL_RTL (slot) = target;
6833 /* Since SLOT is not known to the called function
6834 to belong to its stack frame, we must build an explicit
6835 cleanup. This case occurs when we must build up a reference
6836 to pass the reference as an argument. In this case,
6837 it is very likely that such a reference need not be
6840 if (TREE_OPERAND (exp, 2) == 0)
6841 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6842 cleanups = TREE_OPERAND (exp, 2);
6847 /* This case does occur, when expanding a parameter which
6848 needs to be constructed on the stack. The target
6849 is the actual stack address that we want to initialize.
6850 The function we call will perform the cleanup in this case. */
6852 /* If we have already assigned it space, use that space,
6853 not target that we were passed in, as our target
6854 parameter is only a hint. */
6855 if (DECL_RTL (slot) != 0)
6857 target = DECL_RTL (slot);
6858 /* If we have already expanded the slot, so don't do
6860 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6864 DECL_RTL (slot) = target;
6867 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6868 /* Mark it as expanded. */
6869 TREE_OPERAND (exp, 1) = NULL_TREE;
6871 store_expr (exp1, target, 0);
6875 cleanups_this_call = tree_cons (NULL_TREE,
6877 cleanups_this_call);
6878 expand_eh_region_start ();
6886 tree lhs = TREE_OPERAND (exp, 0);
6887 tree rhs = TREE_OPERAND (exp, 1);
6888 tree noncopied_parts = 0;
6889 tree lhs_type = TREE_TYPE (lhs);
6891 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6892 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6893 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6894 TYPE_NONCOPIED_PARTS (lhs_type));
6895 while (noncopied_parts != 0)
6897 expand_assignment (TREE_VALUE (noncopied_parts),
6898 TREE_PURPOSE (noncopied_parts), 0, 0);
6899 noncopied_parts = TREE_CHAIN (noncopied_parts);
6906 /* If lhs is complex, expand calls in rhs before computing it.
6907 That's so we don't compute a pointer and save it over a call.
6908 If lhs is simple, compute it first so we can give it as a
6909 target if the rhs is just a call. This avoids an extra temp and copy
6910 and that prevents a partial-subsumption which makes bad code.
6911 Actually we could treat component_ref's of vars like vars. */
6913 tree lhs = TREE_OPERAND (exp, 0);
6914 tree rhs = TREE_OPERAND (exp, 1);
6915 tree noncopied_parts = 0;
6916 tree lhs_type = TREE_TYPE (lhs);
6920 if (TREE_CODE (lhs) != VAR_DECL
6921 && TREE_CODE (lhs) != RESULT_DECL
6922 && TREE_CODE (lhs) != PARM_DECL)
6923 preexpand_calls (exp);
6925 /* Check for |= or &= of a bitfield of size one into another bitfield
6926 of size 1. In this case, (unless we need the result of the
6927 assignment) we can do this more efficiently with a
6928 test followed by an assignment, if necessary.
6930 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6931 things change so we do, this code should be enhanced to
6934 && TREE_CODE (lhs) == COMPONENT_REF
6935 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6936 || TREE_CODE (rhs) == BIT_AND_EXPR)
6937 && TREE_OPERAND (rhs, 0) == lhs
6938 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6939 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6940 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6942 rtx label = gen_label_rtx ();
6944 do_jump (TREE_OPERAND (rhs, 1),
6945 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6946 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6947 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6948 (TREE_CODE (rhs) == BIT_IOR_EXPR
6950 : integer_zero_node)),
6952 do_pending_stack_adjust ();
6957 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6958 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6959 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6960 TYPE_NONCOPIED_PARTS (lhs_type));
6962 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6963 while (noncopied_parts != 0)
6965 expand_assignment (TREE_PURPOSE (noncopied_parts),
6966 TREE_VALUE (noncopied_parts), 0, 0);
6967 noncopied_parts = TREE_CHAIN (noncopied_parts);
6972 case PREINCREMENT_EXPR:
6973 case PREDECREMENT_EXPR:
6974 return expand_increment (exp, 0, ignore);
6976 case POSTINCREMENT_EXPR:
6977 case POSTDECREMENT_EXPR:
6978 /* Faster to treat as pre-increment if result is not used. */
6979 return expand_increment (exp, ! ignore, ignore);
6982 /* If nonzero, TEMP will be set to the address of something that might
6983 be a MEM corresponding to a stack slot. */
6986 /* Are we taking the address of a nested function? */
6987 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6988 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
6989 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
6991 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6992 op0 = force_operand (op0, target);
6994 /* If we are taking the address of something erroneous, just
6996 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7000 /* We make sure to pass const0_rtx down if we came in with
7001 ignore set, to avoid doing the cleanups twice for something. */
7002 op0 = expand_expr (TREE_OPERAND (exp, 0),
7003 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7004 (modifier == EXPAND_INITIALIZER
7005 ? modifier : EXPAND_CONST_ADDRESS));
7007 /* If we are going to ignore the result, OP0 will have been set
7008 to const0_rtx, so just return it. Don't get confused and
7009 think we are taking the address of the constant. */
7013 op0 = protect_from_queue (op0, 0);
7015 /* We would like the object in memory. If it is a constant,
7016 we can have it be statically allocated into memory. For
7017 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7018 memory and store the value into it. */
7020 if (CONSTANT_P (op0))
7021 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7023 else if (GET_CODE (op0) == MEM)
7025 mark_temp_addr_taken (op0);
7026 temp = XEXP (op0, 0);
7029 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7030 || GET_CODE (op0) == CONCAT)
7032 /* If this object is in a register, it must be not
7034 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7035 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7037 mark_temp_addr_taken (memloc);
7038 emit_move_insn (memloc, op0);
7042 if (GET_CODE (op0) != MEM)
7045 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7047 temp = XEXP (op0, 0);
7048 #ifdef POINTERS_EXTEND_UNSIGNED
7049 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7050 && mode == ptr_mode)
7051 temp = convert_memory_address (ptr_mode, temp);
7056 op0 = force_operand (XEXP (op0, 0), target);
7059 if (flag_force_addr && GET_CODE (op0) != REG)
7060 op0 = force_reg (Pmode, op0);
7062 if (GET_CODE (op0) == REG
7063 && ! REG_USERVAR_P (op0))
7064 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7066 /* If we might have had a temp slot, add an equivalent address
7069 update_temp_slot_address (temp, op0);
7071 #ifdef POINTERS_EXTEND_UNSIGNED
7072 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7073 && mode == ptr_mode)
7074 op0 = convert_memory_address (ptr_mode, op0);
7079 case ENTRY_VALUE_EXPR:
7082 /* COMPLEX type for Extended Pascal & Fortran */
7085 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7088 /* Get the rtx code of the operands. */
7089 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7090 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7093 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7097 /* Move the real (op0) and imaginary (op1) parts to their location. */
7098 emit_move_insn (gen_realpart (mode, target), op0);
7099 emit_move_insn (gen_imagpart (mode, target), op1);
7101 insns = get_insns ();
7104 /* Complex construction should appear as a single unit. */
7105 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7106 each with a separate pseudo as destination.
7107 It's not correct for flow to treat them as a unit. */
7108 if (GET_CODE (target) != CONCAT)
7109 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7117 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7118 return gen_realpart (mode, op0);
7121 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7122 return gen_imagpart (mode, op0);
7126 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7130 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7133 target = gen_reg_rtx (mode);
7137 /* Store the realpart and the negated imagpart to target. */
7138 emit_move_insn (gen_realpart (partmode, target),
7139 gen_realpart (partmode, op0));
7141 imag_t = gen_imagpart (partmode, target);
7142 temp = expand_unop (partmode, neg_optab,
7143 gen_imagpart (partmode, op0), imag_t, 0);
7145 emit_move_insn (imag_t, temp);
7147 insns = get_insns ();
7150 /* Conjugate should appear as a single unit
7151 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7152 each with a separate pseudo as destination.
7153 It's not correct for flow to treat them as a unit. */
7154 if (GET_CODE (target) != CONCAT)
7155 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7163 op0 = CONST0_RTX (tmode);
7169 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7172 /* Here to do an ordinary binary operator, generating an instruction
7173 from the optab already placed in `this_optab'. */
7175 preexpand_calls (exp);
7176 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7178 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7179 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7181 temp = expand_binop (mode, this_optab, op0, op1, target,
7182 unsignedp, OPTAB_LIB_WIDEN);
7189 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7192 bc_expand_expr (exp)
7195 enum tree_code code;
7198 struct binary_operator *binoptab;
7199 struct unary_operator *unoptab;
7200 struct increment_operator *incroptab;
7201 struct bc_label *lab, *lab1;
7202 enum bytecode_opcode opcode;
7205 code = TREE_CODE (exp);
7211 if (DECL_RTL (exp) == 0)
7213 error_with_decl (exp, "prior parameter's size depends on `%s'");
7217 bc_load_parmaddr (DECL_RTL (exp));
7218 bc_load_memory (TREE_TYPE (exp), exp);
7224 if (DECL_RTL (exp) == 0)
7228 if (BYTECODE_LABEL (DECL_RTL (exp)))
7229 bc_load_externaddr (DECL_RTL (exp));
7231 bc_load_localaddr (DECL_RTL (exp));
7233 if (TREE_PUBLIC (exp))
7234 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7235 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7237 bc_load_localaddr (DECL_RTL (exp));
7239 bc_load_memory (TREE_TYPE (exp), exp);
7244 #ifdef DEBUG_PRINT_CODE
7245 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7247 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7249 : TYPE_MODE (TREE_TYPE (exp)))],
7250 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7256 #ifdef DEBUG_PRINT_CODE
7257 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7259 /* FIX THIS: find a better way to pass real_cst's. -bson */
7260 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7261 (double) TREE_REAL_CST (exp));
7270 /* We build a call description vector describing the type of
7271 the return value and of the arguments; this call vector,
7272 together with a pointer to a location for the return value
7273 and the base of the argument list, is passed to the low
7274 level machine dependent call subroutine, which is responsible
7275 for putting the arguments wherever real functions expect
7276 them, as well as getting the return value back. */
7278 tree calldesc = 0, arg;
7282 /* Push the evaluated args on the evaluation stack in reverse
7283 order. Also make an entry for each arg in the calldesc
7284 vector while we're at it. */
7286 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7288 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7291 bc_expand_expr (TREE_VALUE (arg));
7293 calldesc = tree_cons ((tree) 0,
7294 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7296 calldesc = tree_cons ((tree) 0,
7297 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7301 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7303 /* Allocate a location for the return value and push its
7304 address on the evaluation stack. Also make an entry
7305 at the front of the calldesc for the return value type. */
7307 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7308 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7309 bc_load_localaddr (retval);
7311 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7312 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7314 /* Prepend the argument count. */
7315 calldesc = tree_cons ((tree) 0,
7316 build_int_2 (nargs, 0),
7319 /* Push the address of the call description vector on the stack. */
7320 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7321 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7322 build_index_type (build_int_2 (nargs * 2, 0)));
7323 r = output_constant_def (calldesc);
7324 bc_load_externaddr (r);
7326 /* Push the address of the function to be called. */
7327 bc_expand_expr (TREE_OPERAND (exp, 0));
7329 /* Call the function, popping its address and the calldesc vector
7330 address off the evaluation stack in the process. */
7331 bc_emit_instruction (call);
7333 /* Pop the arguments off the stack. */
7334 bc_adjust_stack (nargs);
7336 /* Load the return value onto the stack. */
7337 bc_load_localaddr (retval);
7338 bc_load_memory (type, TREE_OPERAND (exp, 0));
7344 if (!SAVE_EXPR_RTL (exp))
7346 /* First time around: copy to local variable */
7347 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7348 TYPE_ALIGN (TREE_TYPE(exp)));
7349 bc_expand_expr (TREE_OPERAND (exp, 0));
7350 bc_emit_instruction (duplicate);
7352 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7353 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7357 /* Consecutive reference: use saved copy */
7358 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7359 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7364 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7365 how are they handled instead? */
7368 TREE_USED (exp) = 1;
7369 bc_expand_expr (STMT_BODY (exp));
7376 bc_expand_expr (TREE_OPERAND (exp, 0));
7377 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7382 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7387 bc_expand_address (TREE_OPERAND (exp, 0));
7392 bc_expand_expr (TREE_OPERAND (exp, 0));
7393 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7398 bc_expand_expr (bc_canonicalize_array_ref (exp));
7403 bc_expand_component_address (exp);
7405 /* If we have a bitfield, generate a proper load */
7406 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7411 bc_expand_expr (TREE_OPERAND (exp, 0));
7412 bc_emit_instruction (drop);
7413 bc_expand_expr (TREE_OPERAND (exp, 1));
7418 bc_expand_expr (TREE_OPERAND (exp, 0));
7419 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7420 lab = bc_get_bytecode_label ();
7421 bc_emit_bytecode (xjumpifnot);
7422 bc_emit_bytecode_labelref (lab);
7424 #ifdef DEBUG_PRINT_CODE
7425 fputc ('\n', stderr);
7427 bc_expand_expr (TREE_OPERAND (exp, 1));
7428 lab1 = bc_get_bytecode_label ();
7429 bc_emit_bytecode (jump);
7430 bc_emit_bytecode_labelref (lab1);
7432 #ifdef DEBUG_PRINT_CODE
7433 fputc ('\n', stderr);
7436 bc_emit_bytecode_labeldef (lab);
7437 bc_expand_expr (TREE_OPERAND (exp, 2));
7438 bc_emit_bytecode_labeldef (lab1);
7441 case TRUTH_ANDIF_EXPR:
7443 opcode = xjumpifnot;
7446 case TRUTH_ORIF_EXPR:
7453 binoptab = optab_plus_expr;
7458 binoptab = optab_minus_expr;
7463 binoptab = optab_mult_expr;
7466 case TRUNC_DIV_EXPR:
7467 case FLOOR_DIV_EXPR:
7469 case ROUND_DIV_EXPR:
7470 case EXACT_DIV_EXPR:
7472 binoptab = optab_trunc_div_expr;
7475 case TRUNC_MOD_EXPR:
7476 case FLOOR_MOD_EXPR:
7478 case ROUND_MOD_EXPR:
7480 binoptab = optab_trunc_mod_expr;
7483 case FIX_ROUND_EXPR:
7484 case FIX_FLOOR_EXPR:
7486 abort (); /* Not used for C. */
7488 case FIX_TRUNC_EXPR:
7495 abort (); /* FIXME */
7499 binoptab = optab_rdiv_expr;
7504 binoptab = optab_bit_and_expr;
7509 binoptab = optab_bit_ior_expr;
7514 binoptab = optab_bit_xor_expr;
7519 binoptab = optab_lshift_expr;
7524 binoptab = optab_rshift_expr;
7527 case TRUTH_AND_EXPR:
7529 binoptab = optab_truth_and_expr;
7534 binoptab = optab_truth_or_expr;
7539 binoptab = optab_lt_expr;
7544 binoptab = optab_le_expr;
7549 binoptab = optab_ge_expr;
7554 binoptab = optab_gt_expr;
7559 binoptab = optab_eq_expr;
7564 binoptab = optab_ne_expr;
7569 unoptab = optab_negate_expr;
7574 unoptab = optab_bit_not_expr;
7577 case TRUTH_NOT_EXPR:
7579 unoptab = optab_truth_not_expr;
7582 case PREDECREMENT_EXPR:
7584 incroptab = optab_predecrement_expr;
7587 case PREINCREMENT_EXPR:
7589 incroptab = optab_preincrement_expr;
7592 case POSTDECREMENT_EXPR:
7594 incroptab = optab_postdecrement_expr;
7597 case POSTINCREMENT_EXPR:
7599 incroptab = optab_postincrement_expr;
7604 bc_expand_constructor (exp);
7614 tree vars = TREE_OPERAND (exp, 0);
7615 int vars_need_expansion = 0;
7617 /* Need to open a binding contour here because
7618 if there are any cleanups they most be contained here. */
7619 expand_start_bindings (0);
7621 /* Mark the corresponding BLOCK for output. */
7622 if (TREE_OPERAND (exp, 2) != 0)
7623 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7625 /* If VARS have not yet been expanded, expand them now. */
7628 if (DECL_RTL (vars) == 0)
7630 vars_need_expansion = 1;
7633 expand_decl_init (vars);
7634 vars = TREE_CHAIN (vars);
7637 bc_expand_expr (TREE_OPERAND (exp, 1));
7639 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7649 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7650 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7656 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7662 bc_expand_expr (TREE_OPERAND (exp, 0));
7663 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7664 lab = bc_get_bytecode_label ();
7666 bc_emit_instruction (duplicate);
7667 bc_emit_bytecode (opcode);
7668 bc_emit_bytecode_labelref (lab);
7670 #ifdef DEBUG_PRINT_CODE
7671 fputc ('\n', stderr);
7674 bc_emit_instruction (drop);
7676 bc_expand_expr (TREE_OPERAND (exp, 1));
7677 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7678 bc_emit_bytecode_labeldef (lab);
7684 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7686 /* Push the quantum. */
7687 bc_expand_expr (TREE_OPERAND (exp, 1));
7689 /* Convert it to the lvalue's type. */
7690 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7692 /* Push the address of the lvalue */
7693 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7695 /* Perform actual increment */
7696 bc_expand_increment (incroptab, type);
7700 /* Return the alignment in bits of EXP, a pointer valued expression.
7701 But don't return more than MAX_ALIGN no matter what.
7702 The alignment returned is, by default, the alignment of the thing that
7703 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7705 Otherwise, look at the expression to see if we can do better, i.e., if the
7706 expression is actually pointing at an object whose alignment is tighter. */
7709 get_pointer_alignment (exp, max_align)
7713 unsigned align, inner;
7715 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7718 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7719 align = MIN (align, max_align);
7723 switch (TREE_CODE (exp))
7727 case NON_LVALUE_EXPR:
7728 exp = TREE_OPERAND (exp, 0);
7729 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7731 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7732 align = MIN (inner, max_align);
7736 /* If sum of pointer + int, restrict our maximum alignment to that
7737 imposed by the integer. If not, we can't do any better than
7739 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7742 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7747 exp = TREE_OPERAND (exp, 0);
7751 /* See what we are pointing at and look at its alignment. */
7752 exp = TREE_OPERAND (exp, 0);
7753 if (TREE_CODE (exp) == FUNCTION_DECL)
7754 align = FUNCTION_BOUNDARY;
7755 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7756 align = DECL_ALIGN (exp);
7757 #ifdef CONSTANT_ALIGNMENT
7758 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7759 align = CONSTANT_ALIGNMENT (exp, align);
7761 return MIN (align, max_align);
7769 /* Return the tree node and offset if a given argument corresponds to
7770 a string constant. */
7773 string_constant (arg, ptr_offset)
7779 if (TREE_CODE (arg) == ADDR_EXPR
7780 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7782 *ptr_offset = integer_zero_node;
7783 return TREE_OPERAND (arg, 0);
7785 else if (TREE_CODE (arg) == PLUS_EXPR)
7787 tree arg0 = TREE_OPERAND (arg, 0);
7788 tree arg1 = TREE_OPERAND (arg, 1);
7793 if (TREE_CODE (arg0) == ADDR_EXPR
7794 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7797 return TREE_OPERAND (arg0, 0);
7799 else if (TREE_CODE (arg1) == ADDR_EXPR
7800 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7803 return TREE_OPERAND (arg1, 0);
7810 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7811 way, because it could contain a zero byte in the middle.
7812 TREE_STRING_LENGTH is the size of the character array, not the string.
7814 Unfortunately, string_constant can't access the values of const char
7815 arrays with initializers, so neither can we do so here. */
7825 src = string_constant (src, &offset_node);
7828 max = TREE_STRING_LENGTH (src);
7829 ptr = TREE_STRING_POINTER (src);
7830 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7832 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7833 compute the offset to the following null if we don't know where to
7834 start searching for it. */
7836 for (i = 0; i < max; i++)
7839 /* We don't know the starting offset, but we do know that the string
7840 has no internal zero bytes. We can assume that the offset falls
7841 within the bounds of the string; otherwise, the programmer deserves
7842 what he gets. Subtract the offset from the length of the string,
7844 /* This would perhaps not be valid if we were dealing with named
7845 arrays in addition to literal string constants. */
7846 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7849 /* We have a known offset into the string. Start searching there for
7850 a null character. */
7851 if (offset_node == 0)
7855 /* Did we get a long long offset? If so, punt. */
7856 if (TREE_INT_CST_HIGH (offset_node) != 0)
7858 offset = TREE_INT_CST_LOW (offset_node);
7860 /* If the offset is known to be out of bounds, warn, and call strlen at
7862 if (offset < 0 || offset > max)
7864 warning ("offset outside bounds of constant string");
7867 /* Use strlen to search for the first zero byte. Since any strings
7868 constructed with build_string will have nulls appended, we win even
7869 if we get handed something like (char[4])"abcd".
7871 Since OFFSET is our starting index into the string, no further
7872 calculation is needed. */
7873 return size_int (strlen (ptr + offset));
7877 expand_builtin_return_addr (fndecl_code, count, tem)
7878 enum built_in_function fndecl_code;
7884 /* Some machines need special handling before we can access
7885 arbitrary frames. For example, on the sparc, we must first flush
7886 all register windows to the stack. */
7887 #ifdef SETUP_FRAME_ADDRESSES
7888 SETUP_FRAME_ADDRESSES ();
7891 /* On the sparc, the return address is not in the frame, it is in a
7892 register. There is no way to access it off of the current frame
7893 pointer, but it can be accessed off the previous frame pointer by
7894 reading the value from the register window save area. */
7895 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7896 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7900 /* Scan back COUNT frames to the specified frame. */
7901 for (i = 0; i < count; i++)
7903 /* Assume the dynamic chain pointer is in the word that the
7904 frame address points to, unless otherwise specified. */
7905 #ifdef DYNAMIC_CHAIN_ADDRESS
7906 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7908 tem = memory_address (Pmode, tem);
7909 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7912 /* For __builtin_frame_address, return what we've got. */
7913 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7916 /* For __builtin_return_address, Get the return address from that
7918 #ifdef RETURN_ADDR_RTX
7919 tem = RETURN_ADDR_RTX (count, tem);
7921 tem = memory_address (Pmode,
7922 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7923 tem = gen_rtx (MEM, Pmode, tem);
7928 /* Expand an expression EXP that calls a built-in function,
7929 with result going to TARGET if that's convenient
7930 (and in mode MODE if that's convenient).
7931 SUBTARGET may be used as the target for computing one of EXP's operands.
7932 IGNORE is nonzero if the value is to be ignored. */
7934 #define CALLED_AS_BUILT_IN(NODE) \
7935 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7938 expand_builtin (exp, target, subtarget, mode, ignore)
7942 enum machine_mode mode;
7945 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7946 tree arglist = TREE_OPERAND (exp, 1);
7949 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7950 optab builtin_optab;
7952 switch (DECL_FUNCTION_CODE (fndecl))
7957 /* build_function_call changes these into ABS_EXPR. */
7962 /* Treat these like sqrt, but only if the user asks for them. */
7963 if (! flag_fast_math)
7965 case BUILT_IN_FSQRT:
7966 /* If not optimizing, call the library function. */
7971 /* Arg could be wrong type if user redeclared this fcn wrong. */
7972 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7975 /* Stabilize and compute the argument. */
7976 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7977 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7979 exp = copy_node (exp);
7980 arglist = copy_node (arglist);
7981 TREE_OPERAND (exp, 1) = arglist;
7982 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7984 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7986 /* Make a suitable register to place result in. */
7987 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7992 switch (DECL_FUNCTION_CODE (fndecl))
7995 builtin_optab = sin_optab; break;
7997 builtin_optab = cos_optab; break;
7998 case BUILT_IN_FSQRT:
7999 builtin_optab = sqrt_optab; break;
8004 /* Compute into TARGET.
8005 Set TARGET to wherever the result comes back. */
8006 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8007 builtin_optab, op0, target, 0);
8009 /* If we were unable to expand via the builtin, stop the
8010 sequence (without outputting the insns) and break, causing
8011 a call the the library function. */
8018 /* Check the results by default. But if flag_fast_math is turned on,
8019 then assume sqrt will always be called with valid arguments. */
8021 if (! flag_fast_math)
8023 /* Don't define the builtin FP instructions
8024 if your machine is not IEEE. */
8025 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8028 lab1 = gen_label_rtx ();
8030 /* Test the result; if it is NaN, set errno=EDOM because
8031 the argument was not in the domain. */
8032 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8033 emit_jump_insn (gen_beq (lab1));
8037 #ifdef GEN_ERRNO_RTX
8038 rtx errno_rtx = GEN_ERRNO_RTX;
8041 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8044 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8047 /* We can't set errno=EDOM directly; let the library call do it.
8048 Pop the arguments right away in case the call gets deleted. */
8050 expand_call (exp, target, 0);
8057 /* Output the entire sequence. */
8058 insns = get_insns ();
8064 /* __builtin_apply_args returns block of memory allocated on
8065 the stack into which is stored the arg pointer, structure
8066 value address, static chain, and all the registers that might
8067 possibly be used in performing a function call. The code is
8068 moved to the start of the function so the incoming values are
8070 case BUILT_IN_APPLY_ARGS:
8071 /* Don't do __builtin_apply_args more than once in a function.
8072 Save the result of the first call and reuse it. */
8073 if (apply_args_value != 0)
8074 return apply_args_value;
8076 /* When this function is called, it means that registers must be
8077 saved on entry to this function. So we migrate the
8078 call to the first insn of this function. */
8083 temp = expand_builtin_apply_args ();
8087 apply_args_value = temp;
8089 /* Put the sequence after the NOTE that starts the function.
8090 If this is inside a SEQUENCE, make the outer-level insn
8091 chain current, so the code is placed at the start of the
8093 push_topmost_sequence ();
8094 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8095 pop_topmost_sequence ();
8099 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8100 FUNCTION with a copy of the parameters described by
8101 ARGUMENTS, and ARGSIZE. It returns a block of memory
8102 allocated on the stack into which is stored all the registers
8103 that might possibly be used for returning the result of a
8104 function. ARGUMENTS is the value returned by
8105 __builtin_apply_args. ARGSIZE is the number of bytes of
8106 arguments that must be copied. ??? How should this value be
8107 computed? We'll also need a safe worst case value for varargs
8109 case BUILT_IN_APPLY:
8111 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8112 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8113 || TREE_CHAIN (arglist) == 0
8114 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8115 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8116 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8124 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8125 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8127 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8130 /* __builtin_return (RESULT) causes the function to return the
8131 value described by RESULT. RESULT is address of the block of
8132 memory returned by __builtin_apply. */
8133 case BUILT_IN_RETURN:
8135 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8136 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8137 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8138 NULL_RTX, VOIDmode, 0));
8141 case BUILT_IN_SAVEREGS:
8142 /* Don't do __builtin_saveregs more than once in a function.
8143 Save the result of the first call and reuse it. */
8144 if (saveregs_value != 0)
8145 return saveregs_value;
8147 /* When this function is called, it means that registers must be
8148 saved on entry to this function. So we migrate the
8149 call to the first insn of this function. */
8153 /* Now really call the function. `expand_call' does not call
8154 expand_builtin, so there is no danger of infinite recursion here. */
8157 #ifdef EXPAND_BUILTIN_SAVEREGS
8158 /* Do whatever the machine needs done in this case. */
8159 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8161 /* The register where the function returns its value
8162 is likely to have something else in it, such as an argument.
8163 So preserve that register around the call. */
8165 if (value_mode != VOIDmode)
8167 rtx valreg = hard_libcall_value (value_mode);
8168 rtx saved_valreg = gen_reg_rtx (value_mode);
8170 emit_move_insn (saved_valreg, valreg);
8171 temp = expand_call (exp, target, ignore);
8172 emit_move_insn (valreg, saved_valreg);
8175 /* Generate the call, putting the value in a pseudo. */
8176 temp = expand_call (exp, target, ignore);
8182 saveregs_value = temp;
8184 /* Put the sequence after the NOTE that starts the function.
8185 If this is inside a SEQUENCE, make the outer-level insn
8186 chain current, so the code is placed at the start of the
8188 push_topmost_sequence ();
8189 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8190 pop_topmost_sequence ();
8194 /* __builtin_args_info (N) returns word N of the arg space info
8195 for the current function. The number and meanings of words
8196 is controlled by the definition of CUMULATIVE_ARGS. */
8197 case BUILT_IN_ARGS_INFO:
8199 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8201 int *word_ptr = (int *) ¤t_function_args_info;
8202 tree type, elts, result;
8204 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8205 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8206 __FILE__, __LINE__);
8210 tree arg = TREE_VALUE (arglist);
8211 if (TREE_CODE (arg) != INTEGER_CST)
8212 error ("argument of `__builtin_args_info' must be constant");
8215 int wordnum = TREE_INT_CST_LOW (arg);
8217 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8218 error ("argument of `__builtin_args_info' out of range");
8220 return GEN_INT (word_ptr[wordnum]);
8224 error ("missing argument in `__builtin_args_info'");
8229 for (i = 0; i < nwords; i++)
8230 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8232 type = build_array_type (integer_type_node,
8233 build_index_type (build_int_2 (nwords, 0)));
8234 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8235 TREE_CONSTANT (result) = 1;
8236 TREE_STATIC (result) = 1;
8237 result = build (INDIRECT_REF, build_pointer_type (type), result);
8238 TREE_CONSTANT (result) = 1;
8239 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8243 /* Return the address of the first anonymous stack arg. */
8244 case BUILT_IN_NEXT_ARG:
8246 tree fntype = TREE_TYPE (current_function_decl);
8248 if ((TYPE_ARG_TYPES (fntype) == 0
8249 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8251 && ! current_function_varargs)
8253 error ("`va_start' used in function with fixed args");
8259 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8260 tree arg = TREE_VALUE (arglist);
8262 /* Strip off all nops for the sake of the comparison. This
8263 is not quite the same as STRIP_NOPS. It does more.
8264 We must also strip off INDIRECT_EXPR for C++ reference
8266 while (TREE_CODE (arg) == NOP_EXPR
8267 || TREE_CODE (arg) == CONVERT_EXPR
8268 || TREE_CODE (arg) == NON_LVALUE_EXPR
8269 || TREE_CODE (arg) == INDIRECT_REF)
8270 arg = TREE_OPERAND (arg, 0);
8271 if (arg != last_parm)
8272 warning ("second parameter of `va_start' not last named argument");
8274 else if (! current_function_varargs)
8275 /* Evidently an out of date version of <stdarg.h>; can't validate
8276 va_start's second argument, but can still work as intended. */
8277 warning ("`__builtin_next_arg' called without an argument");
8280 return expand_binop (Pmode, add_optab,
8281 current_function_internal_arg_pointer,
8282 current_function_arg_offset_rtx,
8283 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8285 case BUILT_IN_CLASSIFY_TYPE:
8288 tree type = TREE_TYPE (TREE_VALUE (arglist));
8289 enum tree_code code = TREE_CODE (type);
8290 if (code == VOID_TYPE)
8291 return GEN_INT (void_type_class);
8292 if (code == INTEGER_TYPE)
8293 return GEN_INT (integer_type_class);
8294 if (code == CHAR_TYPE)
8295 return GEN_INT (char_type_class);
8296 if (code == ENUMERAL_TYPE)
8297 return GEN_INT (enumeral_type_class);
8298 if (code == BOOLEAN_TYPE)
8299 return GEN_INT (boolean_type_class);
8300 if (code == POINTER_TYPE)
8301 return GEN_INT (pointer_type_class);
8302 if (code == REFERENCE_TYPE)
8303 return GEN_INT (reference_type_class);
8304 if (code == OFFSET_TYPE)
8305 return GEN_INT (offset_type_class);
8306 if (code == REAL_TYPE)
8307 return GEN_INT (real_type_class);
8308 if (code == COMPLEX_TYPE)
8309 return GEN_INT (complex_type_class);
8310 if (code == FUNCTION_TYPE)
8311 return GEN_INT (function_type_class);
8312 if (code == METHOD_TYPE)
8313 return GEN_INT (method_type_class);
8314 if (code == RECORD_TYPE)
8315 return GEN_INT (record_type_class);
8316 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8317 return GEN_INT (union_type_class);
8318 if (code == ARRAY_TYPE)
8320 if (TYPE_STRING_FLAG (type))
8321 return GEN_INT (string_type_class);
8323 return GEN_INT (array_type_class);
8325 if (code == SET_TYPE)
8326 return GEN_INT (set_type_class);
8327 if (code == FILE_TYPE)
8328 return GEN_INT (file_type_class);
8329 if (code == LANG_TYPE)
8330 return GEN_INT (lang_type_class);
8332 return GEN_INT (no_type_class);
8334 case BUILT_IN_CONSTANT_P:
8339 tree arg = TREE_VALUE (arglist);
8342 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8343 || (TREE_CODE (arg) == ADDR_EXPR
8344 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8345 ? const1_rtx : const0_rtx);
8348 case BUILT_IN_FRAME_ADDRESS:
8349 /* The argument must be a nonnegative integer constant.
8350 It counts the number of frames to scan up the stack.
8351 The value is the address of that frame. */
8352 case BUILT_IN_RETURN_ADDRESS:
8353 /* The argument must be a nonnegative integer constant.
8354 It counts the number of frames to scan up the stack.
8355 The value is the return address saved in that frame. */
8357 /* Warning about missing arg was already issued. */
8359 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8361 error ("invalid arg to `__builtin_return_address'");
8364 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8366 error ("invalid arg to `__builtin_return_address'");
8371 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8372 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8373 hard_frame_pointer_rtx);
8375 /* For __builtin_frame_address, return what we've got. */
8376 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8379 if (GET_CODE (tem) != REG)
8380 tem = copy_to_reg (tem);
8384 case BUILT_IN_ALLOCA:
8386 /* Arg could be non-integer if user redeclared this fcn wrong. */
8387 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8390 /* Compute the argument. */
8391 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8393 /* Allocate the desired space. */
8394 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8397 /* If not optimizing, call the library function. */
8398 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8402 /* Arg could be non-integer if user redeclared this fcn wrong. */
8403 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8406 /* Compute the argument. */
8407 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8408 /* Compute ffs, into TARGET if possible.
8409 Set TARGET to wherever the result comes back. */
8410 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8411 ffs_optab, op0, target, 1);
8416 case BUILT_IN_STRLEN:
8417 /* If not optimizing, call the library function. */
8418 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8422 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8423 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8427 tree src = TREE_VALUE (arglist);
8428 tree len = c_strlen (src);
8431 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8433 rtx result, src_rtx, char_rtx;
8434 enum machine_mode insn_mode = value_mode, char_mode;
8435 enum insn_code icode;
8437 /* If the length is known, just return it. */
8439 return expand_expr (len, target, mode, 0);
8441 /* If SRC is not a pointer type, don't do this operation inline. */
8445 /* Call a function if we can't compute strlen in the right mode. */
8447 while (insn_mode != VOIDmode)
8449 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8450 if (icode != CODE_FOR_nothing)
8453 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8455 if (insn_mode == VOIDmode)
8458 /* Make a place to write the result of the instruction. */
8461 && GET_CODE (result) == REG
8462 && GET_MODE (result) == insn_mode
8463 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8464 result = gen_reg_rtx (insn_mode);
8466 /* Make sure the operands are acceptable to the predicates. */
8468 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8469 result = gen_reg_rtx (insn_mode);
8471 src_rtx = memory_address (BLKmode,
8472 expand_expr (src, NULL_RTX, ptr_mode,
8474 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8475 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8477 char_rtx = const0_rtx;
8478 char_mode = insn_operand_mode[(int)icode][2];
8479 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8480 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8482 emit_insn (GEN_FCN (icode) (result,
8483 gen_rtx (MEM, BLKmode, src_rtx),
8484 char_rtx, GEN_INT (align)));
8486 /* Return the value in the proper mode for this function. */
8487 if (GET_MODE (result) == value_mode)
8489 else if (target != 0)
8491 convert_move (target, result, 0);
8495 return convert_to_mode (value_mode, result, 0);
8498 case BUILT_IN_STRCPY:
8499 /* If not optimizing, call the library function. */
8500 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8504 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8505 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8506 || TREE_CHAIN (arglist) == 0
8507 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8511 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8516 len = size_binop (PLUS_EXPR, len, integer_one_node);
8518 chainon (arglist, build_tree_list (NULL_TREE, len));
8522 case BUILT_IN_MEMCPY:
8523 /* If not optimizing, call the library function. */
8524 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8528 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8529 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8530 || TREE_CHAIN (arglist) == 0
8531 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8532 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8533 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8537 tree dest = TREE_VALUE (arglist);
8538 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8539 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8543 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8545 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8546 rtx dest_rtx, dest_mem, src_mem;
8548 /* If either SRC or DEST is not a pointer type, don't do
8549 this operation in-line. */
8550 if (src_align == 0 || dest_align == 0)
8552 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8553 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8557 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8558 dest_mem = gen_rtx (MEM, BLKmode,
8559 memory_address (BLKmode, dest_rtx));
8560 /* There could be a void* cast on top of the object. */
8561 while (TREE_CODE (dest) == NOP_EXPR)
8562 dest = TREE_OPERAND (dest, 0);
8563 type = TREE_TYPE (TREE_TYPE (dest));
8564 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8565 src_mem = gen_rtx (MEM, BLKmode,
8566 memory_address (BLKmode,
8567 expand_expr (src, NULL_RTX,
8570 /* There could be a void* cast on top of the object. */
8571 while (TREE_CODE (src) == NOP_EXPR)
8572 src = TREE_OPERAND (src, 0);
8573 type = TREE_TYPE (TREE_TYPE (src));
8574 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8576 /* Copy word part most expediently. */
8577 emit_block_move (dest_mem, src_mem,
8578 expand_expr (len, NULL_RTX, VOIDmode, 0),
8579 MIN (src_align, dest_align));
8580 return force_operand (dest_rtx, NULL_RTX);
8583 case BUILT_IN_MEMSET:
8584 /* If not optimizing, call the library function. */
8585 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8589 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8590 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8591 || TREE_CHAIN (arglist) == 0
8592 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8594 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8596 != (TREE_CODE (TREE_TYPE
8598 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8602 tree dest = TREE_VALUE (arglist);
8603 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8604 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8608 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8609 rtx dest_rtx, dest_mem;
8611 /* If DEST is not a pointer type, don't do this
8612 operation in-line. */
8613 if (dest_align == 0)
8616 /* If VAL is not 0, don't do this operation in-line. */
8617 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8620 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8621 dest_mem = gen_rtx (MEM, BLKmode,
8622 memory_address (BLKmode, dest_rtx));
8623 /* There could be a void* cast on top of the object. */
8624 while (TREE_CODE (dest) == NOP_EXPR)
8625 dest = TREE_OPERAND (dest, 0);
8626 type = TREE_TYPE (TREE_TYPE (dest));
8627 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8629 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8632 return force_operand (dest_rtx, NULL_RTX);
8635 /* These comparison functions need an instruction that returns an actual
8636 index. An ordinary compare that just sets the condition codes
8638 #ifdef HAVE_cmpstrsi
8639 case BUILT_IN_STRCMP:
8640 /* If not optimizing, call the library function. */
8641 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8645 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8646 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8647 || TREE_CHAIN (arglist) == 0
8648 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8650 else if (!HAVE_cmpstrsi)
8653 tree arg1 = TREE_VALUE (arglist);
8654 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8658 len = c_strlen (arg1);
8660 len = size_binop (PLUS_EXPR, integer_one_node, len);
8661 len2 = c_strlen (arg2);
8663 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8665 /* If we don't have a constant length for the first, use the length
8666 of the second, if we know it. We don't require a constant for
8667 this case; some cost analysis could be done if both are available
8668 but neither is constant. For now, assume they're equally cheap.
8670 If both strings have constant lengths, use the smaller. This
8671 could arise if optimization results in strcpy being called with
8672 two fixed strings, or if the code was machine-generated. We should
8673 add some code to the `memcmp' handler below to deal with such
8674 situations, someday. */
8675 if (!len || TREE_CODE (len) != INTEGER_CST)
8682 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8684 if (tree_int_cst_lt (len2, len))
8688 chainon (arglist, build_tree_list (NULL_TREE, len));
8692 case BUILT_IN_MEMCMP:
8693 /* If not optimizing, call the library function. */
8694 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8698 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8699 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8700 || TREE_CHAIN (arglist) == 0
8701 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8702 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8703 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8705 else if (!HAVE_cmpstrsi)
8708 tree arg1 = TREE_VALUE (arglist);
8709 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8710 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8714 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8716 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8717 enum machine_mode insn_mode
8718 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8720 /* If we don't have POINTER_TYPE, call the function. */
8721 if (arg1_align == 0 || arg2_align == 0)
8723 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8724 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8728 /* Make a place to write the result of the instruction. */
8731 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8732 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8733 result = gen_reg_rtx (insn_mode);
8735 emit_insn (gen_cmpstrsi (result,
8736 gen_rtx (MEM, BLKmode,
8737 expand_expr (arg1, NULL_RTX,
8740 gen_rtx (MEM, BLKmode,
8741 expand_expr (arg2, NULL_RTX,
8744 expand_expr (len, NULL_RTX, VOIDmode, 0),
8745 GEN_INT (MIN (arg1_align, arg2_align))));
8747 /* Return the value in the proper mode for this function. */
8748 mode = TYPE_MODE (TREE_TYPE (exp));
8749 if (GET_MODE (result) == mode)
8751 else if (target != 0)
8753 convert_move (target, result, 0);
8757 return convert_to_mode (mode, result, 0);
8760 case BUILT_IN_STRCMP:
8761 case BUILT_IN_MEMCMP:
8765 /* __builtin_setjmp is passed a pointer to an array of five words
8766 (not all will be used on all machines). It operates similarly to
8767 the C library function of the same name, but is more efficient.
8768 Much of the code below (and for longjmp) is copied from the handling
8771 NOTE: This is intended for use by GNAT and will only work in
8772 the method used by it. This code will likely NOT survive to
8773 the GCC 2.8.0 release. */
8774 case BUILT_IN_SETJMP:
8776 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8780 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8782 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8783 enum machine_mode sa_mode = Pmode;
8785 int old_inhibit_defer_pop = inhibit_defer_pop;
8786 int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8787 get_identifier ("__dummy"), 0);
8789 CUMULATIVE_ARGS args_so_far;
8792 #ifdef POINTERS_EXTEND_UNSIGNED
8793 buf_addr = convert_memory_address (Pmode, buf_addr);
8796 buf_addr = force_reg (Pmode, buf_addr);
8798 if (target == 0 || GET_CODE (target) != REG
8799 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8800 target = gen_reg_rtx (value_mode);
8804 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8805 current_function_calls_setjmp = 1;
8807 /* We store the frame pointer and the address of lab1 in the buffer
8808 and use the rest of it for the stack save area, which is
8809 machine-dependent. */
8810 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8811 virtual_stack_vars_rtx);
8813 (validize_mem (gen_rtx (MEM, Pmode,
8814 plus_constant (buf_addr,
8815 GET_MODE_SIZE (Pmode)))),
8816 gen_rtx (LABEL_REF, Pmode, lab1));
8818 #ifdef HAVE_save_stack_nonlocal
8819 if (HAVE_save_stack_nonlocal)
8820 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8823 stack_save = gen_rtx (MEM, sa_mode,
8824 plus_constant (buf_addr,
8825 2 * GET_MODE_SIZE (Pmode)));
8826 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8830 emit_insn (gen_setjmp ());
8833 /* Set TARGET to zero and branch around the other case. */
8834 emit_move_insn (target, const0_rtx);
8835 emit_jump_insn (gen_jump (lab2));
8839 /* Note that setjmp clobbers FP when we get here, so we have to
8840 make sure it's marked as used by this function. */
8841 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8843 /* Mark the static chain as clobbered here so life information
8844 doesn't get messed up for it. */
8845 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8847 /* Now put in the code to restore the frame pointer, and argument
8848 pointer, if needed. The code below is from expand_end_bindings
8849 in stmt.c; see detailed documentation there. */
8850 #ifdef HAVE_nonlocal_goto
8851 if (! HAVE_nonlocal_goto)
8853 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8855 current_function_has_nonlocal_goto = 1;
8857 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8858 if (fixed_regs[ARG_POINTER_REGNUM])
8860 #ifdef ELIMINABLE_REGS
8861 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8863 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8864 if (elim_regs[i].from == ARG_POINTER_REGNUM
8865 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8868 if (i == sizeof elim_regs / sizeof elim_regs [0])
8871 /* Now restore our arg pointer from the address at which it
8872 was saved in our stack frame.
8873 If there hasn't be space allocated for it yet, make
8875 if (arg_pointer_save_area == 0)
8876 arg_pointer_save_area
8877 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8878 emit_move_insn (virtual_incoming_args_rtx,
8879 copy_to_reg (arg_pointer_save_area));
8884 #ifdef HAVE_nonlocal_goto_receiver
8885 if (HAVE_nonlocal_goto_receiver)
8886 emit_insn (gen_nonlocal_goto_receiver ());
8888 /* The static chain pointer contains the address of dummy function.
8889 We need to call it here to handle some PIC cases of restoring
8890 a global pointer. Then return 1. */
8891 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8893 /* We can't actually call emit_library_call here, so do everything
8894 it does, which isn't much for a libfunc with no args. */
8895 op0 = memory_address (FUNCTION_MODE, op0);
8897 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8898 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8899 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8901 #ifndef ACCUMULATE_OUTGOING_ARGS
8902 #ifdef HAVE_call_pop
8904 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8905 const0_rtx, next_arg_reg,
8906 GEN_INT (return_pops)));
8913 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8914 const0_rtx, next_arg_reg, const0_rtx));
8919 emit_move_insn (target, const1_rtx);
8924 /* __builtin_longjmp is passed a pointer to an array of five words
8925 and a value, which is a dummy. It's similar to the C library longjmp
8926 function but works with __builtin_setjmp above. */
8927 case BUILT_IN_LONGJMP:
8928 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8929 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8933 tree dummy_id = get_identifier ("__dummy");
8934 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8935 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
8936 #ifdef POINTERS_EXTEND_UNSIGNED
8939 convert_memory_address
8941 expand_expr (TREE_VALUE (arglist),
8942 NULL_RTX, VOIDmode, 0)));
8945 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
8949 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8950 rtx lab = gen_rtx (MEM, Pmode,
8951 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8952 enum machine_mode sa_mode
8953 #ifdef HAVE_save_stack_nonlocal
8954 = (HAVE_save_stack_nonlocal
8955 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8960 rtx stack = gen_rtx (MEM, sa_mode,
8961 plus_constant (buf_addr,
8962 2 * GET_MODE_SIZE (Pmode)));
8964 DECL_EXTERNAL (dummy_decl) = 1;
8965 TREE_PUBLIC (dummy_decl) = 1;
8966 make_decl_rtl (dummy_decl, NULL_PTR, 1);
8968 /* Expand the second expression just for side-effects. */
8969 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8970 const0_rtx, VOIDmode, 0);
8972 assemble_external (dummy_decl);
8974 /* Pick up FP, label, and SP from the block and jump. This code is
8975 from expand_goto in stmt.c; see there for detailed comments. */
8976 #if HAVE_nonlocal_goto
8977 if (HAVE_nonlocal_goto)
8978 emit_insn (gen_nonlocal_goto (fp, lab, stack,
8979 XEXP (DECL_RTL (dummy_decl), 0)));
8983 lab = copy_to_reg (lab);
8984 emit_move_insn (hard_frame_pointer_rtx, fp);
8985 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8987 /* Put in the static chain register the address of the dummy
8989 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
8990 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8991 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8992 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
8993 emit_indirect_jump (lab);
8999 default: /* just do library call, if unknown builtin */
9000 error ("built-in function `%s' not currently supported",
9001 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9004 /* The switch statement above can drop through to cause the function
9005 to be called normally. */
9007 return expand_call (exp, target, ignore);
9010 /* Built-in functions to perform an untyped call and return. */
9012 /* For each register that may be used for calling a function, this
9013 gives a mode used to copy the register's value. VOIDmode indicates
9014 the register is not used for calling a function. If the machine
9015 has register windows, this gives only the outbound registers.
9016 INCOMING_REGNO gives the corresponding inbound register. */
9017 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9019 /* For each register that may be used for returning values, this gives
9020 a mode used to copy the register's value. VOIDmode indicates the
9021 register is not used for returning values. If the machine has
9022 register windows, this gives only the outbound registers.
9023 INCOMING_REGNO gives the corresponding inbound register. */
9024 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9026 /* For each register that may be used for calling a function, this
9027 gives the offset of that register into the block returned by
9028 __builtin_apply_args. 0 indicates that the register is not
9029 used for calling a function. */
9030 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9032 /* Return the offset of register REGNO into the block returned by
9033 __builtin_apply_args. This is not declared static, since it is
9034 needed in objc-act.c. */
9037 apply_args_register_offset (regno)
9042 /* Arguments are always put in outgoing registers (in the argument
9043 block) if such make sense. */
9044 #ifdef OUTGOING_REGNO
9045 regno = OUTGOING_REGNO(regno);
9047 return apply_args_reg_offset[regno];
9050 /* Return the size required for the block returned by __builtin_apply_args,
9051 and initialize apply_args_mode. */
9056 static int size = -1;
9058 enum machine_mode mode;
9060 /* The values computed by this function never change. */
9063 /* The first value is the incoming arg-pointer. */
9064 size = GET_MODE_SIZE (Pmode);
9066 /* The second value is the structure value address unless this is
9067 passed as an "invisible" first argument. */
9068 if (struct_value_rtx)
9069 size += GET_MODE_SIZE (Pmode);
9071 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9072 if (FUNCTION_ARG_REGNO_P (regno))
9074 /* Search for the proper mode for copying this register's
9075 value. I'm not sure this is right, but it works so far. */
9076 enum machine_mode best_mode = VOIDmode;
9078 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9080 mode = GET_MODE_WIDER_MODE (mode))
9081 if (HARD_REGNO_MODE_OK (regno, mode)
9082 && HARD_REGNO_NREGS (regno, mode) == 1)
9085 if (best_mode == VOIDmode)
9086 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9088 mode = GET_MODE_WIDER_MODE (mode))
9089 if (HARD_REGNO_MODE_OK (regno, mode)
9090 && (mov_optab->handlers[(int) mode].insn_code
9091 != CODE_FOR_nothing))
9095 if (mode == VOIDmode)
9098 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9099 if (size % align != 0)
9100 size = CEIL (size, align) * align;
9101 apply_args_reg_offset[regno] = size;
9102 size += GET_MODE_SIZE (mode);
9103 apply_args_mode[regno] = mode;
9107 apply_args_mode[regno] = VOIDmode;
9108 apply_args_reg_offset[regno] = 0;
9114 /* Return the size required for the block returned by __builtin_apply,
9115 and initialize apply_result_mode. */
9118 apply_result_size ()
9120 static int size = -1;
9122 enum machine_mode mode;
9124 /* The values computed by this function never change. */
9129 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9130 if (FUNCTION_VALUE_REGNO_P (regno))
9132 /* Search for the proper mode for copying this register's
9133 value. I'm not sure this is right, but it works so far. */
9134 enum machine_mode best_mode = VOIDmode;
9136 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9138 mode = GET_MODE_WIDER_MODE (mode))
9139 if (HARD_REGNO_MODE_OK (regno, mode))
9142 if (best_mode == VOIDmode)
9143 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9145 mode = GET_MODE_WIDER_MODE (mode))
9146 if (HARD_REGNO_MODE_OK (regno, mode)
9147 && (mov_optab->handlers[(int) mode].insn_code
9148 != CODE_FOR_nothing))
9152 if (mode == VOIDmode)
9155 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9156 if (size % align != 0)
9157 size = CEIL (size, align) * align;
9158 size += GET_MODE_SIZE (mode);
9159 apply_result_mode[regno] = mode;
9162 apply_result_mode[regno] = VOIDmode;
9164 /* Allow targets that use untyped_call and untyped_return to override
9165 the size so that machine-specific information can be stored here. */
9166 #ifdef APPLY_RESULT_SIZE
9167 size = APPLY_RESULT_SIZE;
9173 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9174 /* Create a vector describing the result block RESULT. If SAVEP is true,
9175 the result block is used to save the values; otherwise it is used to
9176 restore the values. */
9179 result_vector (savep, result)
9183 int regno, size, align, nelts;
9184 enum machine_mode mode;
9186 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9189 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9190 if ((mode = apply_result_mode[regno]) != VOIDmode)
9192 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9193 if (size % align != 0)
9194 size = CEIL (size, align) * align;
9195 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9196 mem = change_address (result, mode,
9197 plus_constant (XEXP (result, 0), size));
9198 savevec[nelts++] = (savep
9199 ? gen_rtx (SET, VOIDmode, mem, reg)
9200 : gen_rtx (SET, VOIDmode, reg, mem));
9201 size += GET_MODE_SIZE (mode);
9203 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9205 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9207 /* Save the state required to perform an untyped call with the same
9208 arguments as were passed to the current function. */
9211 expand_builtin_apply_args ()
9214 int size, align, regno;
9215 enum machine_mode mode;
9217 /* Create a block where the arg-pointer, structure value address,
9218 and argument registers can be saved. */
9219 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9221 /* Walk past the arg-pointer and structure value address. */
9222 size = GET_MODE_SIZE (Pmode);
9223 if (struct_value_rtx)
9224 size += GET_MODE_SIZE (Pmode);
9226 /* Save each register used in calling a function to the block. */
9227 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9228 if ((mode = apply_args_mode[regno]) != VOIDmode)
9232 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9233 if (size % align != 0)
9234 size = CEIL (size, align) * align;
9236 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9239 /* For reg-stack.c's stack register household.
9240 Compare with a similar piece of code in function.c. */
9242 emit_insn (gen_rtx (USE, mode, tem));
9245 emit_move_insn (change_address (registers, mode,
9246 plus_constant (XEXP (registers, 0),
9249 size += GET_MODE_SIZE (mode);
9252 /* Save the arg pointer to the block. */
9253 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9254 copy_to_reg (virtual_incoming_args_rtx));
9255 size = GET_MODE_SIZE (Pmode);
9257 /* Save the structure value address unless this is passed as an
9258 "invisible" first argument. */
9259 if (struct_value_incoming_rtx)
9261 emit_move_insn (change_address (registers, Pmode,
9262 plus_constant (XEXP (registers, 0),
9264 copy_to_reg (struct_value_incoming_rtx));
9265 size += GET_MODE_SIZE (Pmode);
9268 /* Return the address of the block. */
9269 return copy_addr_to_reg (XEXP (registers, 0));
9272 /* Perform an untyped call and save the state required to perform an
9273 untyped return of whatever value was returned by the given function. */
9276 expand_builtin_apply (function, arguments, argsize)
9277 rtx function, arguments, argsize;
9279 int size, align, regno;
9280 enum machine_mode mode;
9281 rtx incoming_args, result, reg, dest, call_insn;
9282 rtx old_stack_level = 0;
9283 rtx call_fusage = 0;
9285 /* Create a block where the return registers can be saved. */
9286 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9288 /* ??? The argsize value should be adjusted here. */
9290 /* Fetch the arg pointer from the ARGUMENTS block. */
9291 incoming_args = gen_reg_rtx (Pmode);
9292 emit_move_insn (incoming_args,
9293 gen_rtx (MEM, Pmode, arguments));
9294 #ifndef STACK_GROWS_DOWNWARD
9295 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9296 incoming_args, 0, OPTAB_LIB_WIDEN);
9299 /* Perform postincrements before actually calling the function. */
9302 /* Push a new argument block and copy the arguments. */
9303 do_pending_stack_adjust ();
9304 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9306 /* Push a block of memory onto the stack to store the memory arguments.
9307 Save the address in a register, and copy the memory arguments. ??? I
9308 haven't figured out how the calling convention macros effect this,
9309 but it's likely that the source and/or destination addresses in
9310 the block copy will need updating in machine specific ways. */
9311 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9312 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9313 gen_rtx (MEM, BLKmode, incoming_args),
9315 PARM_BOUNDARY / BITS_PER_UNIT);
9317 /* Refer to the argument block. */
9319 arguments = gen_rtx (MEM, BLKmode, arguments);
9321 /* Walk past the arg-pointer and structure value address. */
9322 size = GET_MODE_SIZE (Pmode);
9323 if (struct_value_rtx)
9324 size += GET_MODE_SIZE (Pmode);
9326 /* Restore each of the registers previously saved. Make USE insns
9327 for each of these registers for use in making the call. */
9328 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9329 if ((mode = apply_args_mode[regno]) != VOIDmode)
9331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9332 if (size % align != 0)
9333 size = CEIL (size, align) * align;
9334 reg = gen_rtx (REG, mode, regno);
9335 emit_move_insn (reg,
9336 change_address (arguments, mode,
9337 plus_constant (XEXP (arguments, 0),
9340 use_reg (&call_fusage, reg);
9341 size += GET_MODE_SIZE (mode);
9344 /* Restore the structure value address unless this is passed as an
9345 "invisible" first argument. */
9346 size = GET_MODE_SIZE (Pmode);
9347 if (struct_value_rtx)
9349 rtx value = gen_reg_rtx (Pmode);
9350 emit_move_insn (value,
9351 change_address (arguments, Pmode,
9352 plus_constant (XEXP (arguments, 0),
9354 emit_move_insn (struct_value_rtx, value);
9355 if (GET_CODE (struct_value_rtx) == REG)
9356 use_reg (&call_fusage, struct_value_rtx);
9357 size += GET_MODE_SIZE (Pmode);
9360 /* All arguments and registers used for the call are set up by now! */
9361 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9363 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9364 and we don't want to load it into a register as an optimization,
9365 because prepare_call_address already did it if it should be done. */
9366 if (GET_CODE (function) != SYMBOL_REF)
9367 function = memory_address (FUNCTION_MODE, function);
9369 /* Generate the actual call instruction and save the return value. */
9370 #ifdef HAVE_untyped_call
9371 if (HAVE_untyped_call)
9372 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9373 result, result_vector (1, result)));
9376 #ifdef HAVE_call_value
9377 if (HAVE_call_value)
9381 /* Locate the unique return register. It is not possible to
9382 express a call that sets more than one return register using
9383 call_value; use untyped_call for that. In fact, untyped_call
9384 only needs to save the return registers in the given block. */
9385 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9386 if ((mode = apply_result_mode[regno]) != VOIDmode)
9389 abort (); /* HAVE_untyped_call required. */
9390 valreg = gen_rtx (REG, mode, regno);
9393 emit_call_insn (gen_call_value (valreg,
9394 gen_rtx (MEM, FUNCTION_MODE, function),
9395 const0_rtx, NULL_RTX, const0_rtx));
9397 emit_move_insn (change_address (result, GET_MODE (valreg),
9405 /* Find the CALL insn we just emitted. */
9406 for (call_insn = get_last_insn ();
9407 call_insn && GET_CODE (call_insn) != CALL_INSN;
9408 call_insn = PREV_INSN (call_insn))
9414 /* Put the register usage information on the CALL. If there is already
9415 some usage information, put ours at the end. */
9416 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9420 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9421 link = XEXP (link, 1))
9424 XEXP (link, 1) = call_fusage;
9427 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9429 /* Restore the stack. */
9430 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9432 /* Return the address of the result block. */
9433 return copy_addr_to_reg (XEXP (result, 0));
9436 /* Perform an untyped return. */
9439 expand_builtin_return (result)
9442 int size, align, regno;
9443 enum machine_mode mode;
9445 rtx call_fusage = 0;
9447 apply_result_size ();
9448 result = gen_rtx (MEM, BLKmode, result);
9450 #ifdef HAVE_untyped_return
9451 if (HAVE_untyped_return)
9453 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9459 /* Restore the return value and note that each value is used. */
9461 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9462 if ((mode = apply_result_mode[regno]) != VOIDmode)
9464 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9465 if (size % align != 0)
9466 size = CEIL (size, align) * align;
9467 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9468 emit_move_insn (reg,
9469 change_address (result, mode,
9470 plus_constant (XEXP (result, 0),
9473 push_to_sequence (call_fusage);
9474 emit_insn (gen_rtx (USE, VOIDmode, reg));
9475 call_fusage = get_insns ();
9477 size += GET_MODE_SIZE (mode);
9480 /* Put the USE insns before the return. */
9481 emit_insns (call_fusage);
9483 /* Return whatever values was restored by jumping directly to the end
9485 expand_null_return ();
9488 /* Expand code for a post- or pre- increment or decrement
9489 and return the RTX for the result.
9490 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9493 expand_increment (exp, post, ignore)
9497 register rtx op0, op1;
9498 register rtx temp, value;
9499 register tree incremented = TREE_OPERAND (exp, 0);
9500 optab this_optab = add_optab;
9502 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9503 int op0_is_copy = 0;
9504 int single_insn = 0;
9505 /* 1 means we can't store into OP0 directly,
9506 because it is a subreg narrower than a word,
9507 and we don't dare clobber the rest of the word. */
9510 if (output_bytecode)
9512 bc_expand_expr (exp);
9516 /* Stabilize any component ref that might need to be
9517 evaluated more than once below. */
9519 || TREE_CODE (incremented) == BIT_FIELD_REF
9520 || (TREE_CODE (incremented) == COMPONENT_REF
9521 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9522 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9523 incremented = stabilize_reference (incremented);
9524 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9525 ones into save exprs so that they don't accidentally get evaluated
9526 more than once by the code below. */
9527 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9528 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9529 incremented = save_expr (incremented);
9531 /* Compute the operands as RTX.
9532 Note whether OP0 is the actual lvalue or a copy of it:
9533 I believe it is a copy iff it is a register or subreg
9534 and insns were generated in computing it. */
9536 temp = get_last_insn ();
9537 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9539 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9540 in place but instead must do sign- or zero-extension during assignment,
9541 so we copy it into a new register and let the code below use it as
9544 Note that we can safely modify this SUBREG since it is know not to be
9545 shared (it was made by the expand_expr call above). */
9547 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9550 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9554 else if (GET_CODE (op0) == SUBREG
9555 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9557 /* We cannot increment this SUBREG in place. If we are
9558 post-incrementing, get a copy of the old value. Otherwise,
9559 just mark that we cannot increment in place. */
9561 op0 = copy_to_reg (op0);
9566 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9567 && temp != get_last_insn ());
9568 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9570 /* Decide whether incrementing or decrementing. */
9571 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9572 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9573 this_optab = sub_optab;
9575 /* Convert decrement by a constant into a negative increment. */
9576 if (this_optab == sub_optab
9577 && GET_CODE (op1) == CONST_INT)
9579 op1 = GEN_INT (- INTVAL (op1));
9580 this_optab = add_optab;
9583 /* For a preincrement, see if we can do this with a single instruction. */
9586 icode = (int) this_optab->handlers[(int) mode].insn_code;
9587 if (icode != (int) CODE_FOR_nothing
9588 /* Make sure that OP0 is valid for operands 0 and 1
9589 of the insn we want to queue. */
9590 && (*insn_operand_predicate[icode][0]) (op0, mode)
9591 && (*insn_operand_predicate[icode][1]) (op0, mode)
9592 && (*insn_operand_predicate[icode][2]) (op1, mode))
9596 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9597 then we cannot just increment OP0. We must therefore contrive to
9598 increment the original value. Then, for postincrement, we can return
9599 OP0 since it is a copy of the old value. For preincrement, expand here
9600 unless we can do it with a single insn.
9602 Likewise if storing directly into OP0 would clobber high bits
9603 we need to preserve (bad_subreg). */
9604 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9606 /* This is the easiest way to increment the value wherever it is.
9607 Problems with multiple evaluation of INCREMENTED are prevented
9608 because either (1) it is a component_ref or preincrement,
9609 in which case it was stabilized above, or (2) it is an array_ref
9610 with constant index in an array in a register, which is
9611 safe to reevaluate. */
9612 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9613 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9614 ? MINUS_EXPR : PLUS_EXPR),
9617 TREE_OPERAND (exp, 1));
9619 while (TREE_CODE (incremented) == NOP_EXPR
9620 || TREE_CODE (incremented) == CONVERT_EXPR)
9622 newexp = convert (TREE_TYPE (incremented), newexp);
9623 incremented = TREE_OPERAND (incremented, 0);
9626 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9627 return post ? op0 : temp;
9632 /* We have a true reference to the value in OP0.
9633 If there is an insn to add or subtract in this mode, queue it.
9634 Queueing the increment insn avoids the register shuffling
9635 that often results if we must increment now and first save
9636 the old value for subsequent use. */
9638 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9639 op0 = stabilize (op0);
9642 icode = (int) this_optab->handlers[(int) mode].insn_code;
9643 if (icode != (int) CODE_FOR_nothing
9644 /* Make sure that OP0 is valid for operands 0 and 1
9645 of the insn we want to queue. */
9646 && (*insn_operand_predicate[icode][0]) (op0, mode)
9647 && (*insn_operand_predicate[icode][1]) (op0, mode))
9649 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9650 op1 = force_reg (mode, op1);
9652 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9654 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9656 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9659 op0 = change_address (op0, VOIDmode, addr);
9660 temp = force_reg (GET_MODE (op0), op0);
9661 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9662 op1 = force_reg (mode, op1);
9664 /* The increment queue is LIFO, thus we have to `queue'
9665 the instructions in reverse order. */
9666 enqueue_insn (op0, gen_move_insn (op0, temp));
9667 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9672 /* Preincrement, or we can't increment with one simple insn. */
9674 /* Save a copy of the value before inc or dec, to return it later. */
9675 temp = value = copy_to_reg (op0);
9677 /* Arrange to return the incremented value. */
9678 /* Copy the rtx because expand_binop will protect from the queue,
9679 and the results of that would be invalid for us to return
9680 if our caller does emit_queue before using our result. */
9681 temp = copy_rtx (value = op0);
9683 /* Increment however we can. */
9684 op1 = expand_binop (mode, this_optab, value, op1, op0,
9685 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9686 /* Make sure the value is stored into OP0. */
9688 emit_move_insn (op0, op1);
9693 /* Expand all function calls contained within EXP, innermost ones first.
9694 But don't look within expressions that have sequence points.
9695 For each CALL_EXPR, record the rtx for its value
9696 in the CALL_EXPR_RTL field. */
9699 preexpand_calls (exp)
9702 register int nops, i;
9703 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9705 if (! do_preexpand_calls)
9708 /* Only expressions and references can contain calls. */
9710 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9713 switch (TREE_CODE (exp))
9716 /* Do nothing if already expanded. */
9717 if (CALL_EXPR_RTL (exp) != 0
9718 /* Do nothing if the call returns a variable-sized object. */
9719 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9720 /* Do nothing to built-in functions. */
9721 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9722 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9724 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9727 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9732 case TRUTH_ANDIF_EXPR:
9733 case TRUTH_ORIF_EXPR:
9734 /* If we find one of these, then we can be sure
9735 the adjust will be done for it (since it makes jumps).
9736 Do it now, so that if this is inside an argument
9737 of a function, we don't get the stack adjustment
9738 after some other args have already been pushed. */
9739 do_pending_stack_adjust ();
9744 case WITH_CLEANUP_EXPR:
9745 case CLEANUP_POINT_EXPR:
9749 if (SAVE_EXPR_RTL (exp) != 0)
9753 nops = tree_code_length[(int) TREE_CODE (exp)];
9754 for (i = 0; i < nops; i++)
9755 if (TREE_OPERAND (exp, i) != 0)
9757 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9758 if (type == 'e' || type == '<' || type == '1' || type == '2'
9760 preexpand_calls (TREE_OPERAND (exp, i));
9764 /* At the start of a function, record that we have no previously-pushed
9765 arguments waiting to be popped. */
9768 init_pending_stack_adjust ()
9770 pending_stack_adjust = 0;
9773 /* When exiting from function, if safe, clear out any pending stack adjust
9774 so the adjustment won't get done. */
9777 clear_pending_stack_adjust ()
9779 #ifdef EXIT_IGNORE_STACK
9781 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9782 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9783 && ! flag_inline_functions)
9784 pending_stack_adjust = 0;
9788 /* Pop any previously-pushed arguments that have not been popped yet. */
9791 do_pending_stack_adjust ()
9793 if (inhibit_defer_pop == 0)
9795 if (pending_stack_adjust != 0)
9796 adjust_stack (GEN_INT (pending_stack_adjust));
9797 pending_stack_adjust = 0;
9801 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9802 Returns the cleanups to be performed. */
9805 defer_cleanups_to (old_cleanups)
9808 tree new_cleanups = NULL_TREE;
9809 tree cleanups = cleanups_this_call;
9810 tree last = NULL_TREE;
9812 while (cleanups_this_call != old_cleanups)
9814 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9815 last = cleanups_this_call;
9816 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9821 /* Remove the list from the chain of cleanups. */
9822 TREE_CHAIN (last) = NULL_TREE;
9824 /* reverse them so that we can build them in the right order. */
9825 cleanups = nreverse (cleanups);
9827 /* All cleanups must be on the function_obstack. */
9828 push_obstacks_nochange ();
9829 resume_temporary_allocation ();
9834 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9835 TREE_VALUE (cleanups), new_cleanups);
9837 new_cleanups = TREE_VALUE (cleanups);
9839 cleanups = TREE_CHAIN (cleanups);
9845 return new_cleanups;
9848 /* Expand all cleanups up to OLD_CLEANUPS.
9849 Needed here, and also for language-dependent calls. */
9852 expand_cleanups_to (old_cleanups)
9855 while (cleanups_this_call != old_cleanups)
9857 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9858 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9859 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9863 /* Expand conditional expressions. */
9865 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9866 LABEL is an rtx of code CODE_LABEL, in this function and all the
9870 jumpifnot (exp, label)
9874 do_jump (exp, label, NULL_RTX);
9877 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9884 do_jump (exp, NULL_RTX, label);
9887 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9888 the result is zero, or IF_TRUE_LABEL if the result is one.
9889 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9890 meaning fall through in that case.
9892 do_jump always does any pending stack adjust except when it does not
9893 actually perform a jump. An example where there is no jump
9894 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9896 This function is responsible for optimizing cases such as
9897 &&, || and comparison operators in EXP. */
9900 do_jump (exp, if_false_label, if_true_label)
9902 rtx if_false_label, if_true_label;
9904 register enum tree_code code = TREE_CODE (exp);
9905 /* Some cases need to create a label to jump to
9906 in order to properly fall through.
9907 These cases set DROP_THROUGH_LABEL nonzero. */
9908 rtx drop_through_label = 0;
9913 enum machine_mode mode;
9923 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9929 /* This is not true with #pragma weak */
9931 /* The address of something can never be zero. */
9933 emit_jump (if_true_label);
9938 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9939 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9940 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9943 /* If we are narrowing the operand, we have to do the compare in the
9945 if ((TYPE_PRECISION (TREE_TYPE (exp))
9946 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9948 case NON_LVALUE_EXPR:
9949 case REFERENCE_EXPR:
9954 /* These cannot change zero->non-zero or vice versa. */
9955 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9959 /* This is never less insns than evaluating the PLUS_EXPR followed by
9960 a test and can be longer if the test is eliminated. */
9962 /* Reduce to minus. */
9963 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9964 TREE_OPERAND (exp, 0),
9965 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9966 TREE_OPERAND (exp, 1))));
9967 /* Process as MINUS. */
9971 /* Non-zero iff operands of minus differ. */
9972 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9973 TREE_OPERAND (exp, 0),
9974 TREE_OPERAND (exp, 1)),
9979 /* If we are AND'ing with a small constant, do this comparison in the
9980 smallest type that fits. If the machine doesn't have comparisons
9981 that small, it will be converted back to the wider comparison.
9982 This helps if we are testing the sign bit of a narrower object.
9983 combine can't do this for us because it can't know whether a
9984 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9986 if (! SLOW_BYTE_ACCESS
9987 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9988 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9989 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9990 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9991 && (type = type_for_mode (mode, 1)) != 0
9992 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9993 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9994 != CODE_FOR_nothing))
9996 do_jump (convert (type, exp), if_false_label, if_true_label);
10001 case TRUTH_NOT_EXPR:
10002 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10005 case TRUTH_ANDIF_EXPR:
10008 tree cleanups, old_cleanups;
10010 if (if_false_label == 0)
10011 if_false_label = drop_through_label = gen_label_rtx ();
10013 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10014 seq1 = get_insns ();
10017 old_cleanups = cleanups_this_call;
10019 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10020 seq2 = get_insns ();
10021 cleanups = defer_cleanups_to (old_cleanups);
10026 rtx flag = gen_reg_rtx (word_mode);
10030 /* Flag cleanups as not needed. */
10031 emit_move_insn (flag, const0_rtx);
10034 /* Flag cleanups as needed. */
10035 emit_move_insn (flag, const1_rtx);
10038 /* All cleanups must be on the function_obstack. */
10039 push_obstacks_nochange ();
10040 resume_temporary_allocation ();
10042 /* convert flag, which is an rtx, into a tree. */
10043 cond = make_node (RTL_EXPR);
10044 TREE_TYPE (cond) = integer_type_node;
10045 RTL_EXPR_RTL (cond) = flag;
10046 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10047 cond = save_expr (cond);
10049 new_cleanups = build (COND_EXPR, void_type_node,
10050 truthvalue_conversion (cond),
10051 cleanups, integer_zero_node);
10052 new_cleanups = fold (new_cleanups);
10056 /* Now add in the conditionalized cleanups. */
10058 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10059 expand_eh_region_start ();
10069 case TRUTH_ORIF_EXPR:
10072 tree cleanups, old_cleanups;
10074 if (if_true_label == 0)
10075 if_true_label = drop_through_label = gen_label_rtx ();
10077 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10078 seq1 = get_insns ();
10081 old_cleanups = cleanups_this_call;
10083 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10084 seq2 = get_insns ();
10085 cleanups = defer_cleanups_to (old_cleanups);
10090 rtx flag = gen_reg_rtx (word_mode);
10094 /* Flag cleanups as not needed. */
10095 emit_move_insn (flag, const0_rtx);
10098 /* Flag cleanups as needed. */
10099 emit_move_insn (flag, const1_rtx);
10102 /* All cleanups must be on the function_obstack. */
10103 push_obstacks_nochange ();
10104 resume_temporary_allocation ();
10106 /* convert flag, which is an rtx, into a tree. */
10107 cond = make_node (RTL_EXPR);
10108 TREE_TYPE (cond) = integer_type_node;
10109 RTL_EXPR_RTL (cond) = flag;
10110 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10111 cond = save_expr (cond);
10113 new_cleanups = build (COND_EXPR, void_type_node,
10114 truthvalue_conversion (cond),
10115 cleanups, integer_zero_node);
10116 new_cleanups = fold (new_cleanups);
10120 /* Now add in the conditionalized cleanups. */
10122 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10123 expand_eh_region_start ();
10133 case COMPOUND_EXPR:
10134 push_temp_slots ();
10135 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10136 preserve_temp_slots (NULL_RTX);
10137 free_temp_slots ();
10140 do_pending_stack_adjust ();
10141 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10144 case COMPONENT_REF:
10145 case BIT_FIELD_REF:
10148 int bitsize, bitpos, unsignedp;
10149 enum machine_mode mode;
10154 /* Get description of this reference. We don't actually care
10155 about the underlying object here. */
10156 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10157 &mode, &unsignedp, &volatilep);
10159 type = type_for_size (bitsize, unsignedp);
10160 if (! SLOW_BYTE_ACCESS
10161 && type != 0 && bitsize >= 0
10162 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10163 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10164 != CODE_FOR_nothing))
10166 do_jump (convert (type, exp), if_false_label, if_true_label);
10173 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10174 if (integer_onep (TREE_OPERAND (exp, 1))
10175 && integer_zerop (TREE_OPERAND (exp, 2)))
10176 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10178 else if (integer_zerop (TREE_OPERAND (exp, 1))
10179 && integer_onep (TREE_OPERAND (exp, 2)))
10180 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10185 tree cleanups_left_side, cleanups_right_side, old_cleanups;
10187 register rtx label1 = gen_label_rtx ();
10188 drop_through_label = gen_label_rtx ();
10190 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10192 /* We need to save the cleanups for the lhs and rhs separately.
10193 Keep track of the cleanups seen before the lhs. */
10194 old_cleanups = cleanups_this_call;
10196 /* Now the THEN-expression. */
10197 do_jump (TREE_OPERAND (exp, 1),
10198 if_false_label ? if_false_label : drop_through_label,
10199 if_true_label ? if_true_label : drop_through_label);
10200 /* In case the do_jump just above never jumps. */
10201 do_pending_stack_adjust ();
10202 emit_label (label1);
10203 seq1 = get_insns ();
10204 /* Now grab the cleanups for the lhs. */
10205 cleanups_left_side = defer_cleanups_to (old_cleanups);
10208 /* And keep track of where we start before the rhs. */
10209 old_cleanups = cleanups_this_call;
10211 /* Now the ELSE-expression. */
10212 do_jump (TREE_OPERAND (exp, 2),
10213 if_false_label ? if_false_label : drop_through_label,
10214 if_true_label ? if_true_label : drop_through_label);
10215 seq2 = get_insns ();
10216 /* Grab the cleanups for the rhs. */
10217 cleanups_right_side = defer_cleanups_to (old_cleanups);
10220 if (cleanups_left_side || cleanups_right_side)
10222 /* Make the cleanups for the THEN and ELSE clauses
10223 conditional based on which half is executed. */
10224 rtx flag = gen_reg_rtx (word_mode);
10228 /* Set the flag to 0 so that we know we executed the lhs. */
10229 emit_move_insn (flag, const0_rtx);
10232 /* Set the flag to 1 so that we know we executed the rhs. */
10233 emit_move_insn (flag, const1_rtx);
10236 /* Make sure the cleanup lives on the function_obstack. */
10237 push_obstacks_nochange ();
10238 resume_temporary_allocation ();
10240 /* Now, build up a COND_EXPR that tests the value of the
10241 flag, and then either do the cleanups for the lhs or the
10243 cond = make_node (RTL_EXPR);
10244 TREE_TYPE (cond) = integer_type_node;
10245 RTL_EXPR_RTL (cond) = flag;
10246 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10247 cond = save_expr (cond);
10249 new_cleanups = build (COND_EXPR, void_type_node,
10250 truthvalue_conversion (cond),
10251 cleanups_right_side, cleanups_left_side);
10252 new_cleanups = fold (new_cleanups);
10256 /* Now add in the conditionalized cleanups. */
10258 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10259 expand_eh_region_start ();
10263 /* No cleanups were needed, so emit the two sequences
10273 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10275 if (integer_zerop (TREE_OPERAND (exp, 1)))
10276 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10277 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10278 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10281 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10282 fold (build (EQ_EXPR, TREE_TYPE (exp),
10283 fold (build1 (REALPART_EXPR,
10284 TREE_TYPE (inner_type),
10285 TREE_OPERAND (exp, 0))),
10286 fold (build1 (REALPART_EXPR,
10287 TREE_TYPE (inner_type),
10288 TREE_OPERAND (exp, 1))))),
10289 fold (build (EQ_EXPR, TREE_TYPE (exp),
10290 fold (build1 (IMAGPART_EXPR,
10291 TREE_TYPE (inner_type),
10292 TREE_OPERAND (exp, 0))),
10293 fold (build1 (IMAGPART_EXPR,
10294 TREE_TYPE (inner_type),
10295 TREE_OPERAND (exp, 1))))))),
10296 if_false_label, if_true_label);
10297 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10298 && !can_compare_p (TYPE_MODE (inner_type)))
10299 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10301 comparison = compare (exp, EQ, EQ);
10307 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10309 if (integer_zerop (TREE_OPERAND (exp, 1)))
10310 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10311 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10312 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10315 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10316 fold (build (NE_EXPR, TREE_TYPE (exp),
10317 fold (build1 (REALPART_EXPR,
10318 TREE_TYPE (inner_type),
10319 TREE_OPERAND (exp, 0))),
10320 fold (build1 (REALPART_EXPR,
10321 TREE_TYPE (inner_type),
10322 TREE_OPERAND (exp, 1))))),
10323 fold (build (NE_EXPR, TREE_TYPE (exp),
10324 fold (build1 (IMAGPART_EXPR,
10325 TREE_TYPE (inner_type),
10326 TREE_OPERAND (exp, 0))),
10327 fold (build1 (IMAGPART_EXPR,
10328 TREE_TYPE (inner_type),
10329 TREE_OPERAND (exp, 1))))))),
10330 if_false_label, if_true_label);
10331 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10332 && !can_compare_p (TYPE_MODE (inner_type)))
10333 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10335 comparison = compare (exp, NE, NE);
10340 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10342 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10343 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10345 comparison = compare (exp, LT, LTU);
10349 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10351 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10352 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10354 comparison = compare (exp, LE, LEU);
10358 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10360 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10361 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10363 comparison = compare (exp, GT, GTU);
10367 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10369 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10370 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10372 comparison = compare (exp, GE, GEU);
10377 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10379 /* This is not needed any more and causes poor code since it causes
10380 comparisons and tests from non-SI objects to have different code
10382 /* Copy to register to avoid generating bad insns by cse
10383 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10384 if (!cse_not_expected && GET_CODE (temp) == MEM)
10385 temp = copy_to_reg (temp);
10387 do_pending_stack_adjust ();
10388 if (GET_CODE (temp) == CONST_INT)
10389 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10390 else if (GET_CODE (temp) == LABEL_REF)
10391 comparison = const_true_rtx;
10392 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10393 && !can_compare_p (GET_MODE (temp)))
10394 /* Note swapping the labels gives us not-equal. */
10395 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10396 else if (GET_MODE (temp) != VOIDmode)
10397 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10398 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10399 GET_MODE (temp), NULL_RTX, 0);
10404 /* Do any postincrements in the expression that was tested. */
10407 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10408 straight into a conditional jump instruction as the jump condition.
10409 Otherwise, all the work has been done already. */
10411 if (comparison == const_true_rtx)
10414 emit_jump (if_true_label);
10416 else if (comparison == const0_rtx)
10418 if (if_false_label)
10419 emit_jump (if_false_label);
10421 else if (comparison)
10422 do_jump_for_compare (comparison, if_false_label, if_true_label);
10424 if (drop_through_label)
10426 /* If do_jump produces code that might be jumped around,
10427 do any stack adjusts from that code, before the place
10428 where control merges in. */
10429 do_pending_stack_adjust ();
10430 emit_label (drop_through_label);
10434 /* Given a comparison expression EXP for values too wide to be compared
10435 with one insn, test the comparison and jump to the appropriate label.
10436 The code of EXP is ignored; we always test GT if SWAP is 0,
10437 and LT if SWAP is 1. */
10440 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10443 rtx if_false_label, if_true_label;
10445 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10446 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10447 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10448 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10449 rtx drop_through_label = 0;
10450 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10453 if (! if_true_label || ! if_false_label)
10454 drop_through_label = gen_label_rtx ();
10455 if (! if_true_label)
10456 if_true_label = drop_through_label;
10457 if (! if_false_label)
10458 if_false_label = drop_through_label;
10460 /* Compare a word at a time, high order first. */
10461 for (i = 0; i < nwords; i++)
10464 rtx op0_word, op1_word;
10466 if (WORDS_BIG_ENDIAN)
10468 op0_word = operand_subword_force (op0, i, mode);
10469 op1_word = operand_subword_force (op1, i, mode);
10473 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10474 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10477 /* All but high-order word must be compared as unsigned. */
10478 comp = compare_from_rtx (op0_word, op1_word,
10479 (unsignedp || i > 0) ? GTU : GT,
10480 unsignedp, word_mode, NULL_RTX, 0);
10481 if (comp == const_true_rtx)
10482 emit_jump (if_true_label);
10483 else if (comp != const0_rtx)
10484 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10486 /* Consider lower words only if these are equal. */
10487 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10489 if (comp == const_true_rtx)
10490 emit_jump (if_false_label);
10491 else if (comp != const0_rtx)
10492 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10495 if (if_false_label)
10496 emit_jump (if_false_label);
10497 if (drop_through_label)
10498 emit_label (drop_through_label);
10501 /* Compare OP0 with OP1, word at a time, in mode MODE.
10502 UNSIGNEDP says to do unsigned comparison.
10503 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10506 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10507 enum machine_mode mode;
10510 rtx if_false_label, if_true_label;
10512 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10513 rtx drop_through_label = 0;
10516 if (! if_true_label || ! if_false_label)
10517 drop_through_label = gen_label_rtx ();
10518 if (! if_true_label)
10519 if_true_label = drop_through_label;
10520 if (! if_false_label)
10521 if_false_label = drop_through_label;
10523 /* Compare a word at a time, high order first. */
10524 for (i = 0; i < nwords; i++)
10527 rtx op0_word, op1_word;
10529 if (WORDS_BIG_ENDIAN)
10531 op0_word = operand_subword_force (op0, i, mode);
10532 op1_word = operand_subword_force (op1, i, mode);
10536 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10537 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10540 /* All but high-order word must be compared as unsigned. */
10541 comp = compare_from_rtx (op0_word, op1_word,
10542 (unsignedp || i > 0) ? GTU : GT,
10543 unsignedp, word_mode, NULL_RTX, 0);
10544 if (comp == const_true_rtx)
10545 emit_jump (if_true_label);
10546 else if (comp != const0_rtx)
10547 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10549 /* Consider lower words only if these are equal. */
10550 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10552 if (comp == const_true_rtx)
10553 emit_jump (if_false_label);
10554 else if (comp != const0_rtx)
10555 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10558 if (if_false_label)
10559 emit_jump (if_false_label);
10560 if (drop_through_label)
10561 emit_label (drop_through_label);
10564 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10565 with one insn, test the comparison and jump to the appropriate label. */
10568 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10570 rtx if_false_label, if_true_label;
10572 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10573 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10574 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10575 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10577 rtx drop_through_label = 0;
10579 if (! if_false_label)
10580 drop_through_label = if_false_label = gen_label_rtx ();
10582 for (i = 0; i < nwords; i++)
10584 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10585 operand_subword_force (op1, i, mode),
10586 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10587 word_mode, NULL_RTX, 0);
10588 if (comp == const_true_rtx)
10589 emit_jump (if_false_label);
10590 else if (comp != const0_rtx)
10591 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10595 emit_jump (if_true_label);
10596 if (drop_through_label)
10597 emit_label (drop_through_label);
10600 /* Jump according to whether OP0 is 0.
10601 We assume that OP0 has an integer mode that is too wide
10602 for the available compare insns. */
10605 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10607 rtx if_false_label, if_true_label;
10609 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10611 rtx drop_through_label = 0;
10613 if (! if_false_label)
10614 drop_through_label = if_false_label = gen_label_rtx ();
10616 for (i = 0; i < nwords; i++)
10618 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10620 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10621 if (comp == const_true_rtx)
10622 emit_jump (if_false_label);
10623 else if (comp != const0_rtx)
10624 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10628 emit_jump (if_true_label);
10629 if (drop_through_label)
10630 emit_label (drop_through_label);
10633 /* Given a comparison expression in rtl form, output conditional branches to
10634 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10637 do_jump_for_compare (comparison, if_false_label, if_true_label)
10638 rtx comparison, if_false_label, if_true_label;
10642 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10643 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10647 if (if_false_label)
10648 emit_jump (if_false_label);
10650 else if (if_false_label)
10653 rtx prev = get_last_insn ();
10656 /* Output the branch with the opposite condition. Then try to invert
10657 what is generated. If more than one insn is a branch, or if the
10658 branch is not the last insn written, abort. If we can't invert
10659 the branch, emit make a true label, redirect this jump to that,
10660 emit a jump to the false label and define the true label. */
10662 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10663 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10667 /* Here we get the first insn that was just emitted. It used to be the
10668 case that, on some machines, emitting the branch would discard
10669 the previous compare insn and emit a replacement. This isn't
10670 done anymore, but abort if we see that PREV is deleted. */
10673 insn = get_insns ();
10674 else if (INSN_DELETED_P (prev))
10677 insn = NEXT_INSN (prev);
10679 for (; insn; insn = NEXT_INSN (insn))
10680 if (GET_CODE (insn) == JUMP_INSN)
10687 if (branch != get_last_insn ())
10690 JUMP_LABEL (branch) = if_false_label;
10691 if (! invert_jump (branch, if_false_label))
10693 if_true_label = gen_label_rtx ();
10694 redirect_jump (branch, if_true_label);
10695 emit_jump (if_false_label);
10696 emit_label (if_true_label);
10701 /* Generate code for a comparison expression EXP
10702 (including code to compute the values to be compared)
10703 and set (CC0) according to the result.
10704 SIGNED_CODE should be the rtx operation for this comparison for
10705 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10707 We force a stack adjustment unless there are currently
10708 things pushed on the stack that aren't yet used. */
10711 compare (exp, signed_code, unsigned_code)
10713 enum rtx_code signed_code, unsigned_code;
10716 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10718 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10719 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10720 register enum machine_mode mode = TYPE_MODE (type);
10721 int unsignedp = TREE_UNSIGNED (type);
10722 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10724 #ifdef HAVE_canonicalize_funcptr_for_compare
10725 /* If function pointers need to be "canonicalized" before they can
10726 be reliably compared, then canonicalize them. */
10727 if (HAVE_canonicalize_funcptr_for_compare
10728 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10729 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10732 rtx new_op0 = gen_reg_rtx (mode);
10734 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10738 if (HAVE_canonicalize_funcptr_for_compare
10739 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10740 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10743 rtx new_op1 = gen_reg_rtx (mode);
10745 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10750 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10752 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10753 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10756 /* Like compare but expects the values to compare as two rtx's.
10757 The decision as to signed or unsigned comparison must be made by the caller.
10759 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10762 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10763 size of MODE should be used. */
10766 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10767 register rtx op0, op1;
10768 enum rtx_code code;
10770 enum machine_mode mode;
10776 /* If one operand is constant, make it the second one. Only do this
10777 if the other operand is not constant as well. */
10779 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10780 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10785 code = swap_condition (code);
10788 if (flag_force_mem)
10790 op0 = force_not_mem (op0);
10791 op1 = force_not_mem (op1);
10794 do_pending_stack_adjust ();
10796 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10797 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10801 /* There's no need to do this now that combine.c can eliminate lots of
10802 sign extensions. This can be less efficient in certain cases on other
10805 /* If this is a signed equality comparison, we can do it as an
10806 unsigned comparison since zero-extension is cheaper than sign
10807 extension and comparisons with zero are done as unsigned. This is
10808 the case even on machines that can do fast sign extension, since
10809 zero-extension is easier to combine with other operations than
10810 sign-extension is. If we are comparing against a constant, we must
10811 convert it to what it would look like unsigned. */
10812 if ((code == EQ || code == NE) && ! unsignedp
10813 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10815 if (GET_CODE (op1) == CONST_INT
10816 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10817 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10822 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10824 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10827 /* Generate code to calculate EXP using a store-flag instruction
10828 and return an rtx for the result. EXP is either a comparison
10829 or a TRUTH_NOT_EXPR whose operand is a comparison.
10831 If TARGET is nonzero, store the result there if convenient.
10833 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10836 Return zero if there is no suitable set-flag instruction
10837 available on this machine.
10839 Once expand_expr has been called on the arguments of the comparison,
10840 we are committed to doing the store flag, since it is not safe to
10841 re-evaluate the expression. We emit the store-flag insn by calling
10842 emit_store_flag, but only expand the arguments if we have a reason
10843 to believe that emit_store_flag will be successful. If we think that
10844 it will, but it isn't, we have to simulate the store-flag with a
10845 set/jump/set sequence. */
10848 do_store_flag (exp, target, mode, only_cheap)
10851 enum machine_mode mode;
10854 enum rtx_code code;
10855 tree arg0, arg1, type;
10857 enum machine_mode operand_mode;
10861 enum insn_code icode;
10862 rtx subtarget = target;
10863 rtx result, label, pattern, jump_pat;
10865 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10866 result at the end. We can't simply invert the test since it would
10867 have already been inverted if it were valid. This case occurs for
10868 some floating-point comparisons. */
10870 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10871 invert = 1, exp = TREE_OPERAND (exp, 0);
10873 arg0 = TREE_OPERAND (exp, 0);
10874 arg1 = TREE_OPERAND (exp, 1);
10875 type = TREE_TYPE (arg0);
10876 operand_mode = TYPE_MODE (type);
10877 unsignedp = TREE_UNSIGNED (type);
10879 /* We won't bother with BLKmode store-flag operations because it would mean
10880 passing a lot of information to emit_store_flag. */
10881 if (operand_mode == BLKmode)
10884 /* We won't bother with store-flag operations involving function pointers
10885 when function pointers must be canonicalized before comparisons. */
10886 #ifdef HAVE_canonicalize_funcptr_for_compare
10887 if (HAVE_canonicalize_funcptr_for_compare
10888 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10889 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10891 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10892 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10893 == FUNCTION_TYPE))))
10900 /* Get the rtx comparison code to use. We know that EXP is a comparison
10901 operation of some type. Some comparisons against 1 and -1 can be
10902 converted to comparisons with zero. Do so here so that the tests
10903 below will be aware that we have a comparison with zero. These
10904 tests will not catch constants in the first operand, but constants
10905 are rarely passed as the first operand. */
10907 switch (TREE_CODE (exp))
10916 if (integer_onep (arg1))
10917 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10919 code = unsignedp ? LTU : LT;
10922 if (! unsignedp && integer_all_onesp (arg1))
10923 arg1 = integer_zero_node, code = LT;
10925 code = unsignedp ? LEU : LE;
10928 if (! unsignedp && integer_all_onesp (arg1))
10929 arg1 = integer_zero_node, code = GE;
10931 code = unsignedp ? GTU : GT;
10934 if (integer_onep (arg1))
10935 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10937 code = unsignedp ? GEU : GE;
10943 /* Put a constant second. */
10944 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10946 tem = arg0; arg0 = arg1; arg1 = tem;
10947 code = swap_condition (code);
10950 /* If this is an equality or inequality test of a single bit, we can
10951 do this by shifting the bit being tested to the low-order bit and
10952 masking the result with the constant 1. If the condition was EQ,
10953 we xor it with 1. This does not require an scc insn and is faster
10954 than an scc insn even if we have it. */
10956 if ((code == NE || code == EQ)
10957 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10958 && integer_pow2p (TREE_OPERAND (arg0, 1))
10959 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10961 tree inner = TREE_OPERAND (arg0, 0);
10966 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10967 NULL_RTX, VOIDmode, 0));
10968 /* In this case, immed_double_const will sign extend the value to make
10969 it look the same on the host and target. We must remove the
10970 sign-extension before calling exact_log2, since exact_log2 will
10971 fail for negative values. */
10972 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10973 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
10974 /* We don't use the obvious constant shift to generate the mask,
10975 because that generates compiler warnings when BITS_PER_WORD is
10976 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10977 code is unreachable in that case. */
10978 tem = tem & GET_MODE_MASK (word_mode);
10979 bitnum = exact_log2 (tem);
10981 /* If INNER is a right shift of a constant and it plus BITNUM does
10982 not overflow, adjust BITNUM and INNER. */
10984 if (TREE_CODE (inner) == RSHIFT_EXPR
10985 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10986 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10987 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10988 < TYPE_PRECISION (type)))
10990 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10991 inner = TREE_OPERAND (inner, 0);
10994 /* If we are going to be able to omit the AND below, we must do our
10995 operations as unsigned. If we must use the AND, we have a choice.
10996 Normally unsigned is faster, but for some machines signed is. */
10997 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10998 #ifdef LOAD_EXTEND_OP
10999 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11005 if (subtarget == 0 || GET_CODE (subtarget) != REG
11006 || GET_MODE (subtarget) != operand_mode
11007 || ! safe_from_p (subtarget, inner))
11010 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11013 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11014 size_int (bitnum), subtarget, ops_unsignedp);
11016 if (GET_MODE (op0) != mode)
11017 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11019 if ((code == EQ && ! invert) || (code == NE && invert))
11020 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11021 ops_unsignedp, OPTAB_LIB_WIDEN);
11023 /* Put the AND last so it can combine with more things. */
11024 if (bitnum != TYPE_PRECISION (type) - 1)
11025 op0 = expand_and (op0, const1_rtx, subtarget);
11030 /* Now see if we are likely to be able to do this. Return if not. */
11031 if (! can_compare_p (operand_mode))
11033 icode = setcc_gen_code[(int) code];
11034 if (icode == CODE_FOR_nothing
11035 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11037 /* We can only do this if it is one of the special cases that
11038 can be handled without an scc insn. */
11039 if ((code == LT && integer_zerop (arg1))
11040 || (! only_cheap && code == GE && integer_zerop (arg1)))
11042 else if (BRANCH_COST >= 0
11043 && ! only_cheap && (code == NE || code == EQ)
11044 && TREE_CODE (type) != REAL_TYPE
11045 && ((abs_optab->handlers[(int) operand_mode].insn_code
11046 != CODE_FOR_nothing)
11047 || (ffs_optab->handlers[(int) operand_mode].insn_code
11048 != CODE_FOR_nothing)))
11054 preexpand_calls (exp);
11055 if (subtarget == 0 || GET_CODE (subtarget) != REG
11056 || GET_MODE (subtarget) != operand_mode
11057 || ! safe_from_p (subtarget, arg1))
11060 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11061 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11064 target = gen_reg_rtx (mode);
11066 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11067 because, if the emit_store_flag does anything it will succeed and
11068 OP0 and OP1 will not be used subsequently. */
11070 result = emit_store_flag (target, code,
11071 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11072 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11073 operand_mode, unsignedp, 1);
11078 result = expand_binop (mode, xor_optab, result, const1_rtx,
11079 result, 0, OPTAB_LIB_WIDEN);
11083 /* If this failed, we have to do this with set/compare/jump/set code. */
11084 if (GET_CODE (target) != REG
11085 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11086 target = gen_reg_rtx (GET_MODE (target));
11088 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11089 result = compare_from_rtx (op0, op1, code, unsignedp,
11090 operand_mode, NULL_RTX, 0);
11091 if (GET_CODE (result) == CONST_INT)
11092 return (((result == const0_rtx && ! invert)
11093 || (result != const0_rtx && invert))
11094 ? const0_rtx : const1_rtx);
11096 label = gen_label_rtx ();
11097 if (bcc_gen_fctn[(int) code] == 0)
11100 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11101 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11102 emit_label (label);
11107 /* Generate a tablejump instruction (used for switch statements). */
11109 #ifdef HAVE_tablejump
11111 /* INDEX is the value being switched on, with the lowest value
11112 in the table already subtracted.
11113 MODE is its expected mode (needed if INDEX is constant).
11114 RANGE is the length of the jump table.
11115 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11117 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11118 index value is out of range. */
11121 do_tablejump (index, mode, range, table_label, default_label)
11122 rtx index, range, table_label, default_label;
11123 enum machine_mode mode;
11125 register rtx temp, vector;
11127 /* Do an unsigned comparison (in the proper mode) between the index
11128 expression and the value which represents the length of the range.
11129 Since we just finished subtracting the lower bound of the range
11130 from the index expression, this comparison allows us to simultaneously
11131 check that the original index expression value is both greater than
11132 or equal to the minimum value of the range and less than or equal to
11133 the maximum value of the range. */
11135 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11136 emit_jump_insn (gen_bgtu (default_label));
11138 /* If index is in range, it must fit in Pmode.
11139 Convert to Pmode so we can index with it. */
11141 index = convert_to_mode (Pmode, index, 1);
11143 /* Don't let a MEM slip thru, because then INDEX that comes
11144 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11145 and break_out_memory_refs will go to work on it and mess it up. */
11146 #ifdef PIC_CASE_VECTOR_ADDRESS
11147 if (flag_pic && GET_CODE (index) != REG)
11148 index = copy_to_mode_reg (Pmode, index);
11151 /* If flag_force_addr were to affect this address
11152 it could interfere with the tricky assumptions made
11153 about addresses that contain label-refs,
11154 which may be valid only very near the tablejump itself. */
11155 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11156 GET_MODE_SIZE, because this indicates how large insns are. The other
11157 uses should all be Pmode, because they are addresses. This code
11158 could fail if addresses and insns are not the same size. */
11159 index = gen_rtx (PLUS, Pmode,
11160 gen_rtx (MULT, Pmode, index,
11161 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11162 gen_rtx (LABEL_REF, Pmode, table_label));
11163 #ifdef PIC_CASE_VECTOR_ADDRESS
11165 index = PIC_CASE_VECTOR_ADDRESS (index);
11168 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11169 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11170 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11171 RTX_UNCHANGING_P (vector) = 1;
11172 convert_move (temp, vector, 0);
11174 emit_jump_insn (gen_tablejump (temp, table_label));
11176 #ifndef CASE_VECTOR_PC_RELATIVE
11177 /* If we are generating PIC code or if the table is PC-relative, the
11178 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11184 #endif /* HAVE_tablejump */
11187 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11188 to that value is on the top of the stack. The resulting type is TYPE, and
11189 the source declaration is DECL. */
11192 bc_load_memory (type, decl)
11195 enum bytecode_opcode opcode;
11198 /* Bit fields are special. We only know about signed and
11199 unsigned ints, and enums. The latter are treated as
11200 signed integers. */
11202 if (DECL_BIT_FIELD (decl))
11203 if (TREE_CODE (type) == ENUMERAL_TYPE
11204 || TREE_CODE (type) == INTEGER_TYPE)
11205 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11209 /* See corresponding comment in bc_store_memory(). */
11210 if (TYPE_MODE (type) == BLKmode
11211 || TYPE_MODE (type) == VOIDmode)
11214 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11216 if (opcode == neverneverland)
11219 bc_emit_bytecode (opcode);
11221 #ifdef DEBUG_PRINT_CODE
11222 fputc ('\n', stderr);
11227 /* Store the contents of the second stack slot to the address in the
11228 top stack slot. DECL is the declaration of the destination and is used
11229 to determine whether we're dealing with a bitfield. */
11232 bc_store_memory (type, decl)
11235 enum bytecode_opcode opcode;
11238 if (DECL_BIT_FIELD (decl))
11240 if (TREE_CODE (type) == ENUMERAL_TYPE
11241 || TREE_CODE (type) == INTEGER_TYPE)
11247 if (TYPE_MODE (type) == BLKmode)
11249 /* Copy structure. This expands to a block copy instruction, storeBLK.
11250 In addition to the arguments expected by the other store instructions,
11251 it also expects a type size (SImode) on top of the stack, which is the
11252 structure size in size units (usually bytes). The two first arguments
11253 are already on the stack; so we just put the size on level 1. For some
11254 other languages, the size may be variable, this is why we don't encode
11255 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11257 bc_expand_expr (TYPE_SIZE (type));
11261 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11263 if (opcode == neverneverland)
11266 bc_emit_bytecode (opcode);
11268 #ifdef DEBUG_PRINT_CODE
11269 fputc ('\n', stderr);
11274 /* Allocate local stack space sufficient to hold a value of the given
11275 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11276 integral power of 2. A special case is locals of type VOID, which
11277 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11278 remapped into the corresponding attribute of SI. */
11281 bc_allocate_local (size, alignment)
11282 int size, alignment;
11285 int byte_alignment;
11290 /* Normalize size and alignment */
11292 size = UNITS_PER_WORD;
11294 if (alignment < BITS_PER_UNIT)
11295 byte_alignment = 1 << (INT_ALIGN - 1);
11298 byte_alignment = alignment / BITS_PER_UNIT;
11300 if (local_vars_size & (byte_alignment - 1))
11301 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11303 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11304 local_vars_size += size;
11310 /* Allocate variable-sized local array. Variable-sized arrays are
11311 actually pointers to the address in memory where they are stored. */
11314 bc_allocate_variable_array (size)
11318 const int ptralign = (1 << (PTR_ALIGN - 1));
11320 /* Align pointer */
11321 if (local_vars_size & ptralign)
11322 local_vars_size += ptralign - (local_vars_size & ptralign);
11324 /* Note down local space needed: pointer to block; also return
11327 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11328 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11333 /* Push the machine address for the given external variable offset. */
11336 bc_load_externaddr (externaddr)
11339 bc_emit_bytecode (constP);
11340 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11341 BYTECODE_BC_LABEL (externaddr)->offset);
11343 #ifdef DEBUG_PRINT_CODE
11344 fputc ('\n', stderr);
11349 /* Like above, but expects an IDENTIFIER. */
11352 bc_load_externaddr_id (id, offset)
11356 if (!IDENTIFIER_POINTER (id))
11359 bc_emit_bytecode (constP);
11360 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11362 #ifdef DEBUG_PRINT_CODE
11363 fputc ('\n', stderr);
11368 /* Push the machine address for the given local variable offset. */
11371 bc_load_localaddr (localaddr)
11374 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11378 /* Push the machine address for the given parameter offset.
11379 NOTE: offset is in bits. */
11382 bc_load_parmaddr (parmaddr)
11385 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11390 /* Convert a[i] into *(a + i). */
11393 bc_canonicalize_array_ref (exp)
11396 tree type = TREE_TYPE (exp);
11397 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11398 TREE_OPERAND (exp, 0));
11399 tree index = TREE_OPERAND (exp, 1);
11402 /* Convert the integer argument to a type the same size as a pointer
11403 so the multiply won't overflow spuriously. */
11405 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11406 index = convert (type_for_size (POINTER_SIZE, 0), index);
11408 /* The array address isn't volatile even if the array is.
11409 (Of course this isn't terribly relevant since the bytecode
11410 translator treats nearly everything as volatile anyway.) */
11411 TREE_THIS_VOLATILE (array_adr) = 0;
11413 return build1 (INDIRECT_REF, type,
11414 fold (build (PLUS_EXPR,
11415 TYPE_POINTER_TO (type),
11417 fold (build (MULT_EXPR,
11418 TYPE_POINTER_TO (type),
11420 size_in_bytes (type))))));
11424 /* Load the address of the component referenced by the given
11425 COMPONENT_REF expression.
11427 Returns innermost lvalue. */
11430 bc_expand_component_address (exp)
11434 enum machine_mode mode;
11436 HOST_WIDE_INT SIval;
11439 tem = TREE_OPERAND (exp, 1);
11440 mode = DECL_MODE (tem);
11443 /* Compute cumulative bit offset for nested component refs
11444 and array refs, and find the ultimate containing object. */
11446 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11448 if (TREE_CODE (tem) == COMPONENT_REF)
11449 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11451 if (TREE_CODE (tem) == ARRAY_REF
11452 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11453 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11455 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11456 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11457 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11462 bc_expand_expr (tem);
11465 /* For bitfields also push their offset and size */
11466 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11467 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11469 if (SIval = bitpos / BITS_PER_UNIT)
11470 bc_emit_instruction (addconstPSI, SIval);
11472 return (TREE_OPERAND (exp, 1));
11476 /* Emit code to push two SI constants */
11479 bc_push_offset_and_size (offset, size)
11480 HOST_WIDE_INT offset, size;
11482 bc_emit_instruction (constSI, offset);
11483 bc_emit_instruction (constSI, size);
11487 /* Emit byte code to push the address of the given lvalue expression to
11488 the stack. If it's a bit field, we also push offset and size info.
11490 Returns innermost component, which allows us to determine not only
11491 its type, but also whether it's a bitfield. */
11494 bc_expand_address (exp)
11498 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11502 switch (TREE_CODE (exp))
11506 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11508 case COMPONENT_REF:
11510 return (bc_expand_component_address (exp));
11514 bc_expand_expr (TREE_OPERAND (exp, 0));
11516 /* For variable-sized types: retrieve pointer. Sometimes the
11517 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11518 also make sure we have an operand, just in case... */
11520 if (TREE_OPERAND (exp, 0)
11521 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11522 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11523 bc_emit_instruction (loadP);
11525 /* If packed, also return offset and size */
11526 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11528 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11529 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11531 return (TREE_OPERAND (exp, 0));
11533 case FUNCTION_DECL:
11535 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11536 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11541 bc_load_parmaddr (DECL_RTL (exp));
11543 /* For variable-sized types: retrieve pointer */
11544 if (TYPE_SIZE (TREE_TYPE (exp))
11545 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11546 bc_emit_instruction (loadP);
11548 /* If packed, also return offset and size */
11549 if (DECL_BIT_FIELD (exp))
11550 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11551 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11557 bc_emit_instruction (returnP);
11563 if (BYTECODE_LABEL (DECL_RTL (exp)))
11564 bc_load_externaddr (DECL_RTL (exp));
11567 if (DECL_EXTERNAL (exp))
11568 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11569 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11571 bc_load_localaddr (DECL_RTL (exp));
11573 /* For variable-sized types: retrieve pointer */
11574 if (TYPE_SIZE (TREE_TYPE (exp))
11575 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11576 bc_emit_instruction (loadP);
11578 /* If packed, also return offset and size */
11579 if (DECL_BIT_FIELD (exp))
11580 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11581 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11589 bc_emit_bytecode (constP);
11590 r = output_constant_def (exp);
11591 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11593 #ifdef DEBUG_PRINT_CODE
11594 fputc ('\n', stderr);
11605 /* Most lvalues don't have components. */
11610 /* Emit a type code to be used by the runtime support in handling
11611 parameter passing. The type code consists of the machine mode
11612 plus the minimal alignment shifted left 8 bits. */
11615 bc_runtime_type_code (type)
11620 switch (TREE_CODE (type))
11626 case ENUMERAL_TYPE:
11630 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11642 return build_int_2 (val, 0);
11646 /* Generate constructor label */
11649 bc_gen_constr_label ()
11651 static int label_counter;
11652 static char label[20];
11654 sprintf (label, "*LR%d", label_counter++);
11656 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11660 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11661 expand the constructor data as static data, and push a pointer to it.
11662 The pointer is put in the pointer table and is retrieved by a constP
11663 bytecode instruction. We then loop and store each constructor member in
11664 the corresponding component. Finally, we return the original pointer on
11668 bc_expand_constructor (constr)
11672 HOST_WIDE_INT ptroffs;
11676 /* Literal constructors are handled as constants, whereas
11677 non-literals are evaluated and stored element by element
11678 into the data segment. */
11680 /* Allocate space in proper segment and push pointer to space on stack.
11683 l = bc_gen_constr_label ();
11685 if (TREE_CONSTANT (constr))
11689 bc_emit_const_labeldef (l);
11690 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11696 bc_emit_data_labeldef (l);
11697 bc_output_data_constructor (constr);
11701 /* Add reference to pointer table and recall pointer to stack;
11702 this code is common for both types of constructors: literals
11703 and non-literals. */
11705 ptroffs = bc_define_pointer (l);
11706 bc_emit_instruction (constP, ptroffs);
11708 /* This is all that has to be done if it's a literal. */
11709 if (TREE_CONSTANT (constr))
11713 /* At this point, we have the pointer to the structure on top of the stack.
11714 Generate sequences of store_memory calls for the constructor. */
11716 /* constructor type is structure */
11717 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11721 /* If the constructor has fewer fields than the structure,
11722 clear the whole structure first. */
11724 if (list_length (CONSTRUCTOR_ELTS (constr))
11725 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11727 bc_emit_instruction (duplicate);
11728 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11729 bc_emit_instruction (clearBLK);
11732 /* Store each element of the constructor into the corresponding
11733 field of TARGET. */
11735 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11737 register tree field = TREE_PURPOSE (elt);
11738 register enum machine_mode mode;
11743 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11744 mode = DECL_MODE (field);
11745 unsignedp = TREE_UNSIGNED (field);
11747 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11749 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11750 /* The alignment of TARGET is
11751 at least what its type requires. */
11753 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11754 int_size_in_bytes (TREE_TYPE (constr)));
11759 /* Constructor type is array */
11760 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11764 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11765 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11766 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11767 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11769 /* If the constructor has fewer fields than the structure,
11770 clear the whole structure first. */
11772 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11774 bc_emit_instruction (duplicate);
11775 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11776 bc_emit_instruction (clearBLK);
11780 /* Store each element of the constructor into the corresponding
11781 element of TARGET, determined by counting the elements. */
11783 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11785 elt = TREE_CHAIN (elt), i++)
11787 register enum machine_mode mode;
11792 mode = TYPE_MODE (elttype);
11793 bitsize = GET_MODE_BITSIZE (mode);
11794 unsignedp = TREE_UNSIGNED (elttype);
11796 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11797 /* * TYPE_SIZE_UNIT (elttype) */ );
11799 bc_store_field (elt, bitsize, bitpos, mode,
11800 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11801 /* The alignment of TARGET is
11802 at least what its type requires. */
11804 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11805 int_size_in_bytes (TREE_TYPE (constr)));
11812 /* Store the value of EXP (an expression tree) into member FIELD of
11813 structure at address on stack, which has type TYPE, mode MODE and
11814 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11817 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11818 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11821 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11822 value_mode, unsignedp, align, total_size)
11823 int bitsize, bitpos;
11824 enum machine_mode mode;
11825 tree field, exp, type;
11826 enum machine_mode value_mode;
11832 /* Expand expression and copy pointer */
11833 bc_expand_expr (exp);
11834 bc_emit_instruction (over);
11837 /* If the component is a bit field, we cannot use addressing to access
11838 it. Use bit-field techniques to store in it. */
11840 if (DECL_BIT_FIELD (field))
11842 bc_store_bit_field (bitpos, bitsize, unsignedp);
11846 /* Not bit field */
11848 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11850 /* Advance pointer to the desired member */
11852 bc_emit_instruction (addconstPSI, offset);
11855 bc_store_memory (type, field);
11860 /* Store SI/SU in bitfield */
11863 bc_store_bit_field (offset, size, unsignedp)
11864 int offset, size, unsignedp;
11866 /* Push bitfield offset and size */
11867 bc_push_offset_and_size (offset, size);
11870 bc_emit_instruction (sstoreBI);
11874 /* Load SI/SU from bitfield */
11877 bc_load_bit_field (offset, size, unsignedp)
11878 int offset, size, unsignedp;
11880 /* Push bitfield offset and size */
11881 bc_push_offset_and_size (offset, size);
11883 /* Load: sign-extend if signed, else zero-extend */
11884 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11888 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11889 (adjust stack pointer upwards), negative means add that number of
11890 levels (adjust the stack pointer downwards). Only positive values
11891 normally make sense. */
11894 bc_adjust_stack (nlevels)
11903 bc_emit_instruction (drop);
11906 bc_emit_instruction (drop);
11911 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11912 stack_depth -= nlevels;
11915 #if defined (VALIDATE_STACK_FOR_BC)
11916 VALIDATE_STACK_FOR_BC ();