1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
97 /* When temporaries are created by TARGET_EXPRs, they are created at
98 this level of temp_slot_level, so that they can remain allocated
99 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
101 int target_temp_slot_level;
103 /* Nonzero means __builtin_saveregs has already been done in this function.
104 The value is the pseudoreg containing the value __builtin_saveregs
106 static rtx saveregs_value;
108 /* Similarly for __builtin_apply_args. */
109 static rtx apply_args_value;
111 /* This structure is used by move_by_pieces to describe the move to
114 struct move_by_pieces
124 int explicit_inc_from;
131 /* This structure is used by clear_by_pieces to describe the clear to
134 struct clear_by_pieces
146 /* Used to generate bytecodes: keep track of size of local variables,
147 as well as depth of arithmetic stack. (Notice that variables are
148 stored on the machine's stack, not the arithmetic stack.) */
150 static rtx get_push_address PROTO ((int));
151 extern int local_vars_size;
152 extern int stack_depth;
153 extern int max_stack_depth;
154 extern struct obstack permanent_obstack;
155 extern rtx arg_pointer_save_area;
157 static rtx enqueue_insn PROTO((rtx, rtx));
158 static int queued_subexp_p PROTO((rtx));
159 static void init_queue PROTO((void));
160 static void move_by_pieces PROTO((rtx, rtx, int, int));
161 static int move_by_pieces_ninsns PROTO((unsigned int, int));
162 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
163 struct move_by_pieces *));
164 static void clear_by_pieces PROTO((rtx, int, int));
165 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
166 struct clear_by_pieces *));
167 static int is_zeros_p PROTO((tree));
168 static int mostly_zeros_p PROTO((tree));
169 static void store_constructor PROTO((tree, rtx, int));
170 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
171 enum machine_mode, int, int, int));
172 static tree save_noncopied_parts PROTO((tree, tree));
173 static tree init_noncopied_parts PROTO((tree, tree));
174 static int safe_from_p PROTO((rtx, tree));
175 static int fixed_type_p PROTO((tree));
176 static rtx var_rtx PROTO((tree));
177 static int get_pointer_alignment PROTO((tree, unsigned));
178 static tree string_constant PROTO((tree, tree *));
179 static tree c_strlen PROTO((tree));
180 static rtx expand_builtin PROTO((tree, rtx, rtx,
181 enum machine_mode, int));
182 static int apply_args_size PROTO((void));
183 static int apply_result_size PROTO((void));
184 static rtx result_vector PROTO((int, rtx));
185 static rtx expand_builtin_apply_args PROTO((void));
186 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
187 static void expand_builtin_return PROTO((rtx));
188 static rtx expand_increment PROTO((tree, int, int));
189 void bc_expand_increment PROTO((struct increment_operator *, tree));
190 rtx bc_allocate_local PROTO((int, int));
191 void bc_store_memory PROTO((tree, tree));
192 tree bc_expand_component_address PROTO((tree));
193 tree bc_expand_address PROTO((tree));
194 void bc_expand_constructor PROTO((tree));
195 void bc_adjust_stack PROTO((int));
196 tree bc_canonicalize_array_ref PROTO((tree));
197 void bc_load_memory PROTO((tree, tree));
198 void bc_load_externaddr PROTO((rtx));
199 void bc_load_externaddr_id PROTO((tree, int));
200 void bc_load_localaddr PROTO((rtx));
201 void bc_load_parmaddr PROTO((rtx));
202 static void preexpand_calls PROTO((tree));
203 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
204 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
205 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
206 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
207 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
208 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
209 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
210 extern tree truthvalue_conversion PROTO((tree));
212 /* Record for each mode whether we can move a register directly to or
213 from an object of that mode in memory. If we can't, we won't try
214 to use that mode directly when accessing a field of that mode. */
216 static char direct_load[NUM_MACHINE_MODES];
217 static char direct_store[NUM_MACHINE_MODES];
219 /* MOVE_RATIO is the number of move instructions that is better than
223 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
226 /* A value of around 6 would minimize code size; infinity would minimize
228 #define MOVE_RATIO 15
232 /* This array records the insn_code of insns to perform block moves. */
233 enum insn_code movstr_optab[NUM_MACHINE_MODES];
235 /* This array records the insn_code of insns to perform block clears. */
236 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
238 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
240 #ifndef SLOW_UNALIGNED_ACCESS
241 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
244 /* Register mappings for target machines without register windows. */
245 #ifndef INCOMING_REGNO
246 #define INCOMING_REGNO(OUT) (OUT)
248 #ifndef OUTGOING_REGNO
249 #define OUTGOING_REGNO(IN) (IN)
252 /* Maps used to convert modes to const, load, and store bytecodes. */
253 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
254 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
255 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
257 /* Initialize maps used to convert modes to const, load, and store
261 bc_init_mode_to_opcode_maps ()
265 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
266 mode_to_const_map[mode]
267 = mode_to_load_map[mode]
268 = mode_to_store_map[mode] = neverneverland;
270 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
271 mode_to_const_map[(int) SYM] = CONST; \
272 mode_to_load_map[(int) SYM] = LOAD; \
273 mode_to_store_map[(int) SYM] = STORE;
275 #include "modemap.def"
279 /* This is run once per compilation to set up which modes can be used
280 directly in memory and to initialize the block move optab. */
286 enum machine_mode mode;
287 /* Try indexing by frame ptr and try by stack ptr.
288 It is known that on the Convex the stack ptr isn't a valid index.
289 With luck, one or the other is valid on any machine. */
290 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
291 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
294 insn = emit_insn (gen_rtx (SET, 0, 0));
295 pat = PATTERN (insn);
297 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
298 mode = (enum machine_mode) ((int) mode + 1))
304 direct_load[(int) mode] = direct_store[(int) mode] = 0;
305 PUT_MODE (mem, mode);
306 PUT_MODE (mem1, mode);
308 /* See if there is some register that can be used in this mode and
309 directly loaded or stored from memory. */
311 if (mode != VOIDmode && mode != BLKmode)
312 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
313 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
316 if (! HARD_REGNO_MODE_OK (regno, mode))
319 reg = gen_rtx (REG, mode, regno);
322 SET_DEST (pat) = reg;
323 if (recog (pat, insn, &num_clobbers) >= 0)
324 direct_load[(int) mode] = 1;
326 SET_SRC (pat) = mem1;
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
332 SET_DEST (pat) = mem;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_store[(int) mode] = 1;
337 SET_DEST (pat) = mem1;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
346 /* This is run at the start of compiling a function. */
353 pending_stack_adjust = 0;
354 inhibit_defer_pop = 0;
356 apply_args_value = 0;
360 /* Save all variables describing the current status into the structure *P.
361 This is used before starting a nested function. */
367 /* Instead of saving the postincrement queue, empty it. */
370 p->pending_stack_adjust = pending_stack_adjust;
371 p->inhibit_defer_pop = inhibit_defer_pop;
372 p->saveregs_value = saveregs_value;
373 p->apply_args_value = apply_args_value;
374 p->forced_labels = forced_labels;
376 pending_stack_adjust = 0;
377 inhibit_defer_pop = 0;
379 apply_args_value = 0;
383 /* Restore all variables describing the current status from the structure *P.
384 This is used after a nested function. */
387 restore_expr_status (p)
390 pending_stack_adjust = p->pending_stack_adjust;
391 inhibit_defer_pop = p->inhibit_defer_pop;
392 saveregs_value = p->saveregs_value;
393 apply_args_value = p->apply_args_value;
394 forced_labels = p->forced_labels;
397 /* Manage the queue of increment instructions to be output
398 for POSTINCREMENT_EXPR expressions, etc. */
400 static rtx pending_chain;
402 /* Queue up to increment (or change) VAR later. BODY says how:
403 BODY should be the same thing you would pass to emit_insn
404 to increment right away. It will go to emit_insn later on.
406 The value is a QUEUED expression to be used in place of VAR
407 where you want to guarantee the pre-incrementation value of VAR. */
410 enqueue_insn (var, body)
413 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
414 var, NULL_RTX, NULL_RTX, body, pending_chain);
415 return pending_chain;
418 /* Use protect_from_queue to convert a QUEUED expression
419 into something that you can put immediately into an instruction.
420 If the queued incrementation has not happened yet,
421 protect_from_queue returns the variable itself.
422 If the incrementation has happened, protect_from_queue returns a temp
423 that contains a copy of the old value of the variable.
425 Any time an rtx which might possibly be a QUEUED is to be put
426 into an instruction, it must be passed through protect_from_queue first.
427 QUEUED expressions are not meaningful in instructions.
429 Do not pass a value through protect_from_queue and then hold
430 on to it for a while before putting it in an instruction!
431 If the queue is flushed in between, incorrect code will result. */
434 protect_from_queue (x, modify)
438 register RTX_CODE code = GET_CODE (x);
440 #if 0 /* A QUEUED can hang around after the queue is forced out. */
441 /* Shortcut for most common case. */
442 if (pending_chain == 0)
448 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
449 use of autoincrement. Make a copy of the contents of the memory
450 location rather than a copy of the address, but not if the value is
451 of mode BLKmode. Don't modify X in place since it might be
453 if (code == MEM && GET_MODE (x) != BLKmode
454 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
456 register rtx y = XEXP (x, 0);
457 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
459 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
460 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
461 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
465 register rtx temp = gen_reg_rtx (GET_MODE (new));
466 emit_insn_before (gen_move_insn (temp, new),
472 /* Otherwise, recursively protect the subexpressions of all
473 the kinds of rtx's that can contain a QUEUED. */
476 rtx tem = protect_from_queue (XEXP (x, 0), 0);
477 if (tem != XEXP (x, 0))
483 else if (code == PLUS || code == MULT)
485 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
486 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
487 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
496 /* If the increment has not happened, use the variable itself. */
497 if (QUEUED_INSN (x) == 0)
498 return QUEUED_VAR (x);
499 /* If the increment has happened and a pre-increment copy exists,
501 if (QUEUED_COPY (x) != 0)
502 return QUEUED_COPY (x);
503 /* The increment has happened but we haven't set up a pre-increment copy.
504 Set one up now, and use it. */
505 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
506 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
508 return QUEUED_COPY (x);
511 /* Return nonzero if X contains a QUEUED expression:
512 if it contains anything that will be altered by a queued increment.
513 We handle only combinations of MEM, PLUS, MINUS and MULT operators
514 since memory addresses generally contain only those. */
520 register enum rtx_code code = GET_CODE (x);
526 return queued_subexp_p (XEXP (x, 0));
530 return queued_subexp_p (XEXP (x, 0))
531 || queued_subexp_p (XEXP (x, 1));
536 /* Perform all the pending incrementations. */
542 while (p = pending_chain)
544 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
545 pending_chain = QUEUED_NEXT (p);
556 /* Copy data from FROM to TO, where the machine modes are not the same.
557 Both modes may be integer, or both may be floating.
558 UNSIGNEDP should be nonzero if FROM is an unsigned type.
559 This causes zero-extension instead of sign-extension. */
562 convert_move (to, from, unsignedp)
563 register rtx to, from;
566 enum machine_mode to_mode = GET_MODE (to);
567 enum machine_mode from_mode = GET_MODE (from);
568 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
569 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
573 /* rtx code for making an equivalent value. */
574 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
576 to = protect_from_queue (to, 1);
577 from = protect_from_queue (from, 0);
579 if (to_real != from_real)
582 /* If FROM is a SUBREG that indicates that we have already done at least
583 the required extension, strip it. We don't handle such SUBREGs as
586 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
587 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
588 >= GET_MODE_SIZE (to_mode))
589 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
590 from = gen_lowpart (to_mode, from), from_mode = to_mode;
592 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
595 if (to_mode == from_mode
596 || (from_mode == VOIDmode && CONSTANT_P (from)))
598 emit_move_insn (to, from);
606 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
608 /* Try converting directly if the insn is supported. */
609 if ((code = can_extend_p (to_mode, from_mode, 0))
612 emit_unop_insn (code, to, from, UNKNOWN);
617 #ifdef HAVE_trunchfqf2
618 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
620 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
624 #ifdef HAVE_trunctqfqf2
625 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
627 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
631 #ifdef HAVE_truncsfqf2
632 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
634 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
638 #ifdef HAVE_truncdfqf2
639 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
641 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
645 #ifdef HAVE_truncxfqf2
646 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
648 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
652 #ifdef HAVE_trunctfqf2
653 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
655 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
660 #ifdef HAVE_trunctqfhf2
661 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
663 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
667 #ifdef HAVE_truncsfhf2
668 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
670 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
674 #ifdef HAVE_truncdfhf2
675 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
677 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
681 #ifdef HAVE_truncxfhf2
682 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
684 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
688 #ifdef HAVE_trunctfhf2
689 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
691 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
696 #ifdef HAVE_truncsftqf2
697 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
699 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
703 #ifdef HAVE_truncdftqf2
704 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
706 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
710 #ifdef HAVE_truncxftqf2
711 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
713 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
717 #ifdef HAVE_trunctftqf2
718 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
720 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
725 #ifdef HAVE_truncdfsf2
726 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
728 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
732 #ifdef HAVE_truncxfsf2
733 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
735 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
739 #ifdef HAVE_trunctfsf2
740 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
742 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
746 #ifdef HAVE_truncxfdf2
747 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
749 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
753 #ifdef HAVE_trunctfdf2
754 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
756 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
768 libcall = extendsfdf2_libfunc;
772 libcall = extendsfxf2_libfunc;
776 libcall = extendsftf2_libfunc;
785 libcall = truncdfsf2_libfunc;
789 libcall = extenddfxf2_libfunc;
793 libcall = extenddftf2_libfunc;
802 libcall = truncxfsf2_libfunc;
806 libcall = truncxfdf2_libfunc;
815 libcall = trunctfsf2_libfunc;
819 libcall = trunctfdf2_libfunc;
825 if (libcall == (rtx) 0)
826 /* This conversion is not implemented yet. */
829 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
831 emit_move_insn (to, value);
835 /* Now both modes are integers. */
837 /* Handle expanding beyond a word. */
838 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
839 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
846 enum machine_mode lowpart_mode;
847 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
849 /* Try converting directly if the insn is supported. */
850 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
853 /* If FROM is a SUBREG, put it into a register. Do this
854 so that we always generate the same set of insns for
855 better cse'ing; if an intermediate assignment occurred,
856 we won't be doing the operation directly on the SUBREG. */
857 if (optimize > 0 && GET_CODE (from) == SUBREG)
858 from = force_reg (from_mode, from);
859 emit_unop_insn (code, to, from, equiv_code);
862 /* Next, try converting via full word. */
863 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
864 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
865 != CODE_FOR_nothing))
867 if (GET_CODE (to) == REG)
868 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
869 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
870 emit_unop_insn (code, to,
871 gen_lowpart (word_mode, to), equiv_code);
875 /* No special multiword conversion insn; do it by hand. */
878 /* Since we will turn this into a no conflict block, we must ensure
879 that the source does not overlap the target. */
881 if (reg_overlap_mentioned_p (to, from))
882 from = force_reg (from_mode, from);
884 /* Get a copy of FROM widened to a word, if necessary. */
885 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
886 lowpart_mode = word_mode;
888 lowpart_mode = from_mode;
890 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
892 lowpart = gen_lowpart (lowpart_mode, to);
893 emit_move_insn (lowpart, lowfrom);
895 /* Compute the value to put in each remaining word. */
897 fill_value = const0_rtx;
902 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
903 && STORE_FLAG_VALUE == -1)
905 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
907 fill_value = gen_reg_rtx (word_mode);
908 emit_insn (gen_slt (fill_value));
914 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
915 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
917 fill_value = convert_to_mode (word_mode, fill_value, 1);
921 /* Fill the remaining words. */
922 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
924 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
925 rtx subword = operand_subword (to, index, 1, to_mode);
930 if (fill_value != subword)
931 emit_move_insn (subword, fill_value);
934 insns = get_insns ();
937 emit_no_conflict_block (insns, to, from, NULL_RTX,
938 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
942 /* Truncating multi-word to a word or less. */
943 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
944 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
946 if (!((GET_CODE (from) == MEM
947 && ! MEM_VOLATILE_P (from)
948 && direct_load[(int) to_mode]
949 && ! mode_dependent_address_p (XEXP (from, 0)))
950 || GET_CODE (from) == REG
951 || GET_CODE (from) == SUBREG))
952 from = force_reg (from_mode, from);
953 convert_move (to, gen_lowpart (word_mode, from), 0);
957 /* Handle pointer conversion */ /* SPEE 900220 */
958 if (to_mode == PSImode)
960 if (from_mode != SImode)
961 from = convert_to_mode (SImode, from, unsignedp);
963 #ifdef HAVE_truncsipsi2
964 if (HAVE_truncsipsi2)
966 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
969 #endif /* HAVE_truncsipsi2 */
973 if (from_mode == PSImode)
975 if (to_mode != SImode)
977 from = convert_to_mode (SImode, from, unsignedp);
982 #ifdef HAVE_extendpsisi2
983 if (HAVE_extendpsisi2)
985 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
988 #endif /* HAVE_extendpsisi2 */
993 if (to_mode == PDImode)
995 if (from_mode != DImode)
996 from = convert_to_mode (DImode, from, unsignedp);
998 #ifdef HAVE_truncdipdi2
999 if (HAVE_truncdipdi2)
1001 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1004 #endif /* HAVE_truncdipdi2 */
1008 if (from_mode == PDImode)
1010 if (to_mode != DImode)
1012 from = convert_to_mode (DImode, from, unsignedp);
1017 #ifdef HAVE_extendpdidi2
1018 if (HAVE_extendpdidi2)
1020 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1023 #endif /* HAVE_extendpdidi2 */
1028 /* Now follow all the conversions between integers
1029 no more than a word long. */
1031 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1032 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1033 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1034 GET_MODE_BITSIZE (from_mode)))
1036 if (!((GET_CODE (from) == MEM
1037 && ! MEM_VOLATILE_P (from)
1038 && direct_load[(int) to_mode]
1039 && ! mode_dependent_address_p (XEXP (from, 0)))
1040 || GET_CODE (from) == REG
1041 || GET_CODE (from) == SUBREG))
1042 from = force_reg (from_mode, from);
1043 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1044 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1045 from = copy_to_reg (from);
1046 emit_move_insn (to, gen_lowpart (to_mode, from));
1050 /* Handle extension. */
1051 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1053 /* Convert directly if that works. */
1054 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1055 != CODE_FOR_nothing)
1057 emit_unop_insn (code, to, from, equiv_code);
1062 enum machine_mode intermediate;
1064 /* Search for a mode to convert via. */
1065 for (intermediate = from_mode; intermediate != VOIDmode;
1066 intermediate = GET_MODE_WIDER_MODE (intermediate))
1067 if (((can_extend_p (to_mode, intermediate, unsignedp)
1068 != CODE_FOR_nothing)
1069 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1070 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1071 && (can_extend_p (intermediate, from_mode, unsignedp)
1072 != CODE_FOR_nothing))
1074 convert_move (to, convert_to_mode (intermediate, from,
1075 unsignedp), unsignedp);
1079 /* No suitable intermediate mode. */
1084 /* Support special truncate insns for certain modes. */
1086 if (from_mode == DImode && to_mode == SImode)
1088 #ifdef HAVE_truncdisi2
1089 if (HAVE_truncdisi2)
1091 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1095 convert_move (to, force_reg (from_mode, from), unsignedp);
1099 if (from_mode == DImode && to_mode == HImode)
1101 #ifdef HAVE_truncdihi2
1102 if (HAVE_truncdihi2)
1104 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1108 convert_move (to, force_reg (from_mode, from), unsignedp);
1112 if (from_mode == DImode && to_mode == QImode)
1114 #ifdef HAVE_truncdiqi2
1115 if (HAVE_truncdiqi2)
1117 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1121 convert_move (to, force_reg (from_mode, from), unsignedp);
1125 if (from_mode == SImode && to_mode == HImode)
1127 #ifdef HAVE_truncsihi2
1128 if (HAVE_truncsihi2)
1130 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1134 convert_move (to, force_reg (from_mode, from), unsignedp);
1138 if (from_mode == SImode && to_mode == QImode)
1140 #ifdef HAVE_truncsiqi2
1141 if (HAVE_truncsiqi2)
1143 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1151 if (from_mode == HImode && to_mode == QImode)
1153 #ifdef HAVE_trunchiqi2
1154 if (HAVE_trunchiqi2)
1156 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 if (from_mode == TImode && to_mode == DImode)
1166 #ifdef HAVE_trunctidi2
1167 if (HAVE_trunctidi2)
1169 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 if (from_mode == TImode && to_mode == SImode)
1179 #ifdef HAVE_trunctisi2
1180 if (HAVE_trunctisi2)
1182 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 if (from_mode == TImode && to_mode == HImode)
1192 #ifdef HAVE_trunctihi2
1193 if (HAVE_trunctihi2)
1195 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 if (from_mode == TImode && to_mode == QImode)
1205 #ifdef HAVE_trunctiqi2
1206 if (HAVE_trunctiqi2)
1208 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1216 /* Handle truncation of volatile memrefs, and so on;
1217 the things that couldn't be truncated directly,
1218 and for which there was no special instruction. */
1219 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1221 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1222 emit_move_insn (to, temp);
1226 /* Mode combination is not recognized. */
1230 /* Return an rtx for a value that would result
1231 from converting X to mode MODE.
1232 Both X and MODE may be floating, or both integer.
1233 UNSIGNEDP is nonzero if X is an unsigned value.
1234 This can be done by referring to a part of X in place
1235 or by copying to a new temporary with conversion.
1237 This function *must not* call protect_from_queue
1238 except when putting X into an insn (in which case convert_move does it). */
1241 convert_to_mode (mode, x, unsignedp)
1242 enum machine_mode mode;
1246 return convert_modes (mode, VOIDmode, x, unsignedp);
1249 /* Return an rtx for a value that would result
1250 from converting X from mode OLDMODE to mode MODE.
1251 Both modes may be floating, or both integer.
1252 UNSIGNEDP is nonzero if X is an unsigned value.
1254 This can be done by referring to a part of X in place
1255 or by copying to a new temporary with conversion.
1257 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1259 This function *must not* call protect_from_queue
1260 except when putting X into an insn (in which case convert_move does it). */
1263 convert_modes (mode, oldmode, x, unsignedp)
1264 enum machine_mode mode, oldmode;
1270 /* If FROM is a SUBREG that indicates that we have already done at least
1271 the required extension, strip it. */
1273 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1274 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1275 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1276 x = gen_lowpart (mode, x);
1278 if (GET_MODE (x) != VOIDmode)
1279 oldmode = GET_MODE (x);
1281 if (mode == oldmode)
1284 /* There is one case that we must handle specially: If we are converting
1285 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1286 we are to interpret the constant as unsigned, gen_lowpart will do
1287 the wrong if the constant appears negative. What we want to do is
1288 make the high-order word of the constant zero, not all ones. */
1290 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1291 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1292 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1294 HOST_WIDE_INT val = INTVAL (x);
1296 if (oldmode != VOIDmode
1297 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1299 int width = GET_MODE_BITSIZE (oldmode);
1301 /* We need to zero extend VAL. */
1302 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1305 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1308 /* We can do this with a gen_lowpart if both desired and current modes
1309 are integer, and this is either a constant integer, a register, or a
1310 non-volatile MEM. Except for the constant case where MODE is no
1311 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1313 if ((GET_CODE (x) == CONST_INT
1314 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1315 || (GET_MODE_CLASS (mode) == MODE_INT
1316 && GET_MODE_CLASS (oldmode) == MODE_INT
1317 && (GET_CODE (x) == CONST_DOUBLE
1318 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1319 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1320 && direct_load[(int) mode])
1321 || (GET_CODE (x) == REG
1322 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1323 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1325 /* ?? If we don't know OLDMODE, we have to assume here that
1326 X does not need sign- or zero-extension. This may not be
1327 the case, but it's the best we can do. */
1328 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1329 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1331 HOST_WIDE_INT val = INTVAL (x);
1332 int width = GET_MODE_BITSIZE (oldmode);
1334 /* We must sign or zero-extend in this case. Start by
1335 zero-extending, then sign extend if we need to. */
1336 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1338 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1339 val |= (HOST_WIDE_INT) (-1) << width;
1341 return GEN_INT (val);
1344 return gen_lowpart (mode, x);
1347 temp = gen_reg_rtx (mode);
1348 convert_move (temp, x, unsignedp);
1352 /* Generate several move instructions to copy LEN bytes
1353 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1354 The caller must pass FROM and TO
1355 through protect_from_queue before calling.
1356 ALIGN (in bytes) is maximum alignment we can assume. */
1359 move_by_pieces (to, from, len, align)
1363 struct move_by_pieces data;
1364 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1365 int max_size = MOVE_MAX + 1;
1368 data.to_addr = to_addr;
1369 data.from_addr = from_addr;
1373 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1374 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1376 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1377 || GET_CODE (from_addr) == POST_INC
1378 || GET_CODE (from_addr) == POST_DEC);
1380 data.explicit_inc_from = 0;
1381 data.explicit_inc_to = 0;
1383 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1384 if (data.reverse) data.offset = len;
1387 data.to_struct = MEM_IN_STRUCT_P (to);
1388 data.from_struct = MEM_IN_STRUCT_P (from);
1390 /* If copying requires more than two move insns,
1391 copy addresses to registers (to make displacements shorter)
1392 and use post-increment if available. */
1393 if (!(data.autinc_from && data.autinc_to)
1394 && move_by_pieces_ninsns (len, align) > 2)
1396 #ifdef HAVE_PRE_DECREMENT
1397 if (data.reverse && ! data.autinc_from)
1399 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1400 data.autinc_from = 1;
1401 data.explicit_inc_from = -1;
1404 #ifdef HAVE_POST_INCREMENT
1405 if (! data.autinc_from)
1407 data.from_addr = copy_addr_to_reg (from_addr);
1408 data.autinc_from = 1;
1409 data.explicit_inc_from = 1;
1412 if (!data.autinc_from && CONSTANT_P (from_addr))
1413 data.from_addr = copy_addr_to_reg (from_addr);
1414 #ifdef HAVE_PRE_DECREMENT
1415 if (data.reverse && ! data.autinc_to)
1417 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1419 data.explicit_inc_to = -1;
1422 #ifdef HAVE_POST_INCREMENT
1423 if (! data.reverse && ! data.autinc_to)
1425 data.to_addr = copy_addr_to_reg (to_addr);
1427 data.explicit_inc_to = 1;
1430 if (!data.autinc_to && CONSTANT_P (to_addr))
1431 data.to_addr = copy_addr_to_reg (to_addr);
1434 if (! SLOW_UNALIGNED_ACCESS
1435 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1438 /* First move what we can in the largest integer mode, then go to
1439 successively smaller modes. */
1441 while (max_size > 1)
1443 enum machine_mode mode = VOIDmode, tmode;
1444 enum insn_code icode;
1446 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1447 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1448 if (GET_MODE_SIZE (tmode) < max_size)
1451 if (mode == VOIDmode)
1454 icode = mov_optab->handlers[(int) mode].insn_code;
1455 if (icode != CODE_FOR_nothing
1456 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1457 GET_MODE_SIZE (mode)))
1458 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1460 max_size = GET_MODE_SIZE (mode);
1463 /* The code above should have handled everything. */
1468 /* Return number of insns required to move L bytes by pieces.
1469 ALIGN (in bytes) is maximum alignment we can assume. */
1472 move_by_pieces_ninsns (l, align)
1476 register int n_insns = 0;
1477 int max_size = MOVE_MAX + 1;
1479 if (! SLOW_UNALIGNED_ACCESS
1480 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1483 while (max_size > 1)
1485 enum machine_mode mode = VOIDmode, tmode;
1486 enum insn_code icode;
1488 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1489 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1490 if (GET_MODE_SIZE (tmode) < max_size)
1493 if (mode == VOIDmode)
1496 icode = mov_optab->handlers[(int) mode].insn_code;
1497 if (icode != CODE_FOR_nothing
1498 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1499 GET_MODE_SIZE (mode)))
1500 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1502 max_size = GET_MODE_SIZE (mode);
1508 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1509 with move instructions for mode MODE. GENFUN is the gen_... function
1510 to make a move insn for that mode. DATA has all the other info. */
1513 move_by_pieces_1 (genfun, mode, data)
1514 rtx (*genfun) PROTO ((rtx, ...));
1515 enum machine_mode mode;
1516 struct move_by_pieces *data;
1518 register int size = GET_MODE_SIZE (mode);
1519 register rtx to1, from1;
1521 while (data->len >= size)
1523 if (data->reverse) data->offset -= size;
1525 to1 = (data->autinc_to
1526 ? gen_rtx (MEM, mode, data->to_addr)
1527 : copy_rtx (change_address (data->to, mode,
1528 plus_constant (data->to_addr,
1530 MEM_IN_STRUCT_P (to1) = data->to_struct;
1533 = (data->autinc_from
1534 ? gen_rtx (MEM, mode, data->from_addr)
1535 : copy_rtx (change_address (data->from, mode,
1536 plus_constant (data->from_addr,
1538 MEM_IN_STRUCT_P (from1) = data->from_struct;
1540 #ifdef HAVE_PRE_DECREMENT
1541 if (data->explicit_inc_to < 0)
1542 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1543 if (data->explicit_inc_from < 0)
1544 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1547 emit_insn ((*genfun) (to1, from1));
1548 #ifdef HAVE_POST_INCREMENT
1549 if (data->explicit_inc_to > 0)
1550 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1551 if (data->explicit_inc_from > 0)
1552 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1555 if (! data->reverse) data->offset += size;
1561 /* Emit code to move a block Y to a block X.
1562 This may be done with string-move instructions,
1563 with multiple scalar move instructions, or with a library call.
1565 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1567 SIZE is an rtx that says how long they are.
1568 ALIGN is the maximum alignment we can assume they have,
1569 measured in bytes. */
1572 emit_block_move (x, y, size, align)
1577 if (GET_MODE (x) != BLKmode)
1580 if (GET_MODE (y) != BLKmode)
1583 x = protect_from_queue (x, 1);
1584 y = protect_from_queue (y, 0);
1585 size = protect_from_queue (size, 0);
1587 if (GET_CODE (x) != MEM)
1589 if (GET_CODE (y) != MEM)
1594 if (GET_CODE (size) == CONST_INT
1595 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1596 move_by_pieces (x, y, INTVAL (size), align);
1599 /* Try the most limited insn first, because there's no point
1600 including more than one in the machine description unless
1601 the more limited one has some advantage. */
1603 rtx opalign = GEN_INT (align);
1604 enum machine_mode mode;
1606 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1607 mode = GET_MODE_WIDER_MODE (mode))
1609 enum insn_code code = movstr_optab[(int) mode];
1611 if (code != CODE_FOR_nothing
1612 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1613 here because if SIZE is less than the mode mask, as it is
1614 returned by the macro, it will definitely be less than the
1615 actual mode mask. */
1616 && ((GET_CODE (size) == CONST_INT
1617 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1618 <= GET_MODE_MASK (mode)))
1619 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1620 && (insn_operand_predicate[(int) code][0] == 0
1621 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1622 && (insn_operand_predicate[(int) code][1] == 0
1623 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1624 && (insn_operand_predicate[(int) code][3] == 0
1625 || (*insn_operand_predicate[(int) code][3]) (opalign,
1629 rtx last = get_last_insn ();
1632 op2 = convert_to_mode (mode, size, 1);
1633 if (insn_operand_predicate[(int) code][2] != 0
1634 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1635 op2 = copy_to_mode_reg (mode, op2);
1637 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1644 delete_insns_since (last);
1648 #ifdef TARGET_MEM_FUNCTIONS
1649 emit_library_call (memcpy_libfunc, 0,
1650 VOIDmode, 3, XEXP (x, 0), Pmode,
1652 convert_to_mode (TYPE_MODE (sizetype), size,
1653 TREE_UNSIGNED (sizetype)),
1654 TYPE_MODE (sizetype));
1656 emit_library_call (bcopy_libfunc, 0,
1657 VOIDmode, 3, XEXP (y, 0), Pmode,
1659 convert_to_mode (TYPE_MODE (integer_type_node), size,
1660 TREE_UNSIGNED (integer_type_node)),
1661 TYPE_MODE (integer_type_node));
1666 /* Copy all or part of a value X into registers starting at REGNO.
1667 The number of registers to be filled is NREGS. */
1670 move_block_to_reg (regno, x, nregs, mode)
1674 enum machine_mode mode;
1682 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1683 x = validize_mem (force_const_mem (mode, x));
1685 /* See if the machine can do this with a load multiple insn. */
1686 #ifdef HAVE_load_multiple
1687 if (HAVE_load_multiple)
1689 last = get_last_insn ();
1690 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1698 delete_insns_since (last);
1702 for (i = 0; i < nregs; i++)
1703 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1704 operand_subword_force (x, i, mode));
1707 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1708 The number of registers to be filled is NREGS. SIZE indicates the number
1709 of bytes in the object X. */
1713 move_block_from_reg (regno, x, nregs, size)
1721 enum machine_mode mode;
1723 /* If SIZE is that of a mode no bigger than a word, just use that
1724 mode's store operation. */
1725 if (size <= UNITS_PER_WORD
1726 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1728 emit_move_insn (change_address (x, mode, NULL),
1729 gen_rtx (REG, mode, regno));
1733 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1734 to the left before storing to memory. Note that the previous test
1735 doesn't handle all cases (e.g. SIZE == 3). */
1736 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1738 rtx tem = operand_subword (x, 0, 1, BLKmode);
1744 shift = expand_shift (LSHIFT_EXPR, word_mode,
1745 gen_rtx (REG, word_mode, regno),
1746 build_int_2 ((UNITS_PER_WORD - size)
1747 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1748 emit_move_insn (tem, shift);
1752 /* See if the machine can do this with a store multiple insn. */
1753 #ifdef HAVE_store_multiple
1754 if (HAVE_store_multiple)
1756 last = get_last_insn ();
1757 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1765 delete_insns_since (last);
1769 for (i = 0; i < nregs; i++)
1771 rtx tem = operand_subword (x, i, 1, BLKmode);
1776 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1780 /* Emit code to move a block Y to a block X, where X is non-consecutive
1781 registers represented by a PARALLEL. */
1784 emit_group_load (x, y)
1787 rtx target_reg, source;
1790 if (GET_CODE (x) != PARALLEL)
1793 /* Check for a NULL entry, used to indicate that the parameter goes
1794 both on the stack and in registers. */
1795 if (XEXP (XVECEXP (x, 0, 0), 0))
1800 for (; i < XVECLEN (x, 0); i++)
1802 rtx element = XVECEXP (x, 0, i);
1804 target_reg = XEXP (element, 0);
1806 if (GET_CODE (y) == MEM)
1807 source = change_address (y, GET_MODE (target_reg),
1808 plus_constant (XEXP (y, 0),
1809 INTVAL (XEXP (element, 1))));
1810 else if (XEXP (element, 1) == const0_rtx)
1812 if (GET_MODE (target_reg) == GET_MODE (y))
1814 /* Allow for the target_reg to be smaller than the input register
1815 to allow for AIX with 4 DF arguments after a single SI arg. The
1816 last DF argument will only load 1 word into the integer registers,
1817 but load a DF value into the float registers. */
1818 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1819 <= GET_MODE_SIZE (GET_MODE (y)))
1820 && GET_MODE (target_reg) == word_mode)
1821 /* This might be a const_double, so we can't just use SUBREG. */
1822 source = operand_subword (y, 0, 0, VOIDmode);
1823 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1824 == GET_MODE_SIZE (GET_MODE (y)))
1825 source = gen_lowpart (GET_MODE (target_reg), y);
1832 emit_move_insn (target_reg, source);
1836 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1837 registers represented by a PARALLEL. */
1840 emit_group_store (x, y)
1843 rtx source_reg, target;
1846 if (GET_CODE (y) != PARALLEL)
1849 /* Check for a NULL entry, used to indicate that the parameter goes
1850 both on the stack and in registers. */
1851 if (XEXP (XVECEXP (y, 0, 0), 0))
1856 for (; i < XVECLEN (y, 0); i++)
1858 rtx element = XVECEXP (y, 0, i);
1860 source_reg = XEXP (element, 0);
1862 if (GET_CODE (x) == MEM)
1863 target = change_address (x, GET_MODE (source_reg),
1864 plus_constant (XEXP (x, 0),
1865 INTVAL (XEXP (element, 1))));
1866 else if (XEXP (element, 1) == const0_rtx)
1869 if (GET_MODE (target) != GET_MODE (source_reg))
1870 target = gen_lowpart (GET_MODE (source_reg), target);
1875 emit_move_insn (target, source_reg);
1879 /* Add a USE expression for REG to the (possibly empty) list pointed
1880 to by CALL_FUSAGE. REG must denote a hard register. */
1883 use_reg (call_fusage, reg)
1884 rtx *call_fusage, reg;
1886 if (GET_CODE (reg) != REG
1887 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1891 = gen_rtx (EXPR_LIST, VOIDmode,
1892 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1895 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1896 starting at REGNO. All of these registers must be hard registers. */
1899 use_regs (call_fusage, regno, nregs)
1906 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1909 for (i = 0; i < nregs; i++)
1910 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1913 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1914 PARALLEL REGS. This is for calls that pass values in multiple
1915 non-contiguous locations. The Irix 6 ABI has examples of this. */
1918 use_group_regs (call_fusage, regs)
1924 /* Check for a NULL entry, used to indicate that the parameter goes
1925 both on the stack and in registers. */
1926 if (XEXP (XVECEXP (regs, 0, 0), 0))
1931 for (; i < XVECLEN (regs, 0); i++)
1932 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1935 /* Generate several move instructions to clear LEN bytes of block TO.
1936 (A MEM rtx with BLKmode). The caller must pass TO through
1937 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1941 clear_by_pieces (to, len, align)
1945 struct clear_by_pieces data;
1946 rtx to_addr = XEXP (to, 0);
1947 int max_size = MOVE_MAX + 1;
1950 data.to_addr = to_addr;
1953 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1954 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1956 data.explicit_inc_to = 0;
1958 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1959 if (data.reverse) data.offset = len;
1962 data.to_struct = MEM_IN_STRUCT_P (to);
1964 /* If copying requires more than two move insns,
1965 copy addresses to registers (to make displacements shorter)
1966 and use post-increment if available. */
1968 && move_by_pieces_ninsns (len, align) > 2)
1970 #ifdef HAVE_PRE_DECREMENT
1971 if (data.reverse && ! data.autinc_to)
1973 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1975 data.explicit_inc_to = -1;
1978 #ifdef HAVE_POST_INCREMENT
1979 if (! data.reverse && ! data.autinc_to)
1981 data.to_addr = copy_addr_to_reg (to_addr);
1983 data.explicit_inc_to = 1;
1986 if (!data.autinc_to && CONSTANT_P (to_addr))
1987 data.to_addr = copy_addr_to_reg (to_addr);
1990 if (! SLOW_UNALIGNED_ACCESS
1991 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1994 /* First move what we can in the largest integer mode, then go to
1995 successively smaller modes. */
1997 while (max_size > 1)
1999 enum machine_mode mode = VOIDmode, tmode;
2000 enum insn_code icode;
2002 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2003 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2004 if (GET_MODE_SIZE (tmode) < max_size)
2007 if (mode == VOIDmode)
2010 icode = mov_optab->handlers[(int) mode].insn_code;
2011 if (icode != CODE_FOR_nothing
2012 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2013 GET_MODE_SIZE (mode)))
2014 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2016 max_size = GET_MODE_SIZE (mode);
2019 /* The code above should have handled everything. */
2024 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2025 with move instructions for mode MODE. GENFUN is the gen_... function
2026 to make a move insn for that mode. DATA has all the other info. */
2029 clear_by_pieces_1 (genfun, mode, data)
2030 rtx (*genfun) PROTO ((rtx, ...));
2031 enum machine_mode mode;
2032 struct clear_by_pieces *data;
2034 register int size = GET_MODE_SIZE (mode);
2037 while (data->len >= size)
2039 if (data->reverse) data->offset -= size;
2041 to1 = (data->autinc_to
2042 ? gen_rtx (MEM, mode, data->to_addr)
2043 : copy_rtx (change_address (data->to, mode,
2044 plus_constant (data->to_addr,
2046 MEM_IN_STRUCT_P (to1) = data->to_struct;
2048 #ifdef HAVE_PRE_DECREMENT
2049 if (data->explicit_inc_to < 0)
2050 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2053 emit_insn ((*genfun) (to1, const0_rtx));
2054 #ifdef HAVE_POST_INCREMENT
2055 if (data->explicit_inc_to > 0)
2056 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2059 if (! data->reverse) data->offset += size;
2065 /* Write zeros through the storage of OBJECT.
2066 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2067 the maximum alignment we can is has, measured in bytes. */
2070 clear_storage (object, size, align)
2075 if (GET_MODE (object) == BLKmode)
2077 object = protect_from_queue (object, 1);
2078 size = protect_from_queue (size, 0);
2080 if (GET_CODE (size) == CONST_INT
2081 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2082 clear_by_pieces (object, INTVAL (size), align);
2086 /* Try the most limited insn first, because there's no point
2087 including more than one in the machine description unless
2088 the more limited one has some advantage. */
2090 rtx opalign = GEN_INT (align);
2091 enum machine_mode mode;
2093 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2094 mode = GET_MODE_WIDER_MODE (mode))
2096 enum insn_code code = clrstr_optab[(int) mode];
2098 if (code != CODE_FOR_nothing
2099 /* We don't need MODE to be narrower than
2100 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2101 the mode mask, as it is returned by the macro, it will
2102 definitely be less than the actual mode mask. */
2103 && ((GET_CODE (size) == CONST_INT
2104 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2105 <= GET_MODE_MASK (mode)))
2106 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2107 && (insn_operand_predicate[(int) code][0] == 0
2108 || (*insn_operand_predicate[(int) code][0]) (object,
2110 && (insn_operand_predicate[(int) code][2] == 0
2111 || (*insn_operand_predicate[(int) code][2]) (opalign,
2115 rtx last = get_last_insn ();
2118 op1 = convert_to_mode (mode, size, 1);
2119 if (insn_operand_predicate[(int) code][1] != 0
2120 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2122 op1 = copy_to_mode_reg (mode, op1);
2124 pat = GEN_FCN ((int) code) (object, op1, opalign);
2131 delete_insns_since (last);
2136 #ifdef TARGET_MEM_FUNCTIONS
2137 emit_library_call (memset_libfunc, 0,
2139 XEXP (object, 0), Pmode,
2140 const0_rtx, TYPE_MODE (integer_type_node),
2141 convert_to_mode (TYPE_MODE (sizetype),
2142 size, TREE_UNSIGNED (sizetype)),
2143 TYPE_MODE (sizetype));
2145 emit_library_call (bzero_libfunc, 0,
2147 XEXP (object, 0), Pmode,
2148 convert_to_mode (TYPE_MODE (integer_type_node),
2150 TREE_UNSIGNED (integer_type_node)),
2151 TYPE_MODE (integer_type_node));
2156 emit_move_insn (object, const0_rtx);
2159 /* Generate code to copy Y into X.
2160 Both Y and X must have the same mode, except that
2161 Y can be a constant with VOIDmode.
2162 This mode cannot be BLKmode; use emit_block_move for that.
2164 Return the last instruction emitted. */
2167 emit_move_insn (x, y)
2170 enum machine_mode mode = GET_MODE (x);
2172 x = protect_from_queue (x, 1);
2173 y = protect_from_queue (y, 0);
2175 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2178 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2179 y = force_const_mem (mode, y);
2181 /* If X or Y are memory references, verify that their addresses are valid
2183 if (GET_CODE (x) == MEM
2184 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2185 && ! push_operand (x, GET_MODE (x)))
2187 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2188 x = change_address (x, VOIDmode, XEXP (x, 0));
2190 if (GET_CODE (y) == MEM
2191 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2193 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2194 y = change_address (y, VOIDmode, XEXP (y, 0));
2196 if (mode == BLKmode)
2199 return emit_move_insn_1 (x, y);
2202 /* Low level part of emit_move_insn.
2203 Called just like emit_move_insn, but assumes X and Y
2204 are basically valid. */
2207 emit_move_insn_1 (x, y)
2210 enum machine_mode mode = GET_MODE (x);
2211 enum machine_mode submode;
2212 enum mode_class class = GET_MODE_CLASS (mode);
2215 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2217 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2219 /* Expand complex moves by moving real part and imag part, if possible. */
2220 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2221 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2223 (class == MODE_COMPLEX_INT
2224 ? MODE_INT : MODE_FLOAT),
2226 && (mov_optab->handlers[(int) submode].insn_code
2227 != CODE_FOR_nothing))
2229 /* Don't split destination if it is a stack push. */
2230 int stack = push_operand (x, GET_MODE (x));
2233 /* If this is a stack, push the highpart first, so it
2234 will be in the argument order.
2236 In that case, change_address is used only to convert
2237 the mode, not to change the address. */
2240 /* Note that the real part always precedes the imag part in memory
2241 regardless of machine's endianness. */
2242 #ifdef STACK_GROWS_DOWNWARD
2243 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2244 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2245 gen_imagpart (submode, y)));
2246 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2247 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2248 gen_realpart (submode, y)));
2250 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2251 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2252 gen_realpart (submode, y)));
2253 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2254 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2255 gen_imagpart (submode, y)));
2260 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2261 (gen_realpart (submode, x), gen_realpart (submode, y)));
2262 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2263 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2266 return get_last_insn ();
2269 /* This will handle any multi-word mode that lacks a move_insn pattern.
2270 However, you will get better code if you define such patterns,
2271 even if they must turn into multiple assembler instructions. */
2272 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2277 #ifdef PUSH_ROUNDING
2279 /* If X is a push on the stack, do the push now and replace
2280 X with a reference to the stack pointer. */
2281 if (push_operand (x, GET_MODE (x)))
2283 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2284 x = change_address (x, VOIDmode, stack_pointer_rtx);
2288 /* Show the output dies here. */
2290 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2293 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2296 rtx xpart = operand_subword (x, i, 1, mode);
2297 rtx ypart = operand_subword (y, i, 1, mode);
2299 /* If we can't get a part of Y, put Y into memory if it is a
2300 constant. Otherwise, force it into a register. If we still
2301 can't get a part of Y, abort. */
2302 if (ypart == 0 && CONSTANT_P (y))
2304 y = force_const_mem (mode, y);
2305 ypart = operand_subword (y, i, 1, mode);
2307 else if (ypart == 0)
2308 ypart = operand_subword_force (y, i, mode);
2310 if (xpart == 0 || ypart == 0)
2313 last_insn = emit_move_insn (xpart, ypart);
2322 /* Pushing data onto the stack. */
2324 /* Push a block of length SIZE (perhaps variable)
2325 and return an rtx to address the beginning of the block.
2326 Note that it is not possible for the value returned to be a QUEUED.
2327 The value may be virtual_outgoing_args_rtx.
2329 EXTRA is the number of bytes of padding to push in addition to SIZE.
2330 BELOW nonzero means this padding comes at low addresses;
2331 otherwise, the padding comes at high addresses. */
2334 push_block (size, extra, below)
2340 size = convert_modes (Pmode, ptr_mode, size, 1);
2341 if (CONSTANT_P (size))
2342 anti_adjust_stack (plus_constant (size, extra));
2343 else if (GET_CODE (size) == REG && extra == 0)
2344 anti_adjust_stack (size);
2347 rtx temp = copy_to_mode_reg (Pmode, size);
2349 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2350 temp, 0, OPTAB_LIB_WIDEN);
2351 anti_adjust_stack (temp);
2354 #ifdef STACK_GROWS_DOWNWARD
2355 temp = virtual_outgoing_args_rtx;
2356 if (extra != 0 && below)
2357 temp = plus_constant (temp, extra);
2359 if (GET_CODE (size) == CONST_INT)
2360 temp = plus_constant (virtual_outgoing_args_rtx,
2361 - INTVAL (size) - (below ? 0 : extra));
2362 else if (extra != 0 && !below)
2363 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2364 negate_rtx (Pmode, plus_constant (size, extra)));
2366 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2367 negate_rtx (Pmode, size));
2370 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2376 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2379 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2380 block of SIZE bytes. */
2383 get_push_address (size)
2388 if (STACK_PUSH_CODE == POST_DEC)
2389 temp = gen_rtx (PLUS, Pmode, stack_pointer_rtx, GEN_INT (size));
2390 else if (STACK_PUSH_CODE == POST_INC)
2391 temp = gen_rtx (MINUS, Pmode, stack_pointer_rtx, GEN_INT (size));
2393 temp = stack_pointer_rtx;
2395 return force_operand (temp, NULL_RTX);
2398 /* Generate code to push X onto the stack, assuming it has mode MODE and
2400 MODE is redundant except when X is a CONST_INT (since they don't
2402 SIZE is an rtx for the size of data to be copied (in bytes),
2403 needed only if X is BLKmode.
2405 ALIGN (in bytes) is maximum alignment we can assume.
2407 If PARTIAL and REG are both nonzero, then copy that many of the first
2408 words of X into registers starting with REG, and push the rest of X.
2409 The amount of space pushed is decreased by PARTIAL words,
2410 rounded *down* to a multiple of PARM_BOUNDARY.
2411 REG must be a hard register in this case.
2412 If REG is zero but PARTIAL is not, take any all others actions for an
2413 argument partially in registers, but do not actually load any
2416 EXTRA is the amount in bytes of extra space to leave next to this arg.
2417 This is ignored if an argument block has already been allocated.
2419 On a machine that lacks real push insns, ARGS_ADDR is the address of
2420 the bottom of the argument block for this call. We use indexing off there
2421 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2422 argument block has not been preallocated.
2424 ARGS_SO_FAR is the size of args previously pushed for this call. */
2427 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2428 args_addr, args_so_far)
2430 enum machine_mode mode;
2441 enum direction stack_direction
2442 #ifdef STACK_GROWS_DOWNWARD
2448 /* Decide where to pad the argument: `downward' for below,
2449 `upward' for above, or `none' for don't pad it.
2450 Default is below for small data on big-endian machines; else above. */
2451 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2453 /* Invert direction if stack is post-update. */
2454 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2455 if (where_pad != none)
2456 where_pad = (where_pad == downward ? upward : downward);
2458 xinner = x = protect_from_queue (x, 0);
2460 if (mode == BLKmode)
2462 /* Copy a block into the stack, entirely or partially. */
2465 int used = partial * UNITS_PER_WORD;
2466 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2474 /* USED is now the # of bytes we need not copy to the stack
2475 because registers will take care of them. */
2478 xinner = change_address (xinner, BLKmode,
2479 plus_constant (XEXP (xinner, 0), used));
2481 /* If the partial register-part of the arg counts in its stack size,
2482 skip the part of stack space corresponding to the registers.
2483 Otherwise, start copying to the beginning of the stack space,
2484 by setting SKIP to 0. */
2485 #ifndef REG_PARM_STACK_SPACE
2491 #ifdef PUSH_ROUNDING
2492 /* Do it with several push insns if that doesn't take lots of insns
2493 and if there is no difficulty with push insns that skip bytes
2494 on the stack for alignment purposes. */
2496 && GET_CODE (size) == CONST_INT
2498 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2500 /* Here we avoid the case of a structure whose weak alignment
2501 forces many pushes of a small amount of data,
2502 and such small pushes do rounding that causes trouble. */
2503 && ((! SLOW_UNALIGNED_ACCESS)
2504 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2505 || PUSH_ROUNDING (align) == align)
2506 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2508 /* Push padding now if padding above and stack grows down,
2509 or if padding below and stack grows up.
2510 But if space already allocated, this has already been done. */
2511 if (extra && args_addr == 0
2512 && where_pad != none && where_pad != stack_direction)
2513 anti_adjust_stack (GEN_INT (extra));
2515 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2516 INTVAL (size) - used, align);
2518 if (flag_check_memory_usage)
2522 temp = get_push_address (INTVAL(size) - used);
2523 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2524 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2526 XEXP (xinner, 0), ptr_mode,
2527 GEN_INT (INTVAL(size) - used),
2528 TYPE_MODE (sizetype));
2530 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2532 GEN_INT (INTVAL(size) - used),
2533 TYPE_MODE (sizetype),
2534 GEN_INT (MEMORY_USE_RW), QImode);
2538 #endif /* PUSH_ROUNDING */
2540 /* Otherwise make space on the stack and copy the data
2541 to the address of that space. */
2543 /* Deduct words put into registers from the size we must copy. */
2546 if (GET_CODE (size) == CONST_INT)
2547 size = GEN_INT (INTVAL (size) - used);
2549 size = expand_binop (GET_MODE (size), sub_optab, size,
2550 GEN_INT (used), NULL_RTX, 0,
2554 /* Get the address of the stack space.
2555 In this case, we do not deal with EXTRA separately.
2556 A single stack adjust will do. */
2559 temp = push_block (size, extra, where_pad == downward);
2562 else if (GET_CODE (args_so_far) == CONST_INT)
2563 temp = memory_address (BLKmode,
2564 plus_constant (args_addr,
2565 skip + INTVAL (args_so_far)));
2567 temp = memory_address (BLKmode,
2568 plus_constant (gen_rtx (PLUS, Pmode,
2569 args_addr, args_so_far),
2571 if (flag_check_memory_usage)
2575 target = copy_to_reg (temp);
2576 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2577 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2579 XEXP (xinner, 0), ptr_mode,
2580 size, TYPE_MODE (sizetype));
2582 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2584 size, TYPE_MODE (sizetype),
2585 GEN_INT (MEMORY_USE_RW), QImode);
2588 /* TEMP is the address of the block. Copy the data there. */
2589 if (GET_CODE (size) == CONST_INT
2590 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2593 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2594 INTVAL (size), align);
2597 /* Try the most limited insn first, because there's no point
2598 including more than one in the machine description unless
2599 the more limited one has some advantage. */
2600 #ifdef HAVE_movstrqi
2602 && GET_CODE (size) == CONST_INT
2603 && ((unsigned) INTVAL (size)
2604 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2606 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2607 xinner, size, GEN_INT (align));
2615 #ifdef HAVE_movstrhi
2617 && GET_CODE (size) == CONST_INT
2618 && ((unsigned) INTVAL (size)
2619 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2621 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2622 xinner, size, GEN_INT (align));
2630 #ifdef HAVE_movstrsi
2633 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2634 xinner, size, GEN_INT (align));
2642 #ifdef HAVE_movstrdi
2645 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2646 xinner, size, GEN_INT (align));
2655 #ifndef ACCUMULATE_OUTGOING_ARGS
2656 /* If the source is referenced relative to the stack pointer,
2657 copy it to another register to stabilize it. We do not need
2658 to do this if we know that we won't be changing sp. */
2660 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2661 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2662 temp = copy_to_reg (temp);
2665 /* Make inhibit_defer_pop nonzero around the library call
2666 to force it to pop the bcopy-arguments right away. */
2668 #ifdef TARGET_MEM_FUNCTIONS
2669 emit_library_call (memcpy_libfunc, 0,
2670 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2671 convert_to_mode (TYPE_MODE (sizetype),
2672 size, TREE_UNSIGNED (sizetype)),
2673 TYPE_MODE (sizetype));
2675 emit_library_call (bcopy_libfunc, 0,
2676 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2677 convert_to_mode (TYPE_MODE (integer_type_node),
2679 TREE_UNSIGNED (integer_type_node)),
2680 TYPE_MODE (integer_type_node));
2685 else if (partial > 0)
2687 /* Scalar partly in registers. */
2689 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2692 /* # words of start of argument
2693 that we must make space for but need not store. */
2694 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2695 int args_offset = INTVAL (args_so_far);
2698 /* Push padding now if padding above and stack grows down,
2699 or if padding below and stack grows up.
2700 But if space already allocated, this has already been done. */
2701 if (extra && args_addr == 0
2702 && where_pad != none && where_pad != stack_direction)
2703 anti_adjust_stack (GEN_INT (extra));
2705 /* If we make space by pushing it, we might as well push
2706 the real data. Otherwise, we can leave OFFSET nonzero
2707 and leave the space uninitialized. */
2711 /* Now NOT_STACK gets the number of words that we don't need to
2712 allocate on the stack. */
2713 not_stack = partial - offset;
2715 /* If the partial register-part of the arg counts in its stack size,
2716 skip the part of stack space corresponding to the registers.
2717 Otherwise, start copying to the beginning of the stack space,
2718 by setting SKIP to 0. */
2719 #ifndef REG_PARM_STACK_SPACE
2725 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2726 x = validize_mem (force_const_mem (mode, x));
2728 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2729 SUBREGs of such registers are not allowed. */
2730 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2731 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2732 x = copy_to_reg (x);
2734 /* Loop over all the words allocated on the stack for this arg. */
2735 /* We can do it by words, because any scalar bigger than a word
2736 has a size a multiple of a word. */
2737 #ifndef PUSH_ARGS_REVERSED
2738 for (i = not_stack; i < size; i++)
2740 for (i = size - 1; i >= not_stack; i--)
2742 if (i >= not_stack + offset)
2743 emit_push_insn (operand_subword_force (x, i, mode),
2744 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2746 GEN_INT (args_offset + ((i - not_stack + skip)
2747 * UNITS_PER_WORD)));
2752 rtx target = NULL_RTX;
2754 /* Push padding now if padding above and stack grows down,
2755 or if padding below and stack grows up.
2756 But if space already allocated, this has already been done. */
2757 if (extra && args_addr == 0
2758 && where_pad != none && where_pad != stack_direction)
2759 anti_adjust_stack (GEN_INT (extra));
2761 #ifdef PUSH_ROUNDING
2763 addr = gen_push_operand ();
2767 if (GET_CODE (args_so_far) == CONST_INT)
2769 = memory_address (mode,
2770 plus_constant (args_addr,
2771 INTVAL (args_so_far)));
2773 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2778 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2780 if (flag_check_memory_usage)
2783 target = get_push_address (GET_MODE_SIZE (mode));
2785 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2786 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2788 XEXP (x, 0), ptr_mode,
2789 GEN_INT (GET_MODE_SIZE (mode)),
2790 TYPE_MODE (sizetype));
2792 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2794 GEN_INT (GET_MODE_SIZE (mode)),
2795 TYPE_MODE (sizetype),
2796 GEN_INT (MEMORY_USE_RW), QImode);
2801 /* If part should go in registers, copy that part
2802 into the appropriate registers. Do this now, at the end,
2803 since mem-to-mem copies above may do function calls. */
2804 if (partial > 0 && reg != 0)
2806 /* Handle calls that pass values in multiple non-contiguous locations.
2807 The Irix 6 ABI has examples of this. */
2808 if (GET_CODE (reg) == PARALLEL)
2809 emit_group_load (reg, x);
2811 move_block_to_reg (REGNO (reg), x, partial, mode);
2814 if (extra && args_addr == 0 && where_pad == stack_direction)
2815 anti_adjust_stack (GEN_INT (extra));
2818 /* Expand an assignment that stores the value of FROM into TO.
2819 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2820 (This may contain a QUEUED rtx;
2821 if the value is constant, this rtx is a constant.)
2822 Otherwise, the returned value is NULL_RTX.
2824 SUGGEST_REG is no longer actually used.
2825 It used to mean, copy the value through a register
2826 and return that register, if that is possible.
2827 We now use WANT_VALUE to decide whether to do this. */
2830 expand_assignment (to, from, want_value, suggest_reg)
2835 register rtx to_rtx = 0;
2838 /* Don't crash if the lhs of the assignment was erroneous. */
2840 if (TREE_CODE (to) == ERROR_MARK)
2842 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2843 return want_value ? result : NULL_RTX;
2846 if (output_bytecode)
2848 tree dest_innermost;
2850 bc_expand_expr (from);
2851 bc_emit_instruction (duplicate);
2853 dest_innermost = bc_expand_address (to);
2855 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2856 take care of it here. */
2858 bc_store_memory (TREE_TYPE (to), dest_innermost);
2862 /* Assignment of a structure component needs special treatment
2863 if the structure component's rtx is not simply a MEM.
2864 Assignment of an array element at a constant index, and assignment of
2865 an array element in an unaligned packed structure field, has the same
2868 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2869 || TREE_CODE (to) == ARRAY_REF)
2871 enum machine_mode mode1;
2881 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2882 &unsignedp, &volatilep, &alignment);
2884 /* If we are going to use store_bit_field and extract_bit_field,
2885 make sure to_rtx will be safe for multiple use. */
2887 if (mode1 == VOIDmode && want_value)
2888 tem = stabilize_reference (tem);
2890 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
2893 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2895 if (GET_CODE (to_rtx) != MEM)
2897 to_rtx = change_address (to_rtx, VOIDmode,
2898 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2899 force_reg (ptr_mode, offset_rtx)));
2903 if (GET_CODE (to_rtx) == MEM)
2905 /* When the offset is zero, to_rtx is the address of the
2906 structure we are storing into, and hence may be shared.
2907 We must make a new MEM before setting the volatile bit. */
2909 to_rtx = copy_rtx (to_rtx);
2911 MEM_VOLATILE_P (to_rtx) = 1;
2913 #if 0 /* This was turned off because, when a field is volatile
2914 in an object which is not volatile, the object may be in a register,
2915 and then we would abort over here. */
2921 /* Check the access. */
2922 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2927 enum machine_mode best_mode;
2929 best_mode = get_best_mode (bitsize, bitpos,
2930 TYPE_ALIGN (TREE_TYPE (tem)),
2932 if (best_mode == VOIDmode)
2935 best_mode_size = GET_MODE_BITSIZE (best_mode);
2936 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2937 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2938 size *= GET_MODE_SIZE (best_mode);
2940 /* Check the access right of the pointer. */
2941 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, to_addr,
2942 ptr_mode, GEN_INT (size), TYPE_MODE (sizetype),
2943 GEN_INT (MEMORY_USE_WO), QImode);
2946 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2948 /* Spurious cast makes HPUX compiler happy. */
2949 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2952 /* Required alignment of containing datum. */
2954 int_size_in_bytes (TREE_TYPE (tem)));
2955 preserve_temp_slots (result);
2959 /* If the value is meaningful, convert RESULT to the proper mode.
2960 Otherwise, return nothing. */
2961 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2962 TYPE_MODE (TREE_TYPE (from)),
2964 TREE_UNSIGNED (TREE_TYPE (to)))
2968 /* If the rhs is a function call and its value is not an aggregate,
2969 call the function before we start to compute the lhs.
2970 This is needed for correct code for cases such as
2971 val = setjmp (buf) on machines where reference to val
2972 requires loading up part of an address in a separate insn.
2974 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2975 a promoted variable where the zero- or sign- extension needs to be done.
2976 Handling this in the normal way is safe because no computation is done
2978 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2979 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2980 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2985 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2987 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
2989 /* Handle calls that return values in multiple non-contiguous locations.
2990 The Irix 6 ABI has examples of this. */
2991 if (GET_CODE (to_rtx) == PARALLEL)
2992 emit_group_load (to_rtx, value);
2993 else if (GET_MODE (to_rtx) == BLKmode)
2994 emit_block_move (to_rtx, value, expr_size (from),
2995 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2997 emit_move_insn (to_rtx, value);
2998 preserve_temp_slots (to_rtx);
3001 return want_value ? to_rtx : NULL_RTX;
3004 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3005 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3008 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3010 /* Don't move directly into a return register. */
3011 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3016 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3017 emit_move_insn (to_rtx, temp);
3018 preserve_temp_slots (to_rtx);
3021 return want_value ? to_rtx : NULL_RTX;
3024 /* In case we are returning the contents of an object which overlaps
3025 the place the value is being stored, use a safe function when copying
3026 a value through a pointer into a structure value return block. */
3027 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3028 && current_function_returns_struct
3029 && !current_function_returns_pcc_struct)
3034 size = expr_size (from);
3035 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3036 EXPAND_MEMORY_USE_DONT);
3038 /* Copy the rights of the bitmap. */
3039 if (flag_check_memory_usage)
3040 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3041 XEXP (to_rtx, 0), ptr_mode,
3042 XEXP (from_rtx, 0), ptr_mode,
3043 convert_to_mode (TYPE_MODE (sizetype),
3044 size, TREE_UNSIGNED (sizetype)),
3045 TYPE_MODE (sizetype));
3047 #ifdef TARGET_MEM_FUNCTIONS
3048 emit_library_call (memcpy_libfunc, 0,
3049 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3050 XEXP (from_rtx, 0), Pmode,
3051 convert_to_mode (TYPE_MODE (sizetype),
3052 size, TREE_UNSIGNED (sizetype)),
3053 TYPE_MODE (sizetype));
3055 emit_library_call (bcopy_libfunc, 0,
3056 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3057 XEXP (to_rtx, 0), Pmode,
3058 convert_to_mode (TYPE_MODE (integer_type_node),
3059 size, TREE_UNSIGNED (integer_type_node)),
3060 TYPE_MODE (integer_type_node));
3063 preserve_temp_slots (to_rtx);
3066 return want_value ? to_rtx : NULL_RTX;
3069 /* Compute FROM and store the value in the rtx we got. */
3072 result = store_expr (from, to_rtx, want_value);
3073 preserve_temp_slots (result);
3076 return want_value ? result : NULL_RTX;
3079 /* Generate code for computing expression EXP,
3080 and storing the value into TARGET.
3081 TARGET may contain a QUEUED rtx.
3083 If WANT_VALUE is nonzero, return a copy of the value
3084 not in TARGET, so that we can be sure to use the proper
3085 value in a containing expression even if TARGET has something
3086 else stored in it. If possible, we copy the value through a pseudo
3087 and return that pseudo. Or, if the value is constant, we try to
3088 return the constant. In some cases, we return a pseudo
3089 copied *from* TARGET.
3091 If the mode is BLKmode then we may return TARGET itself.
3092 It turns out that in BLKmode it doesn't cause a problem.
3093 because C has no operators that could combine two different
3094 assignments into the same BLKmode object with different values
3095 with no sequence point. Will other languages need this to
3098 If WANT_VALUE is 0, we return NULL, to make sure
3099 to catch quickly any cases where the caller uses the value
3100 and fails to set WANT_VALUE. */
3103 store_expr (exp, target, want_value)
3105 register rtx target;
3109 int dont_return_target = 0;
3111 if (TREE_CODE (exp) == COMPOUND_EXPR)
3113 /* Perform first part of compound expression, then assign from second
3115 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3117 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3119 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3121 /* For conditional expression, get safe form of the target. Then
3122 test the condition, doing the appropriate assignment on either
3123 side. This avoids the creation of unnecessary temporaries.
3124 For non-BLKmode, it is more efficient not to do this. */
3126 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3129 target = protect_from_queue (target, 1);
3131 do_pending_stack_adjust ();
3133 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3134 start_cleanup_deferal ();
3135 store_expr (TREE_OPERAND (exp, 1), target, 0);
3136 end_cleanup_deferal ();
3138 emit_jump_insn (gen_jump (lab2));
3141 start_cleanup_deferal ();
3142 store_expr (TREE_OPERAND (exp, 2), target, 0);
3143 end_cleanup_deferal ();
3148 return want_value ? target : NULL_RTX;
3150 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3151 && GET_MODE (target) != BLKmode)
3152 /* If target is in memory and caller wants value in a register instead,
3153 arrange that. Pass TARGET as target for expand_expr so that,
3154 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3155 We know expand_expr will not use the target in that case.
3156 Don't do this if TARGET is volatile because we are supposed
3157 to write it and then read it. */
3159 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3160 GET_MODE (target), 0);
3161 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3162 temp = copy_to_reg (temp);
3163 dont_return_target = 1;
3165 else if (queued_subexp_p (target))
3166 /* If target contains a postincrement, let's not risk
3167 using it as the place to generate the rhs. */
3169 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3171 /* Expand EXP into a new pseudo. */
3172 temp = gen_reg_rtx (GET_MODE (target));
3173 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3176 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3178 /* If target is volatile, ANSI requires accessing the value
3179 *from* the target, if it is accessed. So make that happen.
3180 In no case return the target itself. */
3181 if (! MEM_VOLATILE_P (target) && want_value)
3182 dont_return_target = 1;
3184 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3185 /* If this is an scalar in a register that is stored in a wider mode
3186 than the declared mode, compute the result into its declared mode
3187 and then convert to the wider mode. Our value is the computed
3190 /* If we don't want a value, we can do the conversion inside EXP,
3191 which will often result in some optimizations. Do the conversion
3192 in two steps: first change the signedness, if needed, then
3193 the extend. But don't do this if the type of EXP is a subtype
3194 of something else since then the conversion might involve
3195 more than just converting modes. */
3196 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3197 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3199 if (TREE_UNSIGNED (TREE_TYPE (exp))
3200 != SUBREG_PROMOTED_UNSIGNED_P (target))
3203 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3207 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3208 SUBREG_PROMOTED_UNSIGNED_P (target)),
3212 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3214 /* If TEMP is a volatile MEM and we want a result value, make
3215 the access now so it gets done only once. Likewise if
3216 it contains TARGET. */
3217 if (GET_CODE (temp) == MEM && want_value
3218 && (MEM_VOLATILE_P (temp)
3219 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3220 temp = copy_to_reg (temp);
3222 /* If TEMP is a VOIDmode constant, use convert_modes to make
3223 sure that we properly convert it. */
3224 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3225 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3226 TYPE_MODE (TREE_TYPE (exp)), temp,
3227 SUBREG_PROMOTED_UNSIGNED_P (target));
3229 convert_move (SUBREG_REG (target), temp,
3230 SUBREG_PROMOTED_UNSIGNED_P (target));
3231 return want_value ? temp : NULL_RTX;
3235 temp = expand_expr (exp, target, GET_MODE (target), 0);
3236 /* Return TARGET if it's a specified hardware register.
3237 If TARGET is a volatile mem ref, either return TARGET
3238 or return a reg copied *from* TARGET; ANSI requires this.
3240 Otherwise, if TEMP is not TARGET, return TEMP
3241 if it is constant (for efficiency),
3242 or if we really want the correct value. */
3243 if (!(target && GET_CODE (target) == REG
3244 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3245 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3246 && ! rtx_equal_p (temp, target)
3247 && (CONSTANT_P (temp) || want_value))
3248 dont_return_target = 1;
3251 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3252 the same as that of TARGET, adjust the constant. This is needed, for
3253 example, in case it is a CONST_DOUBLE and we want only a word-sized
3255 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3256 && TREE_CODE (exp) != ERROR_MARK
3257 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3258 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3259 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3261 if (flag_check_memory_usage
3262 && GET_CODE (target) == MEM
3263 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3265 if (GET_CODE (temp) == MEM)
3266 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3267 XEXP (target, 0), ptr_mode,
3268 XEXP (temp, 0), ptr_mode,
3269 expr_size (exp), TYPE_MODE (sizetype));
3271 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3272 XEXP (target, 0), ptr_mode,
3273 expr_size (exp), TYPE_MODE (sizetype),
3274 GEN_INT (MEMORY_USE_WO), QImode);
3277 /* If value was not generated in the target, store it there.
3278 Convert the value to TARGET's type first if nec. */
3280 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3282 target = protect_from_queue (target, 1);
3283 if (GET_MODE (temp) != GET_MODE (target)
3284 && GET_MODE (temp) != VOIDmode)
3286 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3287 if (dont_return_target)
3289 /* In this case, we will return TEMP,
3290 so make sure it has the proper mode.
3291 But don't forget to store the value into TARGET. */
3292 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3293 emit_move_insn (target, temp);
3296 convert_move (target, temp, unsignedp);
3299 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3301 /* Handle copying a string constant into an array.
3302 The string constant may be shorter than the array.
3303 So copy just the string's actual length, and clear the rest. */
3307 /* Get the size of the data type of the string,
3308 which is actually the size of the target. */
3309 size = expr_size (exp);
3310 if (GET_CODE (size) == CONST_INT
3311 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3312 emit_block_move (target, temp, size,
3313 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3316 /* Compute the size of the data to copy from the string. */
3318 = size_binop (MIN_EXPR,
3319 make_tree (sizetype, size),
3321 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3322 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3326 /* Copy that much. */
3327 emit_block_move (target, temp, copy_size_rtx,
3328 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3330 /* Figure out how much is left in TARGET that we have to clear.
3331 Do all calculations in ptr_mode. */
3333 addr = XEXP (target, 0);
3334 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3336 if (GET_CODE (copy_size_rtx) == CONST_INT)
3338 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3339 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3343 addr = force_reg (ptr_mode, addr);
3344 addr = expand_binop (ptr_mode, add_optab, addr,
3345 copy_size_rtx, NULL_RTX, 0,
3348 size = expand_binop (ptr_mode, sub_optab, size,
3349 copy_size_rtx, NULL_RTX, 0,
3352 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3353 GET_MODE (size), 0, 0);
3354 label = gen_label_rtx ();
3355 emit_jump_insn (gen_blt (label));
3358 if (size != const0_rtx)
3360 /* Be sure we can write on ADDR. */
3361 if (flag_check_memory_usage)
3362 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3364 size, TYPE_MODE (sizetype),
3365 GEN_INT (MEMORY_USE_WO), QImode);
3366 #ifdef TARGET_MEM_FUNCTIONS
3367 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3369 const0_rtx, TYPE_MODE (integer_type_node),
3370 convert_to_mode (TYPE_MODE (sizetype),
3372 TREE_UNSIGNED (sizetype)),
3373 TYPE_MODE (sizetype));
3375 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3377 convert_to_mode (TYPE_MODE (integer_type_node),
3379 TREE_UNSIGNED (integer_type_node)),
3380 TYPE_MODE (integer_type_node));
3388 /* Handle calls that return values in multiple non-contiguous locations.
3389 The Irix 6 ABI has examples of this. */
3390 else if (GET_CODE (target) == PARALLEL)
3391 emit_group_load (target, temp);
3392 else if (GET_MODE (temp) == BLKmode)
3393 emit_block_move (target, temp, expr_size (exp),
3394 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3396 emit_move_insn (target, temp);
3399 /* If we don't want a value, return NULL_RTX. */
3403 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3404 ??? The latter test doesn't seem to make sense. */
3405 else if (dont_return_target && GET_CODE (temp) != MEM)
3408 /* Return TARGET itself if it is a hard register. */
3409 else if (want_value && GET_MODE (target) != BLKmode
3410 && ! (GET_CODE (target) == REG
3411 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3412 return copy_to_reg (target);
3418 /* Return 1 if EXP just contains zeros. */
3426 switch (TREE_CODE (exp))
3430 case NON_LVALUE_EXPR:
3431 return is_zeros_p (TREE_OPERAND (exp, 0));
3434 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3438 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3441 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3444 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3445 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3446 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3447 if (! is_zeros_p (TREE_VALUE (elt)))
3456 /* Return 1 if EXP contains mostly (3/4) zeros. */
3459 mostly_zeros_p (exp)
3462 if (TREE_CODE (exp) == CONSTRUCTOR)
3464 int elts = 0, zeros = 0;
3465 tree elt = CONSTRUCTOR_ELTS (exp);
3466 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3468 /* If there are no ranges of true bits, it is all zero. */
3469 return elt == NULL_TREE;
3471 for (; elt; elt = TREE_CHAIN (elt))
3473 /* We do not handle the case where the index is a RANGE_EXPR,
3474 so the statistic will be somewhat inaccurate.
3475 We do make a more accurate count in store_constructor itself,
3476 so since this function is only used for nested array elements,
3477 this should be close enough. */
3478 if (mostly_zeros_p (TREE_VALUE (elt)))
3483 return 4 * zeros >= 3 * elts;
3486 return is_zeros_p (exp);
3489 /* Helper function for store_constructor.
3490 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3491 TYPE is the type of the CONSTRUCTOR, not the element type.
3492 CLEARED is as for store_constructor.
3494 This provides a recursive shortcut back to store_constructor when it isn't
3495 necessary to go through store_field. This is so that we can pass through
3496 the cleared field to let store_constructor know that we may not have to
3497 clear a substructure if the outer structure has already been cleared. */
3500 store_constructor_field (target, bitsize, bitpos,
3501 mode, exp, type, cleared)
3503 int bitsize, bitpos;
3504 enum machine_mode mode;
3508 if (TREE_CODE (exp) == CONSTRUCTOR
3509 && bitpos % BITS_PER_UNIT == 0
3510 /* If we have a non-zero bitpos for a register target, then we just
3511 let store_field do the bitfield handling. This is unlikely to
3512 generate unnecessary clear instructions anyways. */
3513 && (bitpos == 0 || GET_CODE (target) == MEM))
3516 target = change_address (target, VOIDmode,
3517 plus_constant (XEXP (target, 0),
3518 bitpos / BITS_PER_UNIT));
3519 store_constructor (exp, target, cleared);
3522 store_field (target, bitsize, bitpos, mode, exp,
3523 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3524 int_size_in_bytes (type));
3527 /* Store the value of constructor EXP into the rtx TARGET.
3528 TARGET is either a REG or a MEM.
3529 CLEARED is true if TARGET is known to have been zero'd. */
3532 store_constructor (exp, target, cleared)
3537 tree type = TREE_TYPE (exp);
3539 /* We know our target cannot conflict, since safe_from_p has been called. */
3541 /* Don't try copying piece by piece into a hard register
3542 since that is vulnerable to being clobbered by EXP.
3543 Instead, construct in a pseudo register and then copy it all. */
3544 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3546 rtx temp = gen_reg_rtx (GET_MODE (target));
3547 store_constructor (exp, temp, 0);
3548 emit_move_insn (target, temp);
3553 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3554 || TREE_CODE (type) == QUAL_UNION_TYPE)
3558 /* Inform later passes that the whole union value is dead. */
3559 if (TREE_CODE (type) == UNION_TYPE
3560 || TREE_CODE (type) == QUAL_UNION_TYPE)
3561 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3563 /* If we are building a static constructor into a register,
3564 set the initial value as zero so we can fold the value into
3565 a constant. But if more than one register is involved,
3566 this probably loses. */
3567 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3568 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3571 emit_move_insn (target, const0_rtx);
3576 /* If the constructor has fewer fields than the structure
3577 or if we are initializing the structure to mostly zeros,
3578 clear the whole structure first. */
3579 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3580 != list_length (TYPE_FIELDS (type)))
3581 || mostly_zeros_p (exp))
3584 clear_storage (target, expr_size (exp),
3585 TYPE_ALIGN (type) / BITS_PER_UNIT);
3590 /* Inform later passes that the old value is dead. */
3591 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3593 /* Store each element of the constructor into
3594 the corresponding field of TARGET. */
3596 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3598 register tree field = TREE_PURPOSE (elt);
3599 register enum machine_mode mode;
3603 tree pos, constant = 0, offset = 0;
3604 rtx to_rtx = target;
3606 /* Just ignore missing fields.
3607 We cleared the whole structure, above,
3608 if any fields are missing. */
3612 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3615 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3616 unsignedp = TREE_UNSIGNED (field);
3617 mode = DECL_MODE (field);
3618 if (DECL_BIT_FIELD (field))
3621 pos = DECL_FIELD_BITPOS (field);
3622 if (TREE_CODE (pos) == INTEGER_CST)
3624 else if (TREE_CODE (pos) == PLUS_EXPR
3625 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3626 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3631 bitpos = TREE_INT_CST_LOW (constant);
3637 if (contains_placeholder_p (offset))
3638 offset = build (WITH_RECORD_EXPR, sizetype,
3641 offset = size_binop (FLOOR_DIV_EXPR, offset,
3642 size_int (BITS_PER_UNIT));
3644 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3645 if (GET_CODE (to_rtx) != MEM)
3649 = change_address (to_rtx, VOIDmode,
3650 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3651 force_reg (ptr_mode, offset_rtx)));
3653 if (TREE_READONLY (field))
3655 if (GET_CODE (to_rtx) == MEM)
3656 to_rtx = copy_rtx (to_rtx);
3658 RTX_UNCHANGING_P (to_rtx) = 1;
3661 store_constructor_field (to_rtx, bitsize, bitpos,
3662 mode, TREE_VALUE (elt), type, cleared);
3665 else if (TREE_CODE (type) == ARRAY_TYPE)
3670 tree domain = TYPE_DOMAIN (type);
3671 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3672 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3673 tree elttype = TREE_TYPE (type);
3675 /* If the constructor has fewer elements than the array,
3676 clear the whole array first. Similarly if this this is
3677 static constructor of a non-BLKmode object. */
3678 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3682 HOST_WIDE_INT count = 0, zero_count = 0;
3684 /* This loop is a more accurate version of the loop in
3685 mostly_zeros_p (it handles RANGE_EXPR in an index).
3686 It is also needed to check for missing elements. */
3687 for (elt = CONSTRUCTOR_ELTS (exp);
3689 elt = TREE_CHAIN (elt))
3691 tree index = TREE_PURPOSE (elt);
3692 HOST_WIDE_INT this_node_count;
3693 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3695 tree lo_index = TREE_OPERAND (index, 0);
3696 tree hi_index = TREE_OPERAND (index, 1);
3697 if (TREE_CODE (lo_index) != INTEGER_CST
3698 || TREE_CODE (hi_index) != INTEGER_CST)
3703 this_node_count = TREE_INT_CST_LOW (hi_index)
3704 - TREE_INT_CST_LOW (lo_index) + 1;
3707 this_node_count = 1;
3708 count += this_node_count;
3709 if (mostly_zeros_p (TREE_VALUE (elt)))
3710 zero_count += this_node_count;
3712 /* Clear the entire array first if there are any missing elements,
3713 or if the incidence of zero elements is >= 75%. */
3714 if (count < maxelt - minelt + 1
3715 || 4 * zero_count >= 3 * count)
3721 clear_storage (target, expr_size (exp),
3722 TYPE_ALIGN (type) / BITS_PER_UNIT);
3726 /* Inform later passes that the old value is dead. */
3727 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3729 /* Store each element of the constructor into
3730 the corresponding element of TARGET, determined
3731 by counting the elements. */
3732 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3734 elt = TREE_CHAIN (elt), i++)
3736 register enum machine_mode mode;
3740 tree value = TREE_VALUE (elt);
3741 tree index = TREE_PURPOSE (elt);
3742 rtx xtarget = target;
3744 if (cleared && is_zeros_p (value))
3747 mode = TYPE_MODE (elttype);
3748 bitsize = GET_MODE_BITSIZE (mode);
3749 unsignedp = TREE_UNSIGNED (elttype);
3751 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3753 tree lo_index = TREE_OPERAND (index, 0);
3754 tree hi_index = TREE_OPERAND (index, 1);
3755 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3756 struct nesting *loop;
3757 HOST_WIDE_INT lo, hi, count;
3760 /* If the range is constant and "small", unroll the loop. */
3761 if (TREE_CODE (lo_index) == INTEGER_CST
3762 && TREE_CODE (hi_index) == INTEGER_CST
3763 && (lo = TREE_INT_CST_LOW (lo_index),
3764 hi = TREE_INT_CST_LOW (hi_index),
3765 count = hi - lo + 1,
3766 (GET_CODE (target) != MEM
3768 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3769 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3772 lo -= minelt; hi -= minelt;
3773 for (; lo <= hi; lo++)
3775 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3776 store_constructor_field (target, bitsize, bitpos,
3777 mode, value, type, cleared);
3782 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3783 loop_top = gen_label_rtx ();
3784 loop_end = gen_label_rtx ();
3786 unsignedp = TREE_UNSIGNED (domain);
3788 index = build_decl (VAR_DECL, NULL_TREE, domain);
3790 DECL_RTL (index) = index_r
3791 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3794 if (TREE_CODE (value) == SAVE_EXPR
3795 && SAVE_EXPR_RTL (value) == 0)
3797 /* Make sure value gets expanded once before the
3799 expand_expr (value, const0_rtx, VOIDmode, 0);
3802 store_expr (lo_index, index_r, 0);
3803 loop = expand_start_loop (0);
3805 /* Assign value to element index. */
3806 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3807 size_int (BITS_PER_UNIT));
3808 position = size_binop (MULT_EXPR,
3809 size_binop (MINUS_EXPR, index,
3810 TYPE_MIN_VALUE (domain)),
3812 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3813 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3814 xtarget = change_address (target, mode, addr);
3815 if (TREE_CODE (value) == CONSTRUCTOR)
3816 store_constructor (value, xtarget, cleared);
3818 store_expr (value, xtarget, 0);
3820 expand_exit_loop_if_false (loop,
3821 build (LT_EXPR, integer_type_node,
3824 expand_increment (build (PREINCREMENT_EXPR,
3826 index, integer_one_node), 0, 0);
3828 emit_label (loop_end);
3830 /* Needed by stupid register allocation. to extend the
3831 lifetime of pseudo-regs used by target past the end
3833 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3836 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3837 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3843 index = size_int (i);
3846 index = size_binop (MINUS_EXPR, index,
3847 TYPE_MIN_VALUE (domain));
3848 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3849 size_int (BITS_PER_UNIT));
3850 position = size_binop (MULT_EXPR, index, position);
3851 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3852 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3853 xtarget = change_address (target, mode, addr);
3854 store_expr (value, xtarget, 0);
3859 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3860 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3862 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3863 store_constructor_field (target, bitsize, bitpos,
3864 mode, value, type, cleared);
3868 /* set constructor assignments */
3869 else if (TREE_CODE (type) == SET_TYPE)
3871 tree elt = CONSTRUCTOR_ELTS (exp);
3872 rtx xtarget = XEXP (target, 0);
3873 int set_word_size = TYPE_ALIGN (type);
3874 int nbytes = int_size_in_bytes (type), nbits;
3875 tree domain = TYPE_DOMAIN (type);
3876 tree domain_min, domain_max, bitlength;
3878 /* The default implementation strategy is to extract the constant
3879 parts of the constructor, use that to initialize the target,
3880 and then "or" in whatever non-constant ranges we need in addition.
3882 If a large set is all zero or all ones, it is
3883 probably better to set it using memset (if available) or bzero.
3884 Also, if a large set has just a single range, it may also be
3885 better to first clear all the first clear the set (using
3886 bzero/memset), and set the bits we want. */
3888 /* Check for all zeros. */
3889 if (elt == NULL_TREE)
3892 clear_storage (target, expr_size (exp),
3893 TYPE_ALIGN (type) / BITS_PER_UNIT);
3897 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3898 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3899 bitlength = size_binop (PLUS_EXPR,
3900 size_binop (MINUS_EXPR, domain_max, domain_min),
3903 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3905 nbits = TREE_INT_CST_LOW (bitlength);
3907 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3908 are "complicated" (more than one range), initialize (the
3909 constant parts) by copying from a constant. */
3910 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3911 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3913 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3914 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3915 char *bit_buffer = (char *) alloca (nbits);
3916 HOST_WIDE_INT word = 0;
3919 int offset = 0; /* In bytes from beginning of set. */
3920 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3923 if (bit_buffer[ibit])
3925 if (BYTES_BIG_ENDIAN)
3926 word |= (1 << (set_word_size - 1 - bit_pos));
3928 word |= 1 << bit_pos;
3931 if (bit_pos >= set_word_size || ibit == nbits)
3933 if (word != 0 || ! cleared)
3935 rtx datum = GEN_INT (word);
3937 /* The assumption here is that it is safe to use
3938 XEXP if the set is multi-word, but not if
3939 it's single-word. */
3940 if (GET_CODE (target) == MEM)
3942 to_rtx = plus_constant (XEXP (target, 0), offset);
3943 to_rtx = change_address (target, mode, to_rtx);
3945 else if (offset == 0)
3949 emit_move_insn (to_rtx, datum);
3955 offset += set_word_size / BITS_PER_UNIT;
3961 /* Don't bother clearing storage if the set is all ones. */
3962 if (TREE_CHAIN (elt) != NULL_TREE
3963 || (TREE_PURPOSE (elt) == NULL_TREE
3965 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3966 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3967 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3968 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3970 clear_storage (target, expr_size (exp),
3971 TYPE_ALIGN (type) / BITS_PER_UNIT);
3974 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3976 /* start of range of element or NULL */
3977 tree startbit = TREE_PURPOSE (elt);
3978 /* end of range of element, or element value */
3979 tree endbit = TREE_VALUE (elt);
3980 HOST_WIDE_INT startb, endb;
3981 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3983 bitlength_rtx = expand_expr (bitlength,
3984 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3986 /* handle non-range tuple element like [ expr ] */
3987 if (startbit == NULL_TREE)
3989 startbit = save_expr (endbit);
3992 startbit = convert (sizetype, startbit);
3993 endbit = convert (sizetype, endbit);
3994 if (! integer_zerop (domain_min))
3996 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3997 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3999 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4000 EXPAND_CONST_ADDRESS);
4001 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4002 EXPAND_CONST_ADDRESS);
4006 targetx = assign_stack_temp (GET_MODE (target),
4007 GET_MODE_SIZE (GET_MODE (target)),
4009 emit_move_insn (targetx, target);
4011 else if (GET_CODE (target) == MEM)
4016 #ifdef TARGET_MEM_FUNCTIONS
4017 /* Optimization: If startbit and endbit are
4018 constants divisible by BITS_PER_UNIT,
4019 call memset instead. */
4020 if (TREE_CODE (startbit) == INTEGER_CST
4021 && TREE_CODE (endbit) == INTEGER_CST
4022 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4023 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4025 emit_library_call (memset_libfunc, 0,
4027 plus_constant (XEXP (targetx, 0),
4028 startb / BITS_PER_UNIT),
4030 constm1_rtx, TYPE_MODE (integer_type_node),
4031 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4032 TYPE_MODE (sizetype));
4037 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
4038 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4039 bitlength_rtx, TYPE_MODE (sizetype),
4040 startbit_rtx, TYPE_MODE (sizetype),
4041 endbit_rtx, TYPE_MODE (sizetype));
4044 emit_move_insn (target, targetx);
4052 /* Store the value of EXP (an expression tree)
4053 into a subfield of TARGET which has mode MODE and occupies
4054 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4055 If MODE is VOIDmode, it means that we are storing into a bit-field.
4057 If VALUE_MODE is VOIDmode, return nothing in particular.
4058 UNSIGNEDP is not used in this case.
4060 Otherwise, return an rtx for the value stored. This rtx
4061 has mode VALUE_MODE if that is convenient to do.
4062 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4064 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4065 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4068 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4069 unsignedp, align, total_size)
4071 int bitsize, bitpos;
4072 enum machine_mode mode;
4074 enum machine_mode value_mode;
4079 HOST_WIDE_INT width_mask = 0;
4081 if (bitsize < HOST_BITS_PER_WIDE_INT)
4082 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4084 /* If we are storing into an unaligned field of an aligned union that is
4085 in a register, we may have the mode of TARGET being an integer mode but
4086 MODE == BLKmode. In that case, get an aligned object whose size and
4087 alignment are the same as TARGET and store TARGET into it (we can avoid
4088 the store if the field being stored is the entire width of TARGET). Then
4089 call ourselves recursively to store the field into a BLKmode version of
4090 that object. Finally, load from the object into TARGET. This is not
4091 very efficient in general, but should only be slightly more expensive
4092 than the otherwise-required unaligned accesses. Perhaps this can be
4093 cleaned up later. */
4096 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4098 rtx object = assign_stack_temp (GET_MODE (target),
4099 GET_MODE_SIZE (GET_MODE (target)), 0);
4100 rtx blk_object = copy_rtx (object);
4102 MEM_IN_STRUCT_P (object) = 1;
4103 MEM_IN_STRUCT_P (blk_object) = 1;
4104 PUT_MODE (blk_object, BLKmode);
4106 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4107 emit_move_insn (object, target);
4109 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4112 /* Even though we aren't returning target, we need to
4113 give it the updated value. */
4114 emit_move_insn (target, object);
4119 /* If the structure is in a register or if the component
4120 is a bit field, we cannot use addressing to access it.
4121 Use bit-field techniques or SUBREG to store in it. */
4123 if (mode == VOIDmode
4124 || (mode != BLKmode && ! direct_store[(int) mode])
4125 || GET_CODE (target) == REG
4126 || GET_CODE (target) == SUBREG
4127 /* If the field isn't aligned enough to store as an ordinary memref,
4128 store it as a bit field. */
4129 || (SLOW_UNALIGNED_ACCESS
4130 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4131 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4133 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4135 /* If BITSIZE is narrower than the size of the type of EXP
4136 we will be narrowing TEMP. Normally, what's wanted are the
4137 low-order bits. However, if EXP's type is a record and this is
4138 big-endian machine, we want the upper BITSIZE bits. */
4139 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4140 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4141 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4142 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4143 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4147 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4149 if (mode != VOIDmode && mode != BLKmode
4150 && mode != TYPE_MODE (TREE_TYPE (exp)))
4151 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4153 /* If the modes of TARGET and TEMP are both BLKmode, both
4154 must be in memory and BITPOS must be aligned on a byte
4155 boundary. If so, we simply do a block copy. */
4156 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4158 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4159 || bitpos % BITS_PER_UNIT != 0)
4162 target = change_address (target, VOIDmode,
4163 plus_constant (XEXP (target, 0),
4164 bitpos / BITS_PER_UNIT));
4166 emit_block_move (target, temp,
4167 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4171 return value_mode == VOIDmode ? const0_rtx : target;
4174 /* Store the value in the bitfield. */
4175 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4176 if (value_mode != VOIDmode)
4178 /* The caller wants an rtx for the value. */
4179 /* If possible, avoid refetching from the bitfield itself. */
4181 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4184 enum machine_mode tmode;
4187 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4188 tmode = GET_MODE (temp);
4189 if (tmode == VOIDmode)
4191 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4192 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4193 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4195 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4196 NULL_RTX, value_mode, 0, align,
4203 rtx addr = XEXP (target, 0);
4206 /* If a value is wanted, it must be the lhs;
4207 so make the address stable for multiple use. */
4209 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4210 && ! CONSTANT_ADDRESS_P (addr)
4211 /* A frame-pointer reference is already stable. */
4212 && ! (GET_CODE (addr) == PLUS
4213 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4214 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4215 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4216 addr = copy_to_reg (addr);
4218 /* Now build a reference to just the desired component. */
4220 to_rtx = copy_rtx (change_address (target, mode,
4221 plus_constant (addr,
4223 / BITS_PER_UNIT))));
4224 MEM_IN_STRUCT_P (to_rtx) = 1;
4226 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4230 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4231 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4232 ARRAY_REFs and find the ultimate containing object, which we return.
4234 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4235 bit position, and *PUNSIGNEDP to the signedness of the field.
4236 If the position of the field is variable, we store a tree
4237 giving the variable offset (in units) in *POFFSET.
4238 This offset is in addition to the bit position.
4239 If the position is not variable, we store 0 in *POFFSET.
4240 We set *PALIGNMENT to the alignment in bytes of the address that will be
4241 computed. This is the alignment of the thing we return if *POFFSET
4242 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4244 If any of the extraction expressions is volatile,
4245 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4247 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4248 is a mode that can be used to access the field. In that case, *PBITSIZE
4251 If the field describes a variable-sized object, *PMODE is set to
4252 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4253 this case, but the address of the object can be found. */
4256 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4257 punsignedp, pvolatilep, palignment)
4262 enum machine_mode *pmode;
4267 tree orig_exp = exp;
4269 enum machine_mode mode = VOIDmode;
4270 tree offset = integer_zero_node;
4271 int alignment = BIGGEST_ALIGNMENT;
4273 if (TREE_CODE (exp) == COMPONENT_REF)
4275 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4276 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4277 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4278 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4280 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4282 size_tree = TREE_OPERAND (exp, 1);
4283 *punsignedp = TREE_UNSIGNED (exp);
4287 mode = TYPE_MODE (TREE_TYPE (exp));
4288 *pbitsize = GET_MODE_BITSIZE (mode);
4289 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4294 if (TREE_CODE (size_tree) != INTEGER_CST)
4295 mode = BLKmode, *pbitsize = -1;
4297 *pbitsize = TREE_INT_CST_LOW (size_tree);
4300 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4301 and find the ultimate containing object. */
4307 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4309 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4310 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4311 : TREE_OPERAND (exp, 2));
4312 tree constant = integer_zero_node, var = pos;
4314 /* If this field hasn't been filled in yet, don't go
4315 past it. This should only happen when folding expressions
4316 made during type construction. */
4320 /* Assume here that the offset is a multiple of a unit.
4321 If not, there should be an explicitly added constant. */
4322 if (TREE_CODE (pos) == PLUS_EXPR
4323 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4324 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4325 else if (TREE_CODE (pos) == INTEGER_CST)
4326 constant = pos, var = integer_zero_node;
4328 *pbitpos += TREE_INT_CST_LOW (constant);
4329 offset = size_binop (PLUS_EXPR, offset,
4330 size_binop (EXACT_DIV_EXPR, var,
4331 size_int (BITS_PER_UNIT)));
4334 else if (TREE_CODE (exp) == ARRAY_REF)
4336 /* This code is based on the code in case ARRAY_REF in expand_expr
4337 below. We assume here that the size of an array element is
4338 always an integral multiple of BITS_PER_UNIT. */
4340 tree index = TREE_OPERAND (exp, 1);
4341 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4343 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4344 tree index_type = TREE_TYPE (index);
4346 if (! integer_zerop (low_bound))
4347 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4349 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4351 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4353 index_type = TREE_TYPE (index);
4356 index = fold (build (MULT_EXPR, index_type, index,
4357 convert (index_type,
4358 TYPE_SIZE (TREE_TYPE (exp)))));
4360 if (TREE_CODE (index) == INTEGER_CST
4361 && TREE_INT_CST_HIGH (index) == 0)
4362 *pbitpos += TREE_INT_CST_LOW (index);
4364 offset = size_binop (PLUS_EXPR, offset,
4365 size_binop (FLOOR_DIV_EXPR, index,
4366 size_int (BITS_PER_UNIT)));
4368 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4369 && ! ((TREE_CODE (exp) == NOP_EXPR
4370 || TREE_CODE (exp) == CONVERT_EXPR)
4371 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4372 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4374 && (TYPE_MODE (TREE_TYPE (exp))
4375 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4378 /* If any reference in the chain is volatile, the effect is volatile. */
4379 if (TREE_THIS_VOLATILE (exp))
4382 /* If the offset is non-constant already, then we can't assume any
4383 alignment more than the alignment here. */
4384 if (! integer_zerop (offset))
4385 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4387 exp = TREE_OPERAND (exp, 0);
4390 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4391 alignment = MIN (alignment, DECL_ALIGN (exp));
4392 else if (TREE_TYPE (exp) != 0)
4393 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4395 if (integer_zerop (offset))
4398 if (offset != 0 && contains_placeholder_p (offset))
4399 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4403 *palignment = alignment / BITS_PER_UNIT;
4407 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4408 static enum memory_use_mode
4409 get_memory_usage_from_modifier (modifier)
4410 enum expand_modifier modifier;
4415 return MEMORY_USE_RO;
4417 case EXPAND_MEMORY_USE_WO:
4418 return MEMORY_USE_WO;
4420 case EXPAND_MEMORY_USE_RW:
4421 return MEMORY_USE_RW;
4423 case EXPAND_INITIALIZER:
4424 case EXPAND_MEMORY_USE_DONT:
4426 case EXPAND_CONST_ADDRESS:
4427 return MEMORY_USE_DONT;
4428 case EXPAND_MEMORY_USE_BAD:
4434 /* Given an rtx VALUE that may contain additions and multiplications,
4435 return an equivalent value that just refers to a register or memory.
4436 This is done by generating instructions to perform the arithmetic
4437 and returning a pseudo-register containing the value.
4439 The returned value may be a REG, SUBREG, MEM or constant. */
4442 force_operand (value, target)
4445 register optab binoptab = 0;
4446 /* Use a temporary to force order of execution of calls to
4450 /* Use subtarget as the target for operand 0 of a binary operation. */
4451 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4453 if (GET_CODE (value) == PLUS)
4454 binoptab = add_optab;
4455 else if (GET_CODE (value) == MINUS)
4456 binoptab = sub_optab;
4457 else if (GET_CODE (value) == MULT)
4459 op2 = XEXP (value, 1);
4460 if (!CONSTANT_P (op2)
4461 && !(GET_CODE (op2) == REG && op2 != subtarget))
4463 tmp = force_operand (XEXP (value, 0), subtarget);
4464 return expand_mult (GET_MODE (value), tmp,
4465 force_operand (op2, NULL_RTX),
4471 op2 = XEXP (value, 1);
4472 if (!CONSTANT_P (op2)
4473 && !(GET_CODE (op2) == REG && op2 != subtarget))
4475 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4477 binoptab = add_optab;
4478 op2 = negate_rtx (GET_MODE (value), op2);
4481 /* Check for an addition with OP2 a constant integer and our first
4482 operand a PLUS of a virtual register and something else. In that
4483 case, we want to emit the sum of the virtual register and the
4484 constant first and then add the other value. This allows virtual
4485 register instantiation to simply modify the constant rather than
4486 creating another one around this addition. */
4487 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4488 && GET_CODE (XEXP (value, 0)) == PLUS
4489 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4490 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4491 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4493 rtx temp = expand_binop (GET_MODE (value), binoptab,
4494 XEXP (XEXP (value, 0), 0), op2,
4495 subtarget, 0, OPTAB_LIB_WIDEN);
4496 return expand_binop (GET_MODE (value), binoptab, temp,
4497 force_operand (XEXP (XEXP (value, 0), 1), 0),
4498 target, 0, OPTAB_LIB_WIDEN);
4501 tmp = force_operand (XEXP (value, 0), subtarget);
4502 return expand_binop (GET_MODE (value), binoptab, tmp,
4503 force_operand (op2, NULL_RTX),
4504 target, 0, OPTAB_LIB_WIDEN);
4505 /* We give UNSIGNEDP = 0 to expand_binop
4506 because the only operations we are expanding here are signed ones. */
4511 /* Subroutine of expand_expr:
4512 save the non-copied parts (LIST) of an expr (LHS), and return a list
4513 which can restore these values to their previous values,
4514 should something modify their storage. */
4517 save_noncopied_parts (lhs, list)
4524 for (tail = list; tail; tail = TREE_CHAIN (tail))
4525 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4526 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4529 tree part = TREE_VALUE (tail);
4530 tree part_type = TREE_TYPE (part);
4531 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4532 rtx target = assign_temp (part_type, 0, 1, 1);
4533 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4534 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4535 parts = tree_cons (to_be_saved,
4536 build (RTL_EXPR, part_type, NULL_TREE,
4539 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4544 /* Subroutine of expand_expr:
4545 record the non-copied parts (LIST) of an expr (LHS), and return a list
4546 which specifies the initial values of these parts. */
4549 init_noncopied_parts (lhs, list)
4556 for (tail = list; tail; tail = TREE_CHAIN (tail))
4557 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4558 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4561 tree part = TREE_VALUE (tail);
4562 tree part_type = TREE_TYPE (part);
4563 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4564 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4569 /* Subroutine of expand_expr: return nonzero iff there is no way that
4570 EXP can reference X, which is being modified. */
4573 safe_from_p (x, exp)
4581 /* If EXP has varying size, we MUST use a target since we currently
4582 have no way of allocating temporaries of variable size
4583 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4584 So we assume here that something at a higher level has prevented a
4585 clash. This is somewhat bogus, but the best we can do. Only
4586 do this when X is BLKmode. */
4587 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4588 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4589 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4590 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4591 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4593 && GET_MODE (x) == BLKmode))
4596 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4597 find the underlying pseudo. */
4598 if (GET_CODE (x) == SUBREG)
4601 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4605 /* If X is a location in the outgoing argument area, it is always safe. */
4606 if (GET_CODE (x) == MEM
4607 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4608 || (GET_CODE (XEXP (x, 0)) == PLUS
4609 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4612 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4615 exp_rtl = DECL_RTL (exp);
4622 if (TREE_CODE (exp) == TREE_LIST)
4623 return ((TREE_VALUE (exp) == 0
4624 || safe_from_p (x, TREE_VALUE (exp)))
4625 && (TREE_CHAIN (exp) == 0
4626 || safe_from_p (x, TREE_CHAIN (exp))));
4631 return safe_from_p (x, TREE_OPERAND (exp, 0));
4635 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4636 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4640 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4641 the expression. If it is set, we conflict iff we are that rtx or
4642 both are in memory. Otherwise, we check all operands of the
4643 expression recursively. */
4645 switch (TREE_CODE (exp))
4648 return (staticp (TREE_OPERAND (exp, 0))
4649 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4652 if (GET_CODE (x) == MEM)
4657 exp_rtl = CALL_EXPR_RTL (exp);
4660 /* Assume that the call will clobber all hard registers and
4662 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4663 || GET_CODE (x) == MEM)
4670 /* If a sequence exists, we would have to scan every instruction
4671 in the sequence to see if it was safe. This is probably not
4673 if (RTL_EXPR_SEQUENCE (exp))
4676 exp_rtl = RTL_EXPR_RTL (exp);
4679 case WITH_CLEANUP_EXPR:
4680 exp_rtl = RTL_EXPR_RTL (exp);
4683 case CLEANUP_POINT_EXPR:
4684 return safe_from_p (x, TREE_OPERAND (exp, 0));
4687 exp_rtl = SAVE_EXPR_RTL (exp);
4691 /* The only operand we look at is operand 1. The rest aren't
4692 part of the expression. */
4693 return safe_from_p (x, TREE_OPERAND (exp, 1));
4695 case METHOD_CALL_EXPR:
4696 /* This takes a rtx argument, but shouldn't appear here. */
4700 /* If we have an rtx, we do not need to scan our operands. */
4704 nops = tree_code_length[(int) TREE_CODE (exp)];
4705 for (i = 0; i < nops; i++)
4706 if (TREE_OPERAND (exp, i) != 0
4707 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4711 /* If we have an rtl, find any enclosed object. Then see if we conflict
4715 if (GET_CODE (exp_rtl) == SUBREG)
4717 exp_rtl = SUBREG_REG (exp_rtl);
4718 if (GET_CODE (exp_rtl) == REG
4719 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4723 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4724 are memory and EXP is not readonly. */
4725 return ! (rtx_equal_p (x, exp_rtl)
4726 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4727 && ! TREE_READONLY (exp)));
4730 /* If we reach here, it is safe. */
4734 /* Subroutine of expand_expr: return nonzero iff EXP is an
4735 expression whose type is statically determinable. */
4741 if (TREE_CODE (exp) == PARM_DECL
4742 || TREE_CODE (exp) == VAR_DECL
4743 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4744 || TREE_CODE (exp) == COMPONENT_REF
4745 || TREE_CODE (exp) == ARRAY_REF)
4750 /* Subroutine of expand_expr: return rtx if EXP is a
4751 variable or parameter; else return 0. */
4758 switch (TREE_CODE (exp))
4762 return DECL_RTL (exp);
4768 /* expand_expr: generate code for computing expression EXP.
4769 An rtx for the computed value is returned. The value is never null.
4770 In the case of a void EXP, const0_rtx is returned.
4772 The value may be stored in TARGET if TARGET is nonzero.
4773 TARGET is just a suggestion; callers must assume that
4774 the rtx returned may not be the same as TARGET.
4776 If TARGET is CONST0_RTX, it means that the value will be ignored.
4778 If TMODE is not VOIDmode, it suggests generating the
4779 result in mode TMODE. But this is done only when convenient.
4780 Otherwise, TMODE is ignored and the value generated in its natural mode.
4781 TMODE is just a suggestion; callers must assume that
4782 the rtx returned may not have mode TMODE.
4784 Note that TARGET may have neither TMODE nor MODE. In that case, it
4785 probably will not be used.
4787 If MODIFIER is EXPAND_SUM then when EXP is an addition
4788 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4789 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4790 products as above, or REG or MEM, or constant.
4791 Ordinarily in such cases we would output mul or add instructions
4792 and then return a pseudo reg containing the sum.
4794 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4795 it also marks a label as absolutely required (it can't be dead).
4796 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4797 This is used for outputting expressions used in initializers.
4799 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4800 with a constant address even if that address is not normally legitimate.
4801 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4804 expand_expr (exp, target, tmode, modifier)
4807 enum machine_mode tmode;
4808 enum expand_modifier modifier;
4810 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4811 This is static so it will be accessible to our recursive callees. */
4812 static tree placeholder_list = 0;
4813 register rtx op0, op1, temp;
4814 tree type = TREE_TYPE (exp);
4815 int unsignedp = TREE_UNSIGNED (type);
4816 register enum machine_mode mode = TYPE_MODE (type);
4817 register enum tree_code code = TREE_CODE (exp);
4819 /* Use subtarget as the target for operand 0 of a binary operation. */
4820 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4821 rtx original_target = target;
4822 /* Maybe defer this until sure not doing bytecode? */
4823 int ignore = (target == const0_rtx
4824 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4825 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4826 || code == COND_EXPR)
4827 && TREE_CODE (type) == VOID_TYPE));
4829 /* Used by check-memory-usage to make modifier read only. */
4830 enum expand_modifier ro_modifier;
4832 /* Make a read-only version of the modifier. */
4833 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4834 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4835 ro_modifier = modifier;
4837 ro_modifier = EXPAND_NORMAL;
4839 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4841 bc_expand_expr (exp);
4845 /* Don't use hard regs as subtargets, because the combiner
4846 can only handle pseudo regs. */
4847 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4849 /* Avoid subtargets inside loops,
4850 since they hide some invariant expressions. */
4851 if (preserve_subexpressions_p ())
4854 /* If we are going to ignore this result, we need only do something
4855 if there is a side-effect somewhere in the expression. If there
4856 is, short-circuit the most common cases here. Note that we must
4857 not call expand_expr with anything but const0_rtx in case this
4858 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4862 if (! TREE_SIDE_EFFECTS (exp))
4865 /* Ensure we reference a volatile object even if value is ignored. */
4866 if (TREE_THIS_VOLATILE (exp)
4867 && TREE_CODE (exp) != FUNCTION_DECL
4868 && mode != VOIDmode && mode != BLKmode)
4870 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
4871 if (GET_CODE (temp) == MEM)
4872 temp = copy_to_reg (temp);
4876 if (TREE_CODE_CLASS (code) == '1')
4877 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4878 VOIDmode, ro_modifier);
4879 else if (TREE_CODE_CLASS (code) == '2'
4880 || TREE_CODE_CLASS (code) == '<')
4882 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
4883 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
4886 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4887 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4888 /* If the second operand has no side effects, just evaluate
4890 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4891 VOIDmode, ro_modifier);
4896 /* If will do cse, generate all results into pseudo registers
4897 since 1) that allows cse to find more things
4898 and 2) otherwise cse could produce an insn the machine
4901 if (! cse_not_expected && mode != BLKmode && target
4902 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4909 tree function = decl_function_context (exp);
4910 /* Handle using a label in a containing function. */
4911 if (function != current_function_decl
4912 && function != inline_function_decl && function != 0)
4914 struct function *p = find_function_data (function);
4915 /* Allocate in the memory associated with the function
4916 that the label is in. */
4917 push_obstacks (p->function_obstack,
4918 p->function_maybepermanent_obstack);
4920 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4921 label_rtx (exp), p->forced_labels);
4924 else if (modifier == EXPAND_INITIALIZER)
4925 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4926 label_rtx (exp), forced_labels);
4927 temp = gen_rtx (MEM, FUNCTION_MODE,
4928 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4929 if (function != current_function_decl
4930 && function != inline_function_decl && function != 0)
4931 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4936 if (DECL_RTL (exp) == 0)
4938 error_with_decl (exp, "prior parameter's size depends on `%s'");
4939 return CONST0_RTX (mode);
4942 /* ... fall through ... */
4945 /* If a static var's type was incomplete when the decl was written,
4946 but the type is complete now, lay out the decl now. */
4947 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4948 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4950 push_obstacks_nochange ();
4951 end_temporary_allocation ();
4952 layout_decl (exp, 0);
4953 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4957 /* Only check automatic variables. Currently, function arguments are
4958 not checked (this can be done at compile-time with prototypes).
4959 Aggregates are not checked. */
4960 if (flag_check_memory_usage && code == VAR_DECL
4961 && GET_CODE (DECL_RTL (exp)) == MEM
4962 && DECL_CONTEXT (exp) != NULL_TREE
4963 && ! TREE_STATIC (exp)
4964 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4966 enum memory_use_mode memory_usage;
4967 memory_usage = get_memory_usage_from_modifier (modifier);
4969 if (memory_usage != MEMORY_USE_DONT)
4970 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
4971 XEXP (DECL_RTL (exp), 0), ptr_mode,
4972 GEN_INT (int_size_in_bytes (type)),
4973 TYPE_MODE (sizetype),
4974 GEN_INT (memory_usage), QImode);
4977 /* ... fall through ... */
4981 if (DECL_RTL (exp) == 0)
4984 /* Ensure variable marked as used even if it doesn't go through
4985 a parser. If it hasn't be used yet, write out an external
4987 if (! TREE_USED (exp))
4989 assemble_external (exp);
4990 TREE_USED (exp) = 1;
4993 /* Show we haven't gotten RTL for this yet. */
4996 /* Handle variables inherited from containing functions. */
4997 context = decl_function_context (exp);
4999 /* We treat inline_function_decl as an alias for the current function
5000 because that is the inline function whose vars, types, etc.
5001 are being merged into the current function.
5002 See expand_inline_function. */
5004 if (context != 0 && context != current_function_decl
5005 && context != inline_function_decl
5006 /* If var is static, we don't need a static chain to access it. */
5007 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5008 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5012 /* Mark as non-local and addressable. */
5013 DECL_NONLOCAL (exp) = 1;
5014 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5016 mark_addressable (exp);
5017 if (GET_CODE (DECL_RTL (exp)) != MEM)
5019 addr = XEXP (DECL_RTL (exp), 0);
5020 if (GET_CODE (addr) == MEM)
5021 addr = gen_rtx (MEM, Pmode,
5022 fix_lexical_addr (XEXP (addr, 0), exp));
5024 addr = fix_lexical_addr (addr, exp);
5025 temp = change_address (DECL_RTL (exp), mode, addr);
5028 /* This is the case of an array whose size is to be determined
5029 from its initializer, while the initializer is still being parsed.
5032 else if (GET_CODE (DECL_RTL (exp)) == MEM
5033 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5034 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5035 XEXP (DECL_RTL (exp), 0));
5037 /* If DECL_RTL is memory, we are in the normal case and either
5038 the address is not valid or it is not a register and -fforce-addr
5039 is specified, get the address into a register. */
5041 else if (GET_CODE (DECL_RTL (exp)) == MEM
5042 && modifier != EXPAND_CONST_ADDRESS
5043 && modifier != EXPAND_SUM
5044 && modifier != EXPAND_INITIALIZER
5045 && (! memory_address_p (DECL_MODE (exp),
5046 XEXP (DECL_RTL (exp), 0))
5048 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5049 temp = change_address (DECL_RTL (exp), VOIDmode,
5050 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5052 /* If we got something, return it. But first, set the alignment
5053 the address is a register. */
5056 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5057 mark_reg_pointer (XEXP (temp, 0),
5058 DECL_ALIGN (exp) / BITS_PER_UNIT);
5063 /* If the mode of DECL_RTL does not match that of the decl, it
5064 must be a promoted value. We return a SUBREG of the wanted mode,
5065 but mark it so that we know that it was already extended. */
5067 if (GET_CODE (DECL_RTL (exp)) == REG
5068 && GET_MODE (DECL_RTL (exp)) != mode)
5070 /* Get the signedness used for this variable. Ensure we get the
5071 same mode we got when the variable was declared. */
5072 if (GET_MODE (DECL_RTL (exp))
5073 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5076 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
5077 SUBREG_PROMOTED_VAR_P (temp) = 1;
5078 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5082 return DECL_RTL (exp);
5085 return immed_double_const (TREE_INT_CST_LOW (exp),
5086 TREE_INT_CST_HIGH (exp),
5090 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5091 EXPAND_MEMORY_USE_BAD);
5094 /* If optimized, generate immediate CONST_DOUBLE
5095 which will be turned into memory by reload if necessary.
5097 We used to force a register so that loop.c could see it. But
5098 this does not allow gen_* patterns to perform optimizations with
5099 the constants. It also produces two insns in cases like "x = 1.0;".
5100 On most machines, floating-point constants are not permitted in
5101 many insns, so we'd end up copying it to a register in any case.
5103 Now, we do the copying in expand_binop, if appropriate. */
5104 return immed_real_const (exp);
5108 if (! TREE_CST_RTL (exp))
5109 output_constant_def (exp);
5111 /* TREE_CST_RTL probably contains a constant address.
5112 On RISC machines where a constant address isn't valid,
5113 make some insns to get that address into a register. */
5114 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5115 && modifier != EXPAND_CONST_ADDRESS
5116 && modifier != EXPAND_INITIALIZER
5117 && modifier != EXPAND_SUM
5118 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5120 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5121 return change_address (TREE_CST_RTL (exp), VOIDmode,
5122 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5123 return TREE_CST_RTL (exp);
5126 context = decl_function_context (exp);
5128 /* If this SAVE_EXPR was at global context, assume we are an
5129 initialization function and move it into our context. */
5131 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5133 /* We treat inline_function_decl as an alias for the current function
5134 because that is the inline function whose vars, types, etc.
5135 are being merged into the current function.
5136 See expand_inline_function. */
5137 if (context == current_function_decl || context == inline_function_decl)
5140 /* If this is non-local, handle it. */
5143 /* The following call just exists to abort if the context is
5144 not of a containing function. */
5145 find_function_data (context);
5147 temp = SAVE_EXPR_RTL (exp);
5148 if (temp && GET_CODE (temp) == REG)
5150 put_var_into_stack (exp);
5151 temp = SAVE_EXPR_RTL (exp);
5153 if (temp == 0 || GET_CODE (temp) != MEM)
5155 return change_address (temp, mode,
5156 fix_lexical_addr (XEXP (temp, 0), exp));
5158 if (SAVE_EXPR_RTL (exp) == 0)
5160 if (mode == VOIDmode)
5163 temp = assign_temp (type, 0, 0, 0);
5165 SAVE_EXPR_RTL (exp) = temp;
5166 if (!optimize && GET_CODE (temp) == REG)
5167 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5170 /* If the mode of TEMP does not match that of the expression, it
5171 must be a promoted value. We pass store_expr a SUBREG of the
5172 wanted mode but mark it so that we know that it was already
5173 extended. Note that `unsignedp' was modified above in
5176 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5178 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5179 SUBREG_PROMOTED_VAR_P (temp) = 1;
5180 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5183 if (temp == const0_rtx)
5184 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5185 EXPAND_MEMORY_USE_BAD);
5187 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5190 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5191 must be a promoted value. We return a SUBREG of the wanted mode,
5192 but mark it so that we know that it was already extended. */
5194 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5195 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5197 /* Compute the signedness and make the proper SUBREG. */
5198 promote_mode (type, mode, &unsignedp, 0);
5199 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5200 SUBREG_PROMOTED_VAR_P (temp) = 1;
5201 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5205 return SAVE_EXPR_RTL (exp);
5210 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5211 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5215 case PLACEHOLDER_EXPR:
5216 /* If there is an object on the head of the placeholder list,
5217 see if some object in it's references is of type TYPE. For
5218 further information, see tree.def. */
5219 if (placeholder_list)
5221 tree need_type = TYPE_MAIN_VARIANT (type);
5223 tree old_list = placeholder_list;
5226 /* See if the object is the type that we want. */
5227 if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_PURPOSE (placeholder_list)))
5229 object = TREE_PURPOSE (placeholder_list);
5231 /* Find the innermost reference that is of the type we want. */
5232 for (elt = TREE_PURPOSE (placeholder_list);
5234 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5235 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5236 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5237 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5238 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5239 || TREE_CODE (elt) == COND_EXPR)
5240 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5241 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5242 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5245 object = TREE_OPERAND (elt, 0);
5251 /* Expand this object skipping the list entries before
5252 it was found in case it is also a PLACEHOLDER_EXPR.
5253 In that case, we want to translate it using subsequent
5255 placeholder_list = TREE_CHAIN (placeholder_list);
5256 temp = expand_expr (object, original_target, tmode, ro_modifier);
5257 placeholder_list = old_list;
5262 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5265 case WITH_RECORD_EXPR:
5266 /* Put the object on the placeholder list, expand our first operand,
5267 and pop the list. */
5268 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5270 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5271 tmode, ro_modifier);
5272 placeholder_list = TREE_CHAIN (placeholder_list);
5276 expand_exit_loop_if_false (NULL_PTR,
5277 invert_truthvalue (TREE_OPERAND (exp, 0)));
5282 expand_start_loop (1);
5283 expand_expr_stmt (TREE_OPERAND (exp, 0));
5291 tree vars = TREE_OPERAND (exp, 0);
5292 int vars_need_expansion = 0;
5294 /* Need to open a binding contour here because
5295 if there are any cleanups they must be contained here. */
5296 expand_start_bindings (0);
5298 /* Mark the corresponding BLOCK for output in its proper place. */
5299 if (TREE_OPERAND (exp, 2) != 0
5300 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5301 insert_block (TREE_OPERAND (exp, 2));
5303 /* If VARS have not yet been expanded, expand them now. */
5306 if (DECL_RTL (vars) == 0)
5308 vars_need_expansion = 1;
5311 expand_decl_init (vars);
5312 vars = TREE_CHAIN (vars);
5315 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5317 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5323 if (RTL_EXPR_SEQUENCE (exp))
5325 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5327 emit_insns (RTL_EXPR_SEQUENCE (exp));
5328 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5330 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5331 free_temps_for_rtl_expr (exp);
5332 return RTL_EXPR_RTL (exp);
5335 /* If we don't need the result, just ensure we evaluate any
5340 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5341 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5342 EXPAND_MEMORY_USE_BAD);
5346 /* All elts simple constants => refer to a constant in memory. But
5347 if this is a non-BLKmode mode, let it store a field at a time
5348 since that should make a CONST_INT or CONST_DOUBLE when we
5349 fold. Likewise, if we have a target we can use, it is best to
5350 store directly into the target unless the type is large enough
5351 that memcpy will be used. If we are making an initializer and
5352 all operands are constant, put it in memory as well. */
5353 else if ((TREE_STATIC (exp)
5354 && ((mode == BLKmode
5355 && ! (target != 0 && safe_from_p (target, exp)))
5356 || TREE_ADDRESSABLE (exp)
5357 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5358 && (move_by_pieces_ninsns
5359 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5360 TYPE_ALIGN (type) / BITS_PER_UNIT)
5362 && ! mostly_zeros_p (exp))))
5363 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5365 rtx constructor = output_constant_def (exp);
5366 if (modifier != EXPAND_CONST_ADDRESS
5367 && modifier != EXPAND_INITIALIZER
5368 && modifier != EXPAND_SUM
5369 && (! memory_address_p (GET_MODE (constructor),
5370 XEXP (constructor, 0))
5372 && GET_CODE (XEXP (constructor, 0)) != REG)))
5373 constructor = change_address (constructor, VOIDmode,
5374 XEXP (constructor, 0));
5380 /* Handle calls that pass values in multiple non-contiguous
5381 locations. The Irix 6 ABI has examples of this. */
5382 if (target == 0 || ! safe_from_p (target, exp)
5383 || GET_CODE (target) == PARALLEL)
5385 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5386 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5388 target = assign_temp (type, 0, 1, 1);
5391 if (TREE_READONLY (exp))
5393 if (GET_CODE (target) == MEM)
5394 target = copy_rtx (target);
5396 RTX_UNCHANGING_P (target) = 1;
5399 store_constructor (exp, target, 0);
5405 tree exp1 = TREE_OPERAND (exp, 0);
5408 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5409 op0 = memory_address (mode, op0);
5411 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5413 enum memory_use_mode memory_usage;
5414 memory_usage = get_memory_usage_from_modifier (modifier);
5416 if (memory_usage != MEMORY_USE_DONT)
5417 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5419 GEN_INT (int_size_in_bytes (type)),
5420 TYPE_MODE (sizetype),
5421 GEN_INT (memory_usage), QImode);
5424 temp = gen_rtx (MEM, mode, op0);
5425 /* If address was computed by addition,
5426 mark this as an element of an aggregate. */
5427 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5428 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5429 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5430 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5431 || (TREE_CODE (exp1) == ADDR_EXPR
5432 && (exp2 = TREE_OPERAND (exp1, 0))
5433 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5434 MEM_IN_STRUCT_P (temp) = 1;
5435 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5437 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5438 here, because, in C and C++, the fact that a location is accessed
5439 through a pointer to const does not mean that the value there can
5440 never change. Languages where it can never change should
5441 also set TREE_STATIC. */
5442 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5447 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5451 tree array = TREE_OPERAND (exp, 0);
5452 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5453 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5454 tree index = TREE_OPERAND (exp, 1);
5455 tree index_type = TREE_TYPE (index);
5458 /* Optimize the special-case of a zero lower bound.
5460 We convert the low_bound to sizetype to avoid some problems
5461 with constant folding. (E.g. suppose the lower bound is 1,
5462 and its mode is QI. Without the conversion, (ARRAY
5463 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5464 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5466 But sizetype isn't quite right either (especially if
5467 the lowbound is negative). FIXME */
5469 if (! integer_zerop (low_bound))
5470 index = fold (build (MINUS_EXPR, index_type, index,
5471 convert (sizetype, low_bound)));
5473 /* Fold an expression like: "foo"[2].
5474 This is not done in fold so it won't happen inside &.
5475 Don't fold if this is for wide characters since it's too
5476 difficult to do correctly and this is a very rare case. */
5478 if (TREE_CODE (array) == STRING_CST
5479 && TREE_CODE (index) == INTEGER_CST
5480 && !TREE_INT_CST_HIGH (index)
5481 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5482 && GET_MODE_CLASS (mode) == MODE_INT
5483 && GET_MODE_SIZE (mode) == 1)
5484 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5486 /* If this is a constant index into a constant array,
5487 just get the value from the array. Handle both the cases when
5488 we have an explicit constructor and when our operand is a variable
5489 that was declared const. */
5491 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5493 if (TREE_CODE (index) == INTEGER_CST
5494 && TREE_INT_CST_HIGH (index) == 0)
5496 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5498 i = TREE_INT_CST_LOW (index);
5500 elem = TREE_CHAIN (elem);
5502 return expand_expr (fold (TREE_VALUE (elem)), target,
5503 tmode, ro_modifier);
5507 else if (optimize >= 1
5508 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5509 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5510 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5512 if (TREE_CODE (index) == INTEGER_CST)
5514 tree init = DECL_INITIAL (array);
5516 i = TREE_INT_CST_LOW (index);
5517 if (TREE_CODE (init) == CONSTRUCTOR)
5519 tree elem = CONSTRUCTOR_ELTS (init);
5522 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5523 elem = TREE_CHAIN (elem);
5525 return expand_expr (fold (TREE_VALUE (elem)), target,
5526 tmode, ro_modifier);
5528 else if (TREE_CODE (init) == STRING_CST
5529 && TREE_INT_CST_HIGH (index) == 0
5530 && (TREE_INT_CST_LOW (index)
5531 < TREE_STRING_LENGTH (init)))
5533 (TREE_STRING_POINTER
5534 (init)[TREE_INT_CST_LOW (index)]));
5539 /* ... fall through ... */
5543 /* If the operand is a CONSTRUCTOR, we can just extract the
5544 appropriate field if it is present. Don't do this if we have
5545 already written the data since we want to refer to that copy
5546 and varasm.c assumes that's what we'll do. */
5547 if (code != ARRAY_REF
5548 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5549 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5553 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5554 elt = TREE_CHAIN (elt))
5555 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5556 /* We can normally use the value of the field in the
5557 CONSTRUCTOR. However, if this is a bitfield in
5558 an integral mode that we can fit in a HOST_WIDE_INT,
5559 we must mask only the number of bits in the bitfield,
5560 since this is done implicitly by the constructor. If
5561 the bitfield does not meet either of those conditions,
5562 we can't do this optimization. */
5563 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5564 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5566 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5567 <= HOST_BITS_PER_WIDE_INT))))
5569 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5570 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5572 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5573 enum machine_mode imode
5574 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5576 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5578 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5579 op0 = expand_and (op0, op1, target);
5584 = build_int_2 (imode - bitsize, 0);
5586 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5588 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5598 enum machine_mode mode1;
5604 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5605 &mode1, &unsignedp, &volatilep,
5608 /* If we got back the original object, something is wrong. Perhaps
5609 we are evaluating an expression too early. In any event, don't
5610 infinitely recurse. */
5614 /* If TEM's type is a union of variable size, pass TARGET to the inner
5615 computation, since it will need a temporary and TARGET is known
5616 to have to do. This occurs in unchecked conversion in Ada. */
5618 op0 = expand_expr (tem,
5619 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5620 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5622 ? target : NULL_RTX),
5624 modifier == EXPAND_INITIALIZER ? modifier : 0);
5626 /* If this is a constant, put it into a register if it is a
5627 legitimate constant and memory if it isn't. */
5628 if (CONSTANT_P (op0))
5630 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5631 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5632 op0 = force_reg (mode, op0);
5634 op0 = validize_mem (force_const_mem (mode, op0));
5639 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5641 if (GET_CODE (op0) != MEM)
5643 op0 = change_address (op0, VOIDmode,
5644 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5645 force_reg (ptr_mode, offset_rtx)));
5648 /* Don't forget about volatility even if this is a bitfield. */
5649 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5651 op0 = copy_rtx (op0);
5652 MEM_VOLATILE_P (op0) = 1;
5655 /* Check the access. */
5656 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5658 enum memory_use_mode memory_usage;
5659 memory_usage = get_memory_usage_from_modifier (modifier);
5661 if (memory_usage != MEMORY_USE_DONT)
5666 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5667 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5669 /* Check the access right of the pointer. */
5670 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5672 GEN_INT (size / BITS_PER_UNIT),
5673 TYPE_MODE (sizetype),
5674 GEN_INT (memory_usage), QImode);
5678 /* In cases where an aligned union has an unaligned object
5679 as a field, we might be extracting a BLKmode value from
5680 an integer-mode (e.g., SImode) object. Handle this case
5681 by doing the extract into an object as wide as the field
5682 (which we know to be the width of a basic mode), then
5683 storing into memory, and changing the mode to BLKmode.
5684 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5685 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5686 if (mode1 == VOIDmode
5687 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5688 || (modifier != EXPAND_CONST_ADDRESS
5689 && modifier != EXPAND_INITIALIZER
5690 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5691 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5692 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5693 /* If the field isn't aligned enough to fetch as a memref,
5694 fetch it as a bit field. */
5695 || (SLOW_UNALIGNED_ACCESS
5696 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5697 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5699 enum machine_mode ext_mode = mode;
5701 if (ext_mode == BLKmode)
5702 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5704 if (ext_mode == BLKmode)
5706 /* In this case, BITPOS must start at a byte boundary and
5707 TARGET, if specified, must be a MEM. */
5708 if (GET_CODE (op0) != MEM
5709 || (target != 0 && GET_CODE (target) != MEM)
5710 || bitpos % BITS_PER_UNIT != 0)
5713 op0 = change_address (op0, VOIDmode,
5714 plus_constant (XEXP (op0, 0),
5715 bitpos / BITS_PER_UNIT));
5717 target = assign_temp (type, 0, 1, 1);
5719 emit_block_move (target, op0,
5720 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5727 op0 = validize_mem (op0);
5729 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5730 mark_reg_pointer (XEXP (op0, 0), alignment);
5732 op0 = extract_bit_field (op0, bitsize, bitpos,
5733 unsignedp, target, ext_mode, ext_mode,
5735 int_size_in_bytes (TREE_TYPE (tem)));
5737 /* If the result is a record type and BITSIZE is narrower than
5738 the mode of OP0, an integral mode, and this is a big endian
5739 machine, we must put the field into the high-order bits. */
5740 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5741 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5742 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5743 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5744 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5748 if (mode == BLKmode)
5750 rtx new = assign_stack_temp (ext_mode,
5751 bitsize / BITS_PER_UNIT, 0);
5753 emit_move_insn (new, op0);
5754 op0 = copy_rtx (new);
5755 PUT_MODE (op0, BLKmode);
5756 MEM_IN_STRUCT_P (op0) = 1;
5762 /* If the result is BLKmode, use that to access the object
5764 if (mode == BLKmode)
5767 /* Get a reference to just this component. */
5768 if (modifier == EXPAND_CONST_ADDRESS
5769 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5770 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5771 (bitpos / BITS_PER_UNIT)));
5773 op0 = change_address (op0, mode1,
5774 plus_constant (XEXP (op0, 0),
5775 (bitpos / BITS_PER_UNIT)));
5776 if (GET_CODE (XEXP (op0, 0)) == REG)
5777 mark_reg_pointer (XEXP (op0, 0), alignment);
5779 MEM_IN_STRUCT_P (op0) = 1;
5780 MEM_VOLATILE_P (op0) |= volatilep;
5781 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5782 || modifier == EXPAND_CONST_ADDRESS
5783 || modifier == EXPAND_INITIALIZER)
5785 else if (target == 0)
5786 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5788 convert_move (target, op0, unsignedp);
5792 /* Intended for a reference to a buffer of a file-object in Pascal.
5793 But it's not certain that a special tree code will really be
5794 necessary for these. INDIRECT_REF might work for them. */
5800 /* Pascal set IN expression.
5803 rlo = set_low - (set_low%bits_per_word);
5804 the_word = set [ (index - rlo)/bits_per_word ];
5805 bit_index = index % bits_per_word;
5806 bitmask = 1 << bit_index;
5807 return !!(the_word & bitmask); */
5809 tree set = TREE_OPERAND (exp, 0);
5810 tree index = TREE_OPERAND (exp, 1);
5811 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5812 tree set_type = TREE_TYPE (set);
5813 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5814 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5815 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5816 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5817 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5818 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5819 rtx setaddr = XEXP (setval, 0);
5820 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5822 rtx diff, quo, rem, addr, bit, result;
5824 preexpand_calls (exp);
5826 /* If domain is empty, answer is no. Likewise if index is constant
5827 and out of bounds. */
5828 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5829 && TREE_CODE (set_low_bound) == INTEGER_CST
5830 && tree_int_cst_lt (set_high_bound, set_low_bound)
5831 || (TREE_CODE (index) == INTEGER_CST
5832 && TREE_CODE (set_low_bound) == INTEGER_CST
5833 && tree_int_cst_lt (index, set_low_bound))
5834 || (TREE_CODE (set_high_bound) == INTEGER_CST
5835 && TREE_CODE (index) == INTEGER_CST
5836 && tree_int_cst_lt (set_high_bound, index))))
5840 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5842 /* If we get here, we have to generate the code for both cases
5843 (in range and out of range). */
5845 op0 = gen_label_rtx ();
5846 op1 = gen_label_rtx ();
5848 if (! (GET_CODE (index_val) == CONST_INT
5849 && GET_CODE (lo_r) == CONST_INT))
5851 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5852 GET_MODE (index_val), iunsignedp, 0);
5853 emit_jump_insn (gen_blt (op1));
5856 if (! (GET_CODE (index_val) == CONST_INT
5857 && GET_CODE (hi_r) == CONST_INT))
5859 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5860 GET_MODE (index_val), iunsignedp, 0);
5861 emit_jump_insn (gen_bgt (op1));
5864 /* Calculate the element number of bit zero in the first word
5866 if (GET_CODE (lo_r) == CONST_INT)
5867 rlow = GEN_INT (INTVAL (lo_r)
5868 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5870 rlow = expand_binop (index_mode, and_optab, lo_r,
5871 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5872 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5874 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5875 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5877 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5878 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5879 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5880 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5882 addr = memory_address (byte_mode,
5883 expand_binop (index_mode, add_optab, diff,
5884 setaddr, NULL_RTX, iunsignedp,
5887 /* Extract the bit we want to examine */
5888 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5889 gen_rtx (MEM, byte_mode, addr),
5890 make_tree (TREE_TYPE (index), rem),
5892 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5893 GET_MODE (target) == byte_mode ? target : 0,
5894 1, OPTAB_LIB_WIDEN);
5896 if (result != target)
5897 convert_move (target, result, 1);
5899 /* Output the code to handle the out-of-range case. */
5902 emit_move_insn (target, const0_rtx);
5907 case WITH_CLEANUP_EXPR:
5908 if (RTL_EXPR_RTL (exp) == 0)
5911 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
5912 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
5914 /* That's it for this cleanup. */
5915 TREE_OPERAND (exp, 2) = 0;
5917 return RTL_EXPR_RTL (exp);
5919 case CLEANUP_POINT_EXPR:
5921 extern int temp_slot_level;
5922 /* Start a new binding layer that will keep track of all cleanup
5923 actions to be performed. */
5924 expand_start_bindings (0);
5926 target_temp_slot_level = temp_slot_level;
5928 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
5929 /* If we're going to use this value, load it up now. */
5931 op0 = force_not_mem (op0);
5932 preserve_temp_slots (op0);
5933 expand_end_bindings (NULL_TREE, 0, 0);
5938 /* Check for a built-in function. */
5939 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5940 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5942 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5943 return expand_builtin (exp, target, subtarget, tmode, ignore);
5945 /* If this call was expanded already by preexpand_calls,
5946 just return the result we got. */
5947 if (CALL_EXPR_RTL (exp) != 0)
5948 return CALL_EXPR_RTL (exp);
5950 return expand_call (exp, target, ignore);
5952 case NON_LVALUE_EXPR:
5955 case REFERENCE_EXPR:
5956 if (TREE_CODE (type) == UNION_TYPE)
5958 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5961 if (mode != BLKmode)
5962 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5964 target = assign_temp (type, 0, 1, 1);
5967 if (GET_CODE (target) == MEM)
5968 /* Store data into beginning of memory target. */
5969 store_expr (TREE_OPERAND (exp, 0),
5970 change_address (target, TYPE_MODE (valtype), 0), 0);
5972 else if (GET_CODE (target) == REG)
5973 /* Store this field into a union of the proper type. */
5974 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5975 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5977 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5981 /* Return the entire union. */
5985 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5987 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5990 /* If the signedness of the conversion differs and OP0 is
5991 a promoted SUBREG, clear that indication since we now
5992 have to do the proper extension. */
5993 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5994 && GET_CODE (op0) == SUBREG)
5995 SUBREG_PROMOTED_VAR_P (op0) = 0;
6000 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6001 if (GET_MODE (op0) == mode)
6004 /* If OP0 is a constant, just convert it into the proper mode. */
6005 if (CONSTANT_P (op0))
6007 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6008 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6010 if (modifier == EXPAND_INITIALIZER)
6011 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6015 convert_to_mode (mode, op0,
6016 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6018 convert_move (target, op0,
6019 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6023 /* We come here from MINUS_EXPR when the second operand is a
6026 this_optab = add_optab;
6028 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6029 something else, make sure we add the register to the constant and
6030 then to the other thing. This case can occur during strength
6031 reduction and doing it this way will produce better code if the
6032 frame pointer or argument pointer is eliminated.
6034 fold-const.c will ensure that the constant is always in the inner
6035 PLUS_EXPR, so the only case we need to do anything about is if
6036 sp, ap, or fp is our second argument, in which case we must swap
6037 the innermost first argument and our second argument. */
6039 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6040 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6041 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6042 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6043 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6044 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6046 tree t = TREE_OPERAND (exp, 1);
6048 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6049 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6052 /* If the result is to be ptr_mode and we are adding an integer to
6053 something, we might be forming a constant. So try to use
6054 plus_constant. If it produces a sum and we can't accept it,
6055 use force_operand. This allows P = &ARR[const] to generate
6056 efficient code on machines where a SYMBOL_REF is not a valid
6059 If this is an EXPAND_SUM call, always return the sum. */
6060 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6061 || mode == ptr_mode)
6063 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6064 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6065 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6067 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6069 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6070 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6071 op1 = force_operand (op1, target);
6075 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6076 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6077 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6079 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6081 if (! CONSTANT_P (op0))
6083 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6084 VOIDmode, modifier);
6085 /* Don't go to both_summands if modifier
6086 says it's not right to return a PLUS. */
6087 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6091 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6092 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6093 op0 = force_operand (op0, target);
6098 /* No sense saving up arithmetic to be done
6099 if it's all in the wrong mode to form part of an address.
6100 And force_operand won't know whether to sign-extend or
6102 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6103 || mode != ptr_mode)
6106 preexpand_calls (exp);
6107 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6110 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6111 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6114 /* Make sure any term that's a sum with a constant comes last. */
6115 if (GET_CODE (op0) == PLUS
6116 && CONSTANT_P (XEXP (op0, 1)))
6122 /* If adding to a sum including a constant,
6123 associate it to put the constant outside. */
6124 if (GET_CODE (op1) == PLUS
6125 && CONSTANT_P (XEXP (op1, 1)))
6127 rtx constant_term = const0_rtx;
6129 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6132 /* Ensure that MULT comes first if there is one. */
6133 else if (GET_CODE (op0) == MULT)
6134 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
6136 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
6138 /* Let's also eliminate constants from op0 if possible. */
6139 op0 = eliminate_constant_term (op0, &constant_term);
6141 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6142 their sum should be a constant. Form it into OP1, since the
6143 result we want will then be OP0 + OP1. */
6145 temp = simplify_binary_operation (PLUS, mode, constant_term,
6150 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6153 /* Put a constant term last and put a multiplication first. */
6154 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6155 temp = op1, op1 = op0, op0 = temp;
6157 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6158 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6161 /* For initializers, we are allowed to return a MINUS of two
6162 symbolic constants. Here we handle all cases when both operands
6164 /* Handle difference of two symbolic constants,
6165 for the sake of an initializer. */
6166 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6167 && really_constant_p (TREE_OPERAND (exp, 0))
6168 && really_constant_p (TREE_OPERAND (exp, 1)))
6170 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6171 VOIDmode, ro_modifier);
6172 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6173 VOIDmode, ro_modifier);
6175 /* If the last operand is a CONST_INT, use plus_constant of
6176 the negated constant. Else make the MINUS. */
6177 if (GET_CODE (op1) == CONST_INT)
6178 return plus_constant (op0, - INTVAL (op1));
6180 return gen_rtx (MINUS, mode, op0, op1);
6182 /* Convert A - const to A + (-const). */
6183 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6185 tree negated = fold (build1 (NEGATE_EXPR, type,
6186 TREE_OPERAND (exp, 1)));
6188 /* Deal with the case where we can't negate the constant
6190 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6192 tree newtype = signed_type (type);
6193 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6194 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6195 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6197 if (! TREE_OVERFLOW (newneg))
6198 return expand_expr (convert (type,
6199 build (PLUS_EXPR, newtype,
6201 target, tmode, ro_modifier);
6205 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6209 this_optab = sub_optab;
6213 preexpand_calls (exp);
6214 /* If first operand is constant, swap them.
6215 Thus the following special case checks need only
6216 check the second operand. */
6217 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6219 register tree t1 = TREE_OPERAND (exp, 0);
6220 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6221 TREE_OPERAND (exp, 1) = t1;
6224 /* Attempt to return something suitable for generating an
6225 indexed address, for machines that support that. */
6227 if (modifier == EXPAND_SUM && mode == ptr_mode
6228 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6229 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6231 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6234 /* Apply distributive law if OP0 is x+c. */
6235 if (GET_CODE (op0) == PLUS
6236 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6237 return gen_rtx (PLUS, mode,
6238 gen_rtx (MULT, mode, XEXP (op0, 0),
6239 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6240 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6241 * INTVAL (XEXP (op0, 1))));
6243 if (GET_CODE (op0) != REG)
6244 op0 = force_operand (op0, NULL_RTX);
6245 if (GET_CODE (op0) != REG)
6246 op0 = copy_to_mode_reg (mode, op0);
6248 return gen_rtx (MULT, mode, op0,
6249 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6252 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6255 /* Check for multiplying things that have been extended
6256 from a narrower type. If this machine supports multiplying
6257 in that narrower type with a result in the desired type,
6258 do it that way, and avoid the explicit type-conversion. */
6259 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6260 && TREE_CODE (type) == INTEGER_TYPE
6261 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6262 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6263 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6264 && int_fits_type_p (TREE_OPERAND (exp, 1),
6265 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6266 /* Don't use a widening multiply if a shift will do. */
6267 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6268 > HOST_BITS_PER_WIDE_INT)
6269 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6271 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6272 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6274 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6275 /* If both operands are extended, they must either both
6276 be zero-extended or both be sign-extended. */
6277 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6279 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6281 enum machine_mode innermode
6282 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6283 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6284 ? smul_widen_optab : umul_widen_optab);
6285 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6286 ? umul_widen_optab : smul_widen_optab);
6287 if (mode == GET_MODE_WIDER_MODE (innermode))
6289 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6291 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6292 NULL_RTX, VOIDmode, 0);
6293 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6294 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6297 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6298 NULL_RTX, VOIDmode, 0);
6301 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6302 && innermode == word_mode)
6305 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6306 NULL_RTX, VOIDmode, 0);
6307 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6308 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6311 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6312 NULL_RTX, VOIDmode, 0);
6313 temp = expand_binop (mode, other_optab, op0, op1, target,
6314 unsignedp, OPTAB_LIB_WIDEN);
6315 htem = expand_mult_highpart_adjust (innermode,
6316 gen_highpart (innermode, temp),
6318 gen_highpart (innermode, temp),
6320 emit_move_insn (gen_highpart (innermode, temp), htem);
6325 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6326 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6327 return expand_mult (mode, op0, op1, target, unsignedp);
6329 case TRUNC_DIV_EXPR:
6330 case FLOOR_DIV_EXPR:
6332 case ROUND_DIV_EXPR:
6333 case EXACT_DIV_EXPR:
6334 preexpand_calls (exp);
6335 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6337 /* Possible optimization: compute the dividend with EXPAND_SUM
6338 then if the divisor is constant can optimize the case
6339 where some terms of the dividend have coeffs divisible by it. */
6340 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6341 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6342 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6345 this_optab = flodiv_optab;
6348 case TRUNC_MOD_EXPR:
6349 case FLOOR_MOD_EXPR:
6351 case ROUND_MOD_EXPR:
6352 preexpand_calls (exp);
6353 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6355 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6356 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6357 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6359 case FIX_ROUND_EXPR:
6360 case FIX_FLOOR_EXPR:
6362 abort (); /* Not used for C. */
6364 case FIX_TRUNC_EXPR:
6365 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6367 target = gen_reg_rtx (mode);
6368 expand_fix (target, op0, unsignedp);
6372 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6374 target = gen_reg_rtx (mode);
6375 /* expand_float can't figure out what to do if FROM has VOIDmode.
6376 So give it the correct mode. With -O, cse will optimize this. */
6377 if (GET_MODE (op0) == VOIDmode)
6378 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6380 expand_float (target, op0,
6381 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6385 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6386 temp = expand_unop (mode, neg_optab, op0, target, 0);
6392 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6394 /* Handle complex values specially. */
6395 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6396 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6397 return expand_complex_abs (mode, op0, target, unsignedp);
6399 /* Unsigned abs is simply the operand. Testing here means we don't
6400 risk generating incorrect code below. */
6401 if (TREE_UNSIGNED (type))
6404 return expand_abs (mode, op0, target, unsignedp,
6405 safe_from_p (target, TREE_OPERAND (exp, 0)));
6409 target = original_target;
6410 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6411 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6412 || GET_MODE (target) != mode
6413 || (GET_CODE (target) == REG
6414 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6415 target = gen_reg_rtx (mode);
6416 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6417 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6419 /* First try to do it with a special MIN or MAX instruction.
6420 If that does not win, use a conditional jump to select the proper
6422 this_optab = (TREE_UNSIGNED (type)
6423 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6424 : (code == MIN_EXPR ? smin_optab : smax_optab));
6426 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6431 /* At this point, a MEM target is no longer useful; we will get better
6434 if (GET_CODE (target) == MEM)
6435 target = gen_reg_rtx (mode);
6438 emit_move_insn (target, op0);
6440 op0 = gen_label_rtx ();
6442 /* If this mode is an integer too wide to compare properly,
6443 compare word by word. Rely on cse to optimize constant cases. */
6444 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6446 if (code == MAX_EXPR)
6447 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6448 target, op1, NULL_RTX, op0);
6450 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6451 op1, target, NULL_RTX, op0);
6452 emit_move_insn (target, op1);
6456 if (code == MAX_EXPR)
6457 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6458 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6459 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6461 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6462 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6463 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6464 if (temp == const0_rtx)
6465 emit_move_insn (target, op1);
6466 else if (temp != const_true_rtx)
6468 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6469 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6472 emit_move_insn (target, op1);
6479 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6480 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6486 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6487 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6492 /* ??? Can optimize bitwise operations with one arg constant.
6493 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6494 and (a bitwise1 b) bitwise2 b (etc)
6495 but that is probably not worth while. */
6497 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6498 boolean values when we want in all cases to compute both of them. In
6499 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6500 as actual zero-or-1 values and then bitwise anding. In cases where
6501 there cannot be any side effects, better code would be made by
6502 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6503 how to recognize those cases. */
6505 case TRUTH_AND_EXPR:
6507 this_optab = and_optab;
6512 this_optab = ior_optab;
6515 case TRUTH_XOR_EXPR:
6517 this_optab = xor_optab;
6524 preexpand_calls (exp);
6525 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6527 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6528 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6531 /* Could determine the answer when only additive constants differ. Also,
6532 the addition of one can be handled by changing the condition. */
6539 preexpand_calls (exp);
6540 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6544 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6545 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6547 && GET_CODE (original_target) == REG
6548 && (GET_MODE (original_target)
6549 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6551 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6554 if (temp != original_target)
6555 temp = copy_to_reg (temp);
6557 op1 = gen_label_rtx ();
6558 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6559 GET_MODE (temp), unsignedp, 0);
6560 emit_jump_insn (gen_beq (op1));
6561 emit_move_insn (temp, const1_rtx);
6566 /* If no set-flag instruction, must generate a conditional
6567 store into a temporary variable. Drop through
6568 and handle this like && and ||. */
6570 case TRUTH_ANDIF_EXPR:
6571 case TRUTH_ORIF_EXPR:
6573 && (target == 0 || ! safe_from_p (target, exp)
6574 /* Make sure we don't have a hard reg (such as function's return
6575 value) live across basic blocks, if not optimizing. */
6576 || (!optimize && GET_CODE (target) == REG
6577 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6578 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6581 emit_clr_insn (target);
6583 op1 = gen_label_rtx ();
6584 jumpifnot (exp, op1);
6587 emit_0_to_1_insn (target);
6590 return ignore ? const0_rtx : target;
6592 case TRUTH_NOT_EXPR:
6593 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6594 /* The parser is careful to generate TRUTH_NOT_EXPR
6595 only with operands that are always zero or one. */
6596 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6597 target, 1, OPTAB_LIB_WIDEN);
6603 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6605 return expand_expr (TREE_OPERAND (exp, 1),
6606 (ignore ? const0_rtx : target),
6610 /* If we would have a "singleton" (see below) were it not for a
6611 conversion in each arm, bring that conversion back out. */
6612 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6613 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6614 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6615 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6617 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6618 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6620 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6621 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6622 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6623 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6624 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6625 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6626 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6627 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6628 return expand_expr (build1 (NOP_EXPR, type,
6629 build (COND_EXPR, TREE_TYPE (true),
6630 TREE_OPERAND (exp, 0),
6632 target, tmode, modifier);
6636 /* Note that COND_EXPRs whose type is a structure or union
6637 are required to be constructed to contain assignments of
6638 a temporary variable, so that we can evaluate them here
6639 for side effect only. If type is void, we must do likewise. */
6641 /* If an arm of the branch requires a cleanup,
6642 only that cleanup is performed. */
6645 tree binary_op = 0, unary_op = 0;
6647 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6648 convert it to our mode, if necessary. */
6649 if (integer_onep (TREE_OPERAND (exp, 1))
6650 && integer_zerop (TREE_OPERAND (exp, 2))
6651 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6655 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6660 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
6661 if (GET_MODE (op0) == mode)
6665 target = gen_reg_rtx (mode);
6666 convert_move (target, op0, unsignedp);
6670 /* Check for X ? A + B : A. If we have this, we can copy A to the
6671 output and conditionally add B. Similarly for unary operations.
6672 Don't do this if X has side-effects because those side effects
6673 might affect A or B and the "?" operation is a sequence point in
6674 ANSI. (operand_equal_p tests for side effects.) */
6676 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6677 && operand_equal_p (TREE_OPERAND (exp, 2),
6678 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6679 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6680 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6681 && operand_equal_p (TREE_OPERAND (exp, 1),
6682 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6683 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6684 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6685 && operand_equal_p (TREE_OPERAND (exp, 2),
6686 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6687 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6688 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6689 && operand_equal_p (TREE_OPERAND (exp, 1),
6690 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6691 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6693 /* If we are not to produce a result, we have no target. Otherwise,
6694 if a target was specified use it; it will not be used as an
6695 intermediate target unless it is safe. If no target, use a
6700 else if (original_target
6701 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6702 || (singleton && GET_CODE (original_target) == REG
6703 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6704 && original_target == var_rtx (singleton)))
6705 && GET_MODE (original_target) == mode
6706 && ! (GET_CODE (original_target) == MEM
6707 && MEM_VOLATILE_P (original_target)))
6708 temp = original_target;
6709 else if (TREE_ADDRESSABLE (type))
6712 temp = assign_temp (type, 0, 0, 1);
6714 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6715 do the test of X as a store-flag operation, do this as
6716 A + ((X != 0) << log C). Similarly for other simple binary
6717 operators. Only do for C == 1 if BRANCH_COST is low. */
6718 if (temp && singleton && binary_op
6719 && (TREE_CODE (binary_op) == PLUS_EXPR
6720 || TREE_CODE (binary_op) == MINUS_EXPR
6721 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6722 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6723 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6724 : integer_onep (TREE_OPERAND (binary_op, 1)))
6725 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6728 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6729 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6730 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6733 /* If we had X ? A : A + 1, do this as A + (X == 0).
6735 We have to invert the truth value here and then put it
6736 back later if do_store_flag fails. We cannot simply copy
6737 TREE_OPERAND (exp, 0) to another variable and modify that
6738 because invert_truthvalue can modify the tree pointed to
6740 if (singleton == TREE_OPERAND (exp, 1))
6741 TREE_OPERAND (exp, 0)
6742 = invert_truthvalue (TREE_OPERAND (exp, 0));
6744 result = do_store_flag (TREE_OPERAND (exp, 0),
6745 (safe_from_p (temp, singleton)
6747 mode, BRANCH_COST <= 1);
6749 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6750 result = expand_shift (LSHIFT_EXPR, mode, result,
6751 build_int_2 (tree_log2
6755 (safe_from_p (temp, singleton)
6756 ? temp : NULL_RTX), 0);
6760 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6761 return expand_binop (mode, boptab, op1, result, temp,
6762 unsignedp, OPTAB_LIB_WIDEN);
6764 else if (singleton == TREE_OPERAND (exp, 1))
6765 TREE_OPERAND (exp, 0)
6766 = invert_truthvalue (TREE_OPERAND (exp, 0));
6769 do_pending_stack_adjust ();
6771 op0 = gen_label_rtx ();
6773 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6777 /* If the target conflicts with the other operand of the
6778 binary op, we can't use it. Also, we can't use the target
6779 if it is a hard register, because evaluating the condition
6780 might clobber it. */
6782 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6783 || (GET_CODE (temp) == REG
6784 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6785 temp = gen_reg_rtx (mode);
6786 store_expr (singleton, temp, 0);
6789 expand_expr (singleton,
6790 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6791 if (singleton == TREE_OPERAND (exp, 1))
6792 jumpif (TREE_OPERAND (exp, 0), op0);
6794 jumpifnot (TREE_OPERAND (exp, 0), op0);
6796 start_cleanup_deferal ();
6797 if (binary_op && temp == 0)
6798 /* Just touch the other operand. */
6799 expand_expr (TREE_OPERAND (binary_op, 1),
6800 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6802 store_expr (build (TREE_CODE (binary_op), type,
6803 make_tree (type, temp),
6804 TREE_OPERAND (binary_op, 1)),
6807 store_expr (build1 (TREE_CODE (unary_op), type,
6808 make_tree (type, temp)),
6812 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6813 comparison operator. If we have one of these cases, set the
6814 output to A, branch on A (cse will merge these two references),
6815 then set the output to FOO. */
6817 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6818 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6819 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6820 TREE_OPERAND (exp, 1), 0)
6821 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6822 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6824 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6825 temp = gen_reg_rtx (mode);
6826 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6827 jumpif (TREE_OPERAND (exp, 0), op0);
6829 start_cleanup_deferal ();
6830 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6834 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6835 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6836 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6837 TREE_OPERAND (exp, 2), 0)
6838 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6839 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6841 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6842 temp = gen_reg_rtx (mode);
6843 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6844 jumpifnot (TREE_OPERAND (exp, 0), op0);
6846 start_cleanup_deferal ();
6847 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6852 op1 = gen_label_rtx ();
6853 jumpifnot (TREE_OPERAND (exp, 0), op0);
6855 start_cleanup_deferal ();
6857 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6859 expand_expr (TREE_OPERAND (exp, 1),
6860 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6861 end_cleanup_deferal ();
6863 emit_jump_insn (gen_jump (op1));
6866 start_cleanup_deferal ();
6868 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6870 expand_expr (TREE_OPERAND (exp, 2),
6871 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6874 end_cleanup_deferal ();
6885 /* Something needs to be initialized, but we didn't know
6886 where that thing was when building the tree. For example,
6887 it could be the return value of a function, or a parameter
6888 to a function which lays down in the stack, or a temporary
6889 variable which must be passed by reference.
6891 We guarantee that the expression will either be constructed
6892 or copied into our original target. */
6894 tree slot = TREE_OPERAND (exp, 0);
6895 tree cleanups = NULL_TREE;
6899 if (TREE_CODE (slot) != VAR_DECL)
6903 target = original_target;
6907 if (DECL_RTL (slot) != 0)
6909 target = DECL_RTL (slot);
6910 /* If we have already expanded the slot, so don't do
6912 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6917 target = assign_temp (type, 2, 1, 1);
6918 /* All temp slots at this level must not conflict. */
6919 preserve_temp_slots (target);
6920 DECL_RTL (slot) = target;
6922 /* Since SLOT is not known to the called function
6923 to belong to its stack frame, we must build an explicit
6924 cleanup. This case occurs when we must build up a reference
6925 to pass the reference as an argument. In this case,
6926 it is very likely that such a reference need not be
6929 if (TREE_OPERAND (exp, 2) == 0)
6930 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6931 cleanups = TREE_OPERAND (exp, 2);
6936 /* This case does occur, when expanding a parameter which
6937 needs to be constructed on the stack. The target
6938 is the actual stack address that we want to initialize.
6939 The function we call will perform the cleanup in this case. */
6941 /* If we have already assigned it space, use that space,
6942 not target that we were passed in, as our target
6943 parameter is only a hint. */
6944 if (DECL_RTL (slot) != 0)
6946 target = DECL_RTL (slot);
6947 /* If we have already expanded the slot, so don't do
6949 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6953 DECL_RTL (slot) = target;
6956 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6957 /* Mark it as expanded. */
6958 TREE_OPERAND (exp, 1) = NULL_TREE;
6960 store_expr (exp1, target, 0);
6962 expand_decl_cleanup (NULL_TREE, cleanups);
6969 tree lhs = TREE_OPERAND (exp, 0);
6970 tree rhs = TREE_OPERAND (exp, 1);
6971 tree noncopied_parts = 0;
6972 tree lhs_type = TREE_TYPE (lhs);
6974 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6975 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6976 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6977 TYPE_NONCOPIED_PARTS (lhs_type));
6978 while (noncopied_parts != 0)
6980 expand_assignment (TREE_VALUE (noncopied_parts),
6981 TREE_PURPOSE (noncopied_parts), 0, 0);
6982 noncopied_parts = TREE_CHAIN (noncopied_parts);
6989 /* If lhs is complex, expand calls in rhs before computing it.
6990 That's so we don't compute a pointer and save it over a call.
6991 If lhs is simple, compute it first so we can give it as a
6992 target if the rhs is just a call. This avoids an extra temp and copy
6993 and that prevents a partial-subsumption which makes bad code.
6994 Actually we could treat component_ref's of vars like vars. */
6996 tree lhs = TREE_OPERAND (exp, 0);
6997 tree rhs = TREE_OPERAND (exp, 1);
6998 tree noncopied_parts = 0;
6999 tree lhs_type = TREE_TYPE (lhs);
7003 if (TREE_CODE (lhs) != VAR_DECL
7004 && TREE_CODE (lhs) != RESULT_DECL
7005 && TREE_CODE (lhs) != PARM_DECL
7006 && ! (TREE_CODE (lhs) == INDIRECT_REF
7007 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7008 preexpand_calls (exp);
7010 /* Check for |= or &= of a bitfield of size one into another bitfield
7011 of size 1. In this case, (unless we need the result of the
7012 assignment) we can do this more efficiently with a
7013 test followed by an assignment, if necessary.
7015 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7016 things change so we do, this code should be enhanced to
7019 && TREE_CODE (lhs) == COMPONENT_REF
7020 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7021 || TREE_CODE (rhs) == BIT_AND_EXPR)
7022 && TREE_OPERAND (rhs, 0) == lhs
7023 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7024 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7025 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7027 rtx label = gen_label_rtx ();
7029 do_jump (TREE_OPERAND (rhs, 1),
7030 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7031 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7032 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7033 (TREE_CODE (rhs) == BIT_IOR_EXPR
7035 : integer_zero_node)),
7037 do_pending_stack_adjust ();
7042 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7043 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7044 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7045 TYPE_NONCOPIED_PARTS (lhs_type));
7047 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7048 while (noncopied_parts != 0)
7050 expand_assignment (TREE_PURPOSE (noncopied_parts),
7051 TREE_VALUE (noncopied_parts), 0, 0);
7052 noncopied_parts = TREE_CHAIN (noncopied_parts);
7057 case PREINCREMENT_EXPR:
7058 case PREDECREMENT_EXPR:
7059 return expand_increment (exp, 0, ignore);
7061 case POSTINCREMENT_EXPR:
7062 case POSTDECREMENT_EXPR:
7063 /* Faster to treat as pre-increment if result is not used. */
7064 return expand_increment (exp, ! ignore, ignore);
7067 /* If nonzero, TEMP will be set to the address of something that might
7068 be a MEM corresponding to a stack slot. */
7071 /* Are we taking the address of a nested function? */
7072 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7073 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7074 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7076 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7077 op0 = force_operand (op0, target);
7079 /* If we are taking the address of something erroneous, just
7081 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7085 /* We make sure to pass const0_rtx down if we came in with
7086 ignore set, to avoid doing the cleanups twice for something. */
7087 op0 = expand_expr (TREE_OPERAND (exp, 0),
7088 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7089 (modifier == EXPAND_INITIALIZER
7090 ? modifier : EXPAND_CONST_ADDRESS));
7092 /* If we are going to ignore the result, OP0 will have been set
7093 to const0_rtx, so just return it. Don't get confused and
7094 think we are taking the address of the constant. */
7098 op0 = protect_from_queue (op0, 0);
7100 /* We would like the object in memory. If it is a constant,
7101 we can have it be statically allocated into memory. For
7102 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7103 memory and store the value into it. */
7105 if (CONSTANT_P (op0))
7106 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7108 else if (GET_CODE (op0) == MEM)
7110 mark_temp_addr_taken (op0);
7111 temp = XEXP (op0, 0);
7114 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7115 || GET_CODE (op0) == CONCAT)
7117 /* If this object is in a register, it must be not
7119 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7120 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7122 mark_temp_addr_taken (memloc);
7123 emit_move_insn (memloc, op0);
7127 if (GET_CODE (op0) != MEM)
7130 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7132 temp = XEXP (op0, 0);
7133 #ifdef POINTERS_EXTEND_UNSIGNED
7134 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7135 && mode == ptr_mode)
7136 temp = convert_memory_address (ptr_mode, temp);
7141 op0 = force_operand (XEXP (op0, 0), target);
7144 if (flag_force_addr && GET_CODE (op0) != REG)
7145 op0 = force_reg (Pmode, op0);
7147 if (GET_CODE (op0) == REG
7148 && ! REG_USERVAR_P (op0))
7149 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7151 /* If we might have had a temp slot, add an equivalent address
7154 update_temp_slot_address (temp, op0);
7156 #ifdef POINTERS_EXTEND_UNSIGNED
7157 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7158 && mode == ptr_mode)
7159 op0 = convert_memory_address (ptr_mode, op0);
7164 case ENTRY_VALUE_EXPR:
7167 /* COMPLEX type for Extended Pascal & Fortran */
7170 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7173 /* Get the rtx code of the operands. */
7174 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7175 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7178 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7182 /* Move the real (op0) and imaginary (op1) parts to their location. */
7183 emit_move_insn (gen_realpart (mode, target), op0);
7184 emit_move_insn (gen_imagpart (mode, target), op1);
7186 insns = get_insns ();
7189 /* Complex construction should appear as a single unit. */
7190 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7191 each with a separate pseudo as destination.
7192 It's not correct for flow to treat them as a unit. */
7193 if (GET_CODE (target) != CONCAT)
7194 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7202 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7203 return gen_realpart (mode, op0);
7206 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7207 return gen_imagpart (mode, op0);
7211 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7215 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7218 target = gen_reg_rtx (mode);
7222 /* Store the realpart and the negated imagpart to target. */
7223 emit_move_insn (gen_realpart (partmode, target),
7224 gen_realpart (partmode, op0));
7226 imag_t = gen_imagpart (partmode, target);
7227 temp = expand_unop (partmode, neg_optab,
7228 gen_imagpart (partmode, op0), imag_t, 0);
7230 emit_move_insn (imag_t, temp);
7232 insns = get_insns ();
7235 /* Conjugate should appear as a single unit
7236 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7237 each with a separate pseudo as destination.
7238 It's not correct for flow to treat them as a unit. */
7239 if (GET_CODE (target) != CONCAT)
7240 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7247 case TRY_CATCH_EXPR:
7249 tree handler = TREE_OPERAND (exp, 1);
7251 expand_eh_region_start ();
7253 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7255 expand_eh_region_end (handler);
7262 rtx dcc = get_dynamic_cleanup_chain ();
7263 emit_move_insn (dcc, validize_mem (gen_rtx (MEM, Pmode, dcc)));
7269 rtx dhc = get_dynamic_handler_chain ();
7270 emit_move_insn (dhc, validize_mem (gen_rtx (MEM, Pmode, dhc)));
7275 op0 = CONST0_RTX (tmode);
7281 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7284 /* Here to do an ordinary binary operator, generating an instruction
7285 from the optab already placed in `this_optab'. */
7287 preexpand_calls (exp);
7288 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7290 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7291 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7293 temp = expand_binop (mode, this_optab, op0, op1, target,
7294 unsignedp, OPTAB_LIB_WIDEN);
7301 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7304 bc_expand_expr (exp)
7307 enum tree_code code;
7310 struct binary_operator *binoptab;
7311 struct unary_operator *unoptab;
7312 struct increment_operator *incroptab;
7313 struct bc_label *lab, *lab1;
7314 enum bytecode_opcode opcode;
7317 code = TREE_CODE (exp);
7323 if (DECL_RTL (exp) == 0)
7325 error_with_decl (exp, "prior parameter's size depends on `%s'");
7329 bc_load_parmaddr (DECL_RTL (exp));
7330 bc_load_memory (TREE_TYPE (exp), exp);
7336 if (DECL_RTL (exp) == 0)
7340 if (BYTECODE_LABEL (DECL_RTL (exp)))
7341 bc_load_externaddr (DECL_RTL (exp));
7343 bc_load_localaddr (DECL_RTL (exp));
7345 if (TREE_PUBLIC (exp))
7346 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7347 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7349 bc_load_localaddr (DECL_RTL (exp));
7351 bc_load_memory (TREE_TYPE (exp), exp);
7356 #ifdef DEBUG_PRINT_CODE
7357 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7359 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7361 : TYPE_MODE (TREE_TYPE (exp)))],
7362 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7368 #ifdef DEBUG_PRINT_CODE
7369 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7371 /* FIX THIS: find a better way to pass real_cst's. -bson */
7372 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7373 (double) TREE_REAL_CST (exp));
7382 /* We build a call description vector describing the type of
7383 the return value and of the arguments; this call vector,
7384 together with a pointer to a location for the return value
7385 and the base of the argument list, is passed to the low
7386 level machine dependent call subroutine, which is responsible
7387 for putting the arguments wherever real functions expect
7388 them, as well as getting the return value back. */
7390 tree calldesc = 0, arg;
7394 /* Push the evaluated args on the evaluation stack in reverse
7395 order. Also make an entry for each arg in the calldesc
7396 vector while we're at it. */
7398 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7400 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7403 bc_expand_expr (TREE_VALUE (arg));
7405 calldesc = tree_cons ((tree) 0,
7406 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7408 calldesc = tree_cons ((tree) 0,
7409 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7413 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7415 /* Allocate a location for the return value and push its
7416 address on the evaluation stack. Also make an entry
7417 at the front of the calldesc for the return value type. */
7419 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7420 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7421 bc_load_localaddr (retval);
7423 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7424 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7426 /* Prepend the argument count. */
7427 calldesc = tree_cons ((tree) 0,
7428 build_int_2 (nargs, 0),
7431 /* Push the address of the call description vector on the stack. */
7432 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7433 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7434 build_index_type (build_int_2 (nargs * 2, 0)));
7435 r = output_constant_def (calldesc);
7436 bc_load_externaddr (r);
7438 /* Push the address of the function to be called. */
7439 bc_expand_expr (TREE_OPERAND (exp, 0));
7441 /* Call the function, popping its address and the calldesc vector
7442 address off the evaluation stack in the process. */
7443 bc_emit_instruction (call);
7445 /* Pop the arguments off the stack. */
7446 bc_adjust_stack (nargs);
7448 /* Load the return value onto the stack. */
7449 bc_load_localaddr (retval);
7450 bc_load_memory (type, TREE_OPERAND (exp, 0));
7456 if (!SAVE_EXPR_RTL (exp))
7458 /* First time around: copy to local variable */
7459 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7460 TYPE_ALIGN (TREE_TYPE(exp)));
7461 bc_expand_expr (TREE_OPERAND (exp, 0));
7462 bc_emit_instruction (duplicate);
7464 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7465 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7469 /* Consecutive reference: use saved copy */
7470 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7471 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7476 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7477 how are they handled instead? */
7480 TREE_USED (exp) = 1;
7481 bc_expand_expr (STMT_BODY (exp));
7488 bc_expand_expr (TREE_OPERAND (exp, 0));
7489 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7494 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7499 bc_expand_address (TREE_OPERAND (exp, 0));
7504 bc_expand_expr (TREE_OPERAND (exp, 0));
7505 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7510 bc_expand_expr (bc_canonicalize_array_ref (exp));
7515 bc_expand_component_address (exp);
7517 /* If we have a bitfield, generate a proper load */
7518 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7523 bc_expand_expr (TREE_OPERAND (exp, 0));
7524 bc_emit_instruction (drop);
7525 bc_expand_expr (TREE_OPERAND (exp, 1));
7530 bc_expand_expr (TREE_OPERAND (exp, 0));
7531 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7532 lab = bc_get_bytecode_label ();
7533 bc_emit_bytecode (xjumpifnot);
7534 bc_emit_bytecode_labelref (lab);
7536 #ifdef DEBUG_PRINT_CODE
7537 fputc ('\n', stderr);
7539 bc_expand_expr (TREE_OPERAND (exp, 1));
7540 lab1 = bc_get_bytecode_label ();
7541 bc_emit_bytecode (jump);
7542 bc_emit_bytecode_labelref (lab1);
7544 #ifdef DEBUG_PRINT_CODE
7545 fputc ('\n', stderr);
7548 bc_emit_bytecode_labeldef (lab);
7549 bc_expand_expr (TREE_OPERAND (exp, 2));
7550 bc_emit_bytecode_labeldef (lab1);
7553 case TRUTH_ANDIF_EXPR:
7555 opcode = xjumpifnot;
7558 case TRUTH_ORIF_EXPR:
7565 binoptab = optab_plus_expr;
7570 binoptab = optab_minus_expr;
7575 binoptab = optab_mult_expr;
7578 case TRUNC_DIV_EXPR:
7579 case FLOOR_DIV_EXPR:
7581 case ROUND_DIV_EXPR:
7582 case EXACT_DIV_EXPR:
7584 binoptab = optab_trunc_div_expr;
7587 case TRUNC_MOD_EXPR:
7588 case FLOOR_MOD_EXPR:
7590 case ROUND_MOD_EXPR:
7592 binoptab = optab_trunc_mod_expr;
7595 case FIX_ROUND_EXPR:
7596 case FIX_FLOOR_EXPR:
7598 abort (); /* Not used for C. */
7600 case FIX_TRUNC_EXPR:
7607 abort (); /* FIXME */
7611 binoptab = optab_rdiv_expr;
7616 binoptab = optab_bit_and_expr;
7621 binoptab = optab_bit_ior_expr;
7626 binoptab = optab_bit_xor_expr;
7631 binoptab = optab_lshift_expr;
7636 binoptab = optab_rshift_expr;
7639 case TRUTH_AND_EXPR:
7641 binoptab = optab_truth_and_expr;
7646 binoptab = optab_truth_or_expr;
7651 binoptab = optab_lt_expr;
7656 binoptab = optab_le_expr;
7661 binoptab = optab_ge_expr;
7666 binoptab = optab_gt_expr;
7671 binoptab = optab_eq_expr;
7676 binoptab = optab_ne_expr;
7681 unoptab = optab_negate_expr;
7686 unoptab = optab_bit_not_expr;
7689 case TRUTH_NOT_EXPR:
7691 unoptab = optab_truth_not_expr;
7694 case PREDECREMENT_EXPR:
7696 incroptab = optab_predecrement_expr;
7699 case PREINCREMENT_EXPR:
7701 incroptab = optab_preincrement_expr;
7704 case POSTDECREMENT_EXPR:
7706 incroptab = optab_postdecrement_expr;
7709 case POSTINCREMENT_EXPR:
7711 incroptab = optab_postincrement_expr;
7716 bc_expand_constructor (exp);
7726 tree vars = TREE_OPERAND (exp, 0);
7727 int vars_need_expansion = 0;
7729 /* Need to open a binding contour here because
7730 if there are any cleanups they most be contained here. */
7731 expand_start_bindings (0);
7733 /* Mark the corresponding BLOCK for output. */
7734 if (TREE_OPERAND (exp, 2) != 0)
7735 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7737 /* If VARS have not yet been expanded, expand them now. */
7740 if (DECL_RTL (vars) == 0)
7742 vars_need_expansion = 1;
7745 expand_decl_init (vars);
7746 vars = TREE_CHAIN (vars);
7749 bc_expand_expr (TREE_OPERAND (exp, 1));
7751 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7761 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7762 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7768 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7774 bc_expand_expr (TREE_OPERAND (exp, 0));
7775 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7776 lab = bc_get_bytecode_label ();
7778 bc_emit_instruction (duplicate);
7779 bc_emit_bytecode (opcode);
7780 bc_emit_bytecode_labelref (lab);
7782 #ifdef DEBUG_PRINT_CODE
7783 fputc ('\n', stderr);
7786 bc_emit_instruction (drop);
7788 bc_expand_expr (TREE_OPERAND (exp, 1));
7789 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7790 bc_emit_bytecode_labeldef (lab);
7796 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7798 /* Push the quantum. */
7799 bc_expand_expr (TREE_OPERAND (exp, 1));
7801 /* Convert it to the lvalue's type. */
7802 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7804 /* Push the address of the lvalue */
7805 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7807 /* Perform actual increment */
7808 bc_expand_increment (incroptab, type);
7812 /* Return the alignment in bits of EXP, a pointer valued expression.
7813 But don't return more than MAX_ALIGN no matter what.
7814 The alignment returned is, by default, the alignment of the thing that
7815 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7817 Otherwise, look at the expression to see if we can do better, i.e., if the
7818 expression is actually pointing at an object whose alignment is tighter. */
7821 get_pointer_alignment (exp, max_align)
7825 unsigned align, inner;
7827 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7830 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7831 align = MIN (align, max_align);
7835 switch (TREE_CODE (exp))
7839 case NON_LVALUE_EXPR:
7840 exp = TREE_OPERAND (exp, 0);
7841 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7843 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7844 align = MIN (inner, max_align);
7848 /* If sum of pointer + int, restrict our maximum alignment to that
7849 imposed by the integer. If not, we can't do any better than
7851 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7854 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7859 exp = TREE_OPERAND (exp, 0);
7863 /* See what we are pointing at and look at its alignment. */
7864 exp = TREE_OPERAND (exp, 0);
7865 if (TREE_CODE (exp) == FUNCTION_DECL)
7866 align = FUNCTION_BOUNDARY;
7867 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7868 align = DECL_ALIGN (exp);
7869 #ifdef CONSTANT_ALIGNMENT
7870 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7871 align = CONSTANT_ALIGNMENT (exp, align);
7873 return MIN (align, max_align);
7881 /* Return the tree node and offset if a given argument corresponds to
7882 a string constant. */
7885 string_constant (arg, ptr_offset)
7891 if (TREE_CODE (arg) == ADDR_EXPR
7892 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7894 *ptr_offset = integer_zero_node;
7895 return TREE_OPERAND (arg, 0);
7897 else if (TREE_CODE (arg) == PLUS_EXPR)
7899 tree arg0 = TREE_OPERAND (arg, 0);
7900 tree arg1 = TREE_OPERAND (arg, 1);
7905 if (TREE_CODE (arg0) == ADDR_EXPR
7906 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7909 return TREE_OPERAND (arg0, 0);
7911 else if (TREE_CODE (arg1) == ADDR_EXPR
7912 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7915 return TREE_OPERAND (arg1, 0);
7922 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7923 way, because it could contain a zero byte in the middle.
7924 TREE_STRING_LENGTH is the size of the character array, not the string.
7926 Unfortunately, string_constant can't access the values of const char
7927 arrays with initializers, so neither can we do so here. */
7937 src = string_constant (src, &offset_node);
7940 max = TREE_STRING_LENGTH (src);
7941 ptr = TREE_STRING_POINTER (src);
7942 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7944 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7945 compute the offset to the following null if we don't know where to
7946 start searching for it. */
7948 for (i = 0; i < max; i++)
7951 /* We don't know the starting offset, but we do know that the string
7952 has no internal zero bytes. We can assume that the offset falls
7953 within the bounds of the string; otherwise, the programmer deserves
7954 what he gets. Subtract the offset from the length of the string,
7956 /* This would perhaps not be valid if we were dealing with named
7957 arrays in addition to literal string constants. */
7958 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7961 /* We have a known offset into the string. Start searching there for
7962 a null character. */
7963 if (offset_node == 0)
7967 /* Did we get a long long offset? If so, punt. */
7968 if (TREE_INT_CST_HIGH (offset_node) != 0)
7970 offset = TREE_INT_CST_LOW (offset_node);
7972 /* If the offset is known to be out of bounds, warn, and call strlen at
7974 if (offset < 0 || offset > max)
7976 warning ("offset outside bounds of constant string");
7979 /* Use strlen to search for the first zero byte. Since any strings
7980 constructed with build_string will have nulls appended, we win even
7981 if we get handed something like (char[4])"abcd".
7983 Since OFFSET is our starting index into the string, no further
7984 calculation is needed. */
7985 return size_int (strlen (ptr + offset));
7989 expand_builtin_return_addr (fndecl_code, count, tem)
7990 enum built_in_function fndecl_code;
7996 /* Some machines need special handling before we can access
7997 arbitrary frames. For example, on the sparc, we must first flush
7998 all register windows to the stack. */
7999 #ifdef SETUP_FRAME_ADDRESSES
8000 SETUP_FRAME_ADDRESSES ();
8003 /* On the sparc, the return address is not in the frame, it is in a
8004 register. There is no way to access it off of the current frame
8005 pointer, but it can be accessed off the previous frame pointer by
8006 reading the value from the register window save area. */
8007 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8008 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8012 /* Scan back COUNT frames to the specified frame. */
8013 for (i = 0; i < count; i++)
8015 /* Assume the dynamic chain pointer is in the word that the
8016 frame address points to, unless otherwise specified. */
8017 #ifdef DYNAMIC_CHAIN_ADDRESS
8018 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8020 tem = memory_address (Pmode, tem);
8021 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
8024 /* For __builtin_frame_address, return what we've got. */
8025 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8028 /* For __builtin_return_address, Get the return address from that
8030 #ifdef RETURN_ADDR_RTX
8031 tem = RETURN_ADDR_RTX (count, tem);
8033 tem = memory_address (Pmode,
8034 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8035 tem = gen_rtx (MEM, Pmode, tem);
8040 /* __builtin_setjmp is passed a pointer to an array of five words (not
8041 all will be used on all machines). It operates similarly to the C
8042 library function of the same name, but is more efficient. Much of
8043 the code below (and for longjmp) is copied from the handling of
8046 NOTE: This is intended for use by GNAT and the exception handling
8047 scheme in the compiler and will only work in the method used by
8051 expand_builtin_setjmp (buf_addr, target)
8055 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8056 enum machine_mode sa_mode = Pmode, value_mode;
8058 int old_inhibit_defer_pop = inhibit_defer_pop;
8060 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8061 build_function_type (void_type_node, NULL_TREE),
8064 CUMULATIVE_ARGS args_so_far;
8068 value_mode = TYPE_MODE (integer_type_node);
8070 #ifdef POINTERS_EXTEND_UNSIGNED
8071 buf_addr = convert_memory_address (Pmode, buf_addr);
8074 buf_addr = force_reg (Pmode, buf_addr);
8076 if (target == 0 || GET_CODE (target) != REG
8077 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8078 target = gen_reg_rtx (value_mode);
8082 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8083 current_function_calls_setjmp = 1;
8085 /* We store the frame pointer and the address of lab1 in the buffer
8086 and use the rest of it for the stack save area, which is
8087 machine-dependent. */
8088 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8089 virtual_stack_vars_rtx);
8091 (validize_mem (gen_rtx (MEM, Pmode,
8092 plus_constant (buf_addr,
8093 GET_MODE_SIZE (Pmode)))),
8094 gen_rtx (LABEL_REF, Pmode, lab1));
8096 #ifdef HAVE_save_stack_nonlocal
8097 if (HAVE_save_stack_nonlocal)
8098 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8101 stack_save = gen_rtx (MEM, sa_mode,
8102 plus_constant (buf_addr,
8103 2 * GET_MODE_SIZE (Pmode)));
8104 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8108 emit_insn (gen_setjmp ());
8111 /* Set TARGET to zero and branch around the other case. */
8112 emit_move_insn (target, const0_rtx);
8113 emit_jump_insn (gen_jump (lab2));
8117 /* Note that setjmp clobbers FP when we get here, so we have to make
8118 sure it's marked as used by this function. */
8119 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8121 /* Mark the static chain as clobbered here so life information
8122 doesn't get messed up for it. */
8123 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8125 /* Now put in the code to restore the frame pointer, and argument
8126 pointer, if needed. The code below is from expand_end_bindings
8127 in stmt.c; see detailed documentation there. */
8128 #ifdef HAVE_nonlocal_goto
8129 if (! HAVE_nonlocal_goto)
8131 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8133 /* Do we need to do something like:
8135 current_function_has_nonlocal_label = 1;
8137 here? It seems like we might have to, or some subset of that
8138 functionality, but I am unsure. (mrs) */
8140 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8141 if (fixed_regs[ARG_POINTER_REGNUM])
8143 #ifdef ELIMINABLE_REGS
8144 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8146 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8147 if (elim_regs[i].from == ARG_POINTER_REGNUM
8148 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8151 if (i == sizeof elim_regs / sizeof elim_regs [0])
8154 /* Now restore our arg pointer from the address at which it
8155 was saved in our stack frame.
8156 If there hasn't be space allocated for it yet, make
8158 if (arg_pointer_save_area == 0)
8159 arg_pointer_save_area
8160 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8161 emit_move_insn (virtual_incoming_args_rtx,
8162 copy_to_reg (arg_pointer_save_area));
8167 #ifdef HAVE_nonlocal_goto_receiver
8168 if (HAVE_nonlocal_goto_receiver)
8169 emit_insn (gen_nonlocal_goto_receiver ());
8171 /* The static chain pointer contains the address of dummy function.
8172 We need to call it here to handle some PIC cases of restoring a
8173 global pointer. Then return 1. */
8174 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8176 /* We can't actually call emit_library_call here, so do everything
8177 it does, which isn't much for a libfunc with no args. */
8178 op0 = memory_address (FUNCTION_MODE, op0);
8180 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8181 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8182 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8184 #ifndef ACCUMULATE_OUTGOING_ARGS
8185 #ifdef HAVE_call_pop
8187 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8188 const0_rtx, next_arg_reg,
8189 GEN_INT (return_pops)));
8196 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8197 const0_rtx, next_arg_reg, const0_rtx));
8202 emit_move_insn (target, const1_rtx);
8208 /* Expand an expression EXP that calls a built-in function,
8209 with result going to TARGET if that's convenient
8210 (and in mode MODE if that's convenient).
8211 SUBTARGET may be used as the target for computing one of EXP's operands.
8212 IGNORE is nonzero if the value is to be ignored. */
8214 #define CALLED_AS_BUILT_IN(NODE) \
8215 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8218 expand_builtin (exp, target, subtarget, mode, ignore)
8222 enum machine_mode mode;
8225 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8226 tree arglist = TREE_OPERAND (exp, 1);
8229 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8230 optab builtin_optab;
8232 switch (DECL_FUNCTION_CODE (fndecl))
8237 /* build_function_call changes these into ABS_EXPR. */
8242 /* Treat these like sqrt, but only if the user asks for them. */
8243 if (! flag_fast_math)
8245 case BUILT_IN_FSQRT:
8246 /* If not optimizing, call the library function. */
8251 /* Arg could be wrong type if user redeclared this fcn wrong. */
8252 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8255 /* Stabilize and compute the argument. */
8256 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8257 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8259 exp = copy_node (exp);
8260 arglist = copy_node (arglist);
8261 TREE_OPERAND (exp, 1) = arglist;
8262 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8264 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8266 /* Make a suitable register to place result in. */
8267 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8272 switch (DECL_FUNCTION_CODE (fndecl))
8275 builtin_optab = sin_optab; break;
8277 builtin_optab = cos_optab; break;
8278 case BUILT_IN_FSQRT:
8279 builtin_optab = sqrt_optab; break;
8284 /* Compute into TARGET.
8285 Set TARGET to wherever the result comes back. */
8286 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8287 builtin_optab, op0, target, 0);
8289 /* If we were unable to expand via the builtin, stop the
8290 sequence (without outputting the insns) and break, causing
8291 a call the the library function. */
8298 /* Check the results by default. But if flag_fast_math is turned on,
8299 then assume sqrt will always be called with valid arguments. */
8301 if (! flag_fast_math)
8303 /* Don't define the builtin FP instructions
8304 if your machine is not IEEE. */
8305 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8308 lab1 = gen_label_rtx ();
8310 /* Test the result; if it is NaN, set errno=EDOM because
8311 the argument was not in the domain. */
8312 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8313 emit_jump_insn (gen_beq (lab1));
8317 #ifdef GEN_ERRNO_RTX
8318 rtx errno_rtx = GEN_ERRNO_RTX;
8321 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8324 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8327 /* We can't set errno=EDOM directly; let the library call do it.
8328 Pop the arguments right away in case the call gets deleted. */
8330 expand_call (exp, target, 0);
8337 /* Output the entire sequence. */
8338 insns = get_insns ();
8344 /* __builtin_apply_args returns block of memory allocated on
8345 the stack into which is stored the arg pointer, structure
8346 value address, static chain, and all the registers that might
8347 possibly be used in performing a function call. The code is
8348 moved to the start of the function so the incoming values are
8350 case BUILT_IN_APPLY_ARGS:
8351 /* Don't do __builtin_apply_args more than once in a function.
8352 Save the result of the first call and reuse it. */
8353 if (apply_args_value != 0)
8354 return apply_args_value;
8356 /* When this function is called, it means that registers must be
8357 saved on entry to this function. So we migrate the
8358 call to the first insn of this function. */
8363 temp = expand_builtin_apply_args ();
8367 apply_args_value = temp;
8369 /* Put the sequence after the NOTE that starts the function.
8370 If this is inside a SEQUENCE, make the outer-level insn
8371 chain current, so the code is placed at the start of the
8373 push_topmost_sequence ();
8374 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8375 pop_topmost_sequence ();
8379 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8380 FUNCTION with a copy of the parameters described by
8381 ARGUMENTS, and ARGSIZE. It returns a block of memory
8382 allocated on the stack into which is stored all the registers
8383 that might possibly be used for returning the result of a
8384 function. ARGUMENTS is the value returned by
8385 __builtin_apply_args. ARGSIZE is the number of bytes of
8386 arguments that must be copied. ??? How should this value be
8387 computed? We'll also need a safe worst case value for varargs
8389 case BUILT_IN_APPLY:
8391 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8392 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8393 || TREE_CHAIN (arglist) == 0
8394 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8395 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8396 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8404 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8405 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8407 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8410 /* __builtin_return (RESULT) causes the function to return the
8411 value described by RESULT. RESULT is address of the block of
8412 memory returned by __builtin_apply. */
8413 case BUILT_IN_RETURN:
8415 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8416 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8417 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8418 NULL_RTX, VOIDmode, 0));
8421 case BUILT_IN_SAVEREGS:
8422 /* Don't do __builtin_saveregs more than once in a function.
8423 Save the result of the first call and reuse it. */
8424 if (saveregs_value != 0)
8425 return saveregs_value;
8427 /* When this function is called, it means that registers must be
8428 saved on entry to this function. So we migrate the
8429 call to the first insn of this function. */
8433 /* Now really call the function. `expand_call' does not call
8434 expand_builtin, so there is no danger of infinite recursion here. */
8437 #ifdef EXPAND_BUILTIN_SAVEREGS
8438 /* Do whatever the machine needs done in this case. */
8439 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8441 /* The register where the function returns its value
8442 is likely to have something else in it, such as an argument.
8443 So preserve that register around the call. */
8445 if (value_mode != VOIDmode)
8447 rtx valreg = hard_libcall_value (value_mode);
8448 rtx saved_valreg = gen_reg_rtx (value_mode);
8450 emit_move_insn (saved_valreg, valreg);
8451 temp = expand_call (exp, target, ignore);
8452 emit_move_insn (valreg, saved_valreg);
8455 /* Generate the call, putting the value in a pseudo. */
8456 temp = expand_call (exp, target, ignore);
8462 saveregs_value = temp;
8464 /* Put the sequence after the NOTE that starts the function.
8465 If this is inside a SEQUENCE, make the outer-level insn
8466 chain current, so the code is placed at the start of the
8468 push_topmost_sequence ();
8469 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8470 pop_topmost_sequence ();
8474 /* __builtin_args_info (N) returns word N of the arg space info
8475 for the current function. The number and meanings of words
8476 is controlled by the definition of CUMULATIVE_ARGS. */
8477 case BUILT_IN_ARGS_INFO:
8479 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8481 int *word_ptr = (int *) ¤t_function_args_info;
8482 tree type, elts, result;
8484 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8485 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8486 __FILE__, __LINE__);
8490 tree arg = TREE_VALUE (arglist);
8491 if (TREE_CODE (arg) != INTEGER_CST)
8492 error ("argument of `__builtin_args_info' must be constant");
8495 int wordnum = TREE_INT_CST_LOW (arg);
8497 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8498 error ("argument of `__builtin_args_info' out of range");
8500 return GEN_INT (word_ptr[wordnum]);
8504 error ("missing argument in `__builtin_args_info'");
8509 for (i = 0; i < nwords; i++)
8510 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8512 type = build_array_type (integer_type_node,
8513 build_index_type (build_int_2 (nwords, 0)));
8514 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8515 TREE_CONSTANT (result) = 1;
8516 TREE_STATIC (result) = 1;
8517 result = build (INDIRECT_REF, build_pointer_type (type), result);
8518 TREE_CONSTANT (result) = 1;
8519 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8523 /* Return the address of the first anonymous stack arg. */
8524 case BUILT_IN_NEXT_ARG:
8526 tree fntype = TREE_TYPE (current_function_decl);
8528 if ((TYPE_ARG_TYPES (fntype) == 0
8529 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8531 && ! current_function_varargs)
8533 error ("`va_start' used in function with fixed args");
8539 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8540 tree arg = TREE_VALUE (arglist);
8542 /* Strip off all nops for the sake of the comparison. This
8543 is not quite the same as STRIP_NOPS. It does more.
8544 We must also strip off INDIRECT_EXPR for C++ reference
8546 while (TREE_CODE (arg) == NOP_EXPR
8547 || TREE_CODE (arg) == CONVERT_EXPR
8548 || TREE_CODE (arg) == NON_LVALUE_EXPR
8549 || TREE_CODE (arg) == INDIRECT_REF)
8550 arg = TREE_OPERAND (arg, 0);
8551 if (arg != last_parm)
8552 warning ("second parameter of `va_start' not last named argument");
8554 else if (! current_function_varargs)
8555 /* Evidently an out of date version of <stdarg.h>; can't validate
8556 va_start's second argument, but can still work as intended. */
8557 warning ("`__builtin_next_arg' called without an argument");
8560 return expand_binop (Pmode, add_optab,
8561 current_function_internal_arg_pointer,
8562 current_function_arg_offset_rtx,
8563 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8565 case BUILT_IN_CLASSIFY_TYPE:
8568 tree type = TREE_TYPE (TREE_VALUE (arglist));
8569 enum tree_code code = TREE_CODE (type);
8570 if (code == VOID_TYPE)
8571 return GEN_INT (void_type_class);
8572 if (code == INTEGER_TYPE)
8573 return GEN_INT (integer_type_class);
8574 if (code == CHAR_TYPE)
8575 return GEN_INT (char_type_class);
8576 if (code == ENUMERAL_TYPE)
8577 return GEN_INT (enumeral_type_class);
8578 if (code == BOOLEAN_TYPE)
8579 return GEN_INT (boolean_type_class);
8580 if (code == POINTER_TYPE)
8581 return GEN_INT (pointer_type_class);
8582 if (code == REFERENCE_TYPE)
8583 return GEN_INT (reference_type_class);
8584 if (code == OFFSET_TYPE)
8585 return GEN_INT (offset_type_class);
8586 if (code == REAL_TYPE)
8587 return GEN_INT (real_type_class);
8588 if (code == COMPLEX_TYPE)
8589 return GEN_INT (complex_type_class);
8590 if (code == FUNCTION_TYPE)
8591 return GEN_INT (function_type_class);
8592 if (code == METHOD_TYPE)
8593 return GEN_INT (method_type_class);
8594 if (code == RECORD_TYPE)
8595 return GEN_INT (record_type_class);
8596 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8597 return GEN_INT (union_type_class);
8598 if (code == ARRAY_TYPE)
8600 if (TYPE_STRING_FLAG (type))
8601 return GEN_INT (string_type_class);
8603 return GEN_INT (array_type_class);
8605 if (code == SET_TYPE)
8606 return GEN_INT (set_type_class);
8607 if (code == FILE_TYPE)
8608 return GEN_INT (file_type_class);
8609 if (code == LANG_TYPE)
8610 return GEN_INT (lang_type_class);
8612 return GEN_INT (no_type_class);
8614 case BUILT_IN_CONSTANT_P:
8619 tree arg = TREE_VALUE (arglist);
8622 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8623 || (TREE_CODE (arg) == ADDR_EXPR
8624 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8625 ? const1_rtx : const0_rtx);
8628 case BUILT_IN_FRAME_ADDRESS:
8629 /* The argument must be a nonnegative integer constant.
8630 It counts the number of frames to scan up the stack.
8631 The value is the address of that frame. */
8632 case BUILT_IN_RETURN_ADDRESS:
8633 /* The argument must be a nonnegative integer constant.
8634 It counts the number of frames to scan up the stack.
8635 The value is the return address saved in that frame. */
8637 /* Warning about missing arg was already issued. */
8639 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8640 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8642 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8643 error ("invalid arg to `__builtin_frame_address'");
8645 error ("invalid arg to `__builtin_return_address'");
8650 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8651 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8652 hard_frame_pointer_rtx);
8654 /* For __builtin_frame_address, return what we've got. */
8655 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8658 if (GET_CODE (tem) != REG)
8659 tem = copy_to_reg (tem);
8663 /* Returns the address of the area where the structure is returned.
8665 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8667 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8668 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8671 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8673 case BUILT_IN_ALLOCA:
8675 /* Arg could be non-integer if user redeclared this fcn wrong. */
8676 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8679 /* Compute the argument. */
8680 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8682 /* Allocate the desired space. */
8683 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8686 /* If not optimizing, call the library function. */
8687 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8691 /* Arg could be non-integer if user redeclared this fcn wrong. */
8692 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8695 /* Compute the argument. */
8696 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8697 /* Compute ffs, into TARGET if possible.
8698 Set TARGET to wherever the result comes back. */
8699 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8700 ffs_optab, op0, target, 1);
8705 case BUILT_IN_STRLEN:
8706 /* If not optimizing, call the library function. */
8707 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8711 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8712 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8716 tree src = TREE_VALUE (arglist);
8717 tree len = c_strlen (src);
8720 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8722 rtx result, src_rtx, char_rtx;
8723 enum machine_mode insn_mode = value_mode, char_mode;
8724 enum insn_code icode;
8726 /* If the length is known, just return it. */
8728 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8730 /* If SRC is not a pointer type, don't do this operation inline. */
8734 /* Call a function if we can't compute strlen in the right mode. */
8736 while (insn_mode != VOIDmode)
8738 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8739 if (icode != CODE_FOR_nothing)
8742 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8744 if (insn_mode == VOIDmode)
8747 /* Make a place to write the result of the instruction. */
8750 && GET_CODE (result) == REG
8751 && GET_MODE (result) == insn_mode
8752 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8753 result = gen_reg_rtx (insn_mode);
8755 /* Make sure the operands are acceptable to the predicates. */
8757 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8758 result = gen_reg_rtx (insn_mode);
8760 src_rtx = memory_address (BLKmode,
8761 expand_expr (src, NULL_RTX, ptr_mode,
8763 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8764 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8766 char_rtx = const0_rtx;
8767 char_mode = insn_operand_mode[(int)icode][2];
8768 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8769 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8771 emit_insn (GEN_FCN (icode) (result,
8772 gen_rtx (MEM, BLKmode, src_rtx),
8773 char_rtx, GEN_INT (align)));
8775 /* Return the value in the proper mode for this function. */
8776 if (GET_MODE (result) == value_mode)
8778 else if (target != 0)
8780 convert_move (target, result, 0);
8784 return convert_to_mode (value_mode, result, 0);
8787 case BUILT_IN_STRCPY:
8788 /* If not optimizing, call the library function. */
8789 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8793 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8794 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8795 || TREE_CHAIN (arglist) == 0
8796 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8800 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8805 len = size_binop (PLUS_EXPR, len, integer_one_node);
8807 chainon (arglist, build_tree_list (NULL_TREE, len));
8811 case BUILT_IN_MEMCPY:
8812 /* If not optimizing, call the library function. */
8813 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8817 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8818 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8819 || TREE_CHAIN (arglist) == 0
8820 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8821 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8822 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8826 tree dest = TREE_VALUE (arglist);
8827 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8828 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8832 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8834 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8835 rtx dest_rtx, dest_mem, src_mem;
8837 /* If either SRC or DEST is not a pointer type, don't do
8838 this operation in-line. */
8839 if (src_align == 0 || dest_align == 0)
8841 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8842 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8846 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8847 dest_mem = gen_rtx (MEM, BLKmode,
8848 memory_address (BLKmode, dest_rtx));
8849 /* There could be a void* cast on top of the object. */
8850 while (TREE_CODE (dest) == NOP_EXPR)
8851 dest = TREE_OPERAND (dest, 0);
8852 type = TREE_TYPE (TREE_TYPE (dest));
8853 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8854 src_mem = gen_rtx (MEM, BLKmode,
8855 memory_address (BLKmode,
8856 expand_expr (src, NULL_RTX,
8859 /* There could be a void* cast on top of the object. */
8860 while (TREE_CODE (src) == NOP_EXPR)
8861 src = TREE_OPERAND (src, 0);
8862 type = TREE_TYPE (TREE_TYPE (src));
8863 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8865 /* Copy word part most expediently. */
8866 emit_block_move (dest_mem, src_mem,
8867 expand_expr (len, NULL_RTX, VOIDmode, 0),
8868 MIN (src_align, dest_align));
8869 return force_operand (dest_rtx, NULL_RTX);
8872 case BUILT_IN_MEMSET:
8873 /* If not optimizing, call the library function. */
8874 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8878 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8879 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8880 || TREE_CHAIN (arglist) == 0
8881 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8883 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8885 != (TREE_CODE (TREE_TYPE
8887 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8891 tree dest = TREE_VALUE (arglist);
8892 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8893 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8897 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8898 rtx dest_rtx, dest_mem;
8900 /* If DEST is not a pointer type, don't do this
8901 operation in-line. */
8902 if (dest_align == 0)
8905 /* If VAL is not 0, don't do this operation in-line. */
8906 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8909 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8910 dest_mem = gen_rtx (MEM, BLKmode,
8911 memory_address (BLKmode, dest_rtx));
8912 /* There could be a void* cast on top of the object. */
8913 while (TREE_CODE (dest) == NOP_EXPR)
8914 dest = TREE_OPERAND (dest, 0);
8915 type = TREE_TYPE (TREE_TYPE (dest));
8916 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8918 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8921 return force_operand (dest_rtx, NULL_RTX);
8924 /* These comparison functions need an instruction that returns an actual
8925 index. An ordinary compare that just sets the condition codes
8927 #ifdef HAVE_cmpstrsi
8928 case BUILT_IN_STRCMP:
8929 /* If not optimizing, call the library function. */
8930 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8934 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8935 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8936 || TREE_CHAIN (arglist) == 0
8937 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8939 else if (!HAVE_cmpstrsi)
8942 tree arg1 = TREE_VALUE (arglist);
8943 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8947 len = c_strlen (arg1);
8949 len = size_binop (PLUS_EXPR, integer_one_node, len);
8950 len2 = c_strlen (arg2);
8952 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8954 /* If we don't have a constant length for the first, use the length
8955 of the second, if we know it. We don't require a constant for
8956 this case; some cost analysis could be done if both are available
8957 but neither is constant. For now, assume they're equally cheap.
8959 If both strings have constant lengths, use the smaller. This
8960 could arise if optimization results in strcpy being called with
8961 two fixed strings, or if the code was machine-generated. We should
8962 add some code to the `memcmp' handler below to deal with such
8963 situations, someday. */
8964 if (!len || TREE_CODE (len) != INTEGER_CST)
8971 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8973 if (tree_int_cst_lt (len2, len))
8977 chainon (arglist, build_tree_list (NULL_TREE, len));
8981 case BUILT_IN_MEMCMP:
8982 /* If not optimizing, call the library function. */
8983 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8987 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8988 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8989 || TREE_CHAIN (arglist) == 0
8990 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8991 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8992 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8994 else if (!HAVE_cmpstrsi)
8997 tree arg1 = TREE_VALUE (arglist);
8998 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8999 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9003 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9005 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9006 enum machine_mode insn_mode
9007 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9009 /* If we don't have POINTER_TYPE, call the function. */
9010 if (arg1_align == 0 || arg2_align == 0)
9012 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9013 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9017 /* Make a place to write the result of the instruction. */
9020 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9021 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9022 result = gen_reg_rtx (insn_mode);
9024 emit_insn (gen_cmpstrsi (result,
9025 gen_rtx (MEM, BLKmode,
9026 expand_expr (arg1, NULL_RTX,
9029 gen_rtx (MEM, BLKmode,
9030 expand_expr (arg2, NULL_RTX,
9033 expand_expr (len, NULL_RTX, VOIDmode, 0),
9034 GEN_INT (MIN (arg1_align, arg2_align))));
9036 /* Return the value in the proper mode for this function. */
9037 mode = TYPE_MODE (TREE_TYPE (exp));
9038 if (GET_MODE (result) == mode)
9040 else if (target != 0)
9042 convert_move (target, result, 0);
9046 return convert_to_mode (mode, result, 0);
9049 case BUILT_IN_STRCMP:
9050 case BUILT_IN_MEMCMP:
9054 case BUILT_IN_SETJMP:
9056 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9060 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9062 return expand_builtin_setjmp (buf_addr, target);
9065 /* __builtin_longjmp is passed a pointer to an array of five words
9066 and a value, which is a dummy. It's similar to the C library longjmp
9067 function but works with __builtin_setjmp above. */
9068 case BUILT_IN_LONGJMP:
9069 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9070 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9074 tree dummy_id = get_identifier ("__dummy");
9075 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
9076 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
9077 #ifdef POINTERS_EXTEND_UNSIGNED
9080 convert_memory_address
9082 expand_expr (TREE_VALUE (arglist),
9083 NULL_RTX, VOIDmode, 0)));
9086 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9090 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
9091 rtx lab = gen_rtx (MEM, Pmode,
9092 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
9093 enum machine_mode sa_mode
9094 #ifdef HAVE_save_stack_nonlocal
9095 = (HAVE_save_stack_nonlocal
9096 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9101 rtx stack = gen_rtx (MEM, sa_mode,
9102 plus_constant (buf_addr,
9103 2 * GET_MODE_SIZE (Pmode)));
9105 DECL_EXTERNAL (dummy_decl) = 1;
9106 TREE_PUBLIC (dummy_decl) = 1;
9107 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9109 /* Expand the second expression just for side-effects. */
9110 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9111 const0_rtx, VOIDmode, 0);
9113 assemble_external (dummy_decl);
9115 /* Pick up FP, label, and SP from the block and jump. This code is
9116 from expand_goto in stmt.c; see there for detailed comments. */
9117 #if HAVE_nonlocal_goto
9118 if (HAVE_nonlocal_goto)
9119 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9120 XEXP (DECL_RTL (dummy_decl), 0)));
9124 lab = copy_to_reg (lab);
9125 emit_move_insn (hard_frame_pointer_rtx, fp);
9126 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9128 /* Put in the static chain register the address of the dummy
9130 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9131 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9132 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9133 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9134 emit_indirect_jump (lab);
9140 default: /* just do library call, if unknown builtin */
9141 error ("built-in function `%s' not currently supported",
9142 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9145 /* The switch statement above can drop through to cause the function
9146 to be called normally. */
9148 return expand_call (exp, target, ignore);
9151 /* Built-in functions to perform an untyped call and return. */
9153 /* For each register that may be used for calling a function, this
9154 gives a mode used to copy the register's value. VOIDmode indicates
9155 the register is not used for calling a function. If the machine
9156 has register windows, this gives only the outbound registers.
9157 INCOMING_REGNO gives the corresponding inbound register. */
9158 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9160 /* For each register that may be used for returning values, this gives
9161 a mode used to copy the register's value. VOIDmode indicates the
9162 register is not used for returning values. If the machine has
9163 register windows, this gives only the outbound registers.
9164 INCOMING_REGNO gives the corresponding inbound register. */
9165 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9167 /* For each register that may be used for calling a function, this
9168 gives the offset of that register into the block returned by
9169 __builtin_apply_args. 0 indicates that the register is not
9170 used for calling a function. */
9171 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9173 /* Return the offset of register REGNO into the block returned by
9174 __builtin_apply_args. This is not declared static, since it is
9175 needed in objc-act.c. */
9178 apply_args_register_offset (regno)
9183 /* Arguments are always put in outgoing registers (in the argument
9184 block) if such make sense. */
9185 #ifdef OUTGOING_REGNO
9186 regno = OUTGOING_REGNO(regno);
9188 return apply_args_reg_offset[regno];
9191 /* Return the size required for the block returned by __builtin_apply_args,
9192 and initialize apply_args_mode. */
9197 static int size = -1;
9199 enum machine_mode mode;
9201 /* The values computed by this function never change. */
9204 /* The first value is the incoming arg-pointer. */
9205 size = GET_MODE_SIZE (Pmode);
9207 /* The second value is the structure value address unless this is
9208 passed as an "invisible" first argument. */
9209 if (struct_value_rtx)
9210 size += GET_MODE_SIZE (Pmode);
9212 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9213 if (FUNCTION_ARG_REGNO_P (regno))
9215 /* Search for the proper mode for copying this register's
9216 value. I'm not sure this is right, but it works so far. */
9217 enum machine_mode best_mode = VOIDmode;
9219 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9221 mode = GET_MODE_WIDER_MODE (mode))
9222 if (HARD_REGNO_MODE_OK (regno, mode)
9223 && HARD_REGNO_NREGS (regno, mode) == 1)
9226 if (best_mode == VOIDmode)
9227 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9229 mode = GET_MODE_WIDER_MODE (mode))
9230 if (HARD_REGNO_MODE_OK (regno, mode)
9231 && (mov_optab->handlers[(int) mode].insn_code
9232 != CODE_FOR_nothing))
9236 if (mode == VOIDmode)
9239 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9240 if (size % align != 0)
9241 size = CEIL (size, align) * align;
9242 apply_args_reg_offset[regno] = size;
9243 size += GET_MODE_SIZE (mode);
9244 apply_args_mode[regno] = mode;
9248 apply_args_mode[regno] = VOIDmode;
9249 apply_args_reg_offset[regno] = 0;
9255 /* Return the size required for the block returned by __builtin_apply,
9256 and initialize apply_result_mode. */
9259 apply_result_size ()
9261 static int size = -1;
9263 enum machine_mode mode;
9265 /* The values computed by this function never change. */
9270 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9271 if (FUNCTION_VALUE_REGNO_P (regno))
9273 /* Search for the proper mode for copying this register's
9274 value. I'm not sure this is right, but it works so far. */
9275 enum machine_mode best_mode = VOIDmode;
9277 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9279 mode = GET_MODE_WIDER_MODE (mode))
9280 if (HARD_REGNO_MODE_OK (regno, mode))
9283 if (best_mode == VOIDmode)
9284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9286 mode = GET_MODE_WIDER_MODE (mode))
9287 if (HARD_REGNO_MODE_OK (regno, mode)
9288 && (mov_optab->handlers[(int) mode].insn_code
9289 != CODE_FOR_nothing))
9293 if (mode == VOIDmode)
9296 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9297 if (size % align != 0)
9298 size = CEIL (size, align) * align;
9299 size += GET_MODE_SIZE (mode);
9300 apply_result_mode[regno] = mode;
9303 apply_result_mode[regno] = VOIDmode;
9305 /* Allow targets that use untyped_call and untyped_return to override
9306 the size so that machine-specific information can be stored here. */
9307 #ifdef APPLY_RESULT_SIZE
9308 size = APPLY_RESULT_SIZE;
9314 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9315 /* Create a vector describing the result block RESULT. If SAVEP is true,
9316 the result block is used to save the values; otherwise it is used to
9317 restore the values. */
9320 result_vector (savep, result)
9324 int regno, size, align, nelts;
9325 enum machine_mode mode;
9327 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9330 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9331 if ((mode = apply_result_mode[regno]) != VOIDmode)
9333 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9334 if (size % align != 0)
9335 size = CEIL (size, align) * align;
9336 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9337 mem = change_address (result, mode,
9338 plus_constant (XEXP (result, 0), size));
9339 savevec[nelts++] = (savep
9340 ? gen_rtx (SET, VOIDmode, mem, reg)
9341 : gen_rtx (SET, VOIDmode, reg, mem));
9342 size += GET_MODE_SIZE (mode);
9344 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9346 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9348 /* Save the state required to perform an untyped call with the same
9349 arguments as were passed to the current function. */
9352 expand_builtin_apply_args ()
9355 int size, align, regno;
9356 enum machine_mode mode;
9358 /* Create a block where the arg-pointer, structure value address,
9359 and argument registers can be saved. */
9360 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9362 /* Walk past the arg-pointer and structure value address. */
9363 size = GET_MODE_SIZE (Pmode);
9364 if (struct_value_rtx)
9365 size += GET_MODE_SIZE (Pmode);
9367 /* Save each register used in calling a function to the block. */
9368 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9369 if ((mode = apply_args_mode[regno]) != VOIDmode)
9373 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9374 if (size % align != 0)
9375 size = CEIL (size, align) * align;
9377 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9380 /* For reg-stack.c's stack register household.
9381 Compare with a similar piece of code in function.c. */
9383 emit_insn (gen_rtx (USE, mode, tem));
9386 emit_move_insn (change_address (registers, mode,
9387 plus_constant (XEXP (registers, 0),
9390 size += GET_MODE_SIZE (mode);
9393 /* Save the arg pointer to the block. */
9394 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9395 copy_to_reg (virtual_incoming_args_rtx));
9396 size = GET_MODE_SIZE (Pmode);
9398 /* Save the structure value address unless this is passed as an
9399 "invisible" first argument. */
9400 if (struct_value_incoming_rtx)
9402 emit_move_insn (change_address (registers, Pmode,
9403 plus_constant (XEXP (registers, 0),
9405 copy_to_reg (struct_value_incoming_rtx));
9406 size += GET_MODE_SIZE (Pmode);
9409 /* Return the address of the block. */
9410 return copy_addr_to_reg (XEXP (registers, 0));
9413 /* Perform an untyped call and save the state required to perform an
9414 untyped return of whatever value was returned by the given function. */
9417 expand_builtin_apply (function, arguments, argsize)
9418 rtx function, arguments, argsize;
9420 int size, align, regno;
9421 enum machine_mode mode;
9422 rtx incoming_args, result, reg, dest, call_insn;
9423 rtx old_stack_level = 0;
9424 rtx call_fusage = 0;
9426 /* Create a block where the return registers can be saved. */
9427 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9429 /* ??? The argsize value should be adjusted here. */
9431 /* Fetch the arg pointer from the ARGUMENTS block. */
9432 incoming_args = gen_reg_rtx (Pmode);
9433 emit_move_insn (incoming_args,
9434 gen_rtx (MEM, Pmode, arguments));
9435 #ifndef STACK_GROWS_DOWNWARD
9436 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9437 incoming_args, 0, OPTAB_LIB_WIDEN);
9440 /* Perform postincrements before actually calling the function. */
9443 /* Push a new argument block and copy the arguments. */
9444 do_pending_stack_adjust ();
9445 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9447 /* Push a block of memory onto the stack to store the memory arguments.
9448 Save the address in a register, and copy the memory arguments. ??? I
9449 haven't figured out how the calling convention macros effect this,
9450 but it's likely that the source and/or destination addresses in
9451 the block copy will need updating in machine specific ways. */
9452 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9453 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9454 gen_rtx (MEM, BLKmode, incoming_args),
9456 PARM_BOUNDARY / BITS_PER_UNIT);
9458 /* Refer to the argument block. */
9460 arguments = gen_rtx (MEM, BLKmode, arguments);
9462 /* Walk past the arg-pointer and structure value address. */
9463 size = GET_MODE_SIZE (Pmode);
9464 if (struct_value_rtx)
9465 size += GET_MODE_SIZE (Pmode);
9467 /* Restore each of the registers previously saved. Make USE insns
9468 for each of these registers for use in making the call. */
9469 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9470 if ((mode = apply_args_mode[regno]) != VOIDmode)
9472 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9473 if (size % align != 0)
9474 size = CEIL (size, align) * align;
9475 reg = gen_rtx (REG, mode, regno);
9476 emit_move_insn (reg,
9477 change_address (arguments, mode,
9478 plus_constant (XEXP (arguments, 0),
9481 use_reg (&call_fusage, reg);
9482 size += GET_MODE_SIZE (mode);
9485 /* Restore the structure value address unless this is passed as an
9486 "invisible" first argument. */
9487 size = GET_MODE_SIZE (Pmode);
9488 if (struct_value_rtx)
9490 rtx value = gen_reg_rtx (Pmode);
9491 emit_move_insn (value,
9492 change_address (arguments, Pmode,
9493 plus_constant (XEXP (arguments, 0),
9495 emit_move_insn (struct_value_rtx, value);
9496 if (GET_CODE (struct_value_rtx) == REG)
9497 use_reg (&call_fusage, struct_value_rtx);
9498 size += GET_MODE_SIZE (Pmode);
9501 /* All arguments and registers used for the call are set up by now! */
9502 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9504 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9505 and we don't want to load it into a register as an optimization,
9506 because prepare_call_address already did it if it should be done. */
9507 if (GET_CODE (function) != SYMBOL_REF)
9508 function = memory_address (FUNCTION_MODE, function);
9510 /* Generate the actual call instruction and save the return value. */
9511 #ifdef HAVE_untyped_call
9512 if (HAVE_untyped_call)
9513 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9514 result, result_vector (1, result)));
9517 #ifdef HAVE_call_value
9518 if (HAVE_call_value)
9522 /* Locate the unique return register. It is not possible to
9523 express a call that sets more than one return register using
9524 call_value; use untyped_call for that. In fact, untyped_call
9525 only needs to save the return registers in the given block. */
9526 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9527 if ((mode = apply_result_mode[regno]) != VOIDmode)
9530 abort (); /* HAVE_untyped_call required. */
9531 valreg = gen_rtx (REG, mode, regno);
9534 emit_call_insn (gen_call_value (valreg,
9535 gen_rtx (MEM, FUNCTION_MODE, function),
9536 const0_rtx, NULL_RTX, const0_rtx));
9538 emit_move_insn (change_address (result, GET_MODE (valreg),
9546 /* Find the CALL insn we just emitted. */
9547 for (call_insn = get_last_insn ();
9548 call_insn && GET_CODE (call_insn) != CALL_INSN;
9549 call_insn = PREV_INSN (call_insn))
9555 /* Put the register usage information on the CALL. If there is already
9556 some usage information, put ours at the end. */
9557 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9561 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9562 link = XEXP (link, 1))
9565 XEXP (link, 1) = call_fusage;
9568 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9570 /* Restore the stack. */
9571 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9573 /* Return the address of the result block. */
9574 return copy_addr_to_reg (XEXP (result, 0));
9577 /* Perform an untyped return. */
9580 expand_builtin_return (result)
9583 int size, align, regno;
9584 enum machine_mode mode;
9586 rtx call_fusage = 0;
9588 apply_result_size ();
9589 result = gen_rtx (MEM, BLKmode, result);
9591 #ifdef HAVE_untyped_return
9592 if (HAVE_untyped_return)
9594 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9600 /* Restore the return value and note that each value is used. */
9602 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9603 if ((mode = apply_result_mode[regno]) != VOIDmode)
9605 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9606 if (size % align != 0)
9607 size = CEIL (size, align) * align;
9608 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9609 emit_move_insn (reg,
9610 change_address (result, mode,
9611 plus_constant (XEXP (result, 0),
9614 push_to_sequence (call_fusage);
9615 emit_insn (gen_rtx (USE, VOIDmode, reg));
9616 call_fusage = get_insns ();
9618 size += GET_MODE_SIZE (mode);
9621 /* Put the USE insns before the return. */
9622 emit_insns (call_fusage);
9624 /* Return whatever values was restored by jumping directly to the end
9626 expand_null_return ();
9629 /* Expand code for a post- or pre- increment or decrement
9630 and return the RTX for the result.
9631 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9634 expand_increment (exp, post, ignore)
9638 register rtx op0, op1;
9639 register rtx temp, value;
9640 register tree incremented = TREE_OPERAND (exp, 0);
9641 optab this_optab = add_optab;
9643 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9644 int op0_is_copy = 0;
9645 int single_insn = 0;
9646 /* 1 means we can't store into OP0 directly,
9647 because it is a subreg narrower than a word,
9648 and we don't dare clobber the rest of the word. */
9651 if (output_bytecode)
9653 bc_expand_expr (exp);
9657 /* Stabilize any component ref that might need to be
9658 evaluated more than once below. */
9660 || TREE_CODE (incremented) == BIT_FIELD_REF
9661 || (TREE_CODE (incremented) == COMPONENT_REF
9662 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9663 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9664 incremented = stabilize_reference (incremented);
9665 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9666 ones into save exprs so that they don't accidentally get evaluated
9667 more than once by the code below. */
9668 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9669 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9670 incremented = save_expr (incremented);
9672 /* Compute the operands as RTX.
9673 Note whether OP0 is the actual lvalue or a copy of it:
9674 I believe it is a copy iff it is a register or subreg
9675 and insns were generated in computing it. */
9677 temp = get_last_insn ();
9678 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9680 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9681 in place but instead must do sign- or zero-extension during assignment,
9682 so we copy it into a new register and let the code below use it as
9685 Note that we can safely modify this SUBREG since it is know not to be
9686 shared (it was made by the expand_expr call above). */
9688 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9691 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9695 else if (GET_CODE (op0) == SUBREG
9696 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9698 /* We cannot increment this SUBREG in place. If we are
9699 post-incrementing, get a copy of the old value. Otherwise,
9700 just mark that we cannot increment in place. */
9702 op0 = copy_to_reg (op0);
9707 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9708 && temp != get_last_insn ());
9709 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9710 EXPAND_MEMORY_USE_BAD);
9712 /* Decide whether incrementing or decrementing. */
9713 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9714 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9715 this_optab = sub_optab;
9717 /* Convert decrement by a constant into a negative increment. */
9718 if (this_optab == sub_optab
9719 && GET_CODE (op1) == CONST_INT)
9721 op1 = GEN_INT (- INTVAL (op1));
9722 this_optab = add_optab;
9725 /* For a preincrement, see if we can do this with a single instruction. */
9728 icode = (int) this_optab->handlers[(int) mode].insn_code;
9729 if (icode != (int) CODE_FOR_nothing
9730 /* Make sure that OP0 is valid for operands 0 and 1
9731 of the insn we want to queue. */
9732 && (*insn_operand_predicate[icode][0]) (op0, mode)
9733 && (*insn_operand_predicate[icode][1]) (op0, mode)
9734 && (*insn_operand_predicate[icode][2]) (op1, mode))
9738 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9739 then we cannot just increment OP0. We must therefore contrive to
9740 increment the original value. Then, for postincrement, we can return
9741 OP0 since it is a copy of the old value. For preincrement, expand here
9742 unless we can do it with a single insn.
9744 Likewise if storing directly into OP0 would clobber high bits
9745 we need to preserve (bad_subreg). */
9746 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9748 /* This is the easiest way to increment the value wherever it is.
9749 Problems with multiple evaluation of INCREMENTED are prevented
9750 because either (1) it is a component_ref or preincrement,
9751 in which case it was stabilized above, or (2) it is an array_ref
9752 with constant index in an array in a register, which is
9753 safe to reevaluate. */
9754 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9755 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9756 ? MINUS_EXPR : PLUS_EXPR),
9759 TREE_OPERAND (exp, 1));
9761 while (TREE_CODE (incremented) == NOP_EXPR
9762 || TREE_CODE (incremented) == CONVERT_EXPR)
9764 newexp = convert (TREE_TYPE (incremented), newexp);
9765 incremented = TREE_OPERAND (incremented, 0);
9768 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9769 return post ? op0 : temp;
9774 /* We have a true reference to the value in OP0.
9775 If there is an insn to add or subtract in this mode, queue it.
9776 Queueing the increment insn avoids the register shuffling
9777 that often results if we must increment now and first save
9778 the old value for subsequent use. */
9780 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9781 op0 = stabilize (op0);
9784 icode = (int) this_optab->handlers[(int) mode].insn_code;
9785 if (icode != (int) CODE_FOR_nothing
9786 /* Make sure that OP0 is valid for operands 0 and 1
9787 of the insn we want to queue. */
9788 && (*insn_operand_predicate[icode][0]) (op0, mode)
9789 && (*insn_operand_predicate[icode][1]) (op0, mode))
9791 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9792 op1 = force_reg (mode, op1);
9794 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9796 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9798 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9801 op0 = change_address (op0, VOIDmode, addr);
9802 temp = force_reg (GET_MODE (op0), op0);
9803 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9804 op1 = force_reg (mode, op1);
9806 /* The increment queue is LIFO, thus we have to `queue'
9807 the instructions in reverse order. */
9808 enqueue_insn (op0, gen_move_insn (op0, temp));
9809 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9814 /* Preincrement, or we can't increment with one simple insn. */
9816 /* Save a copy of the value before inc or dec, to return it later. */
9817 temp = value = copy_to_reg (op0);
9819 /* Arrange to return the incremented value. */
9820 /* Copy the rtx because expand_binop will protect from the queue,
9821 and the results of that would be invalid for us to return
9822 if our caller does emit_queue before using our result. */
9823 temp = copy_rtx (value = op0);
9825 /* Increment however we can. */
9826 op1 = expand_binop (mode, this_optab, value, op1,
9827 flag_check_memory_usage ? NULL_RTX : op0,
9828 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9829 /* Make sure the value is stored into OP0. */
9831 emit_move_insn (op0, op1);
9836 /* Expand all function calls contained within EXP, innermost ones first.
9837 But don't look within expressions that have sequence points.
9838 For each CALL_EXPR, record the rtx for its value
9839 in the CALL_EXPR_RTL field. */
9842 preexpand_calls (exp)
9845 register int nops, i;
9846 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9848 if (! do_preexpand_calls)
9851 /* Only expressions and references can contain calls. */
9853 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9856 switch (TREE_CODE (exp))
9859 /* Do nothing if already expanded. */
9860 if (CALL_EXPR_RTL (exp) != 0
9861 /* Do nothing if the call returns a variable-sized object. */
9862 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9863 /* Do nothing to built-in functions. */
9864 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9865 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9867 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9870 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9875 case TRUTH_ANDIF_EXPR:
9876 case TRUTH_ORIF_EXPR:
9877 /* If we find one of these, then we can be sure
9878 the adjust will be done for it (since it makes jumps).
9879 Do it now, so that if this is inside an argument
9880 of a function, we don't get the stack adjustment
9881 after some other args have already been pushed. */
9882 do_pending_stack_adjust ();
9887 case WITH_CLEANUP_EXPR:
9888 case CLEANUP_POINT_EXPR:
9892 if (SAVE_EXPR_RTL (exp) != 0)
9896 nops = tree_code_length[(int) TREE_CODE (exp)];
9897 for (i = 0; i < nops; i++)
9898 if (TREE_OPERAND (exp, i) != 0)
9900 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9901 if (type == 'e' || type == '<' || type == '1' || type == '2'
9903 preexpand_calls (TREE_OPERAND (exp, i));
9907 /* At the start of a function, record that we have no previously-pushed
9908 arguments waiting to be popped. */
9911 init_pending_stack_adjust ()
9913 pending_stack_adjust = 0;
9916 /* When exiting from function, if safe, clear out any pending stack adjust
9917 so the adjustment won't get done. */
9920 clear_pending_stack_adjust ()
9922 #ifdef EXIT_IGNORE_STACK
9924 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9925 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9926 && ! flag_inline_functions)
9927 pending_stack_adjust = 0;
9931 /* Pop any previously-pushed arguments that have not been popped yet. */
9934 do_pending_stack_adjust ()
9936 if (inhibit_defer_pop == 0)
9938 if (pending_stack_adjust != 0)
9939 adjust_stack (GEN_INT (pending_stack_adjust));
9940 pending_stack_adjust = 0;
9944 /* Expand conditional expressions. */
9946 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9947 LABEL is an rtx of code CODE_LABEL, in this function and all the
9951 jumpifnot (exp, label)
9955 do_jump (exp, label, NULL_RTX);
9958 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9965 do_jump (exp, NULL_RTX, label);
9968 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9969 the result is zero, or IF_TRUE_LABEL if the result is one.
9970 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9971 meaning fall through in that case.
9973 do_jump always does any pending stack adjust except when it does not
9974 actually perform a jump. An example where there is no jump
9975 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9977 This function is responsible for optimizing cases such as
9978 &&, || and comparison operators in EXP. */
9981 do_jump (exp, if_false_label, if_true_label)
9983 rtx if_false_label, if_true_label;
9985 register enum tree_code code = TREE_CODE (exp);
9986 /* Some cases need to create a label to jump to
9987 in order to properly fall through.
9988 These cases set DROP_THROUGH_LABEL nonzero. */
9989 rtx drop_through_label = 0;
9994 enum machine_mode mode;
10004 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10010 /* This is not true with #pragma weak */
10012 /* The address of something can never be zero. */
10014 emit_jump (if_true_label);
10019 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10020 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10021 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10024 /* If we are narrowing the operand, we have to do the compare in the
10026 if ((TYPE_PRECISION (TREE_TYPE (exp))
10027 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10029 case NON_LVALUE_EXPR:
10030 case REFERENCE_EXPR:
10035 /* These cannot change zero->non-zero or vice versa. */
10036 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10040 /* This is never less insns than evaluating the PLUS_EXPR followed by
10041 a test and can be longer if the test is eliminated. */
10043 /* Reduce to minus. */
10044 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10045 TREE_OPERAND (exp, 0),
10046 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10047 TREE_OPERAND (exp, 1))));
10048 /* Process as MINUS. */
10052 /* Non-zero iff operands of minus differ. */
10053 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10054 TREE_OPERAND (exp, 0),
10055 TREE_OPERAND (exp, 1)),
10060 /* If we are AND'ing with a small constant, do this comparison in the
10061 smallest type that fits. If the machine doesn't have comparisons
10062 that small, it will be converted back to the wider comparison.
10063 This helps if we are testing the sign bit of a narrower object.
10064 combine can't do this for us because it can't know whether a
10065 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10067 if (! SLOW_BYTE_ACCESS
10068 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10069 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10070 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10071 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10072 && (type = type_for_mode (mode, 1)) != 0
10073 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10074 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10075 != CODE_FOR_nothing))
10077 do_jump (convert (type, exp), if_false_label, if_true_label);
10082 case TRUTH_NOT_EXPR:
10083 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10086 case TRUTH_ANDIF_EXPR:
10087 if (if_false_label == 0)
10088 if_false_label = drop_through_label = gen_label_rtx ();
10089 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10090 start_cleanup_deferal ();
10091 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10092 end_cleanup_deferal ();
10095 case TRUTH_ORIF_EXPR:
10096 if (if_true_label == 0)
10097 if_true_label = drop_through_label = gen_label_rtx ();
10098 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10099 start_cleanup_deferal ();
10100 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10101 end_cleanup_deferal ();
10104 case COMPOUND_EXPR:
10105 push_temp_slots ();
10106 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10107 preserve_temp_slots (NULL_RTX);
10108 free_temp_slots ();
10111 do_pending_stack_adjust ();
10112 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10115 case COMPONENT_REF:
10116 case BIT_FIELD_REF:
10119 int bitsize, bitpos, unsignedp;
10120 enum machine_mode mode;
10126 /* Get description of this reference. We don't actually care
10127 about the underlying object here. */
10128 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10129 &mode, &unsignedp, &volatilep,
10132 type = type_for_size (bitsize, unsignedp);
10133 if (! SLOW_BYTE_ACCESS
10134 && type != 0 && bitsize >= 0
10135 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10136 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10137 != CODE_FOR_nothing))
10139 do_jump (convert (type, exp), if_false_label, if_true_label);
10146 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10147 if (integer_onep (TREE_OPERAND (exp, 1))
10148 && integer_zerop (TREE_OPERAND (exp, 2)))
10149 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10151 else if (integer_zerop (TREE_OPERAND (exp, 1))
10152 && integer_onep (TREE_OPERAND (exp, 2)))
10153 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10157 register rtx label1 = gen_label_rtx ();
10158 drop_through_label = gen_label_rtx ();
10160 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10162 start_cleanup_deferal ();
10163 /* Now the THEN-expression. */
10164 do_jump (TREE_OPERAND (exp, 1),
10165 if_false_label ? if_false_label : drop_through_label,
10166 if_true_label ? if_true_label : drop_through_label);
10167 /* In case the do_jump just above never jumps. */
10168 do_pending_stack_adjust ();
10169 emit_label (label1);
10171 /* Now the ELSE-expression. */
10172 do_jump (TREE_OPERAND (exp, 2),
10173 if_false_label ? if_false_label : drop_through_label,
10174 if_true_label ? if_true_label : drop_through_label);
10175 end_cleanup_deferal ();
10181 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10183 if (integer_zerop (TREE_OPERAND (exp, 1)))
10184 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10185 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10186 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10189 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10190 fold (build (EQ_EXPR, TREE_TYPE (exp),
10191 fold (build1 (REALPART_EXPR,
10192 TREE_TYPE (inner_type),
10193 TREE_OPERAND (exp, 0))),
10194 fold (build1 (REALPART_EXPR,
10195 TREE_TYPE (inner_type),
10196 TREE_OPERAND (exp, 1))))),
10197 fold (build (EQ_EXPR, TREE_TYPE (exp),
10198 fold (build1 (IMAGPART_EXPR,
10199 TREE_TYPE (inner_type),
10200 TREE_OPERAND (exp, 0))),
10201 fold (build1 (IMAGPART_EXPR,
10202 TREE_TYPE (inner_type),
10203 TREE_OPERAND (exp, 1))))))),
10204 if_false_label, if_true_label);
10205 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10206 && !can_compare_p (TYPE_MODE (inner_type)))
10207 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10209 comparison = compare (exp, EQ, EQ);
10215 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10217 if (integer_zerop (TREE_OPERAND (exp, 1)))
10218 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10219 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10220 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10223 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10224 fold (build (NE_EXPR, TREE_TYPE (exp),
10225 fold (build1 (REALPART_EXPR,
10226 TREE_TYPE (inner_type),
10227 TREE_OPERAND (exp, 0))),
10228 fold (build1 (REALPART_EXPR,
10229 TREE_TYPE (inner_type),
10230 TREE_OPERAND (exp, 1))))),
10231 fold (build (NE_EXPR, TREE_TYPE (exp),
10232 fold (build1 (IMAGPART_EXPR,
10233 TREE_TYPE (inner_type),
10234 TREE_OPERAND (exp, 0))),
10235 fold (build1 (IMAGPART_EXPR,
10236 TREE_TYPE (inner_type),
10237 TREE_OPERAND (exp, 1))))))),
10238 if_false_label, if_true_label);
10239 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10240 && !can_compare_p (TYPE_MODE (inner_type)))
10241 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10243 comparison = compare (exp, NE, NE);
10248 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10250 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10251 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10253 comparison = compare (exp, LT, LTU);
10257 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10259 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10260 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10262 comparison = compare (exp, LE, LEU);
10266 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10268 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10269 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10271 comparison = compare (exp, GT, GTU);
10275 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10277 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10278 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10280 comparison = compare (exp, GE, GEU);
10285 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10287 /* This is not needed any more and causes poor code since it causes
10288 comparisons and tests from non-SI objects to have different code
10290 /* Copy to register to avoid generating bad insns by cse
10291 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10292 if (!cse_not_expected && GET_CODE (temp) == MEM)
10293 temp = copy_to_reg (temp);
10295 do_pending_stack_adjust ();
10296 if (GET_CODE (temp) == CONST_INT)
10297 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10298 else if (GET_CODE (temp) == LABEL_REF)
10299 comparison = const_true_rtx;
10300 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10301 && !can_compare_p (GET_MODE (temp)))
10302 /* Note swapping the labels gives us not-equal. */
10303 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10304 else if (GET_MODE (temp) != VOIDmode)
10305 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10306 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10307 GET_MODE (temp), NULL_RTX, 0);
10312 /* Do any postincrements in the expression that was tested. */
10315 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10316 straight into a conditional jump instruction as the jump condition.
10317 Otherwise, all the work has been done already. */
10319 if (comparison == const_true_rtx)
10322 emit_jump (if_true_label);
10324 else if (comparison == const0_rtx)
10326 if (if_false_label)
10327 emit_jump (if_false_label);
10329 else if (comparison)
10330 do_jump_for_compare (comparison, if_false_label, if_true_label);
10332 if (drop_through_label)
10334 /* If do_jump produces code that might be jumped around,
10335 do any stack adjusts from that code, before the place
10336 where control merges in. */
10337 do_pending_stack_adjust ();
10338 emit_label (drop_through_label);
10342 /* Given a comparison expression EXP for values too wide to be compared
10343 with one insn, test the comparison and jump to the appropriate label.
10344 The code of EXP is ignored; we always test GT if SWAP is 0,
10345 and LT if SWAP is 1. */
10348 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10351 rtx if_false_label, if_true_label;
10353 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10354 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10355 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10356 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10357 rtx drop_through_label = 0;
10358 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10361 if (! if_true_label || ! if_false_label)
10362 drop_through_label = gen_label_rtx ();
10363 if (! if_true_label)
10364 if_true_label = drop_through_label;
10365 if (! if_false_label)
10366 if_false_label = drop_through_label;
10368 /* Compare a word at a time, high order first. */
10369 for (i = 0; i < nwords; i++)
10372 rtx op0_word, op1_word;
10374 if (WORDS_BIG_ENDIAN)
10376 op0_word = operand_subword_force (op0, i, mode);
10377 op1_word = operand_subword_force (op1, i, mode);
10381 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10382 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10385 /* All but high-order word must be compared as unsigned. */
10386 comp = compare_from_rtx (op0_word, op1_word,
10387 (unsignedp || i > 0) ? GTU : GT,
10388 unsignedp, word_mode, NULL_RTX, 0);
10389 if (comp == const_true_rtx)
10390 emit_jump (if_true_label);
10391 else if (comp != const0_rtx)
10392 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10394 /* Consider lower words only if these are equal. */
10395 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10397 if (comp == const_true_rtx)
10398 emit_jump (if_false_label);
10399 else if (comp != const0_rtx)
10400 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10403 if (if_false_label)
10404 emit_jump (if_false_label);
10405 if (drop_through_label)
10406 emit_label (drop_through_label);
10409 /* Compare OP0 with OP1, word at a time, in mode MODE.
10410 UNSIGNEDP says to do unsigned comparison.
10411 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10414 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10415 enum machine_mode mode;
10418 rtx if_false_label, if_true_label;
10420 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10421 rtx drop_through_label = 0;
10424 if (! if_true_label || ! if_false_label)
10425 drop_through_label = gen_label_rtx ();
10426 if (! if_true_label)
10427 if_true_label = drop_through_label;
10428 if (! if_false_label)
10429 if_false_label = drop_through_label;
10431 /* Compare a word at a time, high order first. */
10432 for (i = 0; i < nwords; i++)
10435 rtx op0_word, op1_word;
10437 if (WORDS_BIG_ENDIAN)
10439 op0_word = operand_subword_force (op0, i, mode);
10440 op1_word = operand_subword_force (op1, i, mode);
10444 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10445 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10448 /* All but high-order word must be compared as unsigned. */
10449 comp = compare_from_rtx (op0_word, op1_word,
10450 (unsignedp || i > 0) ? GTU : GT,
10451 unsignedp, word_mode, NULL_RTX, 0);
10452 if (comp == const_true_rtx)
10453 emit_jump (if_true_label);
10454 else if (comp != const0_rtx)
10455 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10457 /* Consider lower words only if these are equal. */
10458 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10460 if (comp == const_true_rtx)
10461 emit_jump (if_false_label);
10462 else if (comp != const0_rtx)
10463 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10466 if (if_false_label)
10467 emit_jump (if_false_label);
10468 if (drop_through_label)
10469 emit_label (drop_through_label);
10472 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10473 with one insn, test the comparison and jump to the appropriate label. */
10476 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10478 rtx if_false_label, if_true_label;
10480 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10481 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10482 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10483 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10485 rtx drop_through_label = 0;
10487 if (! if_false_label)
10488 drop_through_label = if_false_label = gen_label_rtx ();
10490 for (i = 0; i < nwords; i++)
10492 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10493 operand_subword_force (op1, i, mode),
10494 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10495 word_mode, NULL_RTX, 0);
10496 if (comp == const_true_rtx)
10497 emit_jump (if_false_label);
10498 else if (comp != const0_rtx)
10499 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10503 emit_jump (if_true_label);
10504 if (drop_through_label)
10505 emit_label (drop_through_label);
10508 /* Jump according to whether OP0 is 0.
10509 We assume that OP0 has an integer mode that is too wide
10510 for the available compare insns. */
10513 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10515 rtx if_false_label, if_true_label;
10517 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10519 rtx drop_through_label = 0;
10521 if (! if_false_label)
10522 drop_through_label = if_false_label = gen_label_rtx ();
10524 for (i = 0; i < nwords; i++)
10526 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10528 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10529 if (comp == const_true_rtx)
10530 emit_jump (if_false_label);
10531 else if (comp != const0_rtx)
10532 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10536 emit_jump (if_true_label);
10537 if (drop_through_label)
10538 emit_label (drop_through_label);
10541 /* Given a comparison expression in rtl form, output conditional branches to
10542 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10545 do_jump_for_compare (comparison, if_false_label, if_true_label)
10546 rtx comparison, if_false_label, if_true_label;
10550 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10551 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10555 if (if_false_label)
10556 emit_jump (if_false_label);
10558 else if (if_false_label)
10561 rtx prev = get_last_insn ();
10564 /* Output the branch with the opposite condition. Then try to invert
10565 what is generated. If more than one insn is a branch, or if the
10566 branch is not the last insn written, abort. If we can't invert
10567 the branch, emit make a true label, redirect this jump to that,
10568 emit a jump to the false label and define the true label. */
10570 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10571 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10575 /* Here we get the first insn that was just emitted. It used to be the
10576 case that, on some machines, emitting the branch would discard
10577 the previous compare insn and emit a replacement. This isn't
10578 done anymore, but abort if we see that PREV is deleted. */
10581 insn = get_insns ();
10582 else if (INSN_DELETED_P (prev))
10585 insn = NEXT_INSN (prev);
10587 for (; insn; insn = NEXT_INSN (insn))
10588 if (GET_CODE (insn) == JUMP_INSN)
10595 if (branch != get_last_insn ())
10598 JUMP_LABEL (branch) = if_false_label;
10599 if (! invert_jump (branch, if_false_label))
10601 if_true_label = gen_label_rtx ();
10602 redirect_jump (branch, if_true_label);
10603 emit_jump (if_false_label);
10604 emit_label (if_true_label);
10609 /* Generate code for a comparison expression EXP
10610 (including code to compute the values to be compared)
10611 and set (CC0) according to the result.
10612 SIGNED_CODE should be the rtx operation for this comparison for
10613 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10615 We force a stack adjustment unless there are currently
10616 things pushed on the stack that aren't yet used. */
10619 compare (exp, signed_code, unsigned_code)
10621 enum rtx_code signed_code, unsigned_code;
10624 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10626 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10627 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10628 register enum machine_mode mode = TYPE_MODE (type);
10629 int unsignedp = TREE_UNSIGNED (type);
10630 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10632 #ifdef HAVE_canonicalize_funcptr_for_compare
10633 /* If function pointers need to be "canonicalized" before they can
10634 be reliably compared, then canonicalize them. */
10635 if (HAVE_canonicalize_funcptr_for_compare
10636 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10637 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10640 rtx new_op0 = gen_reg_rtx (mode);
10642 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10646 if (HAVE_canonicalize_funcptr_for_compare
10647 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10648 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10651 rtx new_op1 = gen_reg_rtx (mode);
10653 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10658 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10660 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10661 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10664 /* Like compare but expects the values to compare as two rtx's.
10665 The decision as to signed or unsigned comparison must be made by the caller.
10667 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10670 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10671 size of MODE should be used. */
10674 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10675 register rtx op0, op1;
10676 enum rtx_code code;
10678 enum machine_mode mode;
10684 /* If one operand is constant, make it the second one. Only do this
10685 if the other operand is not constant as well. */
10687 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10688 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10693 code = swap_condition (code);
10696 if (flag_force_mem)
10698 op0 = force_not_mem (op0);
10699 op1 = force_not_mem (op1);
10702 do_pending_stack_adjust ();
10704 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10705 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10709 /* There's no need to do this now that combine.c can eliminate lots of
10710 sign extensions. This can be less efficient in certain cases on other
10713 /* If this is a signed equality comparison, we can do it as an
10714 unsigned comparison since zero-extension is cheaper than sign
10715 extension and comparisons with zero are done as unsigned. This is
10716 the case even on machines that can do fast sign extension, since
10717 zero-extension is easier to combine with other operations than
10718 sign-extension is. If we are comparing against a constant, we must
10719 convert it to what it would look like unsigned. */
10720 if ((code == EQ || code == NE) && ! unsignedp
10721 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10723 if (GET_CODE (op1) == CONST_INT
10724 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10725 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10730 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10732 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10735 /* Generate code to calculate EXP using a store-flag instruction
10736 and return an rtx for the result. EXP is either a comparison
10737 or a TRUTH_NOT_EXPR whose operand is a comparison.
10739 If TARGET is nonzero, store the result there if convenient.
10741 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10744 Return zero if there is no suitable set-flag instruction
10745 available on this machine.
10747 Once expand_expr has been called on the arguments of the comparison,
10748 we are committed to doing the store flag, since it is not safe to
10749 re-evaluate the expression. We emit the store-flag insn by calling
10750 emit_store_flag, but only expand the arguments if we have a reason
10751 to believe that emit_store_flag will be successful. If we think that
10752 it will, but it isn't, we have to simulate the store-flag with a
10753 set/jump/set sequence. */
10756 do_store_flag (exp, target, mode, only_cheap)
10759 enum machine_mode mode;
10762 enum rtx_code code;
10763 tree arg0, arg1, type;
10765 enum machine_mode operand_mode;
10769 enum insn_code icode;
10770 rtx subtarget = target;
10771 rtx result, label, pattern, jump_pat;
10773 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10774 result at the end. We can't simply invert the test since it would
10775 have already been inverted if it were valid. This case occurs for
10776 some floating-point comparisons. */
10778 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10779 invert = 1, exp = TREE_OPERAND (exp, 0);
10781 arg0 = TREE_OPERAND (exp, 0);
10782 arg1 = TREE_OPERAND (exp, 1);
10783 type = TREE_TYPE (arg0);
10784 operand_mode = TYPE_MODE (type);
10785 unsignedp = TREE_UNSIGNED (type);
10787 /* We won't bother with BLKmode store-flag operations because it would mean
10788 passing a lot of information to emit_store_flag. */
10789 if (operand_mode == BLKmode)
10792 /* We won't bother with store-flag operations involving function pointers
10793 when function pointers must be canonicalized before comparisons. */
10794 #ifdef HAVE_canonicalize_funcptr_for_compare
10795 if (HAVE_canonicalize_funcptr_for_compare
10796 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10797 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10799 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10800 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10801 == FUNCTION_TYPE))))
10808 /* Get the rtx comparison code to use. We know that EXP is a comparison
10809 operation of some type. Some comparisons against 1 and -1 can be
10810 converted to comparisons with zero. Do so here so that the tests
10811 below will be aware that we have a comparison with zero. These
10812 tests will not catch constants in the first operand, but constants
10813 are rarely passed as the first operand. */
10815 switch (TREE_CODE (exp))
10824 if (integer_onep (arg1))
10825 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10827 code = unsignedp ? LTU : LT;
10830 if (! unsignedp && integer_all_onesp (arg1))
10831 arg1 = integer_zero_node, code = LT;
10833 code = unsignedp ? LEU : LE;
10836 if (! unsignedp && integer_all_onesp (arg1))
10837 arg1 = integer_zero_node, code = GE;
10839 code = unsignedp ? GTU : GT;
10842 if (integer_onep (arg1))
10843 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10845 code = unsignedp ? GEU : GE;
10851 /* Put a constant second. */
10852 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10854 tem = arg0; arg0 = arg1; arg1 = tem;
10855 code = swap_condition (code);
10858 /* If this is an equality or inequality test of a single bit, we can
10859 do this by shifting the bit being tested to the low-order bit and
10860 masking the result with the constant 1. If the condition was EQ,
10861 we xor it with 1. This does not require an scc insn and is faster
10862 than an scc insn even if we have it. */
10864 if ((code == NE || code == EQ)
10865 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10866 && integer_pow2p (TREE_OPERAND (arg0, 1))
10867 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10869 tree inner = TREE_OPERAND (arg0, 0);
10874 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10875 NULL_RTX, VOIDmode, 0));
10876 /* In this case, immed_double_const will sign extend the value to make
10877 it look the same on the host and target. We must remove the
10878 sign-extension before calling exact_log2, since exact_log2 will
10879 fail for negative values. */
10880 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10881 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
10882 /* We don't use the obvious constant shift to generate the mask,
10883 because that generates compiler warnings when BITS_PER_WORD is
10884 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10885 code is unreachable in that case. */
10886 tem = tem & GET_MODE_MASK (word_mode);
10887 bitnum = exact_log2 (tem);
10889 /* If INNER is a right shift of a constant and it plus BITNUM does
10890 not overflow, adjust BITNUM and INNER. */
10892 if (TREE_CODE (inner) == RSHIFT_EXPR
10893 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10894 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10895 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10896 < TYPE_PRECISION (type)))
10898 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10899 inner = TREE_OPERAND (inner, 0);
10902 /* If we are going to be able to omit the AND below, we must do our
10903 operations as unsigned. If we must use the AND, we have a choice.
10904 Normally unsigned is faster, but for some machines signed is. */
10905 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10906 #ifdef LOAD_EXTEND_OP
10907 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10913 if (subtarget == 0 || GET_CODE (subtarget) != REG
10914 || GET_MODE (subtarget) != operand_mode
10915 || ! safe_from_p (subtarget, inner))
10918 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10921 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10922 size_int (bitnum), subtarget, ops_unsignedp);
10924 if (GET_MODE (op0) != mode)
10925 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10927 if ((code == EQ && ! invert) || (code == NE && invert))
10928 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10929 ops_unsignedp, OPTAB_LIB_WIDEN);
10931 /* Put the AND last so it can combine with more things. */
10932 if (bitnum != TYPE_PRECISION (type) - 1)
10933 op0 = expand_and (op0, const1_rtx, subtarget);
10938 /* Now see if we are likely to be able to do this. Return if not. */
10939 if (! can_compare_p (operand_mode))
10941 icode = setcc_gen_code[(int) code];
10942 if (icode == CODE_FOR_nothing
10943 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10945 /* We can only do this if it is one of the special cases that
10946 can be handled without an scc insn. */
10947 if ((code == LT && integer_zerop (arg1))
10948 || (! only_cheap && code == GE && integer_zerop (arg1)))
10950 else if (BRANCH_COST >= 0
10951 && ! only_cheap && (code == NE || code == EQ)
10952 && TREE_CODE (type) != REAL_TYPE
10953 && ((abs_optab->handlers[(int) operand_mode].insn_code
10954 != CODE_FOR_nothing)
10955 || (ffs_optab->handlers[(int) operand_mode].insn_code
10956 != CODE_FOR_nothing)))
10962 preexpand_calls (exp);
10963 if (subtarget == 0 || GET_CODE (subtarget) != REG
10964 || GET_MODE (subtarget) != operand_mode
10965 || ! safe_from_p (subtarget, arg1))
10968 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10969 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10972 target = gen_reg_rtx (mode);
10974 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10975 because, if the emit_store_flag does anything it will succeed and
10976 OP0 and OP1 will not be used subsequently. */
10978 result = emit_store_flag (target, code,
10979 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10980 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10981 operand_mode, unsignedp, 1);
10986 result = expand_binop (mode, xor_optab, result, const1_rtx,
10987 result, 0, OPTAB_LIB_WIDEN);
10991 /* If this failed, we have to do this with set/compare/jump/set code. */
10992 if (GET_CODE (target) != REG
10993 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10994 target = gen_reg_rtx (GET_MODE (target));
10996 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10997 result = compare_from_rtx (op0, op1, code, unsignedp,
10998 operand_mode, NULL_RTX, 0);
10999 if (GET_CODE (result) == CONST_INT)
11000 return (((result == const0_rtx && ! invert)
11001 || (result != const0_rtx && invert))
11002 ? const0_rtx : const1_rtx);
11004 label = gen_label_rtx ();
11005 if (bcc_gen_fctn[(int) code] == 0)
11008 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11009 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11010 emit_label (label);
11015 /* Generate a tablejump instruction (used for switch statements). */
11017 #ifdef HAVE_tablejump
11019 /* INDEX is the value being switched on, with the lowest value
11020 in the table already subtracted.
11021 MODE is its expected mode (needed if INDEX is constant).
11022 RANGE is the length of the jump table.
11023 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11025 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11026 index value is out of range. */
11029 do_tablejump (index, mode, range, table_label, default_label)
11030 rtx index, range, table_label, default_label;
11031 enum machine_mode mode;
11033 register rtx temp, vector;
11035 /* Do an unsigned comparison (in the proper mode) between the index
11036 expression and the value which represents the length of the range.
11037 Since we just finished subtracting the lower bound of the range
11038 from the index expression, this comparison allows us to simultaneously
11039 check that the original index expression value is both greater than
11040 or equal to the minimum value of the range and less than or equal to
11041 the maximum value of the range. */
11043 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11044 emit_jump_insn (gen_bgtu (default_label));
11046 /* If index is in range, it must fit in Pmode.
11047 Convert to Pmode so we can index with it. */
11049 index = convert_to_mode (Pmode, index, 1);
11051 /* Don't let a MEM slip thru, because then INDEX that comes
11052 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11053 and break_out_memory_refs will go to work on it and mess it up. */
11054 #ifdef PIC_CASE_VECTOR_ADDRESS
11055 if (flag_pic && GET_CODE (index) != REG)
11056 index = copy_to_mode_reg (Pmode, index);
11059 /* If flag_force_addr were to affect this address
11060 it could interfere with the tricky assumptions made
11061 about addresses that contain label-refs,
11062 which may be valid only very near the tablejump itself. */
11063 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11064 GET_MODE_SIZE, because this indicates how large insns are. The other
11065 uses should all be Pmode, because they are addresses. This code
11066 could fail if addresses and insns are not the same size. */
11067 index = gen_rtx (PLUS, Pmode,
11068 gen_rtx (MULT, Pmode, index,
11069 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11070 gen_rtx (LABEL_REF, Pmode, table_label));
11071 #ifdef PIC_CASE_VECTOR_ADDRESS
11073 index = PIC_CASE_VECTOR_ADDRESS (index);
11076 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11077 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11078 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11079 RTX_UNCHANGING_P (vector) = 1;
11080 convert_move (temp, vector, 0);
11082 emit_jump_insn (gen_tablejump (temp, table_label));
11084 #ifndef CASE_VECTOR_PC_RELATIVE
11085 /* If we are generating PIC code or if the table is PC-relative, the
11086 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11092 #endif /* HAVE_tablejump */
11095 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11096 to that value is on the top of the stack. The resulting type is TYPE, and
11097 the source declaration is DECL. */
11100 bc_load_memory (type, decl)
11103 enum bytecode_opcode opcode;
11106 /* Bit fields are special. We only know about signed and
11107 unsigned ints, and enums. The latter are treated as
11108 signed integers. */
11110 if (DECL_BIT_FIELD (decl))
11111 if (TREE_CODE (type) == ENUMERAL_TYPE
11112 || TREE_CODE (type) == INTEGER_TYPE)
11113 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11117 /* See corresponding comment in bc_store_memory. */
11118 if (TYPE_MODE (type) == BLKmode
11119 || TYPE_MODE (type) == VOIDmode)
11122 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11124 if (opcode == neverneverland)
11127 bc_emit_bytecode (opcode);
11129 #ifdef DEBUG_PRINT_CODE
11130 fputc ('\n', stderr);
11135 /* Store the contents of the second stack slot to the address in the
11136 top stack slot. DECL is the declaration of the destination and is used
11137 to determine whether we're dealing with a bitfield. */
11140 bc_store_memory (type, decl)
11143 enum bytecode_opcode opcode;
11146 if (DECL_BIT_FIELD (decl))
11148 if (TREE_CODE (type) == ENUMERAL_TYPE
11149 || TREE_CODE (type) == INTEGER_TYPE)
11155 if (TYPE_MODE (type) == BLKmode)
11157 /* Copy structure. This expands to a block copy instruction, storeBLK.
11158 In addition to the arguments expected by the other store instructions,
11159 it also expects a type size (SImode) on top of the stack, which is the
11160 structure size in size units (usually bytes). The two first arguments
11161 are already on the stack; so we just put the size on level 1. For some
11162 other languages, the size may be variable, this is why we don't encode
11163 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11165 bc_expand_expr (TYPE_SIZE (type));
11169 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11171 if (opcode == neverneverland)
11174 bc_emit_bytecode (opcode);
11176 #ifdef DEBUG_PRINT_CODE
11177 fputc ('\n', stderr);
11182 /* Allocate local stack space sufficient to hold a value of the given
11183 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11184 integral power of 2. A special case is locals of type VOID, which
11185 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11186 remapped into the corresponding attribute of SI. */
11189 bc_allocate_local (size, alignment)
11190 int size, alignment;
11193 int byte_alignment;
11198 /* Normalize size and alignment */
11200 size = UNITS_PER_WORD;
11202 if (alignment < BITS_PER_UNIT)
11203 byte_alignment = 1 << (INT_ALIGN - 1);
11206 byte_alignment = alignment / BITS_PER_UNIT;
11208 if (local_vars_size & (byte_alignment - 1))
11209 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11211 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11212 local_vars_size += size;
11218 /* Allocate variable-sized local array. Variable-sized arrays are
11219 actually pointers to the address in memory where they are stored. */
11222 bc_allocate_variable_array (size)
11226 const int ptralign = (1 << (PTR_ALIGN - 1));
11228 /* Align pointer */
11229 if (local_vars_size & ptralign)
11230 local_vars_size += ptralign - (local_vars_size & ptralign);
11232 /* Note down local space needed: pointer to block; also return
11235 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11236 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11241 /* Push the machine address for the given external variable offset. */
11244 bc_load_externaddr (externaddr)
11247 bc_emit_bytecode (constP);
11248 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11249 BYTECODE_BC_LABEL (externaddr)->offset);
11251 #ifdef DEBUG_PRINT_CODE
11252 fputc ('\n', stderr);
11257 /* Like above, but expects an IDENTIFIER. */
11260 bc_load_externaddr_id (id, offset)
11264 if (!IDENTIFIER_POINTER (id))
11267 bc_emit_bytecode (constP);
11268 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11270 #ifdef DEBUG_PRINT_CODE
11271 fputc ('\n', stderr);
11276 /* Push the machine address for the given local variable offset. */
11279 bc_load_localaddr (localaddr)
11282 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11286 /* Push the machine address for the given parameter offset.
11287 NOTE: offset is in bits. */
11290 bc_load_parmaddr (parmaddr)
11293 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11298 /* Convert a[i] into *(a + i). */
11301 bc_canonicalize_array_ref (exp)
11304 tree type = TREE_TYPE (exp);
11305 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11306 TREE_OPERAND (exp, 0));
11307 tree index = TREE_OPERAND (exp, 1);
11310 /* Convert the integer argument to a type the same size as a pointer
11311 so the multiply won't overflow spuriously. */
11313 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11314 index = convert (type_for_size (POINTER_SIZE, 0), index);
11316 /* The array address isn't volatile even if the array is.
11317 (Of course this isn't terribly relevant since the bytecode
11318 translator treats nearly everything as volatile anyway.) */
11319 TREE_THIS_VOLATILE (array_adr) = 0;
11321 return build1 (INDIRECT_REF, type,
11322 fold (build (PLUS_EXPR,
11323 TYPE_POINTER_TO (type),
11325 fold (build (MULT_EXPR,
11326 TYPE_POINTER_TO (type),
11328 size_in_bytes (type))))));
11332 /* Load the address of the component referenced by the given
11333 COMPONENT_REF expression.
11335 Returns innermost lvalue. */
11338 bc_expand_component_address (exp)
11342 enum machine_mode mode;
11344 HOST_WIDE_INT SIval;
11347 tem = TREE_OPERAND (exp, 1);
11348 mode = DECL_MODE (tem);
11351 /* Compute cumulative bit offset for nested component refs
11352 and array refs, and find the ultimate containing object. */
11354 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11356 if (TREE_CODE (tem) == COMPONENT_REF)
11357 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11359 if (TREE_CODE (tem) == ARRAY_REF
11360 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11361 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11363 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11364 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11365 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11370 bc_expand_expr (tem);
11373 /* For bitfields also push their offset and size */
11374 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11375 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11377 if (SIval = bitpos / BITS_PER_UNIT)
11378 bc_emit_instruction (addconstPSI, SIval);
11380 return (TREE_OPERAND (exp, 1));
11384 /* Emit code to push two SI constants */
11387 bc_push_offset_and_size (offset, size)
11388 HOST_WIDE_INT offset, size;
11390 bc_emit_instruction (constSI, offset);
11391 bc_emit_instruction (constSI, size);
11395 /* Emit byte code to push the address of the given lvalue expression to
11396 the stack. If it's a bit field, we also push offset and size info.
11398 Returns innermost component, which allows us to determine not only
11399 its type, but also whether it's a bitfield. */
11402 bc_expand_address (exp)
11406 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11410 switch (TREE_CODE (exp))
11414 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11416 case COMPONENT_REF:
11418 return (bc_expand_component_address (exp));
11422 bc_expand_expr (TREE_OPERAND (exp, 0));
11424 /* For variable-sized types: retrieve pointer. Sometimes the
11425 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11426 also make sure we have an operand, just in case... */
11428 if (TREE_OPERAND (exp, 0)
11429 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11430 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11431 bc_emit_instruction (loadP);
11433 /* If packed, also return offset and size */
11434 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11436 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11437 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11439 return (TREE_OPERAND (exp, 0));
11441 case FUNCTION_DECL:
11443 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11444 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11449 bc_load_parmaddr (DECL_RTL (exp));
11451 /* For variable-sized types: retrieve pointer */
11452 if (TYPE_SIZE (TREE_TYPE (exp))
11453 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11454 bc_emit_instruction (loadP);
11456 /* If packed, also return offset and size */
11457 if (DECL_BIT_FIELD (exp))
11458 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11459 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11465 bc_emit_instruction (returnP);
11471 if (BYTECODE_LABEL (DECL_RTL (exp)))
11472 bc_load_externaddr (DECL_RTL (exp));
11475 if (DECL_EXTERNAL (exp))
11476 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11477 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11479 bc_load_localaddr (DECL_RTL (exp));
11481 /* For variable-sized types: retrieve pointer */
11482 if (TYPE_SIZE (TREE_TYPE (exp))
11483 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11484 bc_emit_instruction (loadP);
11486 /* If packed, also return offset and size */
11487 if (DECL_BIT_FIELD (exp))
11488 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11489 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11497 bc_emit_bytecode (constP);
11498 r = output_constant_def (exp);
11499 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11501 #ifdef DEBUG_PRINT_CODE
11502 fputc ('\n', stderr);
11513 /* Most lvalues don't have components. */
11518 /* Emit a type code to be used by the runtime support in handling
11519 parameter passing. The type code consists of the machine mode
11520 plus the minimal alignment shifted left 8 bits. */
11523 bc_runtime_type_code (type)
11528 switch (TREE_CODE (type))
11534 case ENUMERAL_TYPE:
11538 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11550 return build_int_2 (val, 0);
11554 /* Generate constructor label */
11557 bc_gen_constr_label ()
11559 static int label_counter;
11560 static char label[20];
11562 sprintf (label, "*LR%d", label_counter++);
11564 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11568 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11569 expand the constructor data as static data, and push a pointer to it.
11570 The pointer is put in the pointer table and is retrieved by a constP
11571 bytecode instruction. We then loop and store each constructor member in
11572 the corresponding component. Finally, we return the original pointer on
11576 bc_expand_constructor (constr)
11580 HOST_WIDE_INT ptroffs;
11584 /* Literal constructors are handled as constants, whereas
11585 non-literals are evaluated and stored element by element
11586 into the data segment. */
11588 /* Allocate space in proper segment and push pointer to space on stack.
11591 l = bc_gen_constr_label ();
11593 if (TREE_CONSTANT (constr))
11597 bc_emit_const_labeldef (l);
11598 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11604 bc_emit_data_labeldef (l);
11605 bc_output_data_constructor (constr);
11609 /* Add reference to pointer table and recall pointer to stack;
11610 this code is common for both types of constructors: literals
11611 and non-literals. */
11613 ptroffs = bc_define_pointer (l);
11614 bc_emit_instruction (constP, ptroffs);
11616 /* This is all that has to be done if it's a literal. */
11617 if (TREE_CONSTANT (constr))
11621 /* At this point, we have the pointer to the structure on top of the stack.
11622 Generate sequences of store_memory calls for the constructor. */
11624 /* constructor type is structure */
11625 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11629 /* If the constructor has fewer fields than the structure,
11630 clear the whole structure first. */
11632 if (list_length (CONSTRUCTOR_ELTS (constr))
11633 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11635 bc_emit_instruction (duplicate);
11636 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11637 bc_emit_instruction (clearBLK);
11640 /* Store each element of the constructor into the corresponding
11641 field of TARGET. */
11643 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11645 register tree field = TREE_PURPOSE (elt);
11646 register enum machine_mode mode;
11651 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11652 mode = DECL_MODE (field);
11653 unsignedp = TREE_UNSIGNED (field);
11655 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11657 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11658 /* The alignment of TARGET is
11659 at least what its type requires. */
11661 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11662 int_size_in_bytes (TREE_TYPE (constr)));
11667 /* Constructor type is array */
11668 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11672 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11673 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11674 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11675 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11677 /* If the constructor has fewer fields than the structure,
11678 clear the whole structure first. */
11680 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11682 bc_emit_instruction (duplicate);
11683 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11684 bc_emit_instruction (clearBLK);
11688 /* Store each element of the constructor into the corresponding
11689 element of TARGET, determined by counting the elements. */
11691 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11693 elt = TREE_CHAIN (elt), i++)
11695 register enum machine_mode mode;
11700 mode = TYPE_MODE (elttype);
11701 bitsize = GET_MODE_BITSIZE (mode);
11702 unsignedp = TREE_UNSIGNED (elttype);
11704 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11705 /* * TYPE_SIZE_UNIT (elttype) */ );
11707 bc_store_field (elt, bitsize, bitpos, mode,
11708 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11709 /* The alignment of TARGET is
11710 at least what its type requires. */
11712 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11713 int_size_in_bytes (TREE_TYPE (constr)));
11720 /* Store the value of EXP (an expression tree) into member FIELD of
11721 structure at address on stack, which has type TYPE, mode MODE and
11722 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11725 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11726 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11729 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11730 value_mode, unsignedp, align, total_size)
11731 int bitsize, bitpos;
11732 enum machine_mode mode;
11733 tree field, exp, type;
11734 enum machine_mode value_mode;
11740 /* Expand expression and copy pointer */
11741 bc_expand_expr (exp);
11742 bc_emit_instruction (over);
11745 /* If the component is a bit field, we cannot use addressing to access
11746 it. Use bit-field techniques to store in it. */
11748 if (DECL_BIT_FIELD (field))
11750 bc_store_bit_field (bitpos, bitsize, unsignedp);
11754 /* Not bit field */
11756 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11758 /* Advance pointer to the desired member */
11760 bc_emit_instruction (addconstPSI, offset);
11763 bc_store_memory (type, field);
11768 /* Store SI/SU in bitfield */
11771 bc_store_bit_field (offset, size, unsignedp)
11772 int offset, size, unsignedp;
11774 /* Push bitfield offset and size */
11775 bc_push_offset_and_size (offset, size);
11778 bc_emit_instruction (sstoreBI);
11782 /* Load SI/SU from bitfield */
11785 bc_load_bit_field (offset, size, unsignedp)
11786 int offset, size, unsignedp;
11788 /* Push bitfield offset and size */
11789 bc_push_offset_and_size (offset, size);
11791 /* Load: sign-extend if signed, else zero-extend */
11792 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11796 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11797 (adjust stack pointer upwards), negative means add that number of
11798 levels (adjust the stack pointer downwards). Only positive values
11799 normally make sense. */
11802 bc_adjust_stack (nlevels)
11811 bc_emit_instruction (drop);
11814 bc_emit_instruction (drop);
11819 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11820 stack_depth -= nlevels;
11823 #if defined (VALIDATE_STACK_FOR_BC)
11824 VALIDATE_STACK_FOR_BC ();