1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
42 #include "bc-opcode.h"
43 #include "bc-typecd.h"
48 #define CEIL(x,y) (((x) + (y) - 1) / (y))
50 /* Decide whether a function's arguments should be processed
51 from first to last or from last to first.
53 They should if the stack and args grow in opposite directions, but
54 only if we have push insns. */
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first */
64 #ifndef STACK_PUSH_CODE
65 #ifdef STACK_GROWS_DOWNWARD
66 #define STACK_PUSH_CODE PRE_DEC
68 #define STACK_PUSH_CODE PRE_INC
72 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
73 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls = 1;
88 /* Number of units that we should eventually pop off the stack.
89 These are the arguments to function calls that have already returned. */
90 int pending_stack_adjust;
92 /* Nonzero means stack pops must not be deferred, and deferred stack
93 pops must not be output. It is nonzero inside a function call,
94 inside a conditional expression, inside a statement expression,
95 and in other cases as well. */
96 int inhibit_defer_pop;
98 /* When temporaries are created by TARGET_EXPRs, they are created at
99 this level of temp_slot_level, so that they can remain allocated
100 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
102 int target_temp_slot_level;
104 /* Nonzero means __builtin_saveregs has already been done in this function.
105 The value is the pseudoreg containing the value __builtin_saveregs
107 static rtx saveregs_value;
109 /* Similarly for __builtin_apply_args. */
110 static rtx apply_args_value;
112 /* This structure is used by move_by_pieces to describe the move to
115 struct move_by_pieces
125 int explicit_inc_from;
132 /* This structure is used by clear_by_pieces to describe the clear to
135 struct clear_by_pieces
147 /* Used to generate bytecodes: keep track of size of local variables,
148 as well as depth of arithmetic stack. (Notice that variables are
149 stored on the machine's stack, not the arithmetic stack.) */
151 static rtx get_push_address PROTO ((int));
152 extern int local_vars_size;
153 extern int stack_depth;
154 extern int max_stack_depth;
155 extern struct obstack permanent_obstack;
156 extern rtx arg_pointer_save_area;
158 static rtx enqueue_insn PROTO((rtx, rtx));
159 static int queued_subexp_p PROTO((rtx));
160 static void init_queue PROTO((void));
161 static void move_by_pieces PROTO((rtx, rtx, int, int));
162 static int move_by_pieces_ninsns PROTO((unsigned int, int));
163 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
164 struct move_by_pieces *));
165 static void clear_by_pieces PROTO((rtx, int, int));
166 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
167 struct clear_by_pieces *));
168 static int is_zeros_p PROTO((tree));
169 static int mostly_zeros_p PROTO((tree));
170 static void store_constructor PROTO((tree, rtx, int));
171 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
172 enum machine_mode, int, int, int));
173 static tree save_noncopied_parts PROTO((tree, tree));
174 static tree init_noncopied_parts PROTO((tree, tree));
175 static int safe_from_p PROTO((rtx, tree));
176 static int fixed_type_p PROTO((tree));
177 static rtx var_rtx PROTO((tree));
178 static int get_pointer_alignment PROTO((tree, unsigned));
179 static tree string_constant PROTO((tree, tree *));
180 static tree c_strlen PROTO((tree));
181 static rtx expand_builtin PROTO((tree, rtx, rtx,
182 enum machine_mode, int));
183 static int apply_args_size PROTO((void));
184 static int apply_result_size PROTO((void));
185 static rtx result_vector PROTO((int, rtx));
186 static rtx expand_builtin_apply_args PROTO((void));
187 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
188 static void expand_builtin_return PROTO((rtx));
189 static rtx expand_increment PROTO((tree, int, int));
190 void bc_expand_increment PROTO((struct increment_operator *, tree));
191 rtx bc_allocate_local PROTO((int, int));
192 void bc_store_memory PROTO((tree, tree));
193 tree bc_expand_component_address PROTO((tree));
194 tree bc_expand_address PROTO((tree));
195 void bc_expand_constructor PROTO((tree));
196 void bc_adjust_stack PROTO((int));
197 tree bc_canonicalize_array_ref PROTO((tree));
198 void bc_load_memory PROTO((tree, tree));
199 void bc_load_externaddr PROTO((rtx));
200 void bc_load_externaddr_id PROTO((tree, int));
201 void bc_load_localaddr PROTO((rtx));
202 void bc_load_parmaddr PROTO((rtx));
203 static void preexpand_calls PROTO((tree));
204 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
205 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
206 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
207 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
208 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
209 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
210 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
211 extern tree truthvalue_conversion PROTO((tree));
213 /* Record for each mode whether we can move a register directly to or
214 from an object of that mode in memory. If we can't, we won't try
215 to use that mode directly when accessing a field of that mode. */
217 static char direct_load[NUM_MACHINE_MODES];
218 static char direct_store[NUM_MACHINE_MODES];
220 /* MOVE_RATIO is the number of move instructions that is better than
224 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
227 /* A value of around 6 would minimize code size; infinity would minimize
229 #define MOVE_RATIO 15
233 /* This array records the insn_code of insns to perform block moves. */
234 enum insn_code movstr_optab[NUM_MACHINE_MODES];
236 /* This array records the insn_code of insns to perform block clears. */
237 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
239 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
241 #ifndef SLOW_UNALIGNED_ACCESS
242 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
245 /* Register mappings for target machines without register windows. */
246 #ifndef INCOMING_REGNO
247 #define INCOMING_REGNO(OUT) (OUT)
249 #ifndef OUTGOING_REGNO
250 #define OUTGOING_REGNO(IN) (IN)
253 /* Maps used to convert modes to const, load, and store bytecodes. */
254 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
255 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
256 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
258 /* Initialize maps used to convert modes to const, load, and store
262 bc_init_mode_to_opcode_maps ()
266 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
267 mode_to_const_map[mode]
268 = mode_to_load_map[mode]
269 = mode_to_store_map[mode] = neverneverland;
271 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
272 mode_to_const_map[(int) SYM] = CONST; \
273 mode_to_load_map[(int) SYM] = LOAD; \
274 mode_to_store_map[(int) SYM] = STORE;
276 #include "modemap.def"
280 /* This is run once per compilation to set up which modes can be used
281 directly in memory and to initialize the block move optab. */
287 enum machine_mode mode;
288 /* Try indexing by frame ptr and try by stack ptr.
289 It is known that on the Convex the stack ptr isn't a valid index.
290 With luck, one or the other is valid on any machine. */
291 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
292 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
295 insn = emit_insn (gen_rtx (SET, 0, NULL_RTX, NULL_RTX));
296 pat = PATTERN (insn);
298 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
299 mode = (enum machine_mode) ((int) mode + 1))
305 direct_load[(int) mode] = direct_store[(int) mode] = 0;
306 PUT_MODE (mem, mode);
307 PUT_MODE (mem1, mode);
309 /* See if there is some register that can be used in this mode and
310 directly loaded or stored from memory. */
312 if (mode != VOIDmode && mode != BLKmode)
313 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
314 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
317 if (! HARD_REGNO_MODE_OK (regno, mode))
320 reg = gen_rtx (REG, mode, regno);
323 SET_DEST (pat) = reg;
324 if (recog (pat, insn, &num_clobbers) >= 0)
325 direct_load[(int) mode] = 1;
327 SET_SRC (pat) = mem1;
328 SET_DEST (pat) = reg;
329 if (recog (pat, insn, &num_clobbers) >= 0)
330 direct_load[(int) mode] = 1;
333 SET_DEST (pat) = mem;
334 if (recog (pat, insn, &num_clobbers) >= 0)
335 direct_store[(int) mode] = 1;
338 SET_DEST (pat) = mem1;
339 if (recog (pat, insn, &num_clobbers) >= 0)
340 direct_store[(int) mode] = 1;
347 /* This is run at the start of compiling a function. */
354 pending_stack_adjust = 0;
355 inhibit_defer_pop = 0;
357 apply_args_value = 0;
361 /* Save all variables describing the current status into the structure *P.
362 This is used before starting a nested function. */
368 /* Instead of saving the postincrement queue, empty it. */
371 p->pending_stack_adjust = pending_stack_adjust;
372 p->inhibit_defer_pop = inhibit_defer_pop;
373 p->saveregs_value = saveregs_value;
374 p->apply_args_value = apply_args_value;
375 p->forced_labels = forced_labels;
377 pending_stack_adjust = 0;
378 inhibit_defer_pop = 0;
380 apply_args_value = 0;
384 /* Restore all variables describing the current status from the structure *P.
385 This is used after a nested function. */
388 restore_expr_status (p)
391 pending_stack_adjust = p->pending_stack_adjust;
392 inhibit_defer_pop = p->inhibit_defer_pop;
393 saveregs_value = p->saveregs_value;
394 apply_args_value = p->apply_args_value;
395 forced_labels = p->forced_labels;
398 /* Manage the queue of increment instructions to be output
399 for POSTINCREMENT_EXPR expressions, etc. */
401 static rtx pending_chain;
403 /* Queue up to increment (or change) VAR later. BODY says how:
404 BODY should be the same thing you would pass to emit_insn
405 to increment right away. It will go to emit_insn later on.
407 The value is a QUEUED expression to be used in place of VAR
408 where you want to guarantee the pre-incrementation value of VAR. */
411 enqueue_insn (var, body)
414 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
415 var, NULL_RTX, NULL_RTX, body, pending_chain);
416 return pending_chain;
419 /* Use protect_from_queue to convert a QUEUED expression
420 into something that you can put immediately into an instruction.
421 If the queued incrementation has not happened yet,
422 protect_from_queue returns the variable itself.
423 If the incrementation has happened, protect_from_queue returns a temp
424 that contains a copy of the old value of the variable.
426 Any time an rtx which might possibly be a QUEUED is to be put
427 into an instruction, it must be passed through protect_from_queue first.
428 QUEUED expressions are not meaningful in instructions.
430 Do not pass a value through protect_from_queue and then hold
431 on to it for a while before putting it in an instruction!
432 If the queue is flushed in between, incorrect code will result. */
435 protect_from_queue (x, modify)
439 register RTX_CODE code = GET_CODE (x);
441 #if 0 /* A QUEUED can hang around after the queue is forced out. */
442 /* Shortcut for most common case. */
443 if (pending_chain == 0)
449 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
450 use of autoincrement. Make a copy of the contents of the memory
451 location rather than a copy of the address, but not if the value is
452 of mode BLKmode. Don't modify X in place since it might be
454 if (code == MEM && GET_MODE (x) != BLKmode
455 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
457 register rtx y = XEXP (x, 0);
458 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
460 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
461 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
462 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
466 register rtx temp = gen_reg_rtx (GET_MODE (new));
467 emit_insn_before (gen_move_insn (temp, new),
473 /* Otherwise, recursively protect the subexpressions of all
474 the kinds of rtx's that can contain a QUEUED. */
477 rtx tem = protect_from_queue (XEXP (x, 0), 0);
478 if (tem != XEXP (x, 0))
484 else if (code == PLUS || code == MULT)
486 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
487 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
488 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
497 /* If the increment has not happened, use the variable itself. */
498 if (QUEUED_INSN (x) == 0)
499 return QUEUED_VAR (x);
500 /* If the increment has happened and a pre-increment copy exists,
502 if (QUEUED_COPY (x) != 0)
503 return QUEUED_COPY (x);
504 /* The increment has happened but we haven't set up a pre-increment copy.
505 Set one up now, and use it. */
506 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
507 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
509 return QUEUED_COPY (x);
512 /* Return nonzero if X contains a QUEUED expression:
513 if it contains anything that will be altered by a queued increment.
514 We handle only combinations of MEM, PLUS, MINUS and MULT operators
515 since memory addresses generally contain only those. */
521 register enum rtx_code code = GET_CODE (x);
527 return queued_subexp_p (XEXP (x, 0));
531 return queued_subexp_p (XEXP (x, 0))
532 || queued_subexp_p (XEXP (x, 1));
537 /* Perform all the pending incrementations. */
543 while (p = pending_chain)
545 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
546 pending_chain = QUEUED_NEXT (p);
557 /* Copy data from FROM to TO, where the machine modes are not the same.
558 Both modes may be integer, or both may be floating.
559 UNSIGNEDP should be nonzero if FROM is an unsigned type.
560 This causes zero-extension instead of sign-extension. */
563 convert_move (to, from, unsignedp)
564 register rtx to, from;
567 enum machine_mode to_mode = GET_MODE (to);
568 enum machine_mode from_mode = GET_MODE (from);
569 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
570 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
574 /* rtx code for making an equivalent value. */
575 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
577 to = protect_from_queue (to, 1);
578 from = protect_from_queue (from, 0);
580 if (to_real != from_real)
583 /* If FROM is a SUBREG that indicates that we have already done at least
584 the required extension, strip it. We don't handle such SUBREGs as
587 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
588 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
589 >= GET_MODE_SIZE (to_mode))
590 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
591 from = gen_lowpart (to_mode, from), from_mode = to_mode;
593 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
596 if (to_mode == from_mode
597 || (from_mode == VOIDmode && CONSTANT_P (from)))
599 emit_move_insn (to, from);
607 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
609 /* Try converting directly if the insn is supported. */
610 if ((code = can_extend_p (to_mode, from_mode, 0))
613 emit_unop_insn (code, to, from, UNKNOWN);
618 #ifdef HAVE_trunchfqf2
619 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
621 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
625 #ifdef HAVE_trunctqfqf2
626 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
628 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
632 #ifdef HAVE_truncsfqf2
633 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
639 #ifdef HAVE_truncdfqf2
640 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
646 #ifdef HAVE_truncxfqf2
647 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
653 #ifdef HAVE_trunctfqf2
654 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
661 #ifdef HAVE_trunctqfhf2
662 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncsfhf2
669 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
675 #ifdef HAVE_truncdfhf2
676 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncxfhf2
683 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
689 #ifdef HAVE_trunctfhf2
690 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
697 #ifdef HAVE_truncsftqf2
698 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
704 #ifdef HAVE_truncdftqf2
705 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
711 #ifdef HAVE_truncxftqf2
712 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
718 #ifdef HAVE_trunctftqf2
719 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
726 #ifdef HAVE_truncdfsf2
727 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
733 #ifdef HAVE_truncxfsf2
734 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
740 #ifdef HAVE_trunctfsf2
741 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
747 #ifdef HAVE_truncxfdf2
748 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
754 #ifdef HAVE_trunctfdf2
755 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
769 libcall = extendsfdf2_libfunc;
773 libcall = extendsfxf2_libfunc;
777 libcall = extendsftf2_libfunc;
786 libcall = truncdfsf2_libfunc;
790 libcall = extenddfxf2_libfunc;
794 libcall = extenddftf2_libfunc;
803 libcall = truncxfsf2_libfunc;
807 libcall = truncxfdf2_libfunc;
816 libcall = trunctfsf2_libfunc;
820 libcall = trunctfdf2_libfunc;
826 if (libcall == (rtx) 0)
827 /* This conversion is not implemented yet. */
830 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
832 emit_move_insn (to, value);
836 /* Now both modes are integers. */
838 /* Handle expanding beyond a word. */
839 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
840 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
847 enum machine_mode lowpart_mode;
848 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
850 /* Try converting directly if the insn is supported. */
851 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
854 /* If FROM is a SUBREG, put it into a register. Do this
855 so that we always generate the same set of insns for
856 better cse'ing; if an intermediate assignment occurred,
857 we won't be doing the operation directly on the SUBREG. */
858 if (optimize > 0 && GET_CODE (from) == SUBREG)
859 from = force_reg (from_mode, from);
860 emit_unop_insn (code, to, from, equiv_code);
863 /* Next, try converting via full word. */
864 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
865 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
866 != CODE_FOR_nothing))
868 if (GET_CODE (to) == REG)
869 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
870 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
871 emit_unop_insn (code, to,
872 gen_lowpart (word_mode, to), equiv_code);
876 /* No special multiword conversion insn; do it by hand. */
879 /* Since we will turn this into a no conflict block, we must ensure
880 that the source does not overlap the target. */
882 if (reg_overlap_mentioned_p (to, from))
883 from = force_reg (from_mode, from);
885 /* Get a copy of FROM widened to a word, if necessary. */
886 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
887 lowpart_mode = word_mode;
889 lowpart_mode = from_mode;
891 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
893 lowpart = gen_lowpart (lowpart_mode, to);
894 emit_move_insn (lowpart, lowfrom);
896 /* Compute the value to put in each remaining word. */
898 fill_value = const0_rtx;
903 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
904 && STORE_FLAG_VALUE == -1)
906 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
908 fill_value = gen_reg_rtx (word_mode);
909 emit_insn (gen_slt (fill_value));
915 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
916 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
918 fill_value = convert_to_mode (word_mode, fill_value, 1);
922 /* Fill the remaining words. */
923 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
925 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
926 rtx subword = operand_subword (to, index, 1, to_mode);
931 if (fill_value != subword)
932 emit_move_insn (subword, fill_value);
935 insns = get_insns ();
938 emit_no_conflict_block (insns, to, from, NULL_RTX,
939 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
943 /* Truncating multi-word to a word or less. */
944 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
945 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
947 if (!((GET_CODE (from) == MEM
948 && ! MEM_VOLATILE_P (from)
949 && direct_load[(int) to_mode]
950 && ! mode_dependent_address_p (XEXP (from, 0)))
951 || GET_CODE (from) == REG
952 || GET_CODE (from) == SUBREG))
953 from = force_reg (from_mode, from);
954 convert_move (to, gen_lowpart (word_mode, from), 0);
958 /* Handle pointer conversion */ /* SPEE 900220 */
959 if (to_mode == PSImode)
961 if (from_mode != SImode)
962 from = convert_to_mode (SImode, from, unsignedp);
964 #ifdef HAVE_truncsipsi2
965 if (HAVE_truncsipsi2)
967 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
970 #endif /* HAVE_truncsipsi2 */
974 if (from_mode == PSImode)
976 if (to_mode != SImode)
978 from = convert_to_mode (SImode, from, unsignedp);
983 #ifdef HAVE_extendpsisi2
984 if (HAVE_extendpsisi2)
986 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
989 #endif /* HAVE_extendpsisi2 */
994 if (to_mode == PDImode)
996 if (from_mode != DImode)
997 from = convert_to_mode (DImode, from, unsignedp);
999 #ifdef HAVE_truncdipdi2
1000 if (HAVE_truncdipdi2)
1002 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1005 #endif /* HAVE_truncdipdi2 */
1009 if (from_mode == PDImode)
1011 if (to_mode != DImode)
1013 from = convert_to_mode (DImode, from, unsignedp);
1018 #ifdef HAVE_extendpdidi2
1019 if (HAVE_extendpdidi2)
1021 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1024 #endif /* HAVE_extendpdidi2 */
1029 /* Now follow all the conversions between integers
1030 no more than a word long. */
1032 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1033 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1034 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1035 GET_MODE_BITSIZE (from_mode)))
1037 if (!((GET_CODE (from) == MEM
1038 && ! MEM_VOLATILE_P (from)
1039 && direct_load[(int) to_mode]
1040 && ! mode_dependent_address_p (XEXP (from, 0)))
1041 || GET_CODE (from) == REG
1042 || GET_CODE (from) == SUBREG))
1043 from = force_reg (from_mode, from);
1044 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1045 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1046 from = copy_to_reg (from);
1047 emit_move_insn (to, gen_lowpart (to_mode, from));
1051 /* Handle extension. */
1052 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1054 /* Convert directly if that works. */
1055 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1056 != CODE_FOR_nothing)
1058 emit_unop_insn (code, to, from, equiv_code);
1063 enum machine_mode intermediate;
1065 /* Search for a mode to convert via. */
1066 for (intermediate = from_mode; intermediate != VOIDmode;
1067 intermediate = GET_MODE_WIDER_MODE (intermediate))
1068 if (((can_extend_p (to_mode, intermediate, unsignedp)
1069 != CODE_FOR_nothing)
1070 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1071 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1072 && (can_extend_p (intermediate, from_mode, unsignedp)
1073 != CODE_FOR_nothing))
1075 convert_move (to, convert_to_mode (intermediate, from,
1076 unsignedp), unsignedp);
1080 /* No suitable intermediate mode. */
1085 /* Support special truncate insns for certain modes. */
1087 if (from_mode == DImode && to_mode == SImode)
1089 #ifdef HAVE_truncdisi2
1090 if (HAVE_truncdisi2)
1092 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1096 convert_move (to, force_reg (from_mode, from), unsignedp);
1100 if (from_mode == DImode && to_mode == HImode)
1102 #ifdef HAVE_truncdihi2
1103 if (HAVE_truncdihi2)
1105 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1113 if (from_mode == DImode && to_mode == QImode)
1115 #ifdef HAVE_truncdiqi2
1116 if (HAVE_truncdiqi2)
1118 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == SImode && to_mode == HImode)
1128 #ifdef HAVE_truncsihi2
1129 if (HAVE_truncsihi2)
1131 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == SImode && to_mode == QImode)
1141 #ifdef HAVE_truncsiqi2
1142 if (HAVE_truncsiqi2)
1144 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == HImode && to_mode == QImode)
1154 #ifdef HAVE_trunchiqi2
1155 if (HAVE_trunchiqi2)
1157 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == TImode && to_mode == DImode)
1167 #ifdef HAVE_trunctidi2
1168 if (HAVE_trunctidi2)
1170 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == TImode && to_mode == SImode)
1180 #ifdef HAVE_trunctisi2
1181 if (HAVE_trunctisi2)
1183 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == HImode)
1193 #ifdef HAVE_trunctihi2
1194 if (HAVE_trunctihi2)
1196 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == QImode)
1206 #ifdef HAVE_trunctiqi2
1207 if (HAVE_trunctiqi2)
1209 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 /* Handle truncation of volatile memrefs, and so on;
1218 the things that couldn't be truncated directly,
1219 and for which there was no special instruction. */
1220 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1222 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1223 emit_move_insn (to, temp);
1227 /* Mode combination is not recognized. */
1231 /* Return an rtx for a value that would result
1232 from converting X to mode MODE.
1233 Both X and MODE may be floating, or both integer.
1234 UNSIGNEDP is nonzero if X is an unsigned value.
1235 This can be done by referring to a part of X in place
1236 or by copying to a new temporary with conversion.
1238 This function *must not* call protect_from_queue
1239 except when putting X into an insn (in which case convert_move does it). */
1242 convert_to_mode (mode, x, unsignedp)
1243 enum machine_mode mode;
1247 return convert_modes (mode, VOIDmode, x, unsignedp);
1250 /* Return an rtx for a value that would result
1251 from converting X from mode OLDMODE to mode MODE.
1252 Both modes may be floating, or both integer.
1253 UNSIGNEDP is nonzero if X is an unsigned value.
1255 This can be done by referring to a part of X in place
1256 or by copying to a new temporary with conversion.
1258 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1260 This function *must not* call protect_from_queue
1261 except when putting X into an insn (in which case convert_move does it). */
1264 convert_modes (mode, oldmode, x, unsignedp)
1265 enum machine_mode mode, oldmode;
1271 /* If FROM is a SUBREG that indicates that we have already done at least
1272 the required extension, strip it. */
1274 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1275 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1276 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1277 x = gen_lowpart (mode, x);
1279 if (GET_MODE (x) != VOIDmode)
1280 oldmode = GET_MODE (x);
1282 if (mode == oldmode)
1285 /* There is one case that we must handle specially: If we are converting
1286 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1287 we are to interpret the constant as unsigned, gen_lowpart will do
1288 the wrong if the constant appears negative. What we want to do is
1289 make the high-order word of the constant zero, not all ones. */
1291 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1292 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1293 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1295 HOST_WIDE_INT val = INTVAL (x);
1297 if (oldmode != VOIDmode
1298 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1300 int width = GET_MODE_BITSIZE (oldmode);
1302 /* We need to zero extend VAL. */
1303 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1306 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1309 /* We can do this with a gen_lowpart if both desired and current modes
1310 are integer, and this is either a constant integer, a register, or a
1311 non-volatile MEM. Except for the constant case where MODE is no
1312 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1314 if ((GET_CODE (x) == CONST_INT
1315 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1316 || (GET_MODE_CLASS (mode) == MODE_INT
1317 && GET_MODE_CLASS (oldmode) == MODE_INT
1318 && (GET_CODE (x) == CONST_DOUBLE
1319 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1320 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1321 && direct_load[(int) mode])
1322 || (GET_CODE (x) == REG
1323 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1324 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1326 /* ?? If we don't know OLDMODE, we have to assume here that
1327 X does not need sign- or zero-extension. This may not be
1328 the case, but it's the best we can do. */
1329 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1330 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1332 HOST_WIDE_INT val = INTVAL (x);
1333 int width = GET_MODE_BITSIZE (oldmode);
1335 /* We must sign or zero-extend in this case. Start by
1336 zero-extending, then sign extend if we need to. */
1337 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1339 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1340 val |= (HOST_WIDE_INT) (-1) << width;
1342 return GEN_INT (val);
1345 return gen_lowpart (mode, x);
1348 temp = gen_reg_rtx (mode);
1349 convert_move (temp, x, unsignedp);
1353 /* Generate several move instructions to copy LEN bytes
1354 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1355 The caller must pass FROM and TO
1356 through protect_from_queue before calling.
1357 ALIGN (in bytes) is maximum alignment we can assume. */
1360 move_by_pieces (to, from, len, align)
1364 struct move_by_pieces data;
1365 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1366 int max_size = MOVE_MAX + 1;
1369 data.to_addr = to_addr;
1370 data.from_addr = from_addr;
1374 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1375 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1377 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1378 || GET_CODE (from_addr) == POST_INC
1379 || GET_CODE (from_addr) == POST_DEC);
1381 data.explicit_inc_from = 0;
1382 data.explicit_inc_to = 0;
1384 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1385 if (data.reverse) data.offset = len;
1388 data.to_struct = MEM_IN_STRUCT_P (to);
1389 data.from_struct = MEM_IN_STRUCT_P (from);
1391 /* If copying requires more than two move insns,
1392 copy addresses to registers (to make displacements shorter)
1393 and use post-increment if available. */
1394 if (!(data.autinc_from && data.autinc_to)
1395 && move_by_pieces_ninsns (len, align) > 2)
1397 #ifdef HAVE_PRE_DECREMENT
1398 if (data.reverse && ! data.autinc_from)
1400 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1401 data.autinc_from = 1;
1402 data.explicit_inc_from = -1;
1405 #ifdef HAVE_POST_INCREMENT
1406 if (! data.autinc_from)
1408 data.from_addr = copy_addr_to_reg (from_addr);
1409 data.autinc_from = 1;
1410 data.explicit_inc_from = 1;
1413 if (!data.autinc_from && CONSTANT_P (from_addr))
1414 data.from_addr = copy_addr_to_reg (from_addr);
1415 #ifdef HAVE_PRE_DECREMENT
1416 if (data.reverse && ! data.autinc_to)
1418 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1420 data.explicit_inc_to = -1;
1423 #ifdef HAVE_POST_INCREMENT
1424 if (! data.reverse && ! data.autinc_to)
1426 data.to_addr = copy_addr_to_reg (to_addr);
1428 data.explicit_inc_to = 1;
1431 if (!data.autinc_to && CONSTANT_P (to_addr))
1432 data.to_addr = copy_addr_to_reg (to_addr);
1435 if (! SLOW_UNALIGNED_ACCESS
1436 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1439 /* First move what we can in the largest integer mode, then go to
1440 successively smaller modes. */
1442 while (max_size > 1)
1444 enum machine_mode mode = VOIDmode, tmode;
1445 enum insn_code icode;
1447 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1448 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1449 if (GET_MODE_SIZE (tmode) < max_size)
1452 if (mode == VOIDmode)
1455 icode = mov_optab->handlers[(int) mode].insn_code;
1456 if (icode != CODE_FOR_nothing
1457 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1458 GET_MODE_SIZE (mode)))
1459 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1461 max_size = GET_MODE_SIZE (mode);
1464 /* The code above should have handled everything. */
1469 /* Return number of insns required to move L bytes by pieces.
1470 ALIGN (in bytes) is maximum alignment we can assume. */
1473 move_by_pieces_ninsns (l, align)
1477 register int n_insns = 0;
1478 int max_size = MOVE_MAX + 1;
1480 if (! SLOW_UNALIGNED_ACCESS
1481 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1484 while (max_size > 1)
1486 enum machine_mode mode = VOIDmode, tmode;
1487 enum insn_code icode;
1489 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1490 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1491 if (GET_MODE_SIZE (tmode) < max_size)
1494 if (mode == VOIDmode)
1497 icode = mov_optab->handlers[(int) mode].insn_code;
1498 if (icode != CODE_FOR_nothing
1499 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1500 GET_MODE_SIZE (mode)))
1501 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1503 max_size = GET_MODE_SIZE (mode);
1509 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1510 with move instructions for mode MODE. GENFUN is the gen_... function
1511 to make a move insn for that mode. DATA has all the other info. */
1514 move_by_pieces_1 (genfun, mode, data)
1515 rtx (*genfun) PROTO ((rtx, ...));
1516 enum machine_mode mode;
1517 struct move_by_pieces *data;
1519 register int size = GET_MODE_SIZE (mode);
1520 register rtx to1, from1;
1522 while (data->len >= size)
1524 if (data->reverse) data->offset -= size;
1526 to1 = (data->autinc_to
1527 ? gen_rtx (MEM, mode, data->to_addr)
1528 : copy_rtx (change_address (data->to, mode,
1529 plus_constant (data->to_addr,
1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
1534 = (data->autinc_from
1535 ? gen_rtx (MEM, mode, data->from_addr)
1536 : copy_rtx (change_address (data->from, mode,
1537 plus_constant (data->from_addr,
1539 MEM_IN_STRUCT_P (from1) = data->from_struct;
1541 #ifdef HAVE_PRE_DECREMENT
1542 if (data->explicit_inc_to < 0)
1543 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1544 if (data->explicit_inc_from < 0)
1545 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1548 emit_insn ((*genfun) (to1, from1));
1549 #ifdef HAVE_POST_INCREMENT
1550 if (data->explicit_inc_to > 0)
1551 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1552 if (data->explicit_inc_from > 0)
1553 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1556 if (! data->reverse) data->offset += size;
1562 /* Emit code to move a block Y to a block X.
1563 This may be done with string-move instructions,
1564 with multiple scalar move instructions, or with a library call.
1566 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1568 SIZE is an rtx that says how long they are.
1569 ALIGN is the maximum alignment we can assume they have,
1570 measured in bytes. */
1573 emit_block_move (x, y, size, align)
1578 if (GET_MODE (x) != BLKmode)
1581 if (GET_MODE (y) != BLKmode)
1584 x = protect_from_queue (x, 1);
1585 y = protect_from_queue (y, 0);
1586 size = protect_from_queue (size, 0);
1588 if (GET_CODE (x) != MEM)
1590 if (GET_CODE (y) != MEM)
1595 if (GET_CODE (size) == CONST_INT
1596 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1597 move_by_pieces (x, y, INTVAL (size), align);
1600 /* Try the most limited insn first, because there's no point
1601 including more than one in the machine description unless
1602 the more limited one has some advantage. */
1604 rtx opalign = GEN_INT (align);
1605 enum machine_mode mode;
1607 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1608 mode = GET_MODE_WIDER_MODE (mode))
1610 enum insn_code code = movstr_optab[(int) mode];
1612 if (code != CODE_FOR_nothing
1613 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1614 here because if SIZE is less than the mode mask, as it is
1615 returned by the macro, it will definitely be less than the
1616 actual mode mask. */
1617 && ((GET_CODE (size) == CONST_INT
1618 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1619 <= GET_MODE_MASK (mode)))
1620 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1621 && (insn_operand_predicate[(int) code][0] == 0
1622 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1623 && (insn_operand_predicate[(int) code][1] == 0
1624 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1625 && (insn_operand_predicate[(int) code][3] == 0
1626 || (*insn_operand_predicate[(int) code][3]) (opalign,
1630 rtx last = get_last_insn ();
1633 op2 = convert_to_mode (mode, size, 1);
1634 if (insn_operand_predicate[(int) code][2] != 0
1635 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1636 op2 = copy_to_mode_reg (mode, op2);
1638 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1645 delete_insns_since (last);
1649 #ifdef TARGET_MEM_FUNCTIONS
1650 emit_library_call (memcpy_libfunc, 0,
1651 VOIDmode, 3, XEXP (x, 0), Pmode,
1653 convert_to_mode (TYPE_MODE (sizetype), size,
1654 TREE_UNSIGNED (sizetype)),
1655 TYPE_MODE (sizetype));
1657 emit_library_call (bcopy_libfunc, 0,
1658 VOIDmode, 3, XEXP (y, 0), Pmode,
1660 convert_to_mode (TYPE_MODE (integer_type_node), size,
1661 TREE_UNSIGNED (integer_type_node)),
1662 TYPE_MODE (integer_type_node));
1667 /* Copy all or part of a value X into registers starting at REGNO.
1668 The number of registers to be filled is NREGS. */
1671 move_block_to_reg (regno, x, nregs, mode)
1675 enum machine_mode mode;
1683 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1684 x = validize_mem (force_const_mem (mode, x));
1686 /* See if the machine can do this with a load multiple insn. */
1687 #ifdef HAVE_load_multiple
1688 if (HAVE_load_multiple)
1690 last = get_last_insn ();
1691 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1699 delete_insns_since (last);
1703 for (i = 0; i < nregs; i++)
1704 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1705 operand_subword_force (x, i, mode));
1708 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1709 The number of registers to be filled is NREGS. SIZE indicates the number
1710 of bytes in the object X. */
1714 move_block_from_reg (regno, x, nregs, size)
1722 enum machine_mode mode;
1724 /* If SIZE is that of a mode no bigger than a word, just use that
1725 mode's store operation. */
1726 if (size <= UNITS_PER_WORD
1727 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1729 emit_move_insn (change_address (x, mode, NULL),
1730 gen_rtx (REG, mode, regno));
1734 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1735 to the left before storing to memory. Note that the previous test
1736 doesn't handle all cases (e.g. SIZE == 3). */
1737 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1739 rtx tem = operand_subword (x, 0, 1, BLKmode);
1745 shift = expand_shift (LSHIFT_EXPR, word_mode,
1746 gen_rtx (REG, word_mode, regno),
1747 build_int_2 ((UNITS_PER_WORD - size)
1748 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1749 emit_move_insn (tem, shift);
1753 /* See if the machine can do this with a store multiple insn. */
1754 #ifdef HAVE_store_multiple
1755 if (HAVE_store_multiple)
1757 last = get_last_insn ();
1758 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1766 delete_insns_since (last);
1770 for (i = 0; i < nregs; i++)
1772 rtx tem = operand_subword (x, i, 1, BLKmode);
1777 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1781 /* Emit code to move a block Y to a block X, where X is non-consecutive
1782 registers represented by a PARALLEL. */
1785 emit_group_load (x, y)
1788 rtx target_reg, source;
1791 if (GET_CODE (x) != PARALLEL)
1794 /* Check for a NULL entry, used to indicate that the parameter goes
1795 both on the stack and in registers. */
1796 if (XEXP (XVECEXP (x, 0, 0), 0))
1801 for (; i < XVECLEN (x, 0); i++)
1803 rtx element = XVECEXP (x, 0, i);
1805 target_reg = XEXP (element, 0);
1807 if (GET_CODE (y) == MEM)
1808 source = change_address (y, GET_MODE (target_reg),
1809 plus_constant (XEXP (y, 0),
1810 INTVAL (XEXP (element, 1))));
1811 else if (XEXP (element, 1) == const0_rtx)
1813 if (GET_MODE (target_reg) == GET_MODE (y))
1815 /* Allow for the target_reg to be smaller than the input register
1816 to allow for AIX with 4 DF arguments after a single SI arg. The
1817 last DF argument will only load 1 word into the integer registers,
1818 but load a DF value into the float registers. */
1819 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1820 <= GET_MODE_SIZE (GET_MODE (y)))
1821 && GET_MODE (target_reg) == word_mode)
1822 /* This might be a const_double, so we can't just use SUBREG. */
1823 source = operand_subword (y, 0, 0, VOIDmode);
1824 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1825 == GET_MODE_SIZE (GET_MODE (y)))
1826 source = gen_lowpart (GET_MODE (target_reg), y);
1833 emit_move_insn (target_reg, source);
1837 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1838 registers represented by a PARALLEL. */
1841 emit_group_store (x, y)
1844 rtx source_reg, target;
1847 if (GET_CODE (y) != PARALLEL)
1850 /* Check for a NULL entry, used to indicate that the parameter goes
1851 both on the stack and in registers. */
1852 if (XEXP (XVECEXP (y, 0, 0), 0))
1857 for (; i < XVECLEN (y, 0); i++)
1859 rtx element = XVECEXP (y, 0, i);
1861 source_reg = XEXP (element, 0);
1863 if (GET_CODE (x) == MEM)
1864 target = change_address (x, GET_MODE (source_reg),
1865 plus_constant (XEXP (x, 0),
1866 INTVAL (XEXP (element, 1))));
1867 else if (XEXP (element, 1) == const0_rtx)
1870 if (GET_MODE (target) != GET_MODE (source_reg))
1871 target = gen_lowpart (GET_MODE (source_reg), target);
1876 emit_move_insn (target, source_reg);
1880 /* Add a USE expression for REG to the (possibly empty) list pointed
1881 to by CALL_FUSAGE. REG must denote a hard register. */
1884 use_reg (call_fusage, reg)
1885 rtx *call_fusage, reg;
1887 if (GET_CODE (reg) != REG
1888 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1892 = gen_rtx (EXPR_LIST, VOIDmode,
1893 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1896 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1897 starting at REGNO. All of these registers must be hard registers. */
1900 use_regs (call_fusage, regno, nregs)
1907 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1910 for (i = 0; i < nregs; i++)
1911 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1914 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1915 PARALLEL REGS. This is for calls that pass values in multiple
1916 non-contiguous locations. The Irix 6 ABI has examples of this. */
1919 use_group_regs (call_fusage, regs)
1925 /* Check for a NULL entry, used to indicate that the parameter goes
1926 both on the stack and in registers. */
1927 if (XEXP (XVECEXP (regs, 0, 0), 0))
1932 for (; i < XVECLEN (regs, 0); i++)
1933 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1936 /* Generate several move instructions to clear LEN bytes of block TO.
1937 (A MEM rtx with BLKmode). The caller must pass TO through
1938 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1942 clear_by_pieces (to, len, align)
1946 struct clear_by_pieces data;
1947 rtx to_addr = XEXP (to, 0);
1948 int max_size = MOVE_MAX + 1;
1951 data.to_addr = to_addr;
1954 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1955 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1957 data.explicit_inc_to = 0;
1959 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1960 if (data.reverse) data.offset = len;
1963 data.to_struct = MEM_IN_STRUCT_P (to);
1965 /* If copying requires more than two move insns,
1966 copy addresses to registers (to make displacements shorter)
1967 and use post-increment if available. */
1969 && move_by_pieces_ninsns (len, align) > 2)
1971 #ifdef HAVE_PRE_DECREMENT
1972 if (data.reverse && ! data.autinc_to)
1974 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1976 data.explicit_inc_to = -1;
1979 #ifdef HAVE_POST_INCREMENT
1980 if (! data.reverse && ! data.autinc_to)
1982 data.to_addr = copy_addr_to_reg (to_addr);
1984 data.explicit_inc_to = 1;
1987 if (!data.autinc_to && CONSTANT_P (to_addr))
1988 data.to_addr = copy_addr_to_reg (to_addr);
1991 if (! SLOW_UNALIGNED_ACCESS
1992 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1995 /* First move what we can in the largest integer mode, then go to
1996 successively smaller modes. */
1998 while (max_size > 1)
2000 enum machine_mode mode = VOIDmode, tmode;
2001 enum insn_code icode;
2003 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2004 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2005 if (GET_MODE_SIZE (tmode) < max_size)
2008 if (mode == VOIDmode)
2011 icode = mov_optab->handlers[(int) mode].insn_code;
2012 if (icode != CODE_FOR_nothing
2013 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2014 GET_MODE_SIZE (mode)))
2015 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2017 max_size = GET_MODE_SIZE (mode);
2020 /* The code above should have handled everything. */
2025 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2026 with move instructions for mode MODE. GENFUN is the gen_... function
2027 to make a move insn for that mode. DATA has all the other info. */
2030 clear_by_pieces_1 (genfun, mode, data)
2031 rtx (*genfun) PROTO ((rtx, ...));
2032 enum machine_mode mode;
2033 struct clear_by_pieces *data;
2035 register int size = GET_MODE_SIZE (mode);
2038 while (data->len >= size)
2040 if (data->reverse) data->offset -= size;
2042 to1 = (data->autinc_to
2043 ? gen_rtx (MEM, mode, data->to_addr)
2044 : copy_rtx (change_address (data->to, mode,
2045 plus_constant (data->to_addr,
2047 MEM_IN_STRUCT_P (to1) = data->to_struct;
2049 #ifdef HAVE_PRE_DECREMENT
2050 if (data->explicit_inc_to < 0)
2051 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2054 emit_insn ((*genfun) (to1, const0_rtx));
2055 #ifdef HAVE_POST_INCREMENT
2056 if (data->explicit_inc_to > 0)
2057 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2060 if (! data->reverse) data->offset += size;
2066 /* Write zeros through the storage of OBJECT.
2067 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2068 the maximum alignment we can is has, measured in bytes. */
2071 clear_storage (object, size, align)
2076 if (GET_MODE (object) == BLKmode)
2078 object = protect_from_queue (object, 1);
2079 size = protect_from_queue (size, 0);
2081 if (GET_CODE (size) == CONST_INT
2082 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2083 clear_by_pieces (object, INTVAL (size), align);
2087 /* Try the most limited insn first, because there's no point
2088 including more than one in the machine description unless
2089 the more limited one has some advantage. */
2091 rtx opalign = GEN_INT (align);
2092 enum machine_mode mode;
2094 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2095 mode = GET_MODE_WIDER_MODE (mode))
2097 enum insn_code code = clrstr_optab[(int) mode];
2099 if (code != CODE_FOR_nothing
2100 /* We don't need MODE to be narrower than
2101 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2102 the mode mask, as it is returned by the macro, it will
2103 definitely be less than the actual mode mask. */
2104 && ((GET_CODE (size) == CONST_INT
2105 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2106 <= GET_MODE_MASK (mode)))
2107 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2108 && (insn_operand_predicate[(int) code][0] == 0
2109 || (*insn_operand_predicate[(int) code][0]) (object,
2111 && (insn_operand_predicate[(int) code][2] == 0
2112 || (*insn_operand_predicate[(int) code][2]) (opalign,
2116 rtx last = get_last_insn ();
2119 op1 = convert_to_mode (mode, size, 1);
2120 if (insn_operand_predicate[(int) code][1] != 0
2121 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2123 op1 = copy_to_mode_reg (mode, op1);
2125 pat = GEN_FCN ((int) code) (object, op1, opalign);
2132 delete_insns_since (last);
2137 #ifdef TARGET_MEM_FUNCTIONS
2138 emit_library_call (memset_libfunc, 0,
2140 XEXP (object, 0), Pmode,
2141 const0_rtx, TYPE_MODE (integer_type_node),
2142 convert_to_mode (TYPE_MODE (sizetype),
2143 size, TREE_UNSIGNED (sizetype)),
2144 TYPE_MODE (sizetype));
2146 emit_library_call (bzero_libfunc, 0,
2148 XEXP (object, 0), Pmode,
2149 convert_to_mode (TYPE_MODE (integer_type_node),
2151 TREE_UNSIGNED (integer_type_node)),
2152 TYPE_MODE (integer_type_node));
2157 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2160 /* Generate code to copy Y into X.
2161 Both Y and X must have the same mode, except that
2162 Y can be a constant with VOIDmode.
2163 This mode cannot be BLKmode; use emit_block_move for that.
2165 Return the last instruction emitted. */
2168 emit_move_insn (x, y)
2171 enum machine_mode mode = GET_MODE (x);
2173 x = protect_from_queue (x, 1);
2174 y = protect_from_queue (y, 0);
2176 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2179 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2180 y = force_const_mem (mode, y);
2182 /* If X or Y are memory references, verify that their addresses are valid
2184 if (GET_CODE (x) == MEM
2185 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2186 && ! push_operand (x, GET_MODE (x)))
2188 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2189 x = change_address (x, VOIDmode, XEXP (x, 0));
2191 if (GET_CODE (y) == MEM
2192 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2194 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2195 y = change_address (y, VOIDmode, XEXP (y, 0));
2197 if (mode == BLKmode)
2200 return emit_move_insn_1 (x, y);
2203 /* Low level part of emit_move_insn.
2204 Called just like emit_move_insn, but assumes X and Y
2205 are basically valid. */
2208 emit_move_insn_1 (x, y)
2211 enum machine_mode mode = GET_MODE (x);
2212 enum machine_mode submode;
2213 enum mode_class class = GET_MODE_CLASS (mode);
2216 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2218 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2220 /* Expand complex moves by moving real part and imag part, if possible. */
2221 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2222 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2224 (class == MODE_COMPLEX_INT
2225 ? MODE_INT : MODE_FLOAT),
2227 && (mov_optab->handlers[(int) submode].insn_code
2228 != CODE_FOR_nothing))
2230 /* Don't split destination if it is a stack push. */
2231 int stack = push_operand (x, GET_MODE (x));
2234 /* If this is a stack, push the highpart first, so it
2235 will be in the argument order.
2237 In that case, change_address is used only to convert
2238 the mode, not to change the address. */
2241 /* Note that the real part always precedes the imag part in memory
2242 regardless of machine's endianness. */
2243 #ifdef STACK_GROWS_DOWNWARD
2244 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2245 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2246 gen_imagpart (submode, y)));
2247 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2248 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2249 gen_realpart (submode, y)));
2251 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2252 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2253 gen_realpart (submode, y)));
2254 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2255 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2256 gen_imagpart (submode, y)));
2261 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2262 (gen_realpart (submode, x), gen_realpart (submode, y)));
2263 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2264 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2267 return get_last_insn ();
2270 /* This will handle any multi-word mode that lacks a move_insn pattern.
2271 However, you will get better code if you define such patterns,
2272 even if they must turn into multiple assembler instructions. */
2273 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2278 #ifdef PUSH_ROUNDING
2280 /* If X is a push on the stack, do the push now and replace
2281 X with a reference to the stack pointer. */
2282 if (push_operand (x, GET_MODE (x)))
2284 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2285 x = change_address (x, VOIDmode, stack_pointer_rtx);
2289 /* Show the output dies here. */
2291 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2294 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2297 rtx xpart = operand_subword (x, i, 1, mode);
2298 rtx ypart = operand_subword (y, i, 1, mode);
2300 /* If we can't get a part of Y, put Y into memory if it is a
2301 constant. Otherwise, force it into a register. If we still
2302 can't get a part of Y, abort. */
2303 if (ypart == 0 && CONSTANT_P (y))
2305 y = force_const_mem (mode, y);
2306 ypart = operand_subword (y, i, 1, mode);
2308 else if (ypart == 0)
2309 ypart = operand_subword_force (y, i, mode);
2311 if (xpart == 0 || ypart == 0)
2314 last_insn = emit_move_insn (xpart, ypart);
2323 /* Pushing data onto the stack. */
2325 /* Push a block of length SIZE (perhaps variable)
2326 and return an rtx to address the beginning of the block.
2327 Note that it is not possible for the value returned to be a QUEUED.
2328 The value may be virtual_outgoing_args_rtx.
2330 EXTRA is the number of bytes of padding to push in addition to SIZE.
2331 BELOW nonzero means this padding comes at low addresses;
2332 otherwise, the padding comes at high addresses. */
2335 push_block (size, extra, below)
2341 size = convert_modes (Pmode, ptr_mode, size, 1);
2342 if (CONSTANT_P (size))
2343 anti_adjust_stack (plus_constant (size, extra));
2344 else if (GET_CODE (size) == REG && extra == 0)
2345 anti_adjust_stack (size);
2348 rtx temp = copy_to_mode_reg (Pmode, size);
2350 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2351 temp, 0, OPTAB_LIB_WIDEN);
2352 anti_adjust_stack (temp);
2355 #ifdef STACK_GROWS_DOWNWARD
2356 temp = virtual_outgoing_args_rtx;
2357 if (extra != 0 && below)
2358 temp = plus_constant (temp, extra);
2360 if (GET_CODE (size) == CONST_INT)
2361 temp = plus_constant (virtual_outgoing_args_rtx,
2362 - INTVAL (size) - (below ? 0 : extra));
2363 else if (extra != 0 && !below)
2364 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2365 negate_rtx (Pmode, plus_constant (size, extra)));
2367 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2368 negate_rtx (Pmode, size));
2371 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2377 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2380 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2381 block of SIZE bytes. */
2384 get_push_address (size)
2389 if (STACK_PUSH_CODE == POST_DEC)
2390 temp = gen_rtx (PLUS, Pmode, stack_pointer_rtx, GEN_INT (size));
2391 else if (STACK_PUSH_CODE == POST_INC)
2392 temp = gen_rtx (MINUS, Pmode, stack_pointer_rtx, GEN_INT (size));
2394 temp = stack_pointer_rtx;
2396 return force_operand (temp, NULL_RTX);
2399 /* Generate code to push X onto the stack, assuming it has mode MODE and
2401 MODE is redundant except when X is a CONST_INT (since they don't
2403 SIZE is an rtx for the size of data to be copied (in bytes),
2404 needed only if X is BLKmode.
2406 ALIGN (in bytes) is maximum alignment we can assume.
2408 If PARTIAL and REG are both nonzero, then copy that many of the first
2409 words of X into registers starting with REG, and push the rest of X.
2410 The amount of space pushed is decreased by PARTIAL words,
2411 rounded *down* to a multiple of PARM_BOUNDARY.
2412 REG must be a hard register in this case.
2413 If REG is zero but PARTIAL is not, take any all others actions for an
2414 argument partially in registers, but do not actually load any
2417 EXTRA is the amount in bytes of extra space to leave next to this arg.
2418 This is ignored if an argument block has already been allocated.
2420 On a machine that lacks real push insns, ARGS_ADDR is the address of
2421 the bottom of the argument block for this call. We use indexing off there
2422 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2423 argument block has not been preallocated.
2425 ARGS_SO_FAR is the size of args previously pushed for this call. */
2428 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2429 args_addr, args_so_far)
2431 enum machine_mode mode;
2442 enum direction stack_direction
2443 #ifdef STACK_GROWS_DOWNWARD
2449 /* Decide where to pad the argument: `downward' for below,
2450 `upward' for above, or `none' for don't pad it.
2451 Default is below for small data on big-endian machines; else above. */
2452 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2454 /* Invert direction if stack is post-update. */
2455 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2456 if (where_pad != none)
2457 where_pad = (where_pad == downward ? upward : downward);
2459 xinner = x = protect_from_queue (x, 0);
2461 if (mode == BLKmode)
2463 /* Copy a block into the stack, entirely or partially. */
2466 int used = partial * UNITS_PER_WORD;
2467 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2475 /* USED is now the # of bytes we need not copy to the stack
2476 because registers will take care of them. */
2479 xinner = change_address (xinner, BLKmode,
2480 plus_constant (XEXP (xinner, 0), used));
2482 /* If the partial register-part of the arg counts in its stack size,
2483 skip the part of stack space corresponding to the registers.
2484 Otherwise, start copying to the beginning of the stack space,
2485 by setting SKIP to 0. */
2486 #ifndef REG_PARM_STACK_SPACE
2492 #ifdef PUSH_ROUNDING
2493 /* Do it with several push insns if that doesn't take lots of insns
2494 and if there is no difficulty with push insns that skip bytes
2495 on the stack for alignment purposes. */
2497 && GET_CODE (size) == CONST_INT
2499 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2501 /* Here we avoid the case of a structure whose weak alignment
2502 forces many pushes of a small amount of data,
2503 and such small pushes do rounding that causes trouble. */
2504 && ((! SLOW_UNALIGNED_ACCESS)
2505 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2506 || PUSH_ROUNDING (align) == align)
2507 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2509 /* Push padding now if padding above and stack grows down,
2510 or if padding below and stack grows up.
2511 But if space already allocated, this has already been done. */
2512 if (extra && args_addr == 0
2513 && where_pad != none && where_pad != stack_direction)
2514 anti_adjust_stack (GEN_INT (extra));
2516 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2517 INTVAL (size) - used, align);
2519 if (flag_check_memory_usage)
2523 temp = get_push_address (INTVAL(size) - used);
2524 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2525 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2527 XEXP (xinner, 0), ptr_mode,
2528 GEN_INT (INTVAL(size) - used),
2529 TYPE_MODE (sizetype));
2531 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2533 GEN_INT (INTVAL(size) - used),
2534 TYPE_MODE (sizetype),
2535 GEN_INT (MEMORY_USE_RW), QImode);
2539 #endif /* PUSH_ROUNDING */
2541 /* Otherwise make space on the stack and copy the data
2542 to the address of that space. */
2544 /* Deduct words put into registers from the size we must copy. */
2547 if (GET_CODE (size) == CONST_INT)
2548 size = GEN_INT (INTVAL (size) - used);
2550 size = expand_binop (GET_MODE (size), sub_optab, size,
2551 GEN_INT (used), NULL_RTX, 0,
2555 /* Get the address of the stack space.
2556 In this case, we do not deal with EXTRA separately.
2557 A single stack adjust will do. */
2560 temp = push_block (size, extra, where_pad == downward);
2563 else if (GET_CODE (args_so_far) == CONST_INT)
2564 temp = memory_address (BLKmode,
2565 plus_constant (args_addr,
2566 skip + INTVAL (args_so_far)));
2568 temp = memory_address (BLKmode,
2569 plus_constant (gen_rtx (PLUS, Pmode,
2570 args_addr, args_so_far),
2572 if (flag_check_memory_usage)
2576 target = copy_to_reg (temp);
2577 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2578 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2580 XEXP (xinner, 0), ptr_mode,
2581 size, TYPE_MODE (sizetype));
2583 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2585 size, TYPE_MODE (sizetype),
2586 GEN_INT (MEMORY_USE_RW), QImode);
2589 /* TEMP is the address of the block. Copy the data there. */
2590 if (GET_CODE (size) == CONST_INT
2591 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2594 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2595 INTVAL (size), align);
2598 /* Try the most limited insn first, because there's no point
2599 including more than one in the machine description unless
2600 the more limited one has some advantage. */
2601 #ifdef HAVE_movstrqi
2603 && GET_CODE (size) == CONST_INT
2604 && ((unsigned) INTVAL (size)
2605 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2607 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2608 xinner, size, GEN_INT (align));
2616 #ifdef HAVE_movstrhi
2618 && GET_CODE (size) == CONST_INT
2619 && ((unsigned) INTVAL (size)
2620 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2622 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2623 xinner, size, GEN_INT (align));
2631 #ifdef HAVE_movstrsi
2634 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2635 xinner, size, GEN_INT (align));
2643 #ifdef HAVE_movstrdi
2646 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2647 xinner, size, GEN_INT (align));
2656 #ifndef ACCUMULATE_OUTGOING_ARGS
2657 /* If the source is referenced relative to the stack pointer,
2658 copy it to another register to stabilize it. We do not need
2659 to do this if we know that we won't be changing sp. */
2661 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2662 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2663 temp = copy_to_reg (temp);
2666 /* Make inhibit_defer_pop nonzero around the library call
2667 to force it to pop the bcopy-arguments right away. */
2669 #ifdef TARGET_MEM_FUNCTIONS
2670 emit_library_call (memcpy_libfunc, 0,
2671 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2672 convert_to_mode (TYPE_MODE (sizetype),
2673 size, TREE_UNSIGNED (sizetype)),
2674 TYPE_MODE (sizetype));
2676 emit_library_call (bcopy_libfunc, 0,
2677 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2678 convert_to_mode (TYPE_MODE (integer_type_node),
2680 TREE_UNSIGNED (integer_type_node)),
2681 TYPE_MODE (integer_type_node));
2686 else if (partial > 0)
2688 /* Scalar partly in registers. */
2690 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2693 /* # words of start of argument
2694 that we must make space for but need not store. */
2695 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2696 int args_offset = INTVAL (args_so_far);
2699 /* Push padding now if padding above and stack grows down,
2700 or if padding below and stack grows up.
2701 But if space already allocated, this has already been done. */
2702 if (extra && args_addr == 0
2703 && where_pad != none && where_pad != stack_direction)
2704 anti_adjust_stack (GEN_INT (extra));
2706 /* If we make space by pushing it, we might as well push
2707 the real data. Otherwise, we can leave OFFSET nonzero
2708 and leave the space uninitialized. */
2712 /* Now NOT_STACK gets the number of words that we don't need to
2713 allocate on the stack. */
2714 not_stack = partial - offset;
2716 /* If the partial register-part of the arg counts in its stack size,
2717 skip the part of stack space corresponding to the registers.
2718 Otherwise, start copying to the beginning of the stack space,
2719 by setting SKIP to 0. */
2720 #ifndef REG_PARM_STACK_SPACE
2726 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2727 x = validize_mem (force_const_mem (mode, x));
2729 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2730 SUBREGs of such registers are not allowed. */
2731 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2732 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2733 x = copy_to_reg (x);
2735 /* Loop over all the words allocated on the stack for this arg. */
2736 /* We can do it by words, because any scalar bigger than a word
2737 has a size a multiple of a word. */
2738 #ifndef PUSH_ARGS_REVERSED
2739 for (i = not_stack; i < size; i++)
2741 for (i = size - 1; i >= not_stack; i--)
2743 if (i >= not_stack + offset)
2744 emit_push_insn (operand_subword_force (x, i, mode),
2745 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2747 GEN_INT (args_offset + ((i - not_stack + skip)
2748 * UNITS_PER_WORD)));
2753 rtx target = NULL_RTX;
2755 /* Push padding now if padding above and stack grows down,
2756 or if padding below and stack grows up.
2757 But if space already allocated, this has already been done. */
2758 if (extra && args_addr == 0
2759 && where_pad != none && where_pad != stack_direction)
2760 anti_adjust_stack (GEN_INT (extra));
2762 #ifdef PUSH_ROUNDING
2764 addr = gen_push_operand ();
2768 if (GET_CODE (args_so_far) == CONST_INT)
2770 = memory_address (mode,
2771 plus_constant (args_addr,
2772 INTVAL (args_so_far)));
2774 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2779 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2781 if (flag_check_memory_usage)
2784 target = get_push_address (GET_MODE_SIZE (mode));
2786 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2787 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2789 XEXP (x, 0), ptr_mode,
2790 GEN_INT (GET_MODE_SIZE (mode)),
2791 TYPE_MODE (sizetype));
2793 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2795 GEN_INT (GET_MODE_SIZE (mode)),
2796 TYPE_MODE (sizetype),
2797 GEN_INT (MEMORY_USE_RW), QImode);
2802 /* If part should go in registers, copy that part
2803 into the appropriate registers. Do this now, at the end,
2804 since mem-to-mem copies above may do function calls. */
2805 if (partial > 0 && reg != 0)
2807 /* Handle calls that pass values in multiple non-contiguous locations.
2808 The Irix 6 ABI has examples of this. */
2809 if (GET_CODE (reg) == PARALLEL)
2810 emit_group_load (reg, x);
2812 move_block_to_reg (REGNO (reg), x, partial, mode);
2815 if (extra && args_addr == 0 && where_pad == stack_direction)
2816 anti_adjust_stack (GEN_INT (extra));
2819 /* Expand an assignment that stores the value of FROM into TO.
2820 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2821 (This may contain a QUEUED rtx;
2822 if the value is constant, this rtx is a constant.)
2823 Otherwise, the returned value is NULL_RTX.
2825 SUGGEST_REG is no longer actually used.
2826 It used to mean, copy the value through a register
2827 and return that register, if that is possible.
2828 We now use WANT_VALUE to decide whether to do this. */
2831 expand_assignment (to, from, want_value, suggest_reg)
2836 register rtx to_rtx = 0;
2839 /* Don't crash if the lhs of the assignment was erroneous. */
2841 if (TREE_CODE (to) == ERROR_MARK)
2843 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2844 return want_value ? result : NULL_RTX;
2847 if (output_bytecode)
2849 tree dest_innermost;
2851 bc_expand_expr (from);
2852 bc_emit_instruction (duplicate);
2854 dest_innermost = bc_expand_address (to);
2856 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2857 take care of it here. */
2859 bc_store_memory (TREE_TYPE (to), dest_innermost);
2863 /* Assignment of a structure component needs special treatment
2864 if the structure component's rtx is not simply a MEM.
2865 Assignment of an array element at a constant index, and assignment of
2866 an array element in an unaligned packed structure field, has the same
2869 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2870 || TREE_CODE (to) == ARRAY_REF)
2872 enum machine_mode mode1;
2882 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2883 &unsignedp, &volatilep, &alignment);
2885 /* If we are going to use store_bit_field and extract_bit_field,
2886 make sure to_rtx will be safe for multiple use. */
2888 if (mode1 == VOIDmode && want_value)
2889 tem = stabilize_reference (tem);
2891 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
2894 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2896 if (GET_CODE (to_rtx) != MEM)
2898 to_rtx = change_address (to_rtx, VOIDmode,
2899 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2900 force_reg (ptr_mode, offset_rtx)));
2904 if (GET_CODE (to_rtx) == MEM)
2906 /* When the offset is zero, to_rtx is the address of the
2907 structure we are storing into, and hence may be shared.
2908 We must make a new MEM before setting the volatile bit. */
2910 to_rtx = copy_rtx (to_rtx);
2912 MEM_VOLATILE_P (to_rtx) = 1;
2914 #if 0 /* This was turned off because, when a field is volatile
2915 in an object which is not volatile, the object may be in a register,
2916 and then we would abort over here. */
2922 /* Check the access. */
2923 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2928 enum machine_mode best_mode;
2930 best_mode = get_best_mode (bitsize, bitpos,
2931 TYPE_ALIGN (TREE_TYPE (tem)),
2933 if (best_mode == VOIDmode)
2936 best_mode_size = GET_MODE_BITSIZE (best_mode);
2937 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2938 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2939 size *= GET_MODE_SIZE (best_mode);
2941 /* Check the access right of the pointer. */
2942 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, to_addr,
2943 ptr_mode, GEN_INT (size), TYPE_MODE (sizetype),
2944 GEN_INT (MEMORY_USE_WO), QImode);
2947 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2949 /* Spurious cast makes HPUX compiler happy. */
2950 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2953 /* Required alignment of containing datum. */
2955 int_size_in_bytes (TREE_TYPE (tem)));
2956 preserve_temp_slots (result);
2960 /* If the value is meaningful, convert RESULT to the proper mode.
2961 Otherwise, return nothing. */
2962 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2963 TYPE_MODE (TREE_TYPE (from)),
2965 TREE_UNSIGNED (TREE_TYPE (to)))
2969 /* If the rhs is a function call and its value is not an aggregate,
2970 call the function before we start to compute the lhs.
2971 This is needed for correct code for cases such as
2972 val = setjmp (buf) on machines where reference to val
2973 requires loading up part of an address in a separate insn.
2975 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2976 a promoted variable where the zero- or sign- extension needs to be done.
2977 Handling this in the normal way is safe because no computation is done
2979 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2980 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2981 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2986 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2988 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
2990 /* Handle calls that return values in multiple non-contiguous locations.
2991 The Irix 6 ABI has examples of this. */
2992 if (GET_CODE (to_rtx) == PARALLEL)
2993 emit_group_load (to_rtx, value);
2994 else if (GET_MODE (to_rtx) == BLKmode)
2995 emit_block_move (to_rtx, value, expr_size (from),
2996 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2998 emit_move_insn (to_rtx, value);
2999 preserve_temp_slots (to_rtx);
3002 return want_value ? to_rtx : NULL_RTX;
3005 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3006 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3009 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3011 /* Don't move directly into a return register. */
3012 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3017 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3018 emit_move_insn (to_rtx, temp);
3019 preserve_temp_slots (to_rtx);
3022 return want_value ? to_rtx : NULL_RTX;
3025 /* In case we are returning the contents of an object which overlaps
3026 the place the value is being stored, use a safe function when copying
3027 a value through a pointer into a structure value return block. */
3028 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3029 && current_function_returns_struct
3030 && !current_function_returns_pcc_struct)
3035 size = expr_size (from);
3036 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3037 EXPAND_MEMORY_USE_DONT);
3039 /* Copy the rights of the bitmap. */
3040 if (flag_check_memory_usage)
3041 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3042 XEXP (to_rtx, 0), ptr_mode,
3043 XEXP (from_rtx, 0), ptr_mode,
3044 convert_to_mode (TYPE_MODE (sizetype),
3045 size, TREE_UNSIGNED (sizetype)),
3046 TYPE_MODE (sizetype));
3048 #ifdef TARGET_MEM_FUNCTIONS
3049 emit_library_call (memcpy_libfunc, 0,
3050 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3051 XEXP (from_rtx, 0), Pmode,
3052 convert_to_mode (TYPE_MODE (sizetype),
3053 size, TREE_UNSIGNED (sizetype)),
3054 TYPE_MODE (sizetype));
3056 emit_library_call (bcopy_libfunc, 0,
3057 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3058 XEXP (to_rtx, 0), Pmode,
3059 convert_to_mode (TYPE_MODE (integer_type_node),
3060 size, TREE_UNSIGNED (integer_type_node)),
3061 TYPE_MODE (integer_type_node));
3064 preserve_temp_slots (to_rtx);
3067 return want_value ? to_rtx : NULL_RTX;
3070 /* Compute FROM and store the value in the rtx we got. */
3073 result = store_expr (from, to_rtx, want_value);
3074 preserve_temp_slots (result);
3077 return want_value ? result : NULL_RTX;
3080 /* Generate code for computing expression EXP,
3081 and storing the value into TARGET.
3082 TARGET may contain a QUEUED rtx.
3084 If WANT_VALUE is nonzero, return a copy of the value
3085 not in TARGET, so that we can be sure to use the proper
3086 value in a containing expression even if TARGET has something
3087 else stored in it. If possible, we copy the value through a pseudo
3088 and return that pseudo. Or, if the value is constant, we try to
3089 return the constant. In some cases, we return a pseudo
3090 copied *from* TARGET.
3092 If the mode is BLKmode then we may return TARGET itself.
3093 It turns out that in BLKmode it doesn't cause a problem.
3094 because C has no operators that could combine two different
3095 assignments into the same BLKmode object with different values
3096 with no sequence point. Will other languages need this to
3099 If WANT_VALUE is 0, we return NULL, to make sure
3100 to catch quickly any cases where the caller uses the value
3101 and fails to set WANT_VALUE. */
3104 store_expr (exp, target, want_value)
3106 register rtx target;
3110 int dont_return_target = 0;
3112 if (TREE_CODE (exp) == COMPOUND_EXPR)
3114 /* Perform first part of compound expression, then assign from second
3116 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3118 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3120 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3122 /* For conditional expression, get safe form of the target. Then
3123 test the condition, doing the appropriate assignment on either
3124 side. This avoids the creation of unnecessary temporaries.
3125 For non-BLKmode, it is more efficient not to do this. */
3127 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3130 target = protect_from_queue (target, 1);
3132 do_pending_stack_adjust ();
3134 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3135 start_cleanup_deferal ();
3136 store_expr (TREE_OPERAND (exp, 1), target, 0);
3137 end_cleanup_deferal ();
3139 emit_jump_insn (gen_jump (lab2));
3142 start_cleanup_deferal ();
3143 store_expr (TREE_OPERAND (exp, 2), target, 0);
3144 end_cleanup_deferal ();
3149 return want_value ? target : NULL_RTX;
3151 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3152 && GET_MODE (target) != BLKmode)
3153 /* If target is in memory and caller wants value in a register instead,
3154 arrange that. Pass TARGET as target for expand_expr so that,
3155 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3156 We know expand_expr will not use the target in that case.
3157 Don't do this if TARGET is volatile because we are supposed
3158 to write it and then read it. */
3160 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3161 GET_MODE (target), 0);
3162 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3163 temp = copy_to_reg (temp);
3164 dont_return_target = 1;
3166 else if (queued_subexp_p (target))
3167 /* If target contains a postincrement, let's not risk
3168 using it as the place to generate the rhs. */
3170 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3172 /* Expand EXP into a new pseudo. */
3173 temp = gen_reg_rtx (GET_MODE (target));
3174 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3177 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3179 /* If target is volatile, ANSI requires accessing the value
3180 *from* the target, if it is accessed. So make that happen.
3181 In no case return the target itself. */
3182 if (! MEM_VOLATILE_P (target) && want_value)
3183 dont_return_target = 1;
3185 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3186 /* If this is an scalar in a register that is stored in a wider mode
3187 than the declared mode, compute the result into its declared mode
3188 and then convert to the wider mode. Our value is the computed
3191 /* If we don't want a value, we can do the conversion inside EXP,
3192 which will often result in some optimizations. Do the conversion
3193 in two steps: first change the signedness, if needed, then
3194 the extend. But don't do this if the type of EXP is a subtype
3195 of something else since then the conversion might involve
3196 more than just converting modes. */
3197 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3198 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3200 if (TREE_UNSIGNED (TREE_TYPE (exp))
3201 != SUBREG_PROMOTED_UNSIGNED_P (target))
3204 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3208 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3209 SUBREG_PROMOTED_UNSIGNED_P (target)),
3213 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3215 /* If TEMP is a volatile MEM and we want a result value, make
3216 the access now so it gets done only once. Likewise if
3217 it contains TARGET. */
3218 if (GET_CODE (temp) == MEM && want_value
3219 && (MEM_VOLATILE_P (temp)
3220 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3221 temp = copy_to_reg (temp);
3223 /* If TEMP is a VOIDmode constant, use convert_modes to make
3224 sure that we properly convert it. */
3225 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3226 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3227 TYPE_MODE (TREE_TYPE (exp)), temp,
3228 SUBREG_PROMOTED_UNSIGNED_P (target));
3230 convert_move (SUBREG_REG (target), temp,
3231 SUBREG_PROMOTED_UNSIGNED_P (target));
3232 return want_value ? temp : NULL_RTX;
3236 temp = expand_expr (exp, target, GET_MODE (target), 0);
3237 /* Return TARGET if it's a specified hardware register.
3238 If TARGET is a volatile mem ref, either return TARGET
3239 or return a reg copied *from* TARGET; ANSI requires this.
3241 Otherwise, if TEMP is not TARGET, return TEMP
3242 if it is constant (for efficiency),
3243 or if we really want the correct value. */
3244 if (!(target && GET_CODE (target) == REG
3245 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3246 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3247 && ! rtx_equal_p (temp, target)
3248 && (CONSTANT_P (temp) || want_value))
3249 dont_return_target = 1;
3252 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3253 the same as that of TARGET, adjust the constant. This is needed, for
3254 example, in case it is a CONST_DOUBLE and we want only a word-sized
3256 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3257 && TREE_CODE (exp) != ERROR_MARK
3258 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3259 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3260 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3262 if (flag_check_memory_usage
3263 && GET_CODE (target) == MEM
3264 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3266 if (GET_CODE (temp) == MEM)
3267 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3268 XEXP (target, 0), ptr_mode,
3269 XEXP (temp, 0), ptr_mode,
3270 expr_size (exp), TYPE_MODE (sizetype));
3272 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3273 XEXP (target, 0), ptr_mode,
3274 expr_size (exp), TYPE_MODE (sizetype),
3275 GEN_INT (MEMORY_USE_WO), QImode);
3278 /* If value was not generated in the target, store it there.
3279 Convert the value to TARGET's type first if nec. */
3281 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3283 target = protect_from_queue (target, 1);
3284 if (GET_MODE (temp) != GET_MODE (target)
3285 && GET_MODE (temp) != VOIDmode)
3287 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3288 if (dont_return_target)
3290 /* In this case, we will return TEMP,
3291 so make sure it has the proper mode.
3292 But don't forget to store the value into TARGET. */
3293 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3294 emit_move_insn (target, temp);
3297 convert_move (target, temp, unsignedp);
3300 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3302 /* Handle copying a string constant into an array.
3303 The string constant may be shorter than the array.
3304 So copy just the string's actual length, and clear the rest. */
3308 /* Get the size of the data type of the string,
3309 which is actually the size of the target. */
3310 size = expr_size (exp);
3311 if (GET_CODE (size) == CONST_INT
3312 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3313 emit_block_move (target, temp, size,
3314 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3317 /* Compute the size of the data to copy from the string. */
3319 = size_binop (MIN_EXPR,
3320 make_tree (sizetype, size),
3322 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3323 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3327 /* Copy that much. */
3328 emit_block_move (target, temp, copy_size_rtx,
3329 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3331 /* Figure out how much is left in TARGET that we have to clear.
3332 Do all calculations in ptr_mode. */
3334 addr = XEXP (target, 0);
3335 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3337 if (GET_CODE (copy_size_rtx) == CONST_INT)
3339 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3340 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3344 addr = force_reg (ptr_mode, addr);
3345 addr = expand_binop (ptr_mode, add_optab, addr,
3346 copy_size_rtx, NULL_RTX, 0,
3349 size = expand_binop (ptr_mode, sub_optab, size,
3350 copy_size_rtx, NULL_RTX, 0,
3353 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3354 GET_MODE (size), 0, 0);
3355 label = gen_label_rtx ();
3356 emit_jump_insn (gen_blt (label));
3359 if (size != const0_rtx)
3361 /* Be sure we can write on ADDR. */
3362 if (flag_check_memory_usage)
3363 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3365 size, TYPE_MODE (sizetype),
3366 GEN_INT (MEMORY_USE_WO), QImode);
3367 #ifdef TARGET_MEM_FUNCTIONS
3368 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3370 const0_rtx, TYPE_MODE (integer_type_node),
3371 convert_to_mode (TYPE_MODE (sizetype),
3373 TREE_UNSIGNED (sizetype)),
3374 TYPE_MODE (sizetype));
3376 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3378 convert_to_mode (TYPE_MODE (integer_type_node),
3380 TREE_UNSIGNED (integer_type_node)),
3381 TYPE_MODE (integer_type_node));
3389 /* Handle calls that return values in multiple non-contiguous locations.
3390 The Irix 6 ABI has examples of this. */
3391 else if (GET_CODE (target) == PARALLEL)
3392 emit_group_load (target, temp);
3393 else if (GET_MODE (temp) == BLKmode)
3394 emit_block_move (target, temp, expr_size (exp),
3395 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3397 emit_move_insn (target, temp);
3400 /* If we don't want a value, return NULL_RTX. */
3404 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3405 ??? The latter test doesn't seem to make sense. */
3406 else if (dont_return_target && GET_CODE (temp) != MEM)
3409 /* Return TARGET itself if it is a hard register. */
3410 else if (want_value && GET_MODE (target) != BLKmode
3411 && ! (GET_CODE (target) == REG
3412 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3413 return copy_to_reg (target);
3419 /* Return 1 if EXP just contains zeros. */
3427 switch (TREE_CODE (exp))
3431 case NON_LVALUE_EXPR:
3432 return is_zeros_p (TREE_OPERAND (exp, 0));
3435 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3439 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3442 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3445 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3446 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3447 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3448 if (! is_zeros_p (TREE_VALUE (elt)))
3457 /* Return 1 if EXP contains mostly (3/4) zeros. */
3460 mostly_zeros_p (exp)
3463 if (TREE_CODE (exp) == CONSTRUCTOR)
3465 int elts = 0, zeros = 0;
3466 tree elt = CONSTRUCTOR_ELTS (exp);
3467 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3469 /* If there are no ranges of true bits, it is all zero. */
3470 return elt == NULL_TREE;
3472 for (; elt; elt = TREE_CHAIN (elt))
3474 /* We do not handle the case where the index is a RANGE_EXPR,
3475 so the statistic will be somewhat inaccurate.
3476 We do make a more accurate count in store_constructor itself,
3477 so since this function is only used for nested array elements,
3478 this should be close enough. */
3479 if (mostly_zeros_p (TREE_VALUE (elt)))
3484 return 4 * zeros >= 3 * elts;
3487 return is_zeros_p (exp);
3490 /* Helper function for store_constructor.
3491 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3492 TYPE is the type of the CONSTRUCTOR, not the element type.
3493 CLEARED is as for store_constructor.
3495 This provides a recursive shortcut back to store_constructor when it isn't
3496 necessary to go through store_field. This is so that we can pass through
3497 the cleared field to let store_constructor know that we may not have to
3498 clear a substructure if the outer structure has already been cleared. */
3501 store_constructor_field (target, bitsize, bitpos,
3502 mode, exp, type, cleared)
3504 int bitsize, bitpos;
3505 enum machine_mode mode;
3509 if (TREE_CODE (exp) == CONSTRUCTOR
3510 && bitpos % BITS_PER_UNIT == 0
3511 /* If we have a non-zero bitpos for a register target, then we just
3512 let store_field do the bitfield handling. This is unlikely to
3513 generate unnecessary clear instructions anyways. */
3514 && (bitpos == 0 || GET_CODE (target) == MEM))
3517 target = change_address (target, VOIDmode,
3518 plus_constant (XEXP (target, 0),
3519 bitpos / BITS_PER_UNIT));
3520 store_constructor (exp, target, cleared);
3523 store_field (target, bitsize, bitpos, mode, exp,
3524 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3525 int_size_in_bytes (type));
3528 /* Store the value of constructor EXP into the rtx TARGET.
3529 TARGET is either a REG or a MEM.
3530 CLEARED is true if TARGET is known to have been zero'd. */
3533 store_constructor (exp, target, cleared)
3538 tree type = TREE_TYPE (exp);
3540 /* We know our target cannot conflict, since safe_from_p has been called. */
3542 /* Don't try copying piece by piece into a hard register
3543 since that is vulnerable to being clobbered by EXP.
3544 Instead, construct in a pseudo register and then copy it all. */
3545 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3547 rtx temp = gen_reg_rtx (GET_MODE (target));
3548 store_constructor (exp, temp, 0);
3549 emit_move_insn (target, temp);
3554 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3555 || TREE_CODE (type) == QUAL_UNION_TYPE)
3559 /* Inform later passes that the whole union value is dead. */
3560 if (TREE_CODE (type) == UNION_TYPE
3561 || TREE_CODE (type) == QUAL_UNION_TYPE)
3562 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3564 /* If we are building a static constructor into a register,
3565 set the initial value as zero so we can fold the value into
3566 a constant. But if more than one register is involved,
3567 this probably loses. */
3568 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3569 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3572 emit_move_insn (target, const0_rtx);
3577 /* If the constructor has fewer fields than the structure
3578 or if we are initializing the structure to mostly zeros,
3579 clear the whole structure first. */
3580 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3581 != list_length (TYPE_FIELDS (type)))
3582 || mostly_zeros_p (exp))
3585 clear_storage (target, expr_size (exp),
3586 TYPE_ALIGN (type) / BITS_PER_UNIT);
3591 /* Inform later passes that the old value is dead. */
3592 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3594 /* Store each element of the constructor into
3595 the corresponding field of TARGET. */
3597 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3599 register tree field = TREE_PURPOSE (elt);
3600 register enum machine_mode mode;
3604 tree pos, constant = 0, offset = 0;
3605 rtx to_rtx = target;
3607 /* Just ignore missing fields.
3608 We cleared the whole structure, above,
3609 if any fields are missing. */
3613 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3616 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3617 unsignedp = TREE_UNSIGNED (field);
3618 mode = DECL_MODE (field);
3619 if (DECL_BIT_FIELD (field))
3622 pos = DECL_FIELD_BITPOS (field);
3623 if (TREE_CODE (pos) == INTEGER_CST)
3625 else if (TREE_CODE (pos) == PLUS_EXPR
3626 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3627 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3632 bitpos = TREE_INT_CST_LOW (constant);
3638 if (contains_placeholder_p (offset))
3639 offset = build (WITH_RECORD_EXPR, sizetype,
3642 offset = size_binop (FLOOR_DIV_EXPR, offset,
3643 size_int (BITS_PER_UNIT));
3645 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3646 if (GET_CODE (to_rtx) != MEM)
3650 = change_address (to_rtx, VOIDmode,
3651 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3652 force_reg (ptr_mode, offset_rtx)));
3654 if (TREE_READONLY (field))
3656 if (GET_CODE (to_rtx) == MEM)
3657 to_rtx = copy_rtx (to_rtx);
3659 RTX_UNCHANGING_P (to_rtx) = 1;
3662 store_constructor_field (to_rtx, bitsize, bitpos,
3663 mode, TREE_VALUE (elt), type, cleared);
3666 else if (TREE_CODE (type) == ARRAY_TYPE)
3671 tree domain = TYPE_DOMAIN (type);
3672 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3673 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3674 tree elttype = TREE_TYPE (type);
3676 /* If the constructor has fewer elements than the array,
3677 clear the whole array first. Similarly if this this is
3678 static constructor of a non-BLKmode object. */
3679 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3683 HOST_WIDE_INT count = 0, zero_count = 0;
3685 /* This loop is a more accurate version of the loop in
3686 mostly_zeros_p (it handles RANGE_EXPR in an index).
3687 It is also needed to check for missing elements. */
3688 for (elt = CONSTRUCTOR_ELTS (exp);
3690 elt = TREE_CHAIN (elt))
3692 tree index = TREE_PURPOSE (elt);
3693 HOST_WIDE_INT this_node_count;
3694 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3696 tree lo_index = TREE_OPERAND (index, 0);
3697 tree hi_index = TREE_OPERAND (index, 1);
3698 if (TREE_CODE (lo_index) != INTEGER_CST
3699 || TREE_CODE (hi_index) != INTEGER_CST)
3704 this_node_count = TREE_INT_CST_LOW (hi_index)
3705 - TREE_INT_CST_LOW (lo_index) + 1;
3708 this_node_count = 1;
3709 count += this_node_count;
3710 if (mostly_zeros_p (TREE_VALUE (elt)))
3711 zero_count += this_node_count;
3713 /* Clear the entire array first if there are any missing elements,
3714 or if the incidence of zero elements is >= 75%. */
3715 if (count < maxelt - minelt + 1
3716 || 4 * zero_count >= 3 * count)
3722 clear_storage (target, expr_size (exp),
3723 TYPE_ALIGN (type) / BITS_PER_UNIT);
3727 /* Inform later passes that the old value is dead. */
3728 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3730 /* Store each element of the constructor into
3731 the corresponding element of TARGET, determined
3732 by counting the elements. */
3733 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3735 elt = TREE_CHAIN (elt), i++)
3737 register enum machine_mode mode;
3741 tree value = TREE_VALUE (elt);
3742 tree index = TREE_PURPOSE (elt);
3743 rtx xtarget = target;
3745 if (cleared && is_zeros_p (value))
3748 mode = TYPE_MODE (elttype);
3749 bitsize = GET_MODE_BITSIZE (mode);
3750 unsignedp = TREE_UNSIGNED (elttype);
3752 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3754 tree lo_index = TREE_OPERAND (index, 0);
3755 tree hi_index = TREE_OPERAND (index, 1);
3756 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3757 struct nesting *loop;
3758 HOST_WIDE_INT lo, hi, count;
3761 /* If the range is constant and "small", unroll the loop. */
3762 if (TREE_CODE (lo_index) == INTEGER_CST
3763 && TREE_CODE (hi_index) == INTEGER_CST
3764 && (lo = TREE_INT_CST_LOW (lo_index),
3765 hi = TREE_INT_CST_LOW (hi_index),
3766 count = hi - lo + 1,
3767 (GET_CODE (target) != MEM
3769 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3770 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3773 lo -= minelt; hi -= minelt;
3774 for (; lo <= hi; lo++)
3776 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3777 store_constructor_field (target, bitsize, bitpos,
3778 mode, value, type, cleared);
3783 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3784 loop_top = gen_label_rtx ();
3785 loop_end = gen_label_rtx ();
3787 unsignedp = TREE_UNSIGNED (domain);
3789 index = build_decl (VAR_DECL, NULL_TREE, domain);
3791 DECL_RTL (index) = index_r
3792 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3795 if (TREE_CODE (value) == SAVE_EXPR
3796 && SAVE_EXPR_RTL (value) == 0)
3798 /* Make sure value gets expanded once before the
3800 expand_expr (value, const0_rtx, VOIDmode, 0);
3803 store_expr (lo_index, index_r, 0);
3804 loop = expand_start_loop (0);
3806 /* Assign value to element index. */
3807 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3808 size_int (BITS_PER_UNIT));
3809 position = size_binop (MULT_EXPR,
3810 size_binop (MINUS_EXPR, index,
3811 TYPE_MIN_VALUE (domain)),
3813 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3814 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3815 xtarget = change_address (target, mode, addr);
3816 if (TREE_CODE (value) == CONSTRUCTOR)
3817 store_constructor (value, xtarget, cleared);
3819 store_expr (value, xtarget, 0);
3821 expand_exit_loop_if_false (loop,
3822 build (LT_EXPR, integer_type_node,
3825 expand_increment (build (PREINCREMENT_EXPR,
3827 index, integer_one_node), 0, 0);
3829 emit_label (loop_end);
3831 /* Needed by stupid register allocation. to extend the
3832 lifetime of pseudo-regs used by target past the end
3834 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3837 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3838 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3844 index = size_int (i);
3847 index = size_binop (MINUS_EXPR, index,
3848 TYPE_MIN_VALUE (domain));
3849 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3850 size_int (BITS_PER_UNIT));
3851 position = size_binop (MULT_EXPR, index, position);
3852 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3853 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3854 xtarget = change_address (target, mode, addr);
3855 store_expr (value, xtarget, 0);
3860 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3861 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3863 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3864 store_constructor_field (target, bitsize, bitpos,
3865 mode, value, type, cleared);
3869 /* set constructor assignments */
3870 else if (TREE_CODE (type) == SET_TYPE)
3872 tree elt = CONSTRUCTOR_ELTS (exp);
3873 rtx xtarget = XEXP (target, 0);
3874 int set_word_size = TYPE_ALIGN (type);
3875 int nbytes = int_size_in_bytes (type), nbits;
3876 tree domain = TYPE_DOMAIN (type);
3877 tree domain_min, domain_max, bitlength;
3879 /* The default implementation strategy is to extract the constant
3880 parts of the constructor, use that to initialize the target,
3881 and then "or" in whatever non-constant ranges we need in addition.
3883 If a large set is all zero or all ones, it is
3884 probably better to set it using memset (if available) or bzero.
3885 Also, if a large set has just a single range, it may also be
3886 better to first clear all the first clear the set (using
3887 bzero/memset), and set the bits we want. */
3889 /* Check for all zeros. */
3890 if (elt == NULL_TREE)
3893 clear_storage (target, expr_size (exp),
3894 TYPE_ALIGN (type) / BITS_PER_UNIT);
3898 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3899 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3900 bitlength = size_binop (PLUS_EXPR,
3901 size_binop (MINUS_EXPR, domain_max, domain_min),
3904 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3906 nbits = TREE_INT_CST_LOW (bitlength);
3908 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3909 are "complicated" (more than one range), initialize (the
3910 constant parts) by copying from a constant. */
3911 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3912 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3914 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3915 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3916 char *bit_buffer = (char *) alloca (nbits);
3917 HOST_WIDE_INT word = 0;
3920 int offset = 0; /* In bytes from beginning of set. */
3921 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3924 if (bit_buffer[ibit])
3926 if (BYTES_BIG_ENDIAN)
3927 word |= (1 << (set_word_size - 1 - bit_pos));
3929 word |= 1 << bit_pos;
3932 if (bit_pos >= set_word_size || ibit == nbits)
3934 if (word != 0 || ! cleared)
3936 rtx datum = GEN_INT (word);
3938 /* The assumption here is that it is safe to use
3939 XEXP if the set is multi-word, but not if
3940 it's single-word. */
3941 if (GET_CODE (target) == MEM)
3943 to_rtx = plus_constant (XEXP (target, 0), offset);
3944 to_rtx = change_address (target, mode, to_rtx);
3946 else if (offset == 0)
3950 emit_move_insn (to_rtx, datum);
3956 offset += set_word_size / BITS_PER_UNIT;
3962 /* Don't bother clearing storage if the set is all ones. */
3963 if (TREE_CHAIN (elt) != NULL_TREE
3964 || (TREE_PURPOSE (elt) == NULL_TREE
3966 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3967 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3968 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3969 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3971 clear_storage (target, expr_size (exp),
3972 TYPE_ALIGN (type) / BITS_PER_UNIT);
3975 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3977 /* start of range of element or NULL */
3978 tree startbit = TREE_PURPOSE (elt);
3979 /* end of range of element, or element value */
3980 tree endbit = TREE_VALUE (elt);
3981 HOST_WIDE_INT startb, endb;
3982 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3984 bitlength_rtx = expand_expr (bitlength,
3985 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3987 /* handle non-range tuple element like [ expr ] */
3988 if (startbit == NULL_TREE)
3990 startbit = save_expr (endbit);
3993 startbit = convert (sizetype, startbit);
3994 endbit = convert (sizetype, endbit);
3995 if (! integer_zerop (domain_min))
3997 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3998 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4000 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4001 EXPAND_CONST_ADDRESS);
4002 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4003 EXPAND_CONST_ADDRESS);
4007 targetx = assign_stack_temp (GET_MODE (target),
4008 GET_MODE_SIZE (GET_MODE (target)),
4010 emit_move_insn (targetx, target);
4012 else if (GET_CODE (target) == MEM)
4017 #ifdef TARGET_MEM_FUNCTIONS
4018 /* Optimization: If startbit and endbit are
4019 constants divisible by BITS_PER_UNIT,
4020 call memset instead. */
4021 if (TREE_CODE (startbit) == INTEGER_CST
4022 && TREE_CODE (endbit) == INTEGER_CST
4023 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4024 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4026 emit_library_call (memset_libfunc, 0,
4028 plus_constant (XEXP (targetx, 0),
4029 startb / BITS_PER_UNIT),
4031 constm1_rtx, TYPE_MODE (integer_type_node),
4032 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4033 TYPE_MODE (sizetype));
4038 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
4039 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4040 bitlength_rtx, TYPE_MODE (sizetype),
4041 startbit_rtx, TYPE_MODE (sizetype),
4042 endbit_rtx, TYPE_MODE (sizetype));
4045 emit_move_insn (target, targetx);
4053 /* Store the value of EXP (an expression tree)
4054 into a subfield of TARGET which has mode MODE and occupies
4055 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4056 If MODE is VOIDmode, it means that we are storing into a bit-field.
4058 If VALUE_MODE is VOIDmode, return nothing in particular.
4059 UNSIGNEDP is not used in this case.
4061 Otherwise, return an rtx for the value stored. This rtx
4062 has mode VALUE_MODE if that is convenient to do.
4063 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4065 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4066 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4069 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4070 unsignedp, align, total_size)
4072 int bitsize, bitpos;
4073 enum machine_mode mode;
4075 enum machine_mode value_mode;
4080 HOST_WIDE_INT width_mask = 0;
4082 if (bitsize < HOST_BITS_PER_WIDE_INT)
4083 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4085 /* If we are storing into an unaligned field of an aligned union that is
4086 in a register, we may have the mode of TARGET being an integer mode but
4087 MODE == BLKmode. In that case, get an aligned object whose size and
4088 alignment are the same as TARGET and store TARGET into it (we can avoid
4089 the store if the field being stored is the entire width of TARGET). Then
4090 call ourselves recursively to store the field into a BLKmode version of
4091 that object. Finally, load from the object into TARGET. This is not
4092 very efficient in general, but should only be slightly more expensive
4093 than the otherwise-required unaligned accesses. Perhaps this can be
4094 cleaned up later. */
4097 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4099 rtx object = assign_stack_temp (GET_MODE (target),
4100 GET_MODE_SIZE (GET_MODE (target)), 0);
4101 rtx blk_object = copy_rtx (object);
4103 MEM_IN_STRUCT_P (object) = 1;
4104 MEM_IN_STRUCT_P (blk_object) = 1;
4105 PUT_MODE (blk_object, BLKmode);
4107 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4108 emit_move_insn (object, target);
4110 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4113 /* Even though we aren't returning target, we need to
4114 give it the updated value. */
4115 emit_move_insn (target, object);
4120 /* If the structure is in a register or if the component
4121 is a bit field, we cannot use addressing to access it.
4122 Use bit-field techniques or SUBREG to store in it. */
4124 if (mode == VOIDmode
4125 || (mode != BLKmode && ! direct_store[(int) mode])
4126 || GET_CODE (target) == REG
4127 || GET_CODE (target) == SUBREG
4128 /* If the field isn't aligned enough to store as an ordinary memref,
4129 store it as a bit field. */
4130 || (SLOW_UNALIGNED_ACCESS
4131 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4132 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4134 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4136 /* If BITSIZE is narrower than the size of the type of EXP
4137 we will be narrowing TEMP. Normally, what's wanted are the
4138 low-order bits. However, if EXP's type is a record and this is
4139 big-endian machine, we want the upper BITSIZE bits. */
4140 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4141 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4142 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4143 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4144 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4148 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4150 if (mode != VOIDmode && mode != BLKmode
4151 && mode != TYPE_MODE (TREE_TYPE (exp)))
4152 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4154 /* If the modes of TARGET and TEMP are both BLKmode, both
4155 must be in memory and BITPOS must be aligned on a byte
4156 boundary. If so, we simply do a block copy. */
4157 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4159 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4160 || bitpos % BITS_PER_UNIT != 0)
4163 target = change_address (target, VOIDmode,
4164 plus_constant (XEXP (target, 0),
4165 bitpos / BITS_PER_UNIT));
4167 emit_block_move (target, temp,
4168 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4172 return value_mode == VOIDmode ? const0_rtx : target;
4175 /* Store the value in the bitfield. */
4176 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4177 if (value_mode != VOIDmode)
4179 /* The caller wants an rtx for the value. */
4180 /* If possible, avoid refetching from the bitfield itself. */
4182 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4185 enum machine_mode tmode;
4188 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4189 tmode = GET_MODE (temp);
4190 if (tmode == VOIDmode)
4192 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4193 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4194 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4196 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4197 NULL_RTX, value_mode, 0, align,
4204 rtx addr = XEXP (target, 0);
4207 /* If a value is wanted, it must be the lhs;
4208 so make the address stable for multiple use. */
4210 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4211 && ! CONSTANT_ADDRESS_P (addr)
4212 /* A frame-pointer reference is already stable. */
4213 && ! (GET_CODE (addr) == PLUS
4214 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4215 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4216 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4217 addr = copy_to_reg (addr);
4219 /* Now build a reference to just the desired component. */
4221 to_rtx = copy_rtx (change_address (target, mode,
4222 plus_constant (addr,
4224 / BITS_PER_UNIT))));
4225 MEM_IN_STRUCT_P (to_rtx) = 1;
4227 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4231 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4232 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4233 ARRAY_REFs and find the ultimate containing object, which we return.
4235 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4236 bit position, and *PUNSIGNEDP to the signedness of the field.
4237 If the position of the field is variable, we store a tree
4238 giving the variable offset (in units) in *POFFSET.
4239 This offset is in addition to the bit position.
4240 If the position is not variable, we store 0 in *POFFSET.
4241 We set *PALIGNMENT to the alignment in bytes of the address that will be
4242 computed. This is the alignment of the thing we return if *POFFSET
4243 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4245 If any of the extraction expressions is volatile,
4246 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4248 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4249 is a mode that can be used to access the field. In that case, *PBITSIZE
4252 If the field describes a variable-sized object, *PMODE is set to
4253 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4254 this case, but the address of the object can be found. */
4257 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4258 punsignedp, pvolatilep, palignment)
4263 enum machine_mode *pmode;
4268 tree orig_exp = exp;
4270 enum machine_mode mode = VOIDmode;
4271 tree offset = integer_zero_node;
4272 int alignment = BIGGEST_ALIGNMENT;
4274 if (TREE_CODE (exp) == COMPONENT_REF)
4276 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4277 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4278 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4279 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4281 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4283 size_tree = TREE_OPERAND (exp, 1);
4284 *punsignedp = TREE_UNSIGNED (exp);
4288 mode = TYPE_MODE (TREE_TYPE (exp));
4289 *pbitsize = GET_MODE_BITSIZE (mode);
4290 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4295 if (TREE_CODE (size_tree) != INTEGER_CST)
4296 mode = BLKmode, *pbitsize = -1;
4298 *pbitsize = TREE_INT_CST_LOW (size_tree);
4301 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4302 and find the ultimate containing object. */
4308 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4310 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4311 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4312 : TREE_OPERAND (exp, 2));
4313 tree constant = integer_zero_node, var = pos;
4315 /* If this field hasn't been filled in yet, don't go
4316 past it. This should only happen when folding expressions
4317 made during type construction. */
4321 /* Assume here that the offset is a multiple of a unit.
4322 If not, there should be an explicitly added constant. */
4323 if (TREE_CODE (pos) == PLUS_EXPR
4324 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4325 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4326 else if (TREE_CODE (pos) == INTEGER_CST)
4327 constant = pos, var = integer_zero_node;
4329 *pbitpos += TREE_INT_CST_LOW (constant);
4330 offset = size_binop (PLUS_EXPR, offset,
4331 size_binop (EXACT_DIV_EXPR, var,
4332 size_int (BITS_PER_UNIT)));
4335 else if (TREE_CODE (exp) == ARRAY_REF)
4337 /* This code is based on the code in case ARRAY_REF in expand_expr
4338 below. We assume here that the size of an array element is
4339 always an integral multiple of BITS_PER_UNIT. */
4341 tree index = TREE_OPERAND (exp, 1);
4342 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4344 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4345 tree index_type = TREE_TYPE (index);
4347 if (! integer_zerop (low_bound))
4348 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4350 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4352 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4354 index_type = TREE_TYPE (index);
4357 index = fold (build (MULT_EXPR, index_type, index,
4358 convert (index_type,
4359 TYPE_SIZE (TREE_TYPE (exp)))));
4361 if (TREE_CODE (index) == INTEGER_CST
4362 && TREE_INT_CST_HIGH (index) == 0)
4363 *pbitpos += TREE_INT_CST_LOW (index);
4365 offset = size_binop (PLUS_EXPR, offset,
4366 size_binop (FLOOR_DIV_EXPR, index,
4367 size_int (BITS_PER_UNIT)));
4369 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4370 && ! ((TREE_CODE (exp) == NOP_EXPR
4371 || TREE_CODE (exp) == CONVERT_EXPR)
4372 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4373 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4375 && (TYPE_MODE (TREE_TYPE (exp))
4376 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4379 /* If any reference in the chain is volatile, the effect is volatile. */
4380 if (TREE_THIS_VOLATILE (exp))
4383 /* If the offset is non-constant already, then we can't assume any
4384 alignment more than the alignment here. */
4385 if (! integer_zerop (offset))
4386 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4388 exp = TREE_OPERAND (exp, 0);
4391 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4392 alignment = MIN (alignment, DECL_ALIGN (exp));
4393 else if (TREE_TYPE (exp) != 0)
4394 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4396 if (integer_zerop (offset))
4399 if (offset != 0 && contains_placeholder_p (offset))
4400 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4404 *palignment = alignment / BITS_PER_UNIT;
4408 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4409 static enum memory_use_mode
4410 get_memory_usage_from_modifier (modifier)
4411 enum expand_modifier modifier;
4416 return MEMORY_USE_RO;
4418 case EXPAND_MEMORY_USE_WO:
4419 return MEMORY_USE_WO;
4421 case EXPAND_MEMORY_USE_RW:
4422 return MEMORY_USE_RW;
4424 case EXPAND_INITIALIZER:
4425 case EXPAND_MEMORY_USE_DONT:
4427 case EXPAND_CONST_ADDRESS:
4428 return MEMORY_USE_DONT;
4429 case EXPAND_MEMORY_USE_BAD:
4435 /* Given an rtx VALUE that may contain additions and multiplications,
4436 return an equivalent value that just refers to a register or memory.
4437 This is done by generating instructions to perform the arithmetic
4438 and returning a pseudo-register containing the value.
4440 The returned value may be a REG, SUBREG, MEM or constant. */
4443 force_operand (value, target)
4446 register optab binoptab = 0;
4447 /* Use a temporary to force order of execution of calls to
4451 /* Use subtarget as the target for operand 0 of a binary operation. */
4452 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4454 if (GET_CODE (value) == PLUS)
4455 binoptab = add_optab;
4456 else if (GET_CODE (value) == MINUS)
4457 binoptab = sub_optab;
4458 else if (GET_CODE (value) == MULT)
4460 op2 = XEXP (value, 1);
4461 if (!CONSTANT_P (op2)
4462 && !(GET_CODE (op2) == REG && op2 != subtarget))
4464 tmp = force_operand (XEXP (value, 0), subtarget);
4465 return expand_mult (GET_MODE (value), tmp,
4466 force_operand (op2, NULL_RTX),
4472 op2 = XEXP (value, 1);
4473 if (!CONSTANT_P (op2)
4474 && !(GET_CODE (op2) == REG && op2 != subtarget))
4476 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4478 binoptab = add_optab;
4479 op2 = negate_rtx (GET_MODE (value), op2);
4482 /* Check for an addition with OP2 a constant integer and our first
4483 operand a PLUS of a virtual register and something else. In that
4484 case, we want to emit the sum of the virtual register and the
4485 constant first and then add the other value. This allows virtual
4486 register instantiation to simply modify the constant rather than
4487 creating another one around this addition. */
4488 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4489 && GET_CODE (XEXP (value, 0)) == PLUS
4490 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4491 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4492 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4494 rtx temp = expand_binop (GET_MODE (value), binoptab,
4495 XEXP (XEXP (value, 0), 0), op2,
4496 subtarget, 0, OPTAB_LIB_WIDEN);
4497 return expand_binop (GET_MODE (value), binoptab, temp,
4498 force_operand (XEXP (XEXP (value, 0), 1), 0),
4499 target, 0, OPTAB_LIB_WIDEN);
4502 tmp = force_operand (XEXP (value, 0), subtarget);
4503 return expand_binop (GET_MODE (value), binoptab, tmp,
4504 force_operand (op2, NULL_RTX),
4505 target, 0, OPTAB_LIB_WIDEN);
4506 /* We give UNSIGNEDP = 0 to expand_binop
4507 because the only operations we are expanding here are signed ones. */
4512 /* Subroutine of expand_expr:
4513 save the non-copied parts (LIST) of an expr (LHS), and return a list
4514 which can restore these values to their previous values,
4515 should something modify their storage. */
4518 save_noncopied_parts (lhs, list)
4525 for (tail = list; tail; tail = TREE_CHAIN (tail))
4526 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4527 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4530 tree part = TREE_VALUE (tail);
4531 tree part_type = TREE_TYPE (part);
4532 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4533 rtx target = assign_temp (part_type, 0, 1, 1);
4534 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4535 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4536 parts = tree_cons (to_be_saved,
4537 build (RTL_EXPR, part_type, NULL_TREE,
4540 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4545 /* Subroutine of expand_expr:
4546 record the non-copied parts (LIST) of an expr (LHS), and return a list
4547 which specifies the initial values of these parts. */
4550 init_noncopied_parts (lhs, list)
4557 for (tail = list; tail; tail = TREE_CHAIN (tail))
4558 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4559 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4562 tree part = TREE_VALUE (tail);
4563 tree part_type = TREE_TYPE (part);
4564 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4565 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4570 /* Subroutine of expand_expr: return nonzero iff there is no way that
4571 EXP can reference X, which is being modified. */
4574 safe_from_p (x, exp)
4582 /* If EXP has varying size, we MUST use a target since we currently
4583 have no way of allocating temporaries of variable size
4584 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4585 So we assume here that something at a higher level has prevented a
4586 clash. This is somewhat bogus, but the best we can do. Only
4587 do this when X is BLKmode. */
4588 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4589 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4590 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4591 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4592 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4594 && GET_MODE (x) == BLKmode))
4597 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4598 find the underlying pseudo. */
4599 if (GET_CODE (x) == SUBREG)
4602 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4606 /* If X is a location in the outgoing argument area, it is always safe. */
4607 if (GET_CODE (x) == MEM
4608 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4609 || (GET_CODE (XEXP (x, 0)) == PLUS
4610 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4613 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4616 exp_rtl = DECL_RTL (exp);
4623 if (TREE_CODE (exp) == TREE_LIST)
4624 return ((TREE_VALUE (exp) == 0
4625 || safe_from_p (x, TREE_VALUE (exp)))
4626 && (TREE_CHAIN (exp) == 0
4627 || safe_from_p (x, TREE_CHAIN (exp))));
4632 return safe_from_p (x, TREE_OPERAND (exp, 0));
4636 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4637 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4641 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4642 the expression. If it is set, we conflict iff we are that rtx or
4643 both are in memory. Otherwise, we check all operands of the
4644 expression recursively. */
4646 switch (TREE_CODE (exp))
4649 return (staticp (TREE_OPERAND (exp, 0))
4650 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4653 if (GET_CODE (x) == MEM)
4658 exp_rtl = CALL_EXPR_RTL (exp);
4661 /* Assume that the call will clobber all hard registers and
4663 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4664 || GET_CODE (x) == MEM)
4671 /* If a sequence exists, we would have to scan every instruction
4672 in the sequence to see if it was safe. This is probably not
4674 if (RTL_EXPR_SEQUENCE (exp))
4677 exp_rtl = RTL_EXPR_RTL (exp);
4680 case WITH_CLEANUP_EXPR:
4681 exp_rtl = RTL_EXPR_RTL (exp);
4684 case CLEANUP_POINT_EXPR:
4685 return safe_from_p (x, TREE_OPERAND (exp, 0));
4688 exp_rtl = SAVE_EXPR_RTL (exp);
4692 /* The only operand we look at is operand 1. The rest aren't
4693 part of the expression. */
4694 return safe_from_p (x, TREE_OPERAND (exp, 1));
4696 case METHOD_CALL_EXPR:
4697 /* This takes a rtx argument, but shouldn't appear here. */
4701 /* If we have an rtx, we do not need to scan our operands. */
4705 nops = tree_code_length[(int) TREE_CODE (exp)];
4706 for (i = 0; i < nops; i++)
4707 if (TREE_OPERAND (exp, i) != 0
4708 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4712 /* If we have an rtl, find any enclosed object. Then see if we conflict
4716 if (GET_CODE (exp_rtl) == SUBREG)
4718 exp_rtl = SUBREG_REG (exp_rtl);
4719 if (GET_CODE (exp_rtl) == REG
4720 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4724 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4725 are memory and EXP is not readonly. */
4726 return ! (rtx_equal_p (x, exp_rtl)
4727 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4728 && ! TREE_READONLY (exp)));
4731 /* If we reach here, it is safe. */
4735 /* Subroutine of expand_expr: return nonzero iff EXP is an
4736 expression whose type is statically determinable. */
4742 if (TREE_CODE (exp) == PARM_DECL
4743 || TREE_CODE (exp) == VAR_DECL
4744 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4745 || TREE_CODE (exp) == COMPONENT_REF
4746 || TREE_CODE (exp) == ARRAY_REF)
4751 /* Subroutine of expand_expr: return rtx if EXP is a
4752 variable or parameter; else return 0. */
4759 switch (TREE_CODE (exp))
4763 return DECL_RTL (exp);
4769 /* expand_expr: generate code for computing expression EXP.
4770 An rtx for the computed value is returned. The value is never null.
4771 In the case of a void EXP, const0_rtx is returned.
4773 The value may be stored in TARGET if TARGET is nonzero.
4774 TARGET is just a suggestion; callers must assume that
4775 the rtx returned may not be the same as TARGET.
4777 If TARGET is CONST0_RTX, it means that the value will be ignored.
4779 If TMODE is not VOIDmode, it suggests generating the
4780 result in mode TMODE. But this is done only when convenient.
4781 Otherwise, TMODE is ignored and the value generated in its natural mode.
4782 TMODE is just a suggestion; callers must assume that
4783 the rtx returned may not have mode TMODE.
4785 Note that TARGET may have neither TMODE nor MODE. In that case, it
4786 probably will not be used.
4788 If MODIFIER is EXPAND_SUM then when EXP is an addition
4789 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4790 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4791 products as above, or REG or MEM, or constant.
4792 Ordinarily in such cases we would output mul or add instructions
4793 and then return a pseudo reg containing the sum.
4795 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4796 it also marks a label as absolutely required (it can't be dead).
4797 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4798 This is used for outputting expressions used in initializers.
4800 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4801 with a constant address even if that address is not normally legitimate.
4802 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4805 expand_expr (exp, target, tmode, modifier)
4808 enum machine_mode tmode;
4809 enum expand_modifier modifier;
4811 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4812 This is static so it will be accessible to our recursive callees. */
4813 static tree placeholder_list = 0;
4814 register rtx op0, op1, temp;
4815 tree type = TREE_TYPE (exp);
4816 int unsignedp = TREE_UNSIGNED (type);
4817 register enum machine_mode mode = TYPE_MODE (type);
4818 register enum tree_code code = TREE_CODE (exp);
4820 /* Use subtarget as the target for operand 0 of a binary operation. */
4821 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4822 rtx original_target = target;
4823 /* Maybe defer this until sure not doing bytecode? */
4824 int ignore = (target == const0_rtx
4825 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4826 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4827 || code == COND_EXPR)
4828 && TREE_CODE (type) == VOID_TYPE));
4830 /* Used by check-memory-usage to make modifier read only. */
4831 enum expand_modifier ro_modifier;
4833 /* Make a read-only version of the modifier. */
4834 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4835 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4836 ro_modifier = modifier;
4838 ro_modifier = EXPAND_NORMAL;
4840 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4842 bc_expand_expr (exp);
4846 /* Don't use hard regs as subtargets, because the combiner
4847 can only handle pseudo regs. */
4848 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4850 /* Avoid subtargets inside loops,
4851 since they hide some invariant expressions. */
4852 if (preserve_subexpressions_p ())
4855 /* If we are going to ignore this result, we need only do something
4856 if there is a side-effect somewhere in the expression. If there
4857 is, short-circuit the most common cases here. Note that we must
4858 not call expand_expr with anything but const0_rtx in case this
4859 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4863 if (! TREE_SIDE_EFFECTS (exp))
4866 /* Ensure we reference a volatile object even if value is ignored. */
4867 if (TREE_THIS_VOLATILE (exp)
4868 && TREE_CODE (exp) != FUNCTION_DECL
4869 && mode != VOIDmode && mode != BLKmode)
4871 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
4872 if (GET_CODE (temp) == MEM)
4873 temp = copy_to_reg (temp);
4877 if (TREE_CODE_CLASS (code) == '1')
4878 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4879 VOIDmode, ro_modifier);
4880 else if (TREE_CODE_CLASS (code) == '2'
4881 || TREE_CODE_CLASS (code) == '<')
4883 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
4884 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
4887 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4888 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4889 /* If the second operand has no side effects, just evaluate
4891 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4892 VOIDmode, ro_modifier);
4897 /* If will do cse, generate all results into pseudo registers
4898 since 1) that allows cse to find more things
4899 and 2) otherwise cse could produce an insn the machine
4902 if (! cse_not_expected && mode != BLKmode && target
4903 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4910 tree function = decl_function_context (exp);
4911 /* Handle using a label in a containing function. */
4912 if (function != current_function_decl
4913 && function != inline_function_decl && function != 0)
4915 struct function *p = find_function_data (function);
4916 /* Allocate in the memory associated with the function
4917 that the label is in. */
4918 push_obstacks (p->function_obstack,
4919 p->function_maybepermanent_obstack);
4921 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4922 label_rtx (exp), p->forced_labels);
4925 else if (modifier == EXPAND_INITIALIZER)
4926 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4927 label_rtx (exp), forced_labels);
4928 temp = gen_rtx (MEM, FUNCTION_MODE,
4929 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4930 if (function != current_function_decl
4931 && function != inline_function_decl && function != 0)
4932 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4937 if (DECL_RTL (exp) == 0)
4939 error_with_decl (exp, "prior parameter's size depends on `%s'");
4940 return CONST0_RTX (mode);
4943 /* ... fall through ... */
4946 /* If a static var's type was incomplete when the decl was written,
4947 but the type is complete now, lay out the decl now. */
4948 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4949 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4951 push_obstacks_nochange ();
4952 end_temporary_allocation ();
4953 layout_decl (exp, 0);
4954 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4958 /* Only check automatic variables. Currently, function arguments are
4959 not checked (this can be done at compile-time with prototypes).
4960 Aggregates are not checked. */
4961 if (flag_check_memory_usage && code == VAR_DECL
4962 && GET_CODE (DECL_RTL (exp)) == MEM
4963 && DECL_CONTEXT (exp) != NULL_TREE
4964 && ! TREE_STATIC (exp)
4965 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4967 enum memory_use_mode memory_usage;
4968 memory_usage = get_memory_usage_from_modifier (modifier);
4970 if (memory_usage != MEMORY_USE_DONT)
4971 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
4972 XEXP (DECL_RTL (exp), 0), ptr_mode,
4973 GEN_INT (int_size_in_bytes (type)),
4974 TYPE_MODE (sizetype),
4975 GEN_INT (memory_usage), QImode);
4978 /* ... fall through ... */
4982 if (DECL_RTL (exp) == 0)
4985 /* Ensure variable marked as used even if it doesn't go through
4986 a parser. If it hasn't be used yet, write out an external
4988 if (! TREE_USED (exp))
4990 assemble_external (exp);
4991 TREE_USED (exp) = 1;
4994 /* Show we haven't gotten RTL for this yet. */
4997 /* Handle variables inherited from containing functions. */
4998 context = decl_function_context (exp);
5000 /* We treat inline_function_decl as an alias for the current function
5001 because that is the inline function whose vars, types, etc.
5002 are being merged into the current function.
5003 See expand_inline_function. */
5005 if (context != 0 && context != current_function_decl
5006 && context != inline_function_decl
5007 /* If var is static, we don't need a static chain to access it. */
5008 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5009 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5013 /* Mark as non-local and addressable. */
5014 DECL_NONLOCAL (exp) = 1;
5015 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5017 mark_addressable (exp);
5018 if (GET_CODE (DECL_RTL (exp)) != MEM)
5020 addr = XEXP (DECL_RTL (exp), 0);
5021 if (GET_CODE (addr) == MEM)
5022 addr = gen_rtx (MEM, Pmode,
5023 fix_lexical_addr (XEXP (addr, 0), exp));
5025 addr = fix_lexical_addr (addr, exp);
5026 temp = change_address (DECL_RTL (exp), mode, addr);
5029 /* This is the case of an array whose size is to be determined
5030 from its initializer, while the initializer is still being parsed.
5033 else if (GET_CODE (DECL_RTL (exp)) == MEM
5034 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5035 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5036 XEXP (DECL_RTL (exp), 0));
5038 /* If DECL_RTL is memory, we are in the normal case and either
5039 the address is not valid or it is not a register and -fforce-addr
5040 is specified, get the address into a register. */
5042 else if (GET_CODE (DECL_RTL (exp)) == MEM
5043 && modifier != EXPAND_CONST_ADDRESS
5044 && modifier != EXPAND_SUM
5045 && modifier != EXPAND_INITIALIZER
5046 && (! memory_address_p (DECL_MODE (exp),
5047 XEXP (DECL_RTL (exp), 0))
5049 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5050 temp = change_address (DECL_RTL (exp), VOIDmode,
5051 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5053 /* If we got something, return it. But first, set the alignment
5054 the address is a register. */
5057 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5058 mark_reg_pointer (XEXP (temp, 0),
5059 DECL_ALIGN (exp) / BITS_PER_UNIT);
5064 /* If the mode of DECL_RTL does not match that of the decl, it
5065 must be a promoted value. We return a SUBREG of the wanted mode,
5066 but mark it so that we know that it was already extended. */
5068 if (GET_CODE (DECL_RTL (exp)) == REG
5069 && GET_MODE (DECL_RTL (exp)) != mode)
5071 /* Get the signedness used for this variable. Ensure we get the
5072 same mode we got when the variable was declared. */
5073 if (GET_MODE (DECL_RTL (exp))
5074 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5077 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
5078 SUBREG_PROMOTED_VAR_P (temp) = 1;
5079 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5083 return DECL_RTL (exp);
5086 return immed_double_const (TREE_INT_CST_LOW (exp),
5087 TREE_INT_CST_HIGH (exp),
5091 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5092 EXPAND_MEMORY_USE_BAD);
5095 /* If optimized, generate immediate CONST_DOUBLE
5096 which will be turned into memory by reload if necessary.
5098 We used to force a register so that loop.c could see it. But
5099 this does not allow gen_* patterns to perform optimizations with
5100 the constants. It also produces two insns in cases like "x = 1.0;".
5101 On most machines, floating-point constants are not permitted in
5102 many insns, so we'd end up copying it to a register in any case.
5104 Now, we do the copying in expand_binop, if appropriate. */
5105 return immed_real_const (exp);
5109 if (! TREE_CST_RTL (exp))
5110 output_constant_def (exp);
5112 /* TREE_CST_RTL probably contains a constant address.
5113 On RISC machines where a constant address isn't valid,
5114 make some insns to get that address into a register. */
5115 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5116 && modifier != EXPAND_CONST_ADDRESS
5117 && modifier != EXPAND_INITIALIZER
5118 && modifier != EXPAND_SUM
5119 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5121 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5122 return change_address (TREE_CST_RTL (exp), VOIDmode,
5123 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5124 return TREE_CST_RTL (exp);
5127 context = decl_function_context (exp);
5129 /* If this SAVE_EXPR was at global context, assume we are an
5130 initialization function and move it into our context. */
5132 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5134 /* We treat inline_function_decl as an alias for the current function
5135 because that is the inline function whose vars, types, etc.
5136 are being merged into the current function.
5137 See expand_inline_function. */
5138 if (context == current_function_decl || context == inline_function_decl)
5141 /* If this is non-local, handle it. */
5144 /* The following call just exists to abort if the context is
5145 not of a containing function. */
5146 find_function_data (context);
5148 temp = SAVE_EXPR_RTL (exp);
5149 if (temp && GET_CODE (temp) == REG)
5151 put_var_into_stack (exp);
5152 temp = SAVE_EXPR_RTL (exp);
5154 if (temp == 0 || GET_CODE (temp) != MEM)
5156 return change_address (temp, mode,
5157 fix_lexical_addr (XEXP (temp, 0), exp));
5159 if (SAVE_EXPR_RTL (exp) == 0)
5161 if (mode == VOIDmode)
5164 temp = assign_temp (type, 0, 0, 0);
5166 SAVE_EXPR_RTL (exp) = temp;
5167 if (!optimize && GET_CODE (temp) == REG)
5168 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5171 /* If the mode of TEMP does not match that of the expression, it
5172 must be a promoted value. We pass store_expr a SUBREG of the
5173 wanted mode but mark it so that we know that it was already
5174 extended. Note that `unsignedp' was modified above in
5177 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5179 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5180 SUBREG_PROMOTED_VAR_P (temp) = 1;
5181 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5184 if (temp == const0_rtx)
5185 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5186 EXPAND_MEMORY_USE_BAD);
5188 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5191 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5192 must be a promoted value. We return a SUBREG of the wanted mode,
5193 but mark it so that we know that it was already extended. */
5195 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5196 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5198 /* Compute the signedness and make the proper SUBREG. */
5199 promote_mode (type, mode, &unsignedp, 0);
5200 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5201 SUBREG_PROMOTED_VAR_P (temp) = 1;
5202 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5206 return SAVE_EXPR_RTL (exp);
5211 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5212 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5216 case PLACEHOLDER_EXPR:
5217 /* If there is an object on the head of the placeholder list,
5218 see if some object in it's references is of type TYPE. For
5219 further information, see tree.def. */
5220 if (placeholder_list)
5222 tree need_type = TYPE_MAIN_VARIANT (type);
5224 tree old_list = placeholder_list;
5227 /* See if the object is the type that we want. */
5228 if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_PURPOSE (placeholder_list)))
5230 object = TREE_PURPOSE (placeholder_list);
5232 /* Find the innermost reference that is of the type we want. */
5233 for (elt = TREE_PURPOSE (placeholder_list);
5235 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5236 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5237 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5238 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5239 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5240 || TREE_CODE (elt) == COND_EXPR)
5241 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5242 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5243 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5246 object = TREE_OPERAND (elt, 0);
5252 /* Expand this object skipping the list entries before
5253 it was found in case it is also a PLACEHOLDER_EXPR.
5254 In that case, we want to translate it using subsequent
5256 placeholder_list = TREE_CHAIN (placeholder_list);
5257 temp = expand_expr (object, original_target, tmode, ro_modifier);
5258 placeholder_list = old_list;
5263 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5266 case WITH_RECORD_EXPR:
5267 /* Put the object on the placeholder list, expand our first operand,
5268 and pop the list. */
5269 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5271 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5272 tmode, ro_modifier);
5273 placeholder_list = TREE_CHAIN (placeholder_list);
5277 expand_exit_loop_if_false (NULL_PTR,
5278 invert_truthvalue (TREE_OPERAND (exp, 0)));
5283 expand_start_loop (1);
5284 expand_expr_stmt (TREE_OPERAND (exp, 0));
5292 tree vars = TREE_OPERAND (exp, 0);
5293 int vars_need_expansion = 0;
5295 /* Need to open a binding contour here because
5296 if there are any cleanups they must be contained here. */
5297 expand_start_bindings (0);
5299 /* Mark the corresponding BLOCK for output in its proper place. */
5300 if (TREE_OPERAND (exp, 2) != 0
5301 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5302 insert_block (TREE_OPERAND (exp, 2));
5304 /* If VARS have not yet been expanded, expand them now. */
5307 if (DECL_RTL (vars) == 0)
5309 vars_need_expansion = 1;
5312 expand_decl_init (vars);
5313 vars = TREE_CHAIN (vars);
5316 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5318 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5324 if (RTL_EXPR_SEQUENCE (exp))
5326 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5328 emit_insns (RTL_EXPR_SEQUENCE (exp));
5329 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5331 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5332 free_temps_for_rtl_expr (exp);
5333 return RTL_EXPR_RTL (exp);
5336 /* If we don't need the result, just ensure we evaluate any
5341 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5342 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5343 EXPAND_MEMORY_USE_BAD);
5347 /* All elts simple constants => refer to a constant in memory. But
5348 if this is a non-BLKmode mode, let it store a field at a time
5349 since that should make a CONST_INT or CONST_DOUBLE when we
5350 fold. Likewise, if we have a target we can use, it is best to
5351 store directly into the target unless the type is large enough
5352 that memcpy will be used. If we are making an initializer and
5353 all operands are constant, put it in memory as well. */
5354 else if ((TREE_STATIC (exp)
5355 && ((mode == BLKmode
5356 && ! (target != 0 && safe_from_p (target, exp)))
5357 || TREE_ADDRESSABLE (exp)
5358 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5359 && (move_by_pieces_ninsns
5360 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5361 TYPE_ALIGN (type) / BITS_PER_UNIT)
5363 && ! mostly_zeros_p (exp))))
5364 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5366 rtx constructor = output_constant_def (exp);
5367 if (modifier != EXPAND_CONST_ADDRESS
5368 && modifier != EXPAND_INITIALIZER
5369 && modifier != EXPAND_SUM
5370 && (! memory_address_p (GET_MODE (constructor),
5371 XEXP (constructor, 0))
5373 && GET_CODE (XEXP (constructor, 0)) != REG)))
5374 constructor = change_address (constructor, VOIDmode,
5375 XEXP (constructor, 0));
5381 /* Handle calls that pass values in multiple non-contiguous
5382 locations. The Irix 6 ABI has examples of this. */
5383 if (target == 0 || ! safe_from_p (target, exp)
5384 || GET_CODE (target) == PARALLEL)
5386 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5387 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5389 target = assign_temp (type, 0, 1, 1);
5392 if (TREE_READONLY (exp))
5394 if (GET_CODE (target) == MEM)
5395 target = copy_rtx (target);
5397 RTX_UNCHANGING_P (target) = 1;
5400 store_constructor (exp, target, 0);
5406 tree exp1 = TREE_OPERAND (exp, 0);
5409 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5410 op0 = memory_address (mode, op0);
5412 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5414 enum memory_use_mode memory_usage;
5415 memory_usage = get_memory_usage_from_modifier (modifier);
5417 if (memory_usage != MEMORY_USE_DONT)
5418 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5420 GEN_INT (int_size_in_bytes (type)),
5421 TYPE_MODE (sizetype),
5422 GEN_INT (memory_usage), QImode);
5425 temp = gen_rtx (MEM, mode, op0);
5426 /* If address was computed by addition,
5427 mark this as an element of an aggregate. */
5428 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5429 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5430 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5431 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5432 || (TREE_CODE (exp1) == ADDR_EXPR
5433 && (exp2 = TREE_OPERAND (exp1, 0))
5434 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5435 MEM_IN_STRUCT_P (temp) = 1;
5436 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5438 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5439 here, because, in C and C++, the fact that a location is accessed
5440 through a pointer to const does not mean that the value there can
5441 never change. Languages where it can never change should
5442 also set TREE_STATIC. */
5443 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5448 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5452 tree array = TREE_OPERAND (exp, 0);
5453 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5454 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5455 tree index = TREE_OPERAND (exp, 1);
5456 tree index_type = TREE_TYPE (index);
5459 /* Optimize the special-case of a zero lower bound.
5461 We convert the low_bound to sizetype to avoid some problems
5462 with constant folding. (E.g. suppose the lower bound is 1,
5463 and its mode is QI. Without the conversion, (ARRAY
5464 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5465 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5467 But sizetype isn't quite right either (especially if
5468 the lowbound is negative). FIXME */
5470 if (! integer_zerop (low_bound))
5471 index = fold (build (MINUS_EXPR, index_type, index,
5472 convert (sizetype, low_bound)));
5474 /* Fold an expression like: "foo"[2].
5475 This is not done in fold so it won't happen inside &.
5476 Don't fold if this is for wide characters since it's too
5477 difficult to do correctly and this is a very rare case. */
5479 if (TREE_CODE (array) == STRING_CST
5480 && TREE_CODE (index) == INTEGER_CST
5481 && !TREE_INT_CST_HIGH (index)
5482 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5483 && GET_MODE_CLASS (mode) == MODE_INT
5484 && GET_MODE_SIZE (mode) == 1)
5485 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5487 /* If this is a constant index into a constant array,
5488 just get the value from the array. Handle both the cases when
5489 we have an explicit constructor and when our operand is a variable
5490 that was declared const. */
5492 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5494 if (TREE_CODE (index) == INTEGER_CST
5495 && TREE_INT_CST_HIGH (index) == 0)
5497 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5499 i = TREE_INT_CST_LOW (index);
5501 elem = TREE_CHAIN (elem);
5503 return expand_expr (fold (TREE_VALUE (elem)), target,
5504 tmode, ro_modifier);
5508 else if (optimize >= 1
5509 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5510 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5511 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5513 if (TREE_CODE (index) == INTEGER_CST)
5515 tree init = DECL_INITIAL (array);
5517 i = TREE_INT_CST_LOW (index);
5518 if (TREE_CODE (init) == CONSTRUCTOR)
5520 tree elem = CONSTRUCTOR_ELTS (init);
5523 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5524 elem = TREE_CHAIN (elem);
5526 return expand_expr (fold (TREE_VALUE (elem)), target,
5527 tmode, ro_modifier);
5529 else if (TREE_CODE (init) == STRING_CST
5530 && TREE_INT_CST_HIGH (index) == 0
5531 && (TREE_INT_CST_LOW (index)
5532 < TREE_STRING_LENGTH (init)))
5534 (TREE_STRING_POINTER
5535 (init)[TREE_INT_CST_LOW (index)]));
5540 /* ... fall through ... */
5544 /* If the operand is a CONSTRUCTOR, we can just extract the
5545 appropriate field if it is present. Don't do this if we have
5546 already written the data since we want to refer to that copy
5547 and varasm.c assumes that's what we'll do. */
5548 if (code != ARRAY_REF
5549 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5550 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5554 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5555 elt = TREE_CHAIN (elt))
5556 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5557 /* We can normally use the value of the field in the
5558 CONSTRUCTOR. However, if this is a bitfield in
5559 an integral mode that we can fit in a HOST_WIDE_INT,
5560 we must mask only the number of bits in the bitfield,
5561 since this is done implicitly by the constructor. If
5562 the bitfield does not meet either of those conditions,
5563 we can't do this optimization. */
5564 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5565 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5567 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5568 <= HOST_BITS_PER_WIDE_INT))))
5570 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5571 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5573 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5574 enum machine_mode imode
5575 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5577 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5579 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5580 op0 = expand_and (op0, op1, target);
5585 = build_int_2 (imode - bitsize, 0);
5587 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5589 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5599 enum machine_mode mode1;
5605 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5606 &mode1, &unsignedp, &volatilep,
5609 /* If we got back the original object, something is wrong. Perhaps
5610 we are evaluating an expression too early. In any event, don't
5611 infinitely recurse. */
5615 /* If TEM's type is a union of variable size, pass TARGET to the inner
5616 computation, since it will need a temporary and TARGET is known
5617 to have to do. This occurs in unchecked conversion in Ada. */
5619 op0 = expand_expr (tem,
5620 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5621 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5623 ? target : NULL_RTX),
5625 modifier == EXPAND_INITIALIZER ? modifier : 0);
5627 /* If this is a constant, put it into a register if it is a
5628 legitimate constant and memory if it isn't. */
5629 if (CONSTANT_P (op0))
5631 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5632 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5633 op0 = force_reg (mode, op0);
5635 op0 = validize_mem (force_const_mem (mode, op0));
5640 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5642 if (GET_CODE (op0) != MEM)
5644 op0 = change_address (op0, VOIDmode,
5645 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5646 force_reg (ptr_mode, offset_rtx)));
5649 /* Don't forget about volatility even if this is a bitfield. */
5650 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5652 op0 = copy_rtx (op0);
5653 MEM_VOLATILE_P (op0) = 1;
5656 /* Check the access. */
5657 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5659 enum memory_use_mode memory_usage;
5660 memory_usage = get_memory_usage_from_modifier (modifier);
5662 if (memory_usage != MEMORY_USE_DONT)
5667 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5668 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5670 /* Check the access right of the pointer. */
5671 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5673 GEN_INT (size / BITS_PER_UNIT),
5674 TYPE_MODE (sizetype),
5675 GEN_INT (memory_usage), QImode);
5679 /* In cases where an aligned union has an unaligned object
5680 as a field, we might be extracting a BLKmode value from
5681 an integer-mode (e.g., SImode) object. Handle this case
5682 by doing the extract into an object as wide as the field
5683 (which we know to be the width of a basic mode), then
5684 storing into memory, and changing the mode to BLKmode.
5685 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5686 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5687 if (mode1 == VOIDmode
5688 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5689 || (modifier != EXPAND_CONST_ADDRESS
5690 && modifier != EXPAND_INITIALIZER
5691 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5692 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5693 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5694 /* If the field isn't aligned enough to fetch as a memref,
5695 fetch it as a bit field. */
5696 || (SLOW_UNALIGNED_ACCESS
5697 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5698 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5700 enum machine_mode ext_mode = mode;
5702 if (ext_mode == BLKmode)
5703 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5705 if (ext_mode == BLKmode)
5707 /* In this case, BITPOS must start at a byte boundary and
5708 TARGET, if specified, must be a MEM. */
5709 if (GET_CODE (op0) != MEM
5710 || (target != 0 && GET_CODE (target) != MEM)
5711 || bitpos % BITS_PER_UNIT != 0)
5714 op0 = change_address (op0, VOIDmode,
5715 plus_constant (XEXP (op0, 0),
5716 bitpos / BITS_PER_UNIT));
5718 target = assign_temp (type, 0, 1, 1);
5720 emit_block_move (target, op0,
5721 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5728 op0 = validize_mem (op0);
5730 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5731 mark_reg_pointer (XEXP (op0, 0), alignment);
5733 op0 = extract_bit_field (op0, bitsize, bitpos,
5734 unsignedp, target, ext_mode, ext_mode,
5736 int_size_in_bytes (TREE_TYPE (tem)));
5738 /* If the result is a record type and BITSIZE is narrower than
5739 the mode of OP0, an integral mode, and this is a big endian
5740 machine, we must put the field into the high-order bits. */
5741 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5742 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5743 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5744 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5745 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5749 if (mode == BLKmode)
5751 rtx new = assign_stack_temp (ext_mode,
5752 bitsize / BITS_PER_UNIT, 0);
5754 emit_move_insn (new, op0);
5755 op0 = copy_rtx (new);
5756 PUT_MODE (op0, BLKmode);
5757 MEM_IN_STRUCT_P (op0) = 1;
5763 /* If the result is BLKmode, use that to access the object
5765 if (mode == BLKmode)
5768 /* Get a reference to just this component. */
5769 if (modifier == EXPAND_CONST_ADDRESS
5770 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5771 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5772 (bitpos / BITS_PER_UNIT)));
5774 op0 = change_address (op0, mode1,
5775 plus_constant (XEXP (op0, 0),
5776 (bitpos / BITS_PER_UNIT)));
5777 if (GET_CODE (XEXP (op0, 0)) == REG)
5778 mark_reg_pointer (XEXP (op0, 0), alignment);
5780 MEM_IN_STRUCT_P (op0) = 1;
5781 MEM_VOLATILE_P (op0) |= volatilep;
5782 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5783 || modifier == EXPAND_CONST_ADDRESS
5784 || modifier == EXPAND_INITIALIZER)
5786 else if (target == 0)
5787 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5789 convert_move (target, op0, unsignedp);
5793 /* Intended for a reference to a buffer of a file-object in Pascal.
5794 But it's not certain that a special tree code will really be
5795 necessary for these. INDIRECT_REF might work for them. */
5801 /* Pascal set IN expression.
5804 rlo = set_low - (set_low%bits_per_word);
5805 the_word = set [ (index - rlo)/bits_per_word ];
5806 bit_index = index % bits_per_word;
5807 bitmask = 1 << bit_index;
5808 return !!(the_word & bitmask); */
5810 tree set = TREE_OPERAND (exp, 0);
5811 tree index = TREE_OPERAND (exp, 1);
5812 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5813 tree set_type = TREE_TYPE (set);
5814 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5815 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5816 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5817 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5818 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5819 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5820 rtx setaddr = XEXP (setval, 0);
5821 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5823 rtx diff, quo, rem, addr, bit, result;
5825 preexpand_calls (exp);
5827 /* If domain is empty, answer is no. Likewise if index is constant
5828 and out of bounds. */
5829 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5830 && TREE_CODE (set_low_bound) == INTEGER_CST
5831 && tree_int_cst_lt (set_high_bound, set_low_bound)
5832 || (TREE_CODE (index) == INTEGER_CST
5833 && TREE_CODE (set_low_bound) == INTEGER_CST
5834 && tree_int_cst_lt (index, set_low_bound))
5835 || (TREE_CODE (set_high_bound) == INTEGER_CST
5836 && TREE_CODE (index) == INTEGER_CST
5837 && tree_int_cst_lt (set_high_bound, index))))
5841 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5843 /* If we get here, we have to generate the code for both cases
5844 (in range and out of range). */
5846 op0 = gen_label_rtx ();
5847 op1 = gen_label_rtx ();
5849 if (! (GET_CODE (index_val) == CONST_INT
5850 && GET_CODE (lo_r) == CONST_INT))
5852 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5853 GET_MODE (index_val), iunsignedp, 0);
5854 emit_jump_insn (gen_blt (op1));
5857 if (! (GET_CODE (index_val) == CONST_INT
5858 && GET_CODE (hi_r) == CONST_INT))
5860 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5861 GET_MODE (index_val), iunsignedp, 0);
5862 emit_jump_insn (gen_bgt (op1));
5865 /* Calculate the element number of bit zero in the first word
5867 if (GET_CODE (lo_r) == CONST_INT)
5868 rlow = GEN_INT (INTVAL (lo_r)
5869 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5871 rlow = expand_binop (index_mode, and_optab, lo_r,
5872 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5873 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5875 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5876 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5878 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5879 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5880 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5881 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5883 addr = memory_address (byte_mode,
5884 expand_binop (index_mode, add_optab, diff,
5885 setaddr, NULL_RTX, iunsignedp,
5888 /* Extract the bit we want to examine */
5889 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5890 gen_rtx (MEM, byte_mode, addr),
5891 make_tree (TREE_TYPE (index), rem),
5893 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5894 GET_MODE (target) == byte_mode ? target : 0,
5895 1, OPTAB_LIB_WIDEN);
5897 if (result != target)
5898 convert_move (target, result, 1);
5900 /* Output the code to handle the out-of-range case. */
5903 emit_move_insn (target, const0_rtx);
5908 case WITH_CLEANUP_EXPR:
5909 if (RTL_EXPR_RTL (exp) == 0)
5912 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
5913 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
5915 /* That's it for this cleanup. */
5916 TREE_OPERAND (exp, 2) = 0;
5918 return RTL_EXPR_RTL (exp);
5920 case CLEANUP_POINT_EXPR:
5922 extern int temp_slot_level;
5923 /* Start a new binding layer that will keep track of all cleanup
5924 actions to be performed. */
5925 expand_start_bindings (0);
5927 target_temp_slot_level = temp_slot_level;
5929 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
5930 /* If we're going to use this value, load it up now. */
5932 op0 = force_not_mem (op0);
5933 preserve_temp_slots (op0);
5934 expand_end_bindings (NULL_TREE, 0, 0);
5939 /* Check for a built-in function. */
5940 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5941 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5943 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5944 return expand_builtin (exp, target, subtarget, tmode, ignore);
5946 /* If this call was expanded already by preexpand_calls,
5947 just return the result we got. */
5948 if (CALL_EXPR_RTL (exp) != 0)
5949 return CALL_EXPR_RTL (exp);
5951 return expand_call (exp, target, ignore);
5953 case NON_LVALUE_EXPR:
5956 case REFERENCE_EXPR:
5957 if (TREE_CODE (type) == UNION_TYPE)
5959 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5962 if (mode != BLKmode)
5963 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5965 target = assign_temp (type, 0, 1, 1);
5968 if (GET_CODE (target) == MEM)
5969 /* Store data into beginning of memory target. */
5970 store_expr (TREE_OPERAND (exp, 0),
5971 change_address (target, TYPE_MODE (valtype), 0), 0);
5973 else if (GET_CODE (target) == REG)
5974 /* Store this field into a union of the proper type. */
5975 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5976 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5978 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5982 /* Return the entire union. */
5986 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5988 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5991 /* If the signedness of the conversion differs and OP0 is
5992 a promoted SUBREG, clear that indication since we now
5993 have to do the proper extension. */
5994 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5995 && GET_CODE (op0) == SUBREG)
5996 SUBREG_PROMOTED_VAR_P (op0) = 0;
6001 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6002 if (GET_MODE (op0) == mode)
6005 /* If OP0 is a constant, just convert it into the proper mode. */
6006 if (CONSTANT_P (op0))
6008 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6009 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6011 if (modifier == EXPAND_INITIALIZER)
6012 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6016 convert_to_mode (mode, op0,
6017 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6019 convert_move (target, op0,
6020 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6024 /* We come here from MINUS_EXPR when the second operand is a
6027 this_optab = add_optab;
6029 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6030 something else, make sure we add the register to the constant and
6031 then to the other thing. This case can occur during strength
6032 reduction and doing it this way will produce better code if the
6033 frame pointer or argument pointer is eliminated.
6035 fold-const.c will ensure that the constant is always in the inner
6036 PLUS_EXPR, so the only case we need to do anything about is if
6037 sp, ap, or fp is our second argument, in which case we must swap
6038 the innermost first argument and our second argument. */
6040 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6041 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6042 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6043 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6044 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6045 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6047 tree t = TREE_OPERAND (exp, 1);
6049 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6050 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6053 /* If the result is to be ptr_mode and we are adding an integer to
6054 something, we might be forming a constant. So try to use
6055 plus_constant. If it produces a sum and we can't accept it,
6056 use force_operand. This allows P = &ARR[const] to generate
6057 efficient code on machines where a SYMBOL_REF is not a valid
6060 If this is an EXPAND_SUM call, always return the sum. */
6061 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6062 || mode == ptr_mode)
6064 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6065 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6066 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6068 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6070 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6071 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6072 op1 = force_operand (op1, target);
6076 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6077 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6078 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6080 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6082 if (! CONSTANT_P (op0))
6084 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6085 VOIDmode, modifier);
6086 /* Don't go to both_summands if modifier
6087 says it's not right to return a PLUS. */
6088 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6092 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6093 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6094 op0 = force_operand (op0, target);
6099 /* No sense saving up arithmetic to be done
6100 if it's all in the wrong mode to form part of an address.
6101 And force_operand won't know whether to sign-extend or
6103 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6104 || mode != ptr_mode)
6107 preexpand_calls (exp);
6108 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6111 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6112 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6115 /* Make sure any term that's a sum with a constant comes last. */
6116 if (GET_CODE (op0) == PLUS
6117 && CONSTANT_P (XEXP (op0, 1)))
6123 /* If adding to a sum including a constant,
6124 associate it to put the constant outside. */
6125 if (GET_CODE (op1) == PLUS
6126 && CONSTANT_P (XEXP (op1, 1)))
6128 rtx constant_term = const0_rtx;
6130 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6133 /* Ensure that MULT comes first if there is one. */
6134 else if (GET_CODE (op0) == MULT)
6135 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
6137 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
6139 /* Let's also eliminate constants from op0 if possible. */
6140 op0 = eliminate_constant_term (op0, &constant_term);
6142 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6143 their sum should be a constant. Form it into OP1, since the
6144 result we want will then be OP0 + OP1. */
6146 temp = simplify_binary_operation (PLUS, mode, constant_term,
6151 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6154 /* Put a constant term last and put a multiplication first. */
6155 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6156 temp = op1, op1 = op0, op0 = temp;
6158 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6159 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6162 /* For initializers, we are allowed to return a MINUS of two
6163 symbolic constants. Here we handle all cases when both operands
6165 /* Handle difference of two symbolic constants,
6166 for the sake of an initializer. */
6167 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6168 && really_constant_p (TREE_OPERAND (exp, 0))
6169 && really_constant_p (TREE_OPERAND (exp, 1)))
6171 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6172 VOIDmode, ro_modifier);
6173 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6174 VOIDmode, ro_modifier);
6176 /* If the last operand is a CONST_INT, use plus_constant of
6177 the negated constant. Else make the MINUS. */
6178 if (GET_CODE (op1) == CONST_INT)
6179 return plus_constant (op0, - INTVAL (op1));
6181 return gen_rtx (MINUS, mode, op0, op1);
6183 /* Convert A - const to A + (-const). */
6184 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6186 tree negated = fold (build1 (NEGATE_EXPR, type,
6187 TREE_OPERAND (exp, 1)));
6189 /* Deal with the case where we can't negate the constant
6191 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6193 tree newtype = signed_type (type);
6194 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6195 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6196 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6198 if (! TREE_OVERFLOW (newneg))
6199 return expand_expr (convert (type,
6200 build (PLUS_EXPR, newtype,
6202 target, tmode, ro_modifier);
6206 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6210 this_optab = sub_optab;
6214 preexpand_calls (exp);
6215 /* If first operand is constant, swap them.
6216 Thus the following special case checks need only
6217 check the second operand. */
6218 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6220 register tree t1 = TREE_OPERAND (exp, 0);
6221 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6222 TREE_OPERAND (exp, 1) = t1;
6225 /* Attempt to return something suitable for generating an
6226 indexed address, for machines that support that. */
6228 if (modifier == EXPAND_SUM && mode == ptr_mode
6229 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6230 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6232 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6235 /* Apply distributive law if OP0 is x+c. */
6236 if (GET_CODE (op0) == PLUS
6237 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6238 return gen_rtx (PLUS, mode,
6239 gen_rtx (MULT, mode, XEXP (op0, 0),
6240 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6241 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6242 * INTVAL (XEXP (op0, 1))));
6244 if (GET_CODE (op0) != REG)
6245 op0 = force_operand (op0, NULL_RTX);
6246 if (GET_CODE (op0) != REG)
6247 op0 = copy_to_mode_reg (mode, op0);
6249 return gen_rtx (MULT, mode, op0,
6250 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6253 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6256 /* Check for multiplying things that have been extended
6257 from a narrower type. If this machine supports multiplying
6258 in that narrower type with a result in the desired type,
6259 do it that way, and avoid the explicit type-conversion. */
6260 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6261 && TREE_CODE (type) == INTEGER_TYPE
6262 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6263 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6264 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6265 && int_fits_type_p (TREE_OPERAND (exp, 1),
6266 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6267 /* Don't use a widening multiply if a shift will do. */
6268 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6269 > HOST_BITS_PER_WIDE_INT)
6270 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6272 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6273 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6275 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6276 /* If both operands are extended, they must either both
6277 be zero-extended or both be sign-extended. */
6278 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6280 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6282 enum machine_mode innermode
6283 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6284 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6285 ? smul_widen_optab : umul_widen_optab);
6286 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6287 ? umul_widen_optab : smul_widen_optab);
6288 if (mode == GET_MODE_WIDER_MODE (innermode))
6290 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6292 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6293 NULL_RTX, VOIDmode, 0);
6294 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6295 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6298 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6299 NULL_RTX, VOIDmode, 0);
6302 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6303 && innermode == word_mode)
6306 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6307 NULL_RTX, VOIDmode, 0);
6308 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6309 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6312 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6313 NULL_RTX, VOIDmode, 0);
6314 temp = expand_binop (mode, other_optab, op0, op1, target,
6315 unsignedp, OPTAB_LIB_WIDEN);
6316 htem = expand_mult_highpart_adjust (innermode,
6317 gen_highpart (innermode, temp),
6319 gen_highpart (innermode, temp),
6321 emit_move_insn (gen_highpart (innermode, temp), htem);
6326 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6327 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6328 return expand_mult (mode, op0, op1, target, unsignedp);
6330 case TRUNC_DIV_EXPR:
6331 case FLOOR_DIV_EXPR:
6333 case ROUND_DIV_EXPR:
6334 case EXACT_DIV_EXPR:
6335 preexpand_calls (exp);
6336 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6338 /* Possible optimization: compute the dividend with EXPAND_SUM
6339 then if the divisor is constant can optimize the case
6340 where some terms of the dividend have coeffs divisible by it. */
6341 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6342 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6343 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6346 this_optab = flodiv_optab;
6349 case TRUNC_MOD_EXPR:
6350 case FLOOR_MOD_EXPR:
6352 case ROUND_MOD_EXPR:
6353 preexpand_calls (exp);
6354 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6356 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6357 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6358 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6360 case FIX_ROUND_EXPR:
6361 case FIX_FLOOR_EXPR:
6363 abort (); /* Not used for C. */
6365 case FIX_TRUNC_EXPR:
6366 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6368 target = gen_reg_rtx (mode);
6369 expand_fix (target, op0, unsignedp);
6373 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6375 target = gen_reg_rtx (mode);
6376 /* expand_float can't figure out what to do if FROM has VOIDmode.
6377 So give it the correct mode. With -O, cse will optimize this. */
6378 if (GET_MODE (op0) == VOIDmode)
6379 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6381 expand_float (target, op0,
6382 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6386 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6387 temp = expand_unop (mode, neg_optab, op0, target, 0);
6393 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6395 /* Handle complex values specially. */
6396 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6397 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6398 return expand_complex_abs (mode, op0, target, unsignedp);
6400 /* Unsigned abs is simply the operand. Testing here means we don't
6401 risk generating incorrect code below. */
6402 if (TREE_UNSIGNED (type))
6405 return expand_abs (mode, op0, target, unsignedp,
6406 safe_from_p (target, TREE_OPERAND (exp, 0)));
6410 target = original_target;
6411 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6412 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6413 || GET_MODE (target) != mode
6414 || (GET_CODE (target) == REG
6415 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6416 target = gen_reg_rtx (mode);
6417 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6418 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6420 /* First try to do it with a special MIN or MAX instruction.
6421 If that does not win, use a conditional jump to select the proper
6423 this_optab = (TREE_UNSIGNED (type)
6424 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6425 : (code == MIN_EXPR ? smin_optab : smax_optab));
6427 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6432 /* At this point, a MEM target is no longer useful; we will get better
6435 if (GET_CODE (target) == MEM)
6436 target = gen_reg_rtx (mode);
6439 emit_move_insn (target, op0);
6441 op0 = gen_label_rtx ();
6443 /* If this mode is an integer too wide to compare properly,
6444 compare word by word. Rely on cse to optimize constant cases. */
6445 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6447 if (code == MAX_EXPR)
6448 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6449 target, op1, NULL_RTX, op0);
6451 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6452 op1, target, NULL_RTX, op0);
6453 emit_move_insn (target, op1);
6457 if (code == MAX_EXPR)
6458 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6459 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6460 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6462 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6463 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6464 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6465 if (temp == const0_rtx)
6466 emit_move_insn (target, op1);
6467 else if (temp != const_true_rtx)
6469 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6470 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6473 emit_move_insn (target, op1);
6480 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6481 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6487 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6488 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6493 /* ??? Can optimize bitwise operations with one arg constant.
6494 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6495 and (a bitwise1 b) bitwise2 b (etc)
6496 but that is probably not worth while. */
6498 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6499 boolean values when we want in all cases to compute both of them. In
6500 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6501 as actual zero-or-1 values and then bitwise anding. In cases where
6502 there cannot be any side effects, better code would be made by
6503 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6504 how to recognize those cases. */
6506 case TRUTH_AND_EXPR:
6508 this_optab = and_optab;
6513 this_optab = ior_optab;
6516 case TRUTH_XOR_EXPR:
6518 this_optab = xor_optab;
6525 preexpand_calls (exp);
6526 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6528 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6529 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6532 /* Could determine the answer when only additive constants differ. Also,
6533 the addition of one can be handled by changing the condition. */
6540 preexpand_calls (exp);
6541 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6545 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6546 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6548 && GET_CODE (original_target) == REG
6549 && (GET_MODE (original_target)
6550 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6552 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6555 if (temp != original_target)
6556 temp = copy_to_reg (temp);
6558 op1 = gen_label_rtx ();
6559 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6560 GET_MODE (temp), unsignedp, 0);
6561 emit_jump_insn (gen_beq (op1));
6562 emit_move_insn (temp, const1_rtx);
6567 /* If no set-flag instruction, must generate a conditional
6568 store into a temporary variable. Drop through
6569 and handle this like && and ||. */
6571 case TRUTH_ANDIF_EXPR:
6572 case TRUTH_ORIF_EXPR:
6574 && (target == 0 || ! safe_from_p (target, exp)
6575 /* Make sure we don't have a hard reg (such as function's return
6576 value) live across basic blocks, if not optimizing. */
6577 || (!optimize && GET_CODE (target) == REG
6578 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6579 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6582 emit_clr_insn (target);
6584 op1 = gen_label_rtx ();
6585 jumpifnot (exp, op1);
6588 emit_0_to_1_insn (target);
6591 return ignore ? const0_rtx : target;
6593 case TRUTH_NOT_EXPR:
6594 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6595 /* The parser is careful to generate TRUTH_NOT_EXPR
6596 only with operands that are always zero or one. */
6597 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6598 target, 1, OPTAB_LIB_WIDEN);
6604 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6606 return expand_expr (TREE_OPERAND (exp, 1),
6607 (ignore ? const0_rtx : target),
6611 /* If we would have a "singleton" (see below) were it not for a
6612 conversion in each arm, bring that conversion back out. */
6613 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6614 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6615 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6616 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6618 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6619 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6621 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6622 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6623 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6624 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6625 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6626 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6627 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6628 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6629 return expand_expr (build1 (NOP_EXPR, type,
6630 build (COND_EXPR, TREE_TYPE (true),
6631 TREE_OPERAND (exp, 0),
6633 target, tmode, modifier);
6637 /* Note that COND_EXPRs whose type is a structure or union
6638 are required to be constructed to contain assignments of
6639 a temporary variable, so that we can evaluate them here
6640 for side effect only. If type is void, we must do likewise. */
6642 /* If an arm of the branch requires a cleanup,
6643 only that cleanup is performed. */
6646 tree binary_op = 0, unary_op = 0;
6648 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6649 convert it to our mode, if necessary. */
6650 if (integer_onep (TREE_OPERAND (exp, 1))
6651 && integer_zerop (TREE_OPERAND (exp, 2))
6652 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6656 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6661 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
6662 if (GET_MODE (op0) == mode)
6666 target = gen_reg_rtx (mode);
6667 convert_move (target, op0, unsignedp);
6671 /* Check for X ? A + B : A. If we have this, we can copy A to the
6672 output and conditionally add B. Similarly for unary operations.
6673 Don't do this if X has side-effects because those side effects
6674 might affect A or B and the "?" operation is a sequence point in
6675 ANSI. (operand_equal_p tests for side effects.) */
6677 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6678 && operand_equal_p (TREE_OPERAND (exp, 2),
6679 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6680 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6681 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6682 && operand_equal_p (TREE_OPERAND (exp, 1),
6683 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6684 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6685 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6686 && operand_equal_p (TREE_OPERAND (exp, 2),
6687 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6688 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6689 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6690 && operand_equal_p (TREE_OPERAND (exp, 1),
6691 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6692 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6694 /* If we are not to produce a result, we have no target. Otherwise,
6695 if a target was specified use it; it will not be used as an
6696 intermediate target unless it is safe. If no target, use a
6701 else if (original_target
6702 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6703 || (singleton && GET_CODE (original_target) == REG
6704 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6705 && original_target == var_rtx (singleton)))
6706 && GET_MODE (original_target) == mode
6707 && ! (GET_CODE (original_target) == MEM
6708 && MEM_VOLATILE_P (original_target)))
6709 temp = original_target;
6710 else if (TREE_ADDRESSABLE (type))
6713 temp = assign_temp (type, 0, 0, 1);
6715 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6716 do the test of X as a store-flag operation, do this as
6717 A + ((X != 0) << log C). Similarly for other simple binary
6718 operators. Only do for C == 1 if BRANCH_COST is low. */
6719 if (temp && singleton && binary_op
6720 && (TREE_CODE (binary_op) == PLUS_EXPR
6721 || TREE_CODE (binary_op) == MINUS_EXPR
6722 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6723 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6724 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6725 : integer_onep (TREE_OPERAND (binary_op, 1)))
6726 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6729 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6730 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6731 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6734 /* If we had X ? A : A + 1, do this as A + (X == 0).
6736 We have to invert the truth value here and then put it
6737 back later if do_store_flag fails. We cannot simply copy
6738 TREE_OPERAND (exp, 0) to another variable and modify that
6739 because invert_truthvalue can modify the tree pointed to
6741 if (singleton == TREE_OPERAND (exp, 1))
6742 TREE_OPERAND (exp, 0)
6743 = invert_truthvalue (TREE_OPERAND (exp, 0));
6745 result = do_store_flag (TREE_OPERAND (exp, 0),
6746 (safe_from_p (temp, singleton)
6748 mode, BRANCH_COST <= 1);
6750 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6751 result = expand_shift (LSHIFT_EXPR, mode, result,
6752 build_int_2 (tree_log2
6756 (safe_from_p (temp, singleton)
6757 ? temp : NULL_RTX), 0);
6761 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6762 return expand_binop (mode, boptab, op1, result, temp,
6763 unsignedp, OPTAB_LIB_WIDEN);
6765 else if (singleton == TREE_OPERAND (exp, 1))
6766 TREE_OPERAND (exp, 0)
6767 = invert_truthvalue (TREE_OPERAND (exp, 0));
6770 do_pending_stack_adjust ();
6772 op0 = gen_label_rtx ();
6774 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6778 /* If the target conflicts with the other operand of the
6779 binary op, we can't use it. Also, we can't use the target
6780 if it is a hard register, because evaluating the condition
6781 might clobber it. */
6783 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6784 || (GET_CODE (temp) == REG
6785 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6786 temp = gen_reg_rtx (mode);
6787 store_expr (singleton, temp, 0);
6790 expand_expr (singleton,
6791 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6792 if (singleton == TREE_OPERAND (exp, 1))
6793 jumpif (TREE_OPERAND (exp, 0), op0);
6795 jumpifnot (TREE_OPERAND (exp, 0), op0);
6797 start_cleanup_deferal ();
6798 if (binary_op && temp == 0)
6799 /* Just touch the other operand. */
6800 expand_expr (TREE_OPERAND (binary_op, 1),
6801 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6803 store_expr (build (TREE_CODE (binary_op), type,
6804 make_tree (type, temp),
6805 TREE_OPERAND (binary_op, 1)),
6808 store_expr (build1 (TREE_CODE (unary_op), type,
6809 make_tree (type, temp)),
6813 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6814 comparison operator. If we have one of these cases, set the
6815 output to A, branch on A (cse will merge these two references),
6816 then set the output to FOO. */
6818 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6819 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6820 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6821 TREE_OPERAND (exp, 1), 0)
6822 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6823 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6825 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6826 temp = gen_reg_rtx (mode);
6827 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6828 jumpif (TREE_OPERAND (exp, 0), op0);
6830 start_cleanup_deferal ();
6831 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6835 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6836 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6837 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6838 TREE_OPERAND (exp, 2), 0)
6839 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6840 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6842 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6843 temp = gen_reg_rtx (mode);
6844 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6845 jumpifnot (TREE_OPERAND (exp, 0), op0);
6847 start_cleanup_deferal ();
6848 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6853 op1 = gen_label_rtx ();
6854 jumpifnot (TREE_OPERAND (exp, 0), op0);
6856 start_cleanup_deferal ();
6858 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6860 expand_expr (TREE_OPERAND (exp, 1),
6861 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6862 end_cleanup_deferal ();
6864 emit_jump_insn (gen_jump (op1));
6867 start_cleanup_deferal ();
6869 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6871 expand_expr (TREE_OPERAND (exp, 2),
6872 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6875 end_cleanup_deferal ();
6886 /* Something needs to be initialized, but we didn't know
6887 where that thing was when building the tree. For example,
6888 it could be the return value of a function, or a parameter
6889 to a function which lays down in the stack, or a temporary
6890 variable which must be passed by reference.
6892 We guarantee that the expression will either be constructed
6893 or copied into our original target. */
6895 tree slot = TREE_OPERAND (exp, 0);
6896 tree cleanups = NULL_TREE;
6900 if (TREE_CODE (slot) != VAR_DECL)
6904 target = original_target;
6908 if (DECL_RTL (slot) != 0)
6910 target = DECL_RTL (slot);
6911 /* If we have already expanded the slot, so don't do
6913 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6918 target = assign_temp (type, 2, 1, 1);
6919 /* All temp slots at this level must not conflict. */
6920 preserve_temp_slots (target);
6921 DECL_RTL (slot) = target;
6923 /* Since SLOT is not known to the called function
6924 to belong to its stack frame, we must build an explicit
6925 cleanup. This case occurs when we must build up a reference
6926 to pass the reference as an argument. In this case,
6927 it is very likely that such a reference need not be
6930 if (TREE_OPERAND (exp, 2) == 0)
6931 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6932 cleanups = TREE_OPERAND (exp, 2);
6937 /* This case does occur, when expanding a parameter which
6938 needs to be constructed on the stack. The target
6939 is the actual stack address that we want to initialize.
6940 The function we call will perform the cleanup in this case. */
6942 /* If we have already assigned it space, use that space,
6943 not target that we were passed in, as our target
6944 parameter is only a hint. */
6945 if (DECL_RTL (slot) != 0)
6947 target = DECL_RTL (slot);
6948 /* If we have already expanded the slot, so don't do
6950 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6955 DECL_RTL (slot) = target;
6956 /* If we must have an addressable slot, then make sure that
6957 the RTL that we just stored in slot is OK. */
6958 if (TREE_ADDRESSABLE (slot))
6960 TREE_ADDRESSABLE (slot) = 0;
6961 mark_addressable (slot);
6966 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6967 /* Mark it as expanded. */
6968 TREE_OPERAND (exp, 1) = NULL_TREE;
6970 store_expr (exp1, target, 0);
6972 expand_decl_cleanup (NULL_TREE, cleanups);
6979 tree lhs = TREE_OPERAND (exp, 0);
6980 tree rhs = TREE_OPERAND (exp, 1);
6981 tree noncopied_parts = 0;
6982 tree lhs_type = TREE_TYPE (lhs);
6984 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6985 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6986 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6987 TYPE_NONCOPIED_PARTS (lhs_type));
6988 while (noncopied_parts != 0)
6990 expand_assignment (TREE_VALUE (noncopied_parts),
6991 TREE_PURPOSE (noncopied_parts), 0, 0);
6992 noncopied_parts = TREE_CHAIN (noncopied_parts);
6999 /* If lhs is complex, expand calls in rhs before computing it.
7000 That's so we don't compute a pointer and save it over a call.
7001 If lhs is simple, compute it first so we can give it as a
7002 target if the rhs is just a call. This avoids an extra temp and copy
7003 and that prevents a partial-subsumption which makes bad code.
7004 Actually we could treat component_ref's of vars like vars. */
7006 tree lhs = TREE_OPERAND (exp, 0);
7007 tree rhs = TREE_OPERAND (exp, 1);
7008 tree noncopied_parts = 0;
7009 tree lhs_type = TREE_TYPE (lhs);
7013 if (TREE_CODE (lhs) != VAR_DECL
7014 && TREE_CODE (lhs) != RESULT_DECL
7015 && TREE_CODE (lhs) != PARM_DECL
7016 && ! (TREE_CODE (lhs) == INDIRECT_REF
7017 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7018 preexpand_calls (exp);
7020 /* Check for |= or &= of a bitfield of size one into another bitfield
7021 of size 1. In this case, (unless we need the result of the
7022 assignment) we can do this more efficiently with a
7023 test followed by an assignment, if necessary.
7025 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7026 things change so we do, this code should be enhanced to
7029 && TREE_CODE (lhs) == COMPONENT_REF
7030 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7031 || TREE_CODE (rhs) == BIT_AND_EXPR)
7032 && TREE_OPERAND (rhs, 0) == lhs
7033 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7034 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7035 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7037 rtx label = gen_label_rtx ();
7039 do_jump (TREE_OPERAND (rhs, 1),
7040 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7041 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7042 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7043 (TREE_CODE (rhs) == BIT_IOR_EXPR
7045 : integer_zero_node)),
7047 do_pending_stack_adjust ();
7052 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7053 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7054 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7055 TYPE_NONCOPIED_PARTS (lhs_type));
7057 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7058 while (noncopied_parts != 0)
7060 expand_assignment (TREE_PURPOSE (noncopied_parts),
7061 TREE_VALUE (noncopied_parts), 0, 0);
7062 noncopied_parts = TREE_CHAIN (noncopied_parts);
7067 case PREINCREMENT_EXPR:
7068 case PREDECREMENT_EXPR:
7069 return expand_increment (exp, 0, ignore);
7071 case POSTINCREMENT_EXPR:
7072 case POSTDECREMENT_EXPR:
7073 /* Faster to treat as pre-increment if result is not used. */
7074 return expand_increment (exp, ! ignore, ignore);
7077 /* If nonzero, TEMP will be set to the address of something that might
7078 be a MEM corresponding to a stack slot. */
7081 /* Are we taking the address of a nested function? */
7082 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7083 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7084 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7086 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7087 op0 = force_operand (op0, target);
7089 /* If we are taking the address of something erroneous, just
7091 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7095 /* We make sure to pass const0_rtx down if we came in with
7096 ignore set, to avoid doing the cleanups twice for something. */
7097 op0 = expand_expr (TREE_OPERAND (exp, 0),
7098 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7099 (modifier == EXPAND_INITIALIZER
7100 ? modifier : EXPAND_CONST_ADDRESS));
7102 /* If we are going to ignore the result, OP0 will have been set
7103 to const0_rtx, so just return it. Don't get confused and
7104 think we are taking the address of the constant. */
7108 op0 = protect_from_queue (op0, 0);
7110 /* We would like the object in memory. If it is a constant,
7111 we can have it be statically allocated into memory. For
7112 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7113 memory and store the value into it. */
7115 if (CONSTANT_P (op0))
7116 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7118 else if (GET_CODE (op0) == MEM)
7120 mark_temp_addr_taken (op0);
7121 temp = XEXP (op0, 0);
7124 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7125 || GET_CODE (op0) == CONCAT)
7127 /* If this object is in a register, it must be not
7129 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7130 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7132 mark_temp_addr_taken (memloc);
7133 emit_move_insn (memloc, op0);
7137 if (GET_CODE (op0) != MEM)
7140 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7142 temp = XEXP (op0, 0);
7143 #ifdef POINTERS_EXTEND_UNSIGNED
7144 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7145 && mode == ptr_mode)
7146 temp = convert_memory_address (ptr_mode, temp);
7151 op0 = force_operand (XEXP (op0, 0), target);
7154 if (flag_force_addr && GET_CODE (op0) != REG)
7155 op0 = force_reg (Pmode, op0);
7157 if (GET_CODE (op0) == REG
7158 && ! REG_USERVAR_P (op0))
7159 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7161 /* If we might have had a temp slot, add an equivalent address
7164 update_temp_slot_address (temp, op0);
7166 #ifdef POINTERS_EXTEND_UNSIGNED
7167 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7168 && mode == ptr_mode)
7169 op0 = convert_memory_address (ptr_mode, op0);
7174 case ENTRY_VALUE_EXPR:
7177 /* COMPLEX type for Extended Pascal & Fortran */
7180 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7183 /* Get the rtx code of the operands. */
7184 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7185 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7188 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7192 /* Move the real (op0) and imaginary (op1) parts to their location. */
7193 emit_move_insn (gen_realpart (mode, target), op0);
7194 emit_move_insn (gen_imagpart (mode, target), op1);
7196 insns = get_insns ();
7199 /* Complex construction should appear as a single unit. */
7200 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7201 each with a separate pseudo as destination.
7202 It's not correct for flow to treat them as a unit. */
7203 if (GET_CODE (target) != CONCAT)
7204 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7212 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7213 return gen_realpart (mode, op0);
7216 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7217 return gen_imagpart (mode, op0);
7221 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7225 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7228 target = gen_reg_rtx (mode);
7232 /* Store the realpart and the negated imagpart to target. */
7233 emit_move_insn (gen_realpart (partmode, target),
7234 gen_realpart (partmode, op0));
7236 imag_t = gen_imagpart (partmode, target);
7237 temp = expand_unop (partmode, neg_optab,
7238 gen_imagpart (partmode, op0), imag_t, 0);
7240 emit_move_insn (imag_t, temp);
7242 insns = get_insns ();
7245 /* Conjugate should appear as a single unit
7246 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7247 each with a separate pseudo as destination.
7248 It's not correct for flow to treat them as a unit. */
7249 if (GET_CODE (target) != CONCAT)
7250 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7257 case TRY_CATCH_EXPR:
7259 tree handler = TREE_OPERAND (exp, 1);
7261 expand_eh_region_start ();
7263 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7265 expand_eh_region_end (handler);
7272 rtx dcc = get_dynamic_cleanup_chain ();
7273 emit_move_insn (dcc, validize_mem (gen_rtx (MEM, Pmode, dcc)));
7279 rtx dhc = get_dynamic_handler_chain ();
7280 emit_move_insn (dhc, validize_mem (gen_rtx (MEM, Pmode, dhc)));
7285 op0 = CONST0_RTX (tmode);
7291 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7294 /* Here to do an ordinary binary operator, generating an instruction
7295 from the optab already placed in `this_optab'. */
7297 preexpand_calls (exp);
7298 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7300 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7301 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7303 temp = expand_binop (mode, this_optab, op0, op1, target,
7304 unsignedp, OPTAB_LIB_WIDEN);
7311 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7314 bc_expand_expr (exp)
7317 enum tree_code code;
7320 struct binary_operator *binoptab;
7321 struct unary_operator *unoptab;
7322 struct increment_operator *incroptab;
7323 struct bc_label *lab, *lab1;
7324 enum bytecode_opcode opcode;
7327 code = TREE_CODE (exp);
7333 if (DECL_RTL (exp) == 0)
7335 error_with_decl (exp, "prior parameter's size depends on `%s'");
7339 bc_load_parmaddr (DECL_RTL (exp));
7340 bc_load_memory (TREE_TYPE (exp), exp);
7346 if (DECL_RTL (exp) == 0)
7350 if (BYTECODE_LABEL (DECL_RTL (exp)))
7351 bc_load_externaddr (DECL_RTL (exp));
7353 bc_load_localaddr (DECL_RTL (exp));
7355 if (TREE_PUBLIC (exp))
7356 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7357 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7359 bc_load_localaddr (DECL_RTL (exp));
7361 bc_load_memory (TREE_TYPE (exp), exp);
7366 #ifdef DEBUG_PRINT_CODE
7367 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7369 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7371 : TYPE_MODE (TREE_TYPE (exp)))],
7372 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7378 #ifdef DEBUG_PRINT_CODE
7379 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7381 /* FIX THIS: find a better way to pass real_cst's. -bson */
7382 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7383 (double) TREE_REAL_CST (exp));
7392 /* We build a call description vector describing the type of
7393 the return value and of the arguments; this call vector,
7394 together with a pointer to a location for the return value
7395 and the base of the argument list, is passed to the low
7396 level machine dependent call subroutine, which is responsible
7397 for putting the arguments wherever real functions expect
7398 them, as well as getting the return value back. */
7400 tree calldesc = 0, arg;
7404 /* Push the evaluated args on the evaluation stack in reverse
7405 order. Also make an entry for each arg in the calldesc
7406 vector while we're at it. */
7408 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7410 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7413 bc_expand_expr (TREE_VALUE (arg));
7415 calldesc = tree_cons ((tree) 0,
7416 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7418 calldesc = tree_cons ((tree) 0,
7419 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7423 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7425 /* Allocate a location for the return value and push its
7426 address on the evaluation stack. Also make an entry
7427 at the front of the calldesc for the return value type. */
7429 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7430 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7431 bc_load_localaddr (retval);
7433 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7434 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7436 /* Prepend the argument count. */
7437 calldesc = tree_cons ((tree) 0,
7438 build_int_2 (nargs, 0),
7441 /* Push the address of the call description vector on the stack. */
7442 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7443 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7444 build_index_type (build_int_2 (nargs * 2, 0)));
7445 r = output_constant_def (calldesc);
7446 bc_load_externaddr (r);
7448 /* Push the address of the function to be called. */
7449 bc_expand_expr (TREE_OPERAND (exp, 0));
7451 /* Call the function, popping its address and the calldesc vector
7452 address off the evaluation stack in the process. */
7453 bc_emit_instruction (call);
7455 /* Pop the arguments off the stack. */
7456 bc_adjust_stack (nargs);
7458 /* Load the return value onto the stack. */
7459 bc_load_localaddr (retval);
7460 bc_load_memory (type, TREE_OPERAND (exp, 0));
7466 if (!SAVE_EXPR_RTL (exp))
7468 /* First time around: copy to local variable */
7469 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7470 TYPE_ALIGN (TREE_TYPE(exp)));
7471 bc_expand_expr (TREE_OPERAND (exp, 0));
7472 bc_emit_instruction (duplicate);
7474 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7475 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7479 /* Consecutive reference: use saved copy */
7480 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7481 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7486 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7487 how are they handled instead? */
7490 TREE_USED (exp) = 1;
7491 bc_expand_expr (STMT_BODY (exp));
7498 bc_expand_expr (TREE_OPERAND (exp, 0));
7499 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7504 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7509 bc_expand_address (TREE_OPERAND (exp, 0));
7514 bc_expand_expr (TREE_OPERAND (exp, 0));
7515 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7520 bc_expand_expr (bc_canonicalize_array_ref (exp));
7525 bc_expand_component_address (exp);
7527 /* If we have a bitfield, generate a proper load */
7528 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7533 bc_expand_expr (TREE_OPERAND (exp, 0));
7534 bc_emit_instruction (drop);
7535 bc_expand_expr (TREE_OPERAND (exp, 1));
7540 bc_expand_expr (TREE_OPERAND (exp, 0));
7541 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7542 lab = bc_get_bytecode_label ();
7543 bc_emit_bytecode (xjumpifnot);
7544 bc_emit_bytecode_labelref (lab);
7546 #ifdef DEBUG_PRINT_CODE
7547 fputc ('\n', stderr);
7549 bc_expand_expr (TREE_OPERAND (exp, 1));
7550 lab1 = bc_get_bytecode_label ();
7551 bc_emit_bytecode (jump);
7552 bc_emit_bytecode_labelref (lab1);
7554 #ifdef DEBUG_PRINT_CODE
7555 fputc ('\n', stderr);
7558 bc_emit_bytecode_labeldef (lab);
7559 bc_expand_expr (TREE_OPERAND (exp, 2));
7560 bc_emit_bytecode_labeldef (lab1);
7563 case TRUTH_ANDIF_EXPR:
7565 opcode = xjumpifnot;
7568 case TRUTH_ORIF_EXPR:
7575 binoptab = optab_plus_expr;
7580 binoptab = optab_minus_expr;
7585 binoptab = optab_mult_expr;
7588 case TRUNC_DIV_EXPR:
7589 case FLOOR_DIV_EXPR:
7591 case ROUND_DIV_EXPR:
7592 case EXACT_DIV_EXPR:
7594 binoptab = optab_trunc_div_expr;
7597 case TRUNC_MOD_EXPR:
7598 case FLOOR_MOD_EXPR:
7600 case ROUND_MOD_EXPR:
7602 binoptab = optab_trunc_mod_expr;
7605 case FIX_ROUND_EXPR:
7606 case FIX_FLOOR_EXPR:
7608 abort (); /* Not used for C. */
7610 case FIX_TRUNC_EXPR:
7617 abort (); /* FIXME */
7621 binoptab = optab_rdiv_expr;
7626 binoptab = optab_bit_and_expr;
7631 binoptab = optab_bit_ior_expr;
7636 binoptab = optab_bit_xor_expr;
7641 binoptab = optab_lshift_expr;
7646 binoptab = optab_rshift_expr;
7649 case TRUTH_AND_EXPR:
7651 binoptab = optab_truth_and_expr;
7656 binoptab = optab_truth_or_expr;
7661 binoptab = optab_lt_expr;
7666 binoptab = optab_le_expr;
7671 binoptab = optab_ge_expr;
7676 binoptab = optab_gt_expr;
7681 binoptab = optab_eq_expr;
7686 binoptab = optab_ne_expr;
7691 unoptab = optab_negate_expr;
7696 unoptab = optab_bit_not_expr;
7699 case TRUTH_NOT_EXPR:
7701 unoptab = optab_truth_not_expr;
7704 case PREDECREMENT_EXPR:
7706 incroptab = optab_predecrement_expr;
7709 case PREINCREMENT_EXPR:
7711 incroptab = optab_preincrement_expr;
7714 case POSTDECREMENT_EXPR:
7716 incroptab = optab_postdecrement_expr;
7719 case POSTINCREMENT_EXPR:
7721 incroptab = optab_postincrement_expr;
7726 bc_expand_constructor (exp);
7736 tree vars = TREE_OPERAND (exp, 0);
7737 int vars_need_expansion = 0;
7739 /* Need to open a binding contour here because
7740 if there are any cleanups they most be contained here. */
7741 expand_start_bindings (0);
7743 /* Mark the corresponding BLOCK for output. */
7744 if (TREE_OPERAND (exp, 2) != 0)
7745 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7747 /* If VARS have not yet been expanded, expand them now. */
7750 if (DECL_RTL (vars) == 0)
7752 vars_need_expansion = 1;
7755 expand_decl_init (vars);
7756 vars = TREE_CHAIN (vars);
7759 bc_expand_expr (TREE_OPERAND (exp, 1));
7761 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7771 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7772 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7778 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7784 bc_expand_expr (TREE_OPERAND (exp, 0));
7785 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7786 lab = bc_get_bytecode_label ();
7788 bc_emit_instruction (duplicate);
7789 bc_emit_bytecode (opcode);
7790 bc_emit_bytecode_labelref (lab);
7792 #ifdef DEBUG_PRINT_CODE
7793 fputc ('\n', stderr);
7796 bc_emit_instruction (drop);
7798 bc_expand_expr (TREE_OPERAND (exp, 1));
7799 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7800 bc_emit_bytecode_labeldef (lab);
7806 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7808 /* Push the quantum. */
7809 bc_expand_expr (TREE_OPERAND (exp, 1));
7811 /* Convert it to the lvalue's type. */
7812 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7814 /* Push the address of the lvalue */
7815 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7817 /* Perform actual increment */
7818 bc_expand_increment (incroptab, type);
7822 /* Return the alignment in bits of EXP, a pointer valued expression.
7823 But don't return more than MAX_ALIGN no matter what.
7824 The alignment returned is, by default, the alignment of the thing that
7825 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7827 Otherwise, look at the expression to see if we can do better, i.e., if the
7828 expression is actually pointing at an object whose alignment is tighter. */
7831 get_pointer_alignment (exp, max_align)
7835 unsigned align, inner;
7837 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7840 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7841 align = MIN (align, max_align);
7845 switch (TREE_CODE (exp))
7849 case NON_LVALUE_EXPR:
7850 exp = TREE_OPERAND (exp, 0);
7851 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7853 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7854 align = MIN (inner, max_align);
7858 /* If sum of pointer + int, restrict our maximum alignment to that
7859 imposed by the integer. If not, we can't do any better than
7861 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7864 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7869 exp = TREE_OPERAND (exp, 0);
7873 /* See what we are pointing at and look at its alignment. */
7874 exp = TREE_OPERAND (exp, 0);
7875 if (TREE_CODE (exp) == FUNCTION_DECL)
7876 align = FUNCTION_BOUNDARY;
7877 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7878 align = DECL_ALIGN (exp);
7879 #ifdef CONSTANT_ALIGNMENT
7880 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7881 align = CONSTANT_ALIGNMENT (exp, align);
7883 return MIN (align, max_align);
7891 /* Return the tree node and offset if a given argument corresponds to
7892 a string constant. */
7895 string_constant (arg, ptr_offset)
7901 if (TREE_CODE (arg) == ADDR_EXPR
7902 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7904 *ptr_offset = integer_zero_node;
7905 return TREE_OPERAND (arg, 0);
7907 else if (TREE_CODE (arg) == PLUS_EXPR)
7909 tree arg0 = TREE_OPERAND (arg, 0);
7910 tree arg1 = TREE_OPERAND (arg, 1);
7915 if (TREE_CODE (arg0) == ADDR_EXPR
7916 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7919 return TREE_OPERAND (arg0, 0);
7921 else if (TREE_CODE (arg1) == ADDR_EXPR
7922 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7925 return TREE_OPERAND (arg1, 0);
7932 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7933 way, because it could contain a zero byte in the middle.
7934 TREE_STRING_LENGTH is the size of the character array, not the string.
7936 Unfortunately, string_constant can't access the values of const char
7937 arrays with initializers, so neither can we do so here. */
7947 src = string_constant (src, &offset_node);
7950 max = TREE_STRING_LENGTH (src);
7951 ptr = TREE_STRING_POINTER (src);
7952 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7954 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7955 compute the offset to the following null if we don't know where to
7956 start searching for it. */
7958 for (i = 0; i < max; i++)
7961 /* We don't know the starting offset, but we do know that the string
7962 has no internal zero bytes. We can assume that the offset falls
7963 within the bounds of the string; otherwise, the programmer deserves
7964 what he gets. Subtract the offset from the length of the string,
7966 /* This would perhaps not be valid if we were dealing with named
7967 arrays in addition to literal string constants. */
7968 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7971 /* We have a known offset into the string. Start searching there for
7972 a null character. */
7973 if (offset_node == 0)
7977 /* Did we get a long long offset? If so, punt. */
7978 if (TREE_INT_CST_HIGH (offset_node) != 0)
7980 offset = TREE_INT_CST_LOW (offset_node);
7982 /* If the offset is known to be out of bounds, warn, and call strlen at
7984 if (offset < 0 || offset > max)
7986 warning ("offset outside bounds of constant string");
7989 /* Use strlen to search for the first zero byte. Since any strings
7990 constructed with build_string will have nulls appended, we win even
7991 if we get handed something like (char[4])"abcd".
7993 Since OFFSET is our starting index into the string, no further
7994 calculation is needed. */
7995 return size_int (strlen (ptr + offset));
7999 expand_builtin_return_addr (fndecl_code, count, tem)
8000 enum built_in_function fndecl_code;
8006 /* Some machines need special handling before we can access
8007 arbitrary frames. For example, on the sparc, we must first flush
8008 all register windows to the stack. */
8009 #ifdef SETUP_FRAME_ADDRESSES
8011 SETUP_FRAME_ADDRESSES ();
8014 /* On the sparc, the return address is not in the frame, it is in a
8015 register. There is no way to access it off of the current frame
8016 pointer, but it can be accessed off the previous frame pointer by
8017 reading the value from the register window save area. */
8018 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8019 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8023 /* Scan back COUNT frames to the specified frame. */
8024 for (i = 0; i < count; i++)
8026 /* Assume the dynamic chain pointer is in the word that the
8027 frame address points to, unless otherwise specified. */
8028 #ifdef DYNAMIC_CHAIN_ADDRESS
8029 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8031 tem = memory_address (Pmode, tem);
8032 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
8035 /* For __builtin_frame_address, return what we've got. */
8036 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8039 /* For __builtin_return_address, Get the return address from that
8041 #ifdef RETURN_ADDR_RTX
8042 tem = RETURN_ADDR_RTX (count, tem);
8044 tem = memory_address (Pmode,
8045 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8046 tem = gen_rtx (MEM, Pmode, tem);
8051 /* __builtin_setjmp is passed a pointer to an array of five words (not
8052 all will be used on all machines). It operates similarly to the C
8053 library function of the same name, but is more efficient. Much of
8054 the code below (and for longjmp) is copied from the handling of
8057 NOTE: This is intended for use by GNAT and the exception handling
8058 scheme in the compiler and will only work in the method used by
8062 expand_builtin_setjmp (buf_addr, target)
8066 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8067 enum machine_mode sa_mode = Pmode, value_mode;
8069 int old_inhibit_defer_pop = inhibit_defer_pop;
8071 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8072 build_function_type (void_type_node, NULL_TREE),
8075 CUMULATIVE_ARGS args_so_far;
8079 value_mode = TYPE_MODE (integer_type_node);
8081 #ifdef POINTERS_EXTEND_UNSIGNED
8082 buf_addr = convert_memory_address (Pmode, buf_addr);
8085 buf_addr = force_reg (Pmode, buf_addr);
8087 if (target == 0 || GET_CODE (target) != REG
8088 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8089 target = gen_reg_rtx (value_mode);
8093 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8094 current_function_calls_setjmp = 1;
8096 /* We store the frame pointer and the address of lab1 in the buffer
8097 and use the rest of it for the stack save area, which is
8098 machine-dependent. */
8099 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8100 virtual_stack_vars_rtx);
8102 (validize_mem (gen_rtx (MEM, Pmode,
8103 plus_constant (buf_addr,
8104 GET_MODE_SIZE (Pmode)))),
8105 gen_rtx (LABEL_REF, Pmode, lab1));
8107 #ifdef HAVE_save_stack_nonlocal
8108 if (HAVE_save_stack_nonlocal)
8109 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8112 stack_save = gen_rtx (MEM, sa_mode,
8113 plus_constant (buf_addr,
8114 2 * GET_MODE_SIZE (Pmode)));
8115 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8119 emit_insn (gen_setjmp ());
8122 /* Set TARGET to zero and branch around the other case. */
8123 emit_move_insn (target, const0_rtx);
8124 emit_jump_insn (gen_jump (lab2));
8128 /* Note that setjmp clobbers FP when we get here, so we have to make
8129 sure it's marked as used by this function. */
8130 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8132 /* Mark the static chain as clobbered here so life information
8133 doesn't get messed up for it. */
8134 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8136 /* Now put in the code to restore the frame pointer, and argument
8137 pointer, if needed. The code below is from expand_end_bindings
8138 in stmt.c; see detailed documentation there. */
8139 #ifdef HAVE_nonlocal_goto
8140 if (! HAVE_nonlocal_goto)
8142 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8144 /* Do we need to do something like:
8146 current_function_has_nonlocal_label = 1;
8148 here? It seems like we might have to, or some subset of that
8149 functionality, but I am unsure. (mrs) */
8151 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8152 if (fixed_regs[ARG_POINTER_REGNUM])
8154 #ifdef ELIMINABLE_REGS
8155 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8157 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8158 if (elim_regs[i].from == ARG_POINTER_REGNUM
8159 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8162 if (i == sizeof elim_regs / sizeof elim_regs [0])
8165 /* Now restore our arg pointer from the address at which it
8166 was saved in our stack frame.
8167 If there hasn't be space allocated for it yet, make
8169 if (arg_pointer_save_area == 0)
8170 arg_pointer_save_area
8171 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8172 emit_move_insn (virtual_incoming_args_rtx,
8173 copy_to_reg (arg_pointer_save_area));
8178 #ifdef HAVE_nonlocal_goto_receiver
8179 if (HAVE_nonlocal_goto_receiver)
8180 emit_insn (gen_nonlocal_goto_receiver ());
8182 /* The static chain pointer contains the address of dummy function.
8183 We need to call it here to handle some PIC cases of restoring a
8184 global pointer. Then return 1. */
8185 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8187 /* We can't actually call emit_library_call here, so do everything
8188 it does, which isn't much for a libfunc with no args. */
8189 op0 = memory_address (FUNCTION_MODE, op0);
8191 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8192 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8193 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8195 #ifndef ACCUMULATE_OUTGOING_ARGS
8196 #ifdef HAVE_call_pop
8198 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8199 const0_rtx, next_arg_reg,
8200 GEN_INT (return_pops)));
8207 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8208 const0_rtx, next_arg_reg, const0_rtx));
8213 emit_move_insn (target, const1_rtx);
8219 /* Expand an expression EXP that calls a built-in function,
8220 with result going to TARGET if that's convenient
8221 (and in mode MODE if that's convenient).
8222 SUBTARGET may be used as the target for computing one of EXP's operands.
8223 IGNORE is nonzero if the value is to be ignored. */
8225 #define CALLED_AS_BUILT_IN(NODE) \
8226 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8229 expand_builtin (exp, target, subtarget, mode, ignore)
8233 enum machine_mode mode;
8236 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8237 tree arglist = TREE_OPERAND (exp, 1);
8240 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8241 optab builtin_optab;
8243 switch (DECL_FUNCTION_CODE (fndecl))
8248 /* build_function_call changes these into ABS_EXPR. */
8253 /* Treat these like sqrt, but only if the user asks for them. */
8254 if (! flag_fast_math)
8256 case BUILT_IN_FSQRT:
8257 /* If not optimizing, call the library function. */
8262 /* Arg could be wrong type if user redeclared this fcn wrong. */
8263 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8266 /* Stabilize and compute the argument. */
8267 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8268 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8270 exp = copy_node (exp);
8271 arglist = copy_node (arglist);
8272 TREE_OPERAND (exp, 1) = arglist;
8273 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8275 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8277 /* Make a suitable register to place result in. */
8278 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8283 switch (DECL_FUNCTION_CODE (fndecl))
8286 builtin_optab = sin_optab; break;
8288 builtin_optab = cos_optab; break;
8289 case BUILT_IN_FSQRT:
8290 builtin_optab = sqrt_optab; break;
8295 /* Compute into TARGET.
8296 Set TARGET to wherever the result comes back. */
8297 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8298 builtin_optab, op0, target, 0);
8300 /* If we were unable to expand via the builtin, stop the
8301 sequence (without outputting the insns) and break, causing
8302 a call the the library function. */
8309 /* Check the results by default. But if flag_fast_math is turned on,
8310 then assume sqrt will always be called with valid arguments. */
8312 if (! flag_fast_math)
8314 /* Don't define the builtin FP instructions
8315 if your machine is not IEEE. */
8316 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8319 lab1 = gen_label_rtx ();
8321 /* Test the result; if it is NaN, set errno=EDOM because
8322 the argument was not in the domain. */
8323 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8324 emit_jump_insn (gen_beq (lab1));
8328 #ifdef GEN_ERRNO_RTX
8329 rtx errno_rtx = GEN_ERRNO_RTX;
8332 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8335 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8338 /* We can't set errno=EDOM directly; let the library call do it.
8339 Pop the arguments right away in case the call gets deleted. */
8341 expand_call (exp, target, 0);
8348 /* Output the entire sequence. */
8349 insns = get_insns ();
8355 /* __builtin_apply_args returns block of memory allocated on
8356 the stack into which is stored the arg pointer, structure
8357 value address, static chain, and all the registers that might
8358 possibly be used in performing a function call. The code is
8359 moved to the start of the function so the incoming values are
8361 case BUILT_IN_APPLY_ARGS:
8362 /* Don't do __builtin_apply_args more than once in a function.
8363 Save the result of the first call and reuse it. */
8364 if (apply_args_value != 0)
8365 return apply_args_value;
8367 /* When this function is called, it means that registers must be
8368 saved on entry to this function. So we migrate the
8369 call to the first insn of this function. */
8374 temp = expand_builtin_apply_args ();
8378 apply_args_value = temp;
8380 /* Put the sequence after the NOTE that starts the function.
8381 If this is inside a SEQUENCE, make the outer-level insn
8382 chain current, so the code is placed at the start of the
8384 push_topmost_sequence ();
8385 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8386 pop_topmost_sequence ();
8390 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8391 FUNCTION with a copy of the parameters described by
8392 ARGUMENTS, and ARGSIZE. It returns a block of memory
8393 allocated on the stack into which is stored all the registers
8394 that might possibly be used for returning the result of a
8395 function. ARGUMENTS is the value returned by
8396 __builtin_apply_args. ARGSIZE is the number of bytes of
8397 arguments that must be copied. ??? How should this value be
8398 computed? We'll also need a safe worst case value for varargs
8400 case BUILT_IN_APPLY:
8402 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8403 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8404 || TREE_CHAIN (arglist) == 0
8405 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8406 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8407 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8415 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8416 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8418 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8421 /* __builtin_return (RESULT) causes the function to return the
8422 value described by RESULT. RESULT is address of the block of
8423 memory returned by __builtin_apply. */
8424 case BUILT_IN_RETURN:
8426 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8427 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8428 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8429 NULL_RTX, VOIDmode, 0));
8432 case BUILT_IN_SAVEREGS:
8433 /* Don't do __builtin_saveregs more than once in a function.
8434 Save the result of the first call and reuse it. */
8435 if (saveregs_value != 0)
8436 return saveregs_value;
8438 /* When this function is called, it means that registers must be
8439 saved on entry to this function. So we migrate the
8440 call to the first insn of this function. */
8444 /* Now really call the function. `expand_call' does not call
8445 expand_builtin, so there is no danger of infinite recursion here. */
8448 #ifdef EXPAND_BUILTIN_SAVEREGS
8449 /* Do whatever the machine needs done in this case. */
8450 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8452 /* The register where the function returns its value
8453 is likely to have something else in it, such as an argument.
8454 So preserve that register around the call. */
8456 if (value_mode != VOIDmode)
8458 rtx valreg = hard_libcall_value (value_mode);
8459 rtx saved_valreg = gen_reg_rtx (value_mode);
8461 emit_move_insn (saved_valreg, valreg);
8462 temp = expand_call (exp, target, ignore);
8463 emit_move_insn (valreg, saved_valreg);
8466 /* Generate the call, putting the value in a pseudo. */
8467 temp = expand_call (exp, target, ignore);
8473 saveregs_value = temp;
8475 /* Put the sequence after the NOTE that starts the function.
8476 If this is inside a SEQUENCE, make the outer-level insn
8477 chain current, so the code is placed at the start of the
8479 push_topmost_sequence ();
8480 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8481 pop_topmost_sequence ();
8485 /* __builtin_args_info (N) returns word N of the arg space info
8486 for the current function. The number and meanings of words
8487 is controlled by the definition of CUMULATIVE_ARGS. */
8488 case BUILT_IN_ARGS_INFO:
8490 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8492 int *word_ptr = (int *) ¤t_function_args_info;
8493 tree type, elts, result;
8495 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8496 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8497 __FILE__, __LINE__);
8501 tree arg = TREE_VALUE (arglist);
8502 if (TREE_CODE (arg) != INTEGER_CST)
8503 error ("argument of `__builtin_args_info' must be constant");
8506 int wordnum = TREE_INT_CST_LOW (arg);
8508 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8509 error ("argument of `__builtin_args_info' out of range");
8511 return GEN_INT (word_ptr[wordnum]);
8515 error ("missing argument in `__builtin_args_info'");
8520 for (i = 0; i < nwords; i++)
8521 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8523 type = build_array_type (integer_type_node,
8524 build_index_type (build_int_2 (nwords, 0)));
8525 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8526 TREE_CONSTANT (result) = 1;
8527 TREE_STATIC (result) = 1;
8528 result = build (INDIRECT_REF, build_pointer_type (type), result);
8529 TREE_CONSTANT (result) = 1;
8530 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8534 /* Return the address of the first anonymous stack arg. */
8535 case BUILT_IN_NEXT_ARG:
8537 tree fntype = TREE_TYPE (current_function_decl);
8539 if ((TYPE_ARG_TYPES (fntype) == 0
8540 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8542 && ! current_function_varargs)
8544 error ("`va_start' used in function with fixed args");
8550 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8551 tree arg = TREE_VALUE (arglist);
8553 /* Strip off all nops for the sake of the comparison. This
8554 is not quite the same as STRIP_NOPS. It does more.
8555 We must also strip off INDIRECT_EXPR for C++ reference
8557 while (TREE_CODE (arg) == NOP_EXPR
8558 || TREE_CODE (arg) == CONVERT_EXPR
8559 || TREE_CODE (arg) == NON_LVALUE_EXPR
8560 || TREE_CODE (arg) == INDIRECT_REF)
8561 arg = TREE_OPERAND (arg, 0);
8562 if (arg != last_parm)
8563 warning ("second parameter of `va_start' not last named argument");
8565 else if (! current_function_varargs)
8566 /* Evidently an out of date version of <stdarg.h>; can't validate
8567 va_start's second argument, but can still work as intended. */
8568 warning ("`__builtin_next_arg' called without an argument");
8571 return expand_binop (Pmode, add_optab,
8572 current_function_internal_arg_pointer,
8573 current_function_arg_offset_rtx,
8574 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8576 case BUILT_IN_CLASSIFY_TYPE:
8579 tree type = TREE_TYPE (TREE_VALUE (arglist));
8580 enum tree_code code = TREE_CODE (type);
8581 if (code == VOID_TYPE)
8582 return GEN_INT (void_type_class);
8583 if (code == INTEGER_TYPE)
8584 return GEN_INT (integer_type_class);
8585 if (code == CHAR_TYPE)
8586 return GEN_INT (char_type_class);
8587 if (code == ENUMERAL_TYPE)
8588 return GEN_INT (enumeral_type_class);
8589 if (code == BOOLEAN_TYPE)
8590 return GEN_INT (boolean_type_class);
8591 if (code == POINTER_TYPE)
8592 return GEN_INT (pointer_type_class);
8593 if (code == REFERENCE_TYPE)
8594 return GEN_INT (reference_type_class);
8595 if (code == OFFSET_TYPE)
8596 return GEN_INT (offset_type_class);
8597 if (code == REAL_TYPE)
8598 return GEN_INT (real_type_class);
8599 if (code == COMPLEX_TYPE)
8600 return GEN_INT (complex_type_class);
8601 if (code == FUNCTION_TYPE)
8602 return GEN_INT (function_type_class);
8603 if (code == METHOD_TYPE)
8604 return GEN_INT (method_type_class);
8605 if (code == RECORD_TYPE)
8606 return GEN_INT (record_type_class);
8607 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8608 return GEN_INT (union_type_class);
8609 if (code == ARRAY_TYPE)
8611 if (TYPE_STRING_FLAG (type))
8612 return GEN_INT (string_type_class);
8614 return GEN_INT (array_type_class);
8616 if (code == SET_TYPE)
8617 return GEN_INT (set_type_class);
8618 if (code == FILE_TYPE)
8619 return GEN_INT (file_type_class);
8620 if (code == LANG_TYPE)
8621 return GEN_INT (lang_type_class);
8623 return GEN_INT (no_type_class);
8625 case BUILT_IN_CONSTANT_P:
8630 tree arg = TREE_VALUE (arglist);
8633 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8634 || (TREE_CODE (arg) == ADDR_EXPR
8635 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8636 ? const1_rtx : const0_rtx);
8639 case BUILT_IN_FRAME_ADDRESS:
8640 /* The argument must be a nonnegative integer constant.
8641 It counts the number of frames to scan up the stack.
8642 The value is the address of that frame. */
8643 case BUILT_IN_RETURN_ADDRESS:
8644 /* The argument must be a nonnegative integer constant.
8645 It counts the number of frames to scan up the stack.
8646 The value is the return address saved in that frame. */
8648 /* Warning about missing arg was already issued. */
8650 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8651 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8653 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8654 error ("invalid arg to `__builtin_frame_address'");
8656 error ("invalid arg to `__builtin_return_address'");
8661 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8662 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8663 hard_frame_pointer_rtx);
8665 /* For __builtin_frame_address, return what we've got. */
8666 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8669 if (GET_CODE (tem) != REG)
8670 tem = copy_to_reg (tem);
8674 /* Returns the address of the area where the structure is returned.
8676 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8678 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8679 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8682 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8684 case BUILT_IN_ALLOCA:
8686 /* Arg could be non-integer if user redeclared this fcn wrong. */
8687 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8690 /* Compute the argument. */
8691 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8693 /* Allocate the desired space. */
8694 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8697 /* If not optimizing, call the library function. */
8698 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8702 /* Arg could be non-integer if user redeclared this fcn wrong. */
8703 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8706 /* Compute the argument. */
8707 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8708 /* Compute ffs, into TARGET if possible.
8709 Set TARGET to wherever the result comes back. */
8710 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8711 ffs_optab, op0, target, 1);
8716 case BUILT_IN_STRLEN:
8717 /* If not optimizing, call the library function. */
8718 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8722 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8723 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8727 tree src = TREE_VALUE (arglist);
8728 tree len = c_strlen (src);
8731 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8733 rtx result, src_rtx, char_rtx;
8734 enum machine_mode insn_mode = value_mode, char_mode;
8735 enum insn_code icode;
8737 /* If the length is known, just return it. */
8739 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8741 /* If SRC is not a pointer type, don't do this operation inline. */
8745 /* Call a function if we can't compute strlen in the right mode. */
8747 while (insn_mode != VOIDmode)
8749 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8750 if (icode != CODE_FOR_nothing)
8753 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8755 if (insn_mode == VOIDmode)
8758 /* Make a place to write the result of the instruction. */
8761 && GET_CODE (result) == REG
8762 && GET_MODE (result) == insn_mode
8763 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8764 result = gen_reg_rtx (insn_mode);
8766 /* Make sure the operands are acceptable to the predicates. */
8768 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8769 result = gen_reg_rtx (insn_mode);
8771 src_rtx = memory_address (BLKmode,
8772 expand_expr (src, NULL_RTX, ptr_mode,
8774 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8775 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8777 char_rtx = const0_rtx;
8778 char_mode = insn_operand_mode[(int)icode][2];
8779 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8780 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8782 emit_insn (GEN_FCN (icode) (result,
8783 gen_rtx (MEM, BLKmode, src_rtx),
8784 char_rtx, GEN_INT (align)));
8786 /* Return the value in the proper mode for this function. */
8787 if (GET_MODE (result) == value_mode)
8789 else if (target != 0)
8791 convert_move (target, result, 0);
8795 return convert_to_mode (value_mode, result, 0);
8798 case BUILT_IN_STRCPY:
8799 /* If not optimizing, call the library function. */
8800 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8804 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8805 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8806 || TREE_CHAIN (arglist) == 0
8807 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8811 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8816 len = size_binop (PLUS_EXPR, len, integer_one_node);
8818 chainon (arglist, build_tree_list (NULL_TREE, len));
8822 case BUILT_IN_MEMCPY:
8823 /* If not optimizing, call the library function. */
8824 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8828 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8829 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8830 || TREE_CHAIN (arglist) == 0
8831 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8832 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8833 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8837 tree dest = TREE_VALUE (arglist);
8838 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8839 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8843 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8845 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8846 rtx dest_rtx, dest_mem, src_mem;
8848 /* If either SRC or DEST is not a pointer type, don't do
8849 this operation in-line. */
8850 if (src_align == 0 || dest_align == 0)
8852 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8853 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8857 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8858 dest_mem = gen_rtx (MEM, BLKmode,
8859 memory_address (BLKmode, dest_rtx));
8860 /* There could be a void* cast on top of the object. */
8861 while (TREE_CODE (dest) == NOP_EXPR)
8862 dest = TREE_OPERAND (dest, 0);
8863 type = TREE_TYPE (TREE_TYPE (dest));
8864 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8865 src_mem = gen_rtx (MEM, BLKmode,
8866 memory_address (BLKmode,
8867 expand_expr (src, NULL_RTX,
8870 /* There could be a void* cast on top of the object. */
8871 while (TREE_CODE (src) == NOP_EXPR)
8872 src = TREE_OPERAND (src, 0);
8873 type = TREE_TYPE (TREE_TYPE (src));
8874 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8876 /* Copy word part most expediently. */
8877 emit_block_move (dest_mem, src_mem,
8878 expand_expr (len, NULL_RTX, VOIDmode, 0),
8879 MIN (src_align, dest_align));
8880 return force_operand (dest_rtx, NULL_RTX);
8883 case BUILT_IN_MEMSET:
8884 /* If not optimizing, call the library function. */
8885 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8889 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8890 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8891 || TREE_CHAIN (arglist) == 0
8892 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8894 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8896 != (TREE_CODE (TREE_TYPE
8898 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8902 tree dest = TREE_VALUE (arglist);
8903 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8904 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8908 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8909 rtx dest_rtx, dest_mem;
8911 /* If DEST is not a pointer type, don't do this
8912 operation in-line. */
8913 if (dest_align == 0)
8916 /* If VAL is not 0, don't do this operation in-line. */
8917 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8920 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8921 dest_mem = gen_rtx (MEM, BLKmode,
8922 memory_address (BLKmode, dest_rtx));
8923 /* There could be a void* cast on top of the object. */
8924 while (TREE_CODE (dest) == NOP_EXPR)
8925 dest = TREE_OPERAND (dest, 0);
8926 type = TREE_TYPE (TREE_TYPE (dest));
8927 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8929 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8932 return force_operand (dest_rtx, NULL_RTX);
8935 /* These comparison functions need an instruction that returns an actual
8936 index. An ordinary compare that just sets the condition codes
8938 #ifdef HAVE_cmpstrsi
8939 case BUILT_IN_STRCMP:
8940 /* If not optimizing, call the library function. */
8941 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8945 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8946 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8947 || TREE_CHAIN (arglist) == 0
8948 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8950 else if (!HAVE_cmpstrsi)
8953 tree arg1 = TREE_VALUE (arglist);
8954 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8958 len = c_strlen (arg1);
8960 len = size_binop (PLUS_EXPR, integer_one_node, len);
8961 len2 = c_strlen (arg2);
8963 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8965 /* If we don't have a constant length for the first, use the length
8966 of the second, if we know it. We don't require a constant for
8967 this case; some cost analysis could be done if both are available
8968 but neither is constant. For now, assume they're equally cheap.
8970 If both strings have constant lengths, use the smaller. This
8971 could arise if optimization results in strcpy being called with
8972 two fixed strings, or if the code was machine-generated. We should
8973 add some code to the `memcmp' handler below to deal with such
8974 situations, someday. */
8975 if (!len || TREE_CODE (len) != INTEGER_CST)
8982 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8984 if (tree_int_cst_lt (len2, len))
8988 chainon (arglist, build_tree_list (NULL_TREE, len));
8992 case BUILT_IN_MEMCMP:
8993 /* If not optimizing, call the library function. */
8994 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8998 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8999 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9000 || TREE_CHAIN (arglist) == 0
9001 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9002 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9003 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9005 else if (!HAVE_cmpstrsi)
9008 tree arg1 = TREE_VALUE (arglist);
9009 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9010 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9014 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9016 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9017 enum machine_mode insn_mode
9018 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9020 /* If we don't have POINTER_TYPE, call the function. */
9021 if (arg1_align == 0 || arg2_align == 0)
9023 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9024 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9028 /* Make a place to write the result of the instruction. */
9031 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9032 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9033 result = gen_reg_rtx (insn_mode);
9035 emit_insn (gen_cmpstrsi (result,
9036 gen_rtx (MEM, BLKmode,
9037 expand_expr (arg1, NULL_RTX,
9040 gen_rtx (MEM, BLKmode,
9041 expand_expr (arg2, NULL_RTX,
9044 expand_expr (len, NULL_RTX, VOIDmode, 0),
9045 GEN_INT (MIN (arg1_align, arg2_align))));
9047 /* Return the value in the proper mode for this function. */
9048 mode = TYPE_MODE (TREE_TYPE (exp));
9049 if (GET_MODE (result) == mode)
9051 else if (target != 0)
9053 convert_move (target, result, 0);
9057 return convert_to_mode (mode, result, 0);
9060 case BUILT_IN_STRCMP:
9061 case BUILT_IN_MEMCMP:
9065 case BUILT_IN_SETJMP:
9067 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9071 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9073 return expand_builtin_setjmp (buf_addr, target);
9076 /* __builtin_longjmp is passed a pointer to an array of five words
9077 and a value, which is a dummy. It's similar to the C library longjmp
9078 function but works with __builtin_setjmp above. */
9079 case BUILT_IN_LONGJMP:
9080 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9081 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9085 tree dummy_id = get_identifier ("__dummy");
9086 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
9087 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
9088 #ifdef POINTERS_EXTEND_UNSIGNED
9091 convert_memory_address
9093 expand_expr (TREE_VALUE (arglist),
9094 NULL_RTX, VOIDmode, 0)));
9097 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9101 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
9102 rtx lab = gen_rtx (MEM, Pmode,
9103 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
9104 enum machine_mode sa_mode
9105 #ifdef HAVE_save_stack_nonlocal
9106 = (HAVE_save_stack_nonlocal
9107 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9112 rtx stack = gen_rtx (MEM, sa_mode,
9113 plus_constant (buf_addr,
9114 2 * GET_MODE_SIZE (Pmode)));
9116 DECL_EXTERNAL (dummy_decl) = 1;
9117 TREE_PUBLIC (dummy_decl) = 1;
9118 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9120 /* Expand the second expression just for side-effects. */
9121 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9122 const0_rtx, VOIDmode, 0);
9124 assemble_external (dummy_decl);
9126 /* Pick up FP, label, and SP from the block and jump. This code is
9127 from expand_goto in stmt.c; see there for detailed comments. */
9128 #if HAVE_nonlocal_goto
9129 if (HAVE_nonlocal_goto)
9130 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9131 XEXP (DECL_RTL (dummy_decl), 0)));
9135 lab = copy_to_reg (lab);
9136 emit_move_insn (hard_frame_pointer_rtx, fp);
9137 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9139 /* Put in the static chain register the address of the dummy
9141 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9142 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9143 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9144 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9145 emit_indirect_jump (lab);
9151 /* Various hooks for the DWARF 2 __throw routine. */
9152 case BUILT_IN_UNWIND_INIT:
9153 expand_builtin_unwind_init ();
9156 return frame_pointer_rtx;
9158 return stack_pointer_rtx;
9159 #ifdef DWARF2_UNWIND_INFO
9160 case BUILT_IN_DWARF_FP_REGNUM:
9161 return expand_builtin_dwarf_fp_regnum ();
9162 case BUILT_IN_DWARF_REG_SIZE:
9163 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9165 case BUILT_IN_FROB_RETURN_ADDR:
9166 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9167 case BUILT_IN_EXTRACT_RETURN_ADDR:
9168 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9169 case BUILT_IN_SET_RETURN_ADDR_REG:
9170 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
9172 case BUILT_IN_EH_STUB:
9173 return expand_builtin_eh_stub ();
9174 case BUILT_IN_SET_EH_REGS:
9175 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
9176 TREE_VALUE (TREE_CHAIN (arglist)));
9179 default: /* just do library call, if unknown builtin */
9180 error ("built-in function `%s' not currently supported",
9181 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9184 /* The switch statement above can drop through to cause the function
9185 to be called normally. */
9187 return expand_call (exp, target, ignore);
9190 /* Built-in functions to perform an untyped call and return. */
9192 /* For each register that may be used for calling a function, this
9193 gives a mode used to copy the register's value. VOIDmode indicates
9194 the register is not used for calling a function. If the machine
9195 has register windows, this gives only the outbound registers.
9196 INCOMING_REGNO gives the corresponding inbound register. */
9197 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9199 /* For each register that may be used for returning values, this gives
9200 a mode used to copy the register's value. VOIDmode indicates the
9201 register is not used for returning values. If the machine has
9202 register windows, this gives only the outbound registers.
9203 INCOMING_REGNO gives the corresponding inbound register. */
9204 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9206 /* For each register that may be used for calling a function, this
9207 gives the offset of that register into the block returned by
9208 __builtin_apply_args. 0 indicates that the register is not
9209 used for calling a function. */
9210 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9212 /* Return the offset of register REGNO into the block returned by
9213 __builtin_apply_args. This is not declared static, since it is
9214 needed in objc-act.c. */
9217 apply_args_register_offset (regno)
9222 /* Arguments are always put in outgoing registers (in the argument
9223 block) if such make sense. */
9224 #ifdef OUTGOING_REGNO
9225 regno = OUTGOING_REGNO(regno);
9227 return apply_args_reg_offset[regno];
9230 /* Return the size required for the block returned by __builtin_apply_args,
9231 and initialize apply_args_mode. */
9236 static int size = -1;
9238 enum machine_mode mode;
9240 /* The values computed by this function never change. */
9243 /* The first value is the incoming arg-pointer. */
9244 size = GET_MODE_SIZE (Pmode);
9246 /* The second value is the structure value address unless this is
9247 passed as an "invisible" first argument. */
9248 if (struct_value_rtx)
9249 size += GET_MODE_SIZE (Pmode);
9251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9252 if (FUNCTION_ARG_REGNO_P (regno))
9254 /* Search for the proper mode for copying this register's
9255 value. I'm not sure this is right, but it works so far. */
9256 enum machine_mode best_mode = VOIDmode;
9258 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9260 mode = GET_MODE_WIDER_MODE (mode))
9261 if (HARD_REGNO_MODE_OK (regno, mode)
9262 && HARD_REGNO_NREGS (regno, mode) == 1)
9265 if (best_mode == VOIDmode)
9266 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9268 mode = GET_MODE_WIDER_MODE (mode))
9269 if (HARD_REGNO_MODE_OK (regno, mode)
9270 && (mov_optab->handlers[(int) mode].insn_code
9271 != CODE_FOR_nothing))
9275 if (mode == VOIDmode)
9278 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9279 if (size % align != 0)
9280 size = CEIL (size, align) * align;
9281 apply_args_reg_offset[regno] = size;
9282 size += GET_MODE_SIZE (mode);
9283 apply_args_mode[regno] = mode;
9287 apply_args_mode[regno] = VOIDmode;
9288 apply_args_reg_offset[regno] = 0;
9294 /* Return the size required for the block returned by __builtin_apply,
9295 and initialize apply_result_mode. */
9298 apply_result_size ()
9300 static int size = -1;
9302 enum machine_mode mode;
9304 /* The values computed by this function never change. */
9309 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9310 if (FUNCTION_VALUE_REGNO_P (regno))
9312 /* Search for the proper mode for copying this register's
9313 value. I'm not sure this is right, but it works so far. */
9314 enum machine_mode best_mode = VOIDmode;
9316 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9318 mode = GET_MODE_WIDER_MODE (mode))
9319 if (HARD_REGNO_MODE_OK (regno, mode))
9322 if (best_mode == VOIDmode)
9323 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9325 mode = GET_MODE_WIDER_MODE (mode))
9326 if (HARD_REGNO_MODE_OK (regno, mode)
9327 && (mov_optab->handlers[(int) mode].insn_code
9328 != CODE_FOR_nothing))
9332 if (mode == VOIDmode)
9335 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9336 if (size % align != 0)
9337 size = CEIL (size, align) * align;
9338 size += GET_MODE_SIZE (mode);
9339 apply_result_mode[regno] = mode;
9342 apply_result_mode[regno] = VOIDmode;
9344 /* Allow targets that use untyped_call and untyped_return to override
9345 the size so that machine-specific information can be stored here. */
9346 #ifdef APPLY_RESULT_SIZE
9347 size = APPLY_RESULT_SIZE;
9353 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9354 /* Create a vector describing the result block RESULT. If SAVEP is true,
9355 the result block is used to save the values; otherwise it is used to
9356 restore the values. */
9359 result_vector (savep, result)
9363 int regno, size, align, nelts;
9364 enum machine_mode mode;
9366 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9369 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9370 if ((mode = apply_result_mode[regno]) != VOIDmode)
9372 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9373 if (size % align != 0)
9374 size = CEIL (size, align) * align;
9375 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9376 mem = change_address (result, mode,
9377 plus_constant (XEXP (result, 0), size));
9378 savevec[nelts++] = (savep
9379 ? gen_rtx (SET, VOIDmode, mem, reg)
9380 : gen_rtx (SET, VOIDmode, reg, mem));
9381 size += GET_MODE_SIZE (mode);
9383 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9385 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9387 /* Save the state required to perform an untyped call with the same
9388 arguments as were passed to the current function. */
9391 expand_builtin_apply_args ()
9394 int size, align, regno;
9395 enum machine_mode mode;
9397 /* Create a block where the arg-pointer, structure value address,
9398 and argument registers can be saved. */
9399 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9401 /* Walk past the arg-pointer and structure value address. */
9402 size = GET_MODE_SIZE (Pmode);
9403 if (struct_value_rtx)
9404 size += GET_MODE_SIZE (Pmode);
9406 /* Save each register used in calling a function to the block. */
9407 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9408 if ((mode = apply_args_mode[regno]) != VOIDmode)
9412 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9413 if (size % align != 0)
9414 size = CEIL (size, align) * align;
9416 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9419 /* For reg-stack.c's stack register household.
9420 Compare with a similar piece of code in function.c. */
9422 emit_insn (gen_rtx (USE, mode, tem));
9425 emit_move_insn (change_address (registers, mode,
9426 plus_constant (XEXP (registers, 0),
9429 size += GET_MODE_SIZE (mode);
9432 /* Save the arg pointer to the block. */
9433 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9434 copy_to_reg (virtual_incoming_args_rtx));
9435 size = GET_MODE_SIZE (Pmode);
9437 /* Save the structure value address unless this is passed as an
9438 "invisible" first argument. */
9439 if (struct_value_incoming_rtx)
9441 emit_move_insn (change_address (registers, Pmode,
9442 plus_constant (XEXP (registers, 0),
9444 copy_to_reg (struct_value_incoming_rtx));
9445 size += GET_MODE_SIZE (Pmode);
9448 /* Return the address of the block. */
9449 return copy_addr_to_reg (XEXP (registers, 0));
9452 /* Perform an untyped call and save the state required to perform an
9453 untyped return of whatever value was returned by the given function. */
9456 expand_builtin_apply (function, arguments, argsize)
9457 rtx function, arguments, argsize;
9459 int size, align, regno;
9460 enum machine_mode mode;
9461 rtx incoming_args, result, reg, dest, call_insn;
9462 rtx old_stack_level = 0;
9463 rtx call_fusage = 0;
9465 /* Create a block where the return registers can be saved. */
9466 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9468 /* ??? The argsize value should be adjusted here. */
9470 /* Fetch the arg pointer from the ARGUMENTS block. */
9471 incoming_args = gen_reg_rtx (Pmode);
9472 emit_move_insn (incoming_args,
9473 gen_rtx (MEM, Pmode, arguments));
9474 #ifndef STACK_GROWS_DOWNWARD
9475 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9476 incoming_args, 0, OPTAB_LIB_WIDEN);
9479 /* Perform postincrements before actually calling the function. */
9482 /* Push a new argument block and copy the arguments. */
9483 do_pending_stack_adjust ();
9484 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9486 /* Push a block of memory onto the stack to store the memory arguments.
9487 Save the address in a register, and copy the memory arguments. ??? I
9488 haven't figured out how the calling convention macros effect this,
9489 but it's likely that the source and/or destination addresses in
9490 the block copy will need updating in machine specific ways. */
9491 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9492 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9493 gen_rtx (MEM, BLKmode, incoming_args),
9495 PARM_BOUNDARY / BITS_PER_UNIT);
9497 /* Refer to the argument block. */
9499 arguments = gen_rtx (MEM, BLKmode, arguments);
9501 /* Walk past the arg-pointer and structure value address. */
9502 size = GET_MODE_SIZE (Pmode);
9503 if (struct_value_rtx)
9504 size += GET_MODE_SIZE (Pmode);
9506 /* Restore each of the registers previously saved. Make USE insns
9507 for each of these registers for use in making the call. */
9508 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9509 if ((mode = apply_args_mode[regno]) != VOIDmode)
9511 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9512 if (size % align != 0)
9513 size = CEIL (size, align) * align;
9514 reg = gen_rtx (REG, mode, regno);
9515 emit_move_insn (reg,
9516 change_address (arguments, mode,
9517 plus_constant (XEXP (arguments, 0),
9520 use_reg (&call_fusage, reg);
9521 size += GET_MODE_SIZE (mode);
9524 /* Restore the structure value address unless this is passed as an
9525 "invisible" first argument. */
9526 size = GET_MODE_SIZE (Pmode);
9527 if (struct_value_rtx)
9529 rtx value = gen_reg_rtx (Pmode);
9530 emit_move_insn (value,
9531 change_address (arguments, Pmode,
9532 plus_constant (XEXP (arguments, 0),
9534 emit_move_insn (struct_value_rtx, value);
9535 if (GET_CODE (struct_value_rtx) == REG)
9536 use_reg (&call_fusage, struct_value_rtx);
9537 size += GET_MODE_SIZE (Pmode);
9540 /* All arguments and registers used for the call are set up by now! */
9541 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9543 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9544 and we don't want to load it into a register as an optimization,
9545 because prepare_call_address already did it if it should be done. */
9546 if (GET_CODE (function) != SYMBOL_REF)
9547 function = memory_address (FUNCTION_MODE, function);
9549 /* Generate the actual call instruction and save the return value. */
9550 #ifdef HAVE_untyped_call
9551 if (HAVE_untyped_call)
9552 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9553 result, result_vector (1, result)));
9556 #ifdef HAVE_call_value
9557 if (HAVE_call_value)
9561 /* Locate the unique return register. It is not possible to
9562 express a call that sets more than one return register using
9563 call_value; use untyped_call for that. In fact, untyped_call
9564 only needs to save the return registers in the given block. */
9565 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9566 if ((mode = apply_result_mode[regno]) != VOIDmode)
9569 abort (); /* HAVE_untyped_call required. */
9570 valreg = gen_rtx (REG, mode, regno);
9573 emit_call_insn (gen_call_value (valreg,
9574 gen_rtx (MEM, FUNCTION_MODE, function),
9575 const0_rtx, NULL_RTX, const0_rtx));
9577 emit_move_insn (change_address (result, GET_MODE (valreg),
9585 /* Find the CALL insn we just emitted. */
9586 for (call_insn = get_last_insn ();
9587 call_insn && GET_CODE (call_insn) != CALL_INSN;
9588 call_insn = PREV_INSN (call_insn))
9594 /* Put the register usage information on the CALL. If there is already
9595 some usage information, put ours at the end. */
9596 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9600 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9601 link = XEXP (link, 1))
9604 XEXP (link, 1) = call_fusage;
9607 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9609 /* Restore the stack. */
9610 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9612 /* Return the address of the result block. */
9613 return copy_addr_to_reg (XEXP (result, 0));
9616 /* Perform an untyped return. */
9619 expand_builtin_return (result)
9622 int size, align, regno;
9623 enum machine_mode mode;
9625 rtx call_fusage = 0;
9627 apply_result_size ();
9628 result = gen_rtx (MEM, BLKmode, result);
9630 #ifdef HAVE_untyped_return
9631 if (HAVE_untyped_return)
9633 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9639 /* Restore the return value and note that each value is used. */
9641 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9642 if ((mode = apply_result_mode[regno]) != VOIDmode)
9644 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9645 if (size % align != 0)
9646 size = CEIL (size, align) * align;
9647 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9648 emit_move_insn (reg,
9649 change_address (result, mode,
9650 plus_constant (XEXP (result, 0),
9653 push_to_sequence (call_fusage);
9654 emit_insn (gen_rtx (USE, VOIDmode, reg));
9655 call_fusage = get_insns ();
9657 size += GET_MODE_SIZE (mode);
9660 /* Put the USE insns before the return. */
9661 emit_insns (call_fusage);
9663 /* Return whatever values was restored by jumping directly to the end
9665 expand_null_return ();
9668 /* Expand code for a post- or pre- increment or decrement
9669 and return the RTX for the result.
9670 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9673 expand_increment (exp, post, ignore)
9677 register rtx op0, op1;
9678 register rtx temp, value;
9679 register tree incremented = TREE_OPERAND (exp, 0);
9680 optab this_optab = add_optab;
9682 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9683 int op0_is_copy = 0;
9684 int single_insn = 0;
9685 /* 1 means we can't store into OP0 directly,
9686 because it is a subreg narrower than a word,
9687 and we don't dare clobber the rest of the word. */
9690 if (output_bytecode)
9692 bc_expand_expr (exp);
9696 /* Stabilize any component ref that might need to be
9697 evaluated more than once below. */
9699 || TREE_CODE (incremented) == BIT_FIELD_REF
9700 || (TREE_CODE (incremented) == COMPONENT_REF
9701 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9702 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9703 incremented = stabilize_reference (incremented);
9704 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9705 ones into save exprs so that they don't accidentally get evaluated
9706 more than once by the code below. */
9707 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9708 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9709 incremented = save_expr (incremented);
9711 /* Compute the operands as RTX.
9712 Note whether OP0 is the actual lvalue or a copy of it:
9713 I believe it is a copy iff it is a register or subreg
9714 and insns were generated in computing it. */
9716 temp = get_last_insn ();
9717 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9719 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9720 in place but instead must do sign- or zero-extension during assignment,
9721 so we copy it into a new register and let the code below use it as
9724 Note that we can safely modify this SUBREG since it is know not to be
9725 shared (it was made by the expand_expr call above). */
9727 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9730 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9734 else if (GET_CODE (op0) == SUBREG
9735 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9737 /* We cannot increment this SUBREG in place. If we are
9738 post-incrementing, get a copy of the old value. Otherwise,
9739 just mark that we cannot increment in place. */
9741 op0 = copy_to_reg (op0);
9746 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9747 && temp != get_last_insn ());
9748 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9749 EXPAND_MEMORY_USE_BAD);
9751 /* Decide whether incrementing or decrementing. */
9752 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9753 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9754 this_optab = sub_optab;
9756 /* Convert decrement by a constant into a negative increment. */
9757 if (this_optab == sub_optab
9758 && GET_CODE (op1) == CONST_INT)
9760 op1 = GEN_INT (- INTVAL (op1));
9761 this_optab = add_optab;
9764 /* For a preincrement, see if we can do this with a single instruction. */
9767 icode = (int) this_optab->handlers[(int) mode].insn_code;
9768 if (icode != (int) CODE_FOR_nothing
9769 /* Make sure that OP0 is valid for operands 0 and 1
9770 of the insn we want to queue. */
9771 && (*insn_operand_predicate[icode][0]) (op0, mode)
9772 && (*insn_operand_predicate[icode][1]) (op0, mode)
9773 && (*insn_operand_predicate[icode][2]) (op1, mode))
9777 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9778 then we cannot just increment OP0. We must therefore contrive to
9779 increment the original value. Then, for postincrement, we can return
9780 OP0 since it is a copy of the old value. For preincrement, expand here
9781 unless we can do it with a single insn.
9783 Likewise if storing directly into OP0 would clobber high bits
9784 we need to preserve (bad_subreg). */
9785 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9787 /* This is the easiest way to increment the value wherever it is.
9788 Problems with multiple evaluation of INCREMENTED are prevented
9789 because either (1) it is a component_ref or preincrement,
9790 in which case it was stabilized above, or (2) it is an array_ref
9791 with constant index in an array in a register, which is
9792 safe to reevaluate. */
9793 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9794 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9795 ? MINUS_EXPR : PLUS_EXPR),
9798 TREE_OPERAND (exp, 1));
9800 while (TREE_CODE (incremented) == NOP_EXPR
9801 || TREE_CODE (incremented) == CONVERT_EXPR)
9803 newexp = convert (TREE_TYPE (incremented), newexp);
9804 incremented = TREE_OPERAND (incremented, 0);
9807 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9808 return post ? op0 : temp;
9813 /* We have a true reference to the value in OP0.
9814 If there is an insn to add or subtract in this mode, queue it.
9815 Queueing the increment insn avoids the register shuffling
9816 that often results if we must increment now and first save
9817 the old value for subsequent use. */
9819 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9820 op0 = stabilize (op0);
9823 icode = (int) this_optab->handlers[(int) mode].insn_code;
9824 if (icode != (int) CODE_FOR_nothing
9825 /* Make sure that OP0 is valid for operands 0 and 1
9826 of the insn we want to queue. */
9827 && (*insn_operand_predicate[icode][0]) (op0, mode)
9828 && (*insn_operand_predicate[icode][1]) (op0, mode))
9830 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9831 op1 = force_reg (mode, op1);
9833 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9835 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9837 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9840 op0 = change_address (op0, VOIDmode, addr);
9841 temp = force_reg (GET_MODE (op0), op0);
9842 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9843 op1 = force_reg (mode, op1);
9845 /* The increment queue is LIFO, thus we have to `queue'
9846 the instructions in reverse order. */
9847 enqueue_insn (op0, gen_move_insn (op0, temp));
9848 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9853 /* Preincrement, or we can't increment with one simple insn. */
9855 /* Save a copy of the value before inc or dec, to return it later. */
9856 temp = value = copy_to_reg (op0);
9858 /* Arrange to return the incremented value. */
9859 /* Copy the rtx because expand_binop will protect from the queue,
9860 and the results of that would be invalid for us to return
9861 if our caller does emit_queue before using our result. */
9862 temp = copy_rtx (value = op0);
9864 /* Increment however we can. */
9865 op1 = expand_binop (mode, this_optab, value, op1,
9866 flag_check_memory_usage ? NULL_RTX : op0,
9867 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9868 /* Make sure the value is stored into OP0. */
9870 emit_move_insn (op0, op1);
9875 /* Expand all function calls contained within EXP, innermost ones first.
9876 But don't look within expressions that have sequence points.
9877 For each CALL_EXPR, record the rtx for its value
9878 in the CALL_EXPR_RTL field. */
9881 preexpand_calls (exp)
9884 register int nops, i;
9885 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9887 if (! do_preexpand_calls)
9890 /* Only expressions and references can contain calls. */
9892 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9895 switch (TREE_CODE (exp))
9898 /* Do nothing if already expanded. */
9899 if (CALL_EXPR_RTL (exp) != 0
9900 /* Do nothing if the call returns a variable-sized object. */
9901 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9902 /* Do nothing to built-in functions. */
9903 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9904 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9906 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9909 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9914 case TRUTH_ANDIF_EXPR:
9915 case TRUTH_ORIF_EXPR:
9916 /* If we find one of these, then we can be sure
9917 the adjust will be done for it (since it makes jumps).
9918 Do it now, so that if this is inside an argument
9919 of a function, we don't get the stack adjustment
9920 after some other args have already been pushed. */
9921 do_pending_stack_adjust ();
9926 case WITH_CLEANUP_EXPR:
9927 case CLEANUP_POINT_EXPR:
9931 if (SAVE_EXPR_RTL (exp) != 0)
9935 nops = tree_code_length[(int) TREE_CODE (exp)];
9936 for (i = 0; i < nops; i++)
9937 if (TREE_OPERAND (exp, i) != 0)
9939 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9940 if (type == 'e' || type == '<' || type == '1' || type == '2'
9942 preexpand_calls (TREE_OPERAND (exp, i));
9946 /* At the start of a function, record that we have no previously-pushed
9947 arguments waiting to be popped. */
9950 init_pending_stack_adjust ()
9952 pending_stack_adjust = 0;
9955 /* When exiting from function, if safe, clear out any pending stack adjust
9956 so the adjustment won't get done. */
9959 clear_pending_stack_adjust ()
9961 #ifdef EXIT_IGNORE_STACK
9963 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9964 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9965 && ! flag_inline_functions)
9966 pending_stack_adjust = 0;
9970 /* Pop any previously-pushed arguments that have not been popped yet. */
9973 do_pending_stack_adjust ()
9975 if (inhibit_defer_pop == 0)
9977 if (pending_stack_adjust != 0)
9978 adjust_stack (GEN_INT (pending_stack_adjust));
9979 pending_stack_adjust = 0;
9983 /* Expand conditional expressions. */
9985 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9986 LABEL is an rtx of code CODE_LABEL, in this function and all the
9990 jumpifnot (exp, label)
9994 do_jump (exp, label, NULL_RTX);
9997 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10000 jumpif (exp, label)
10004 do_jump (exp, NULL_RTX, label);
10007 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10008 the result is zero, or IF_TRUE_LABEL if the result is one.
10009 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10010 meaning fall through in that case.
10012 do_jump always does any pending stack adjust except when it does not
10013 actually perform a jump. An example where there is no jump
10014 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10016 This function is responsible for optimizing cases such as
10017 &&, || and comparison operators in EXP. */
10020 do_jump (exp, if_false_label, if_true_label)
10022 rtx if_false_label, if_true_label;
10024 register enum tree_code code = TREE_CODE (exp);
10025 /* Some cases need to create a label to jump to
10026 in order to properly fall through.
10027 These cases set DROP_THROUGH_LABEL nonzero. */
10028 rtx drop_through_label = 0;
10030 rtx comparison = 0;
10033 enum machine_mode mode;
10043 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10049 /* This is not true with #pragma weak */
10051 /* The address of something can never be zero. */
10053 emit_jump (if_true_label);
10058 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10059 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10060 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10063 /* If we are narrowing the operand, we have to do the compare in the
10065 if ((TYPE_PRECISION (TREE_TYPE (exp))
10066 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10068 case NON_LVALUE_EXPR:
10069 case REFERENCE_EXPR:
10074 /* These cannot change zero->non-zero or vice versa. */
10075 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10079 /* This is never less insns than evaluating the PLUS_EXPR followed by
10080 a test and can be longer if the test is eliminated. */
10082 /* Reduce to minus. */
10083 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10084 TREE_OPERAND (exp, 0),
10085 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10086 TREE_OPERAND (exp, 1))));
10087 /* Process as MINUS. */
10091 /* Non-zero iff operands of minus differ. */
10092 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10093 TREE_OPERAND (exp, 0),
10094 TREE_OPERAND (exp, 1)),
10099 /* If we are AND'ing with a small constant, do this comparison in the
10100 smallest type that fits. If the machine doesn't have comparisons
10101 that small, it will be converted back to the wider comparison.
10102 This helps if we are testing the sign bit of a narrower object.
10103 combine can't do this for us because it can't know whether a
10104 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10106 if (! SLOW_BYTE_ACCESS
10107 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10108 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10109 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10110 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10111 && (type = type_for_mode (mode, 1)) != 0
10112 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10113 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10114 != CODE_FOR_nothing))
10116 do_jump (convert (type, exp), if_false_label, if_true_label);
10121 case TRUTH_NOT_EXPR:
10122 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10125 case TRUTH_ANDIF_EXPR:
10126 if (if_false_label == 0)
10127 if_false_label = drop_through_label = gen_label_rtx ();
10128 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10129 start_cleanup_deferal ();
10130 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10131 end_cleanup_deferal ();
10134 case TRUTH_ORIF_EXPR:
10135 if (if_true_label == 0)
10136 if_true_label = drop_through_label = gen_label_rtx ();
10137 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10138 start_cleanup_deferal ();
10139 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10140 end_cleanup_deferal ();
10143 case COMPOUND_EXPR:
10144 push_temp_slots ();
10145 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10146 preserve_temp_slots (NULL_RTX);
10147 free_temp_slots ();
10150 do_pending_stack_adjust ();
10151 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10154 case COMPONENT_REF:
10155 case BIT_FIELD_REF:
10158 int bitsize, bitpos, unsignedp;
10159 enum machine_mode mode;
10165 /* Get description of this reference. We don't actually care
10166 about the underlying object here. */
10167 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10168 &mode, &unsignedp, &volatilep,
10171 type = type_for_size (bitsize, unsignedp);
10172 if (! SLOW_BYTE_ACCESS
10173 && type != 0 && bitsize >= 0
10174 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10175 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10176 != CODE_FOR_nothing))
10178 do_jump (convert (type, exp), if_false_label, if_true_label);
10185 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10186 if (integer_onep (TREE_OPERAND (exp, 1))
10187 && integer_zerop (TREE_OPERAND (exp, 2)))
10188 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10190 else if (integer_zerop (TREE_OPERAND (exp, 1))
10191 && integer_onep (TREE_OPERAND (exp, 2)))
10192 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10196 register rtx label1 = gen_label_rtx ();
10197 drop_through_label = gen_label_rtx ();
10199 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10201 start_cleanup_deferal ();
10202 /* Now the THEN-expression. */
10203 do_jump (TREE_OPERAND (exp, 1),
10204 if_false_label ? if_false_label : drop_through_label,
10205 if_true_label ? if_true_label : drop_through_label);
10206 /* In case the do_jump just above never jumps. */
10207 do_pending_stack_adjust ();
10208 emit_label (label1);
10210 /* Now the ELSE-expression. */
10211 do_jump (TREE_OPERAND (exp, 2),
10212 if_false_label ? if_false_label : drop_through_label,
10213 if_true_label ? if_true_label : drop_through_label);
10214 end_cleanup_deferal ();
10220 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10222 if (integer_zerop (TREE_OPERAND (exp, 1)))
10223 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10224 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10225 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10228 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10229 fold (build (EQ_EXPR, TREE_TYPE (exp),
10230 fold (build1 (REALPART_EXPR,
10231 TREE_TYPE (inner_type),
10232 TREE_OPERAND (exp, 0))),
10233 fold (build1 (REALPART_EXPR,
10234 TREE_TYPE (inner_type),
10235 TREE_OPERAND (exp, 1))))),
10236 fold (build (EQ_EXPR, TREE_TYPE (exp),
10237 fold (build1 (IMAGPART_EXPR,
10238 TREE_TYPE (inner_type),
10239 TREE_OPERAND (exp, 0))),
10240 fold (build1 (IMAGPART_EXPR,
10241 TREE_TYPE (inner_type),
10242 TREE_OPERAND (exp, 1))))))),
10243 if_false_label, if_true_label);
10244 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10245 && !can_compare_p (TYPE_MODE (inner_type)))
10246 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10248 comparison = compare (exp, EQ, EQ);
10254 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10256 if (integer_zerop (TREE_OPERAND (exp, 1)))
10257 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10258 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10259 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10262 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10263 fold (build (NE_EXPR, TREE_TYPE (exp),
10264 fold (build1 (REALPART_EXPR,
10265 TREE_TYPE (inner_type),
10266 TREE_OPERAND (exp, 0))),
10267 fold (build1 (REALPART_EXPR,
10268 TREE_TYPE (inner_type),
10269 TREE_OPERAND (exp, 1))))),
10270 fold (build (NE_EXPR, TREE_TYPE (exp),
10271 fold (build1 (IMAGPART_EXPR,
10272 TREE_TYPE (inner_type),
10273 TREE_OPERAND (exp, 0))),
10274 fold (build1 (IMAGPART_EXPR,
10275 TREE_TYPE (inner_type),
10276 TREE_OPERAND (exp, 1))))))),
10277 if_false_label, if_true_label);
10278 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10279 && !can_compare_p (TYPE_MODE (inner_type)))
10280 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10282 comparison = compare (exp, NE, NE);
10287 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10289 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10290 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10292 comparison = compare (exp, LT, LTU);
10296 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10298 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10299 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10301 comparison = compare (exp, LE, LEU);
10305 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10307 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10308 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10310 comparison = compare (exp, GT, GTU);
10314 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10316 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10317 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10319 comparison = compare (exp, GE, GEU);
10324 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10326 /* This is not needed any more and causes poor code since it causes
10327 comparisons and tests from non-SI objects to have different code
10329 /* Copy to register to avoid generating bad insns by cse
10330 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10331 if (!cse_not_expected && GET_CODE (temp) == MEM)
10332 temp = copy_to_reg (temp);
10334 do_pending_stack_adjust ();
10335 if (GET_CODE (temp) == CONST_INT)
10336 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10337 else if (GET_CODE (temp) == LABEL_REF)
10338 comparison = const_true_rtx;
10339 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10340 && !can_compare_p (GET_MODE (temp)))
10341 /* Note swapping the labels gives us not-equal. */
10342 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10343 else if (GET_MODE (temp) != VOIDmode)
10344 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10345 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10346 GET_MODE (temp), NULL_RTX, 0);
10351 /* Do any postincrements in the expression that was tested. */
10354 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10355 straight into a conditional jump instruction as the jump condition.
10356 Otherwise, all the work has been done already. */
10358 if (comparison == const_true_rtx)
10361 emit_jump (if_true_label);
10363 else if (comparison == const0_rtx)
10365 if (if_false_label)
10366 emit_jump (if_false_label);
10368 else if (comparison)
10369 do_jump_for_compare (comparison, if_false_label, if_true_label);
10371 if (drop_through_label)
10373 /* If do_jump produces code that might be jumped around,
10374 do any stack adjusts from that code, before the place
10375 where control merges in. */
10376 do_pending_stack_adjust ();
10377 emit_label (drop_through_label);
10381 /* Given a comparison expression EXP for values too wide to be compared
10382 with one insn, test the comparison and jump to the appropriate label.
10383 The code of EXP is ignored; we always test GT if SWAP is 0,
10384 and LT if SWAP is 1. */
10387 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10390 rtx if_false_label, if_true_label;
10392 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10393 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10394 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10395 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10396 rtx drop_through_label = 0;
10397 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10400 if (! if_true_label || ! if_false_label)
10401 drop_through_label = gen_label_rtx ();
10402 if (! if_true_label)
10403 if_true_label = drop_through_label;
10404 if (! if_false_label)
10405 if_false_label = drop_through_label;
10407 /* Compare a word at a time, high order first. */
10408 for (i = 0; i < nwords; i++)
10411 rtx op0_word, op1_word;
10413 if (WORDS_BIG_ENDIAN)
10415 op0_word = operand_subword_force (op0, i, mode);
10416 op1_word = operand_subword_force (op1, i, mode);
10420 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10421 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10424 /* All but high-order word must be compared as unsigned. */
10425 comp = compare_from_rtx (op0_word, op1_word,
10426 (unsignedp || i > 0) ? GTU : GT,
10427 unsignedp, word_mode, NULL_RTX, 0);
10428 if (comp == const_true_rtx)
10429 emit_jump (if_true_label);
10430 else if (comp != const0_rtx)
10431 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10433 /* Consider lower words only if these are equal. */
10434 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10436 if (comp == const_true_rtx)
10437 emit_jump (if_false_label);
10438 else if (comp != const0_rtx)
10439 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10442 if (if_false_label)
10443 emit_jump (if_false_label);
10444 if (drop_through_label)
10445 emit_label (drop_through_label);
10448 /* Compare OP0 with OP1, word at a time, in mode MODE.
10449 UNSIGNEDP says to do unsigned comparison.
10450 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10453 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10454 enum machine_mode mode;
10457 rtx if_false_label, if_true_label;
10459 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10460 rtx drop_through_label = 0;
10463 if (! if_true_label || ! if_false_label)
10464 drop_through_label = gen_label_rtx ();
10465 if (! if_true_label)
10466 if_true_label = drop_through_label;
10467 if (! if_false_label)
10468 if_false_label = drop_through_label;
10470 /* Compare a word at a time, high order first. */
10471 for (i = 0; i < nwords; i++)
10474 rtx op0_word, op1_word;
10476 if (WORDS_BIG_ENDIAN)
10478 op0_word = operand_subword_force (op0, i, mode);
10479 op1_word = operand_subword_force (op1, i, mode);
10483 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10484 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10487 /* All but high-order word must be compared as unsigned. */
10488 comp = compare_from_rtx (op0_word, op1_word,
10489 (unsignedp || i > 0) ? GTU : GT,
10490 unsignedp, word_mode, NULL_RTX, 0);
10491 if (comp == const_true_rtx)
10492 emit_jump (if_true_label);
10493 else if (comp != const0_rtx)
10494 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10496 /* Consider lower words only if these are equal. */
10497 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10499 if (comp == const_true_rtx)
10500 emit_jump (if_false_label);
10501 else if (comp != const0_rtx)
10502 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10505 if (if_false_label)
10506 emit_jump (if_false_label);
10507 if (drop_through_label)
10508 emit_label (drop_through_label);
10511 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10512 with one insn, test the comparison and jump to the appropriate label. */
10515 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10517 rtx if_false_label, if_true_label;
10519 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10520 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10521 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10522 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10524 rtx drop_through_label = 0;
10526 if (! if_false_label)
10527 drop_through_label = if_false_label = gen_label_rtx ();
10529 for (i = 0; i < nwords; i++)
10531 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10532 operand_subword_force (op1, i, mode),
10533 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10534 word_mode, NULL_RTX, 0);
10535 if (comp == const_true_rtx)
10536 emit_jump (if_false_label);
10537 else if (comp != const0_rtx)
10538 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10542 emit_jump (if_true_label);
10543 if (drop_through_label)
10544 emit_label (drop_through_label);
10547 /* Jump according to whether OP0 is 0.
10548 We assume that OP0 has an integer mode that is too wide
10549 for the available compare insns. */
10552 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10554 rtx if_false_label, if_true_label;
10556 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10558 rtx drop_through_label = 0;
10560 if (! if_false_label)
10561 drop_through_label = if_false_label = gen_label_rtx ();
10563 for (i = 0; i < nwords; i++)
10565 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10567 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10568 if (comp == const_true_rtx)
10569 emit_jump (if_false_label);
10570 else if (comp != const0_rtx)
10571 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10575 emit_jump (if_true_label);
10576 if (drop_through_label)
10577 emit_label (drop_through_label);
10580 /* Given a comparison expression in rtl form, output conditional branches to
10581 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10584 do_jump_for_compare (comparison, if_false_label, if_true_label)
10585 rtx comparison, if_false_label, if_true_label;
10589 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10590 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10594 if (if_false_label)
10595 emit_jump (if_false_label);
10597 else if (if_false_label)
10600 rtx prev = get_last_insn ();
10603 /* Output the branch with the opposite condition. Then try to invert
10604 what is generated. If more than one insn is a branch, or if the
10605 branch is not the last insn written, abort. If we can't invert
10606 the branch, emit make a true label, redirect this jump to that,
10607 emit a jump to the false label and define the true label. */
10609 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10610 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10614 /* Here we get the first insn that was just emitted. It used to be the
10615 case that, on some machines, emitting the branch would discard
10616 the previous compare insn and emit a replacement. This isn't
10617 done anymore, but abort if we see that PREV is deleted. */
10620 insn = get_insns ();
10621 else if (INSN_DELETED_P (prev))
10624 insn = NEXT_INSN (prev);
10626 for (; insn; insn = NEXT_INSN (insn))
10627 if (GET_CODE (insn) == JUMP_INSN)
10634 if (branch != get_last_insn ())
10637 JUMP_LABEL (branch) = if_false_label;
10638 if (! invert_jump (branch, if_false_label))
10640 if_true_label = gen_label_rtx ();
10641 redirect_jump (branch, if_true_label);
10642 emit_jump (if_false_label);
10643 emit_label (if_true_label);
10648 /* Generate code for a comparison expression EXP
10649 (including code to compute the values to be compared)
10650 and set (CC0) according to the result.
10651 SIGNED_CODE should be the rtx operation for this comparison for
10652 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10654 We force a stack adjustment unless there are currently
10655 things pushed on the stack that aren't yet used. */
10658 compare (exp, signed_code, unsigned_code)
10660 enum rtx_code signed_code, unsigned_code;
10663 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10665 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10666 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10667 register enum machine_mode mode = TYPE_MODE (type);
10668 int unsignedp = TREE_UNSIGNED (type);
10669 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10671 #ifdef HAVE_canonicalize_funcptr_for_compare
10672 /* If function pointers need to be "canonicalized" before they can
10673 be reliably compared, then canonicalize them. */
10674 if (HAVE_canonicalize_funcptr_for_compare
10675 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10676 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10679 rtx new_op0 = gen_reg_rtx (mode);
10681 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10685 if (HAVE_canonicalize_funcptr_for_compare
10686 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10687 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10690 rtx new_op1 = gen_reg_rtx (mode);
10692 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10697 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10699 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10700 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10703 /* Like compare but expects the values to compare as two rtx's.
10704 The decision as to signed or unsigned comparison must be made by the caller.
10706 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10709 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10710 size of MODE should be used. */
10713 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10714 register rtx op0, op1;
10715 enum rtx_code code;
10717 enum machine_mode mode;
10723 /* If one operand is constant, make it the second one. Only do this
10724 if the other operand is not constant as well. */
10726 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10727 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10732 code = swap_condition (code);
10735 if (flag_force_mem)
10737 op0 = force_not_mem (op0);
10738 op1 = force_not_mem (op1);
10741 do_pending_stack_adjust ();
10743 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10744 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10748 /* There's no need to do this now that combine.c can eliminate lots of
10749 sign extensions. This can be less efficient in certain cases on other
10752 /* If this is a signed equality comparison, we can do it as an
10753 unsigned comparison since zero-extension is cheaper than sign
10754 extension and comparisons with zero are done as unsigned. This is
10755 the case even on machines that can do fast sign extension, since
10756 zero-extension is easier to combine with other operations than
10757 sign-extension is. If we are comparing against a constant, we must
10758 convert it to what it would look like unsigned. */
10759 if ((code == EQ || code == NE) && ! unsignedp
10760 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10762 if (GET_CODE (op1) == CONST_INT
10763 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10764 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10769 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10771 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10774 /* Generate code to calculate EXP using a store-flag instruction
10775 and return an rtx for the result. EXP is either a comparison
10776 or a TRUTH_NOT_EXPR whose operand is a comparison.
10778 If TARGET is nonzero, store the result there if convenient.
10780 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10783 Return zero if there is no suitable set-flag instruction
10784 available on this machine.
10786 Once expand_expr has been called on the arguments of the comparison,
10787 we are committed to doing the store flag, since it is not safe to
10788 re-evaluate the expression. We emit the store-flag insn by calling
10789 emit_store_flag, but only expand the arguments if we have a reason
10790 to believe that emit_store_flag will be successful. If we think that
10791 it will, but it isn't, we have to simulate the store-flag with a
10792 set/jump/set sequence. */
10795 do_store_flag (exp, target, mode, only_cheap)
10798 enum machine_mode mode;
10801 enum rtx_code code;
10802 tree arg0, arg1, type;
10804 enum machine_mode operand_mode;
10808 enum insn_code icode;
10809 rtx subtarget = target;
10810 rtx result, label, pattern, jump_pat;
10812 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10813 result at the end. We can't simply invert the test since it would
10814 have already been inverted if it were valid. This case occurs for
10815 some floating-point comparisons. */
10817 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10818 invert = 1, exp = TREE_OPERAND (exp, 0);
10820 arg0 = TREE_OPERAND (exp, 0);
10821 arg1 = TREE_OPERAND (exp, 1);
10822 type = TREE_TYPE (arg0);
10823 operand_mode = TYPE_MODE (type);
10824 unsignedp = TREE_UNSIGNED (type);
10826 /* We won't bother with BLKmode store-flag operations because it would mean
10827 passing a lot of information to emit_store_flag. */
10828 if (operand_mode == BLKmode)
10831 /* We won't bother with store-flag operations involving function pointers
10832 when function pointers must be canonicalized before comparisons. */
10833 #ifdef HAVE_canonicalize_funcptr_for_compare
10834 if (HAVE_canonicalize_funcptr_for_compare
10835 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10836 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10838 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10839 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10840 == FUNCTION_TYPE))))
10847 /* Get the rtx comparison code to use. We know that EXP is a comparison
10848 operation of some type. Some comparisons against 1 and -1 can be
10849 converted to comparisons with zero. Do so here so that the tests
10850 below will be aware that we have a comparison with zero. These
10851 tests will not catch constants in the first operand, but constants
10852 are rarely passed as the first operand. */
10854 switch (TREE_CODE (exp))
10863 if (integer_onep (arg1))
10864 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10866 code = unsignedp ? LTU : LT;
10869 if (! unsignedp && integer_all_onesp (arg1))
10870 arg1 = integer_zero_node, code = LT;
10872 code = unsignedp ? LEU : LE;
10875 if (! unsignedp && integer_all_onesp (arg1))
10876 arg1 = integer_zero_node, code = GE;
10878 code = unsignedp ? GTU : GT;
10881 if (integer_onep (arg1))
10882 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10884 code = unsignedp ? GEU : GE;
10890 /* Put a constant second. */
10891 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10893 tem = arg0; arg0 = arg1; arg1 = tem;
10894 code = swap_condition (code);
10897 /* If this is an equality or inequality test of a single bit, we can
10898 do this by shifting the bit being tested to the low-order bit and
10899 masking the result with the constant 1. If the condition was EQ,
10900 we xor it with 1. This does not require an scc insn and is faster
10901 than an scc insn even if we have it. */
10903 if ((code == NE || code == EQ)
10904 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10905 && integer_pow2p (TREE_OPERAND (arg0, 1))
10906 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10908 tree inner = TREE_OPERAND (arg0, 0);
10913 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10914 NULL_RTX, VOIDmode, 0));
10915 /* In this case, immed_double_const will sign extend the value to make
10916 it look the same on the host and target. We must remove the
10917 sign-extension before calling exact_log2, since exact_log2 will
10918 fail for negative values. */
10919 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10920 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
10921 /* We don't use the obvious constant shift to generate the mask,
10922 because that generates compiler warnings when BITS_PER_WORD is
10923 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10924 code is unreachable in that case. */
10925 tem = tem & GET_MODE_MASK (word_mode);
10926 bitnum = exact_log2 (tem);
10928 /* If INNER is a right shift of a constant and it plus BITNUM does
10929 not overflow, adjust BITNUM and INNER. */
10931 if (TREE_CODE (inner) == RSHIFT_EXPR
10932 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10933 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10934 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10935 < TYPE_PRECISION (type)))
10937 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10938 inner = TREE_OPERAND (inner, 0);
10941 /* If we are going to be able to omit the AND below, we must do our
10942 operations as unsigned. If we must use the AND, we have a choice.
10943 Normally unsigned is faster, but for some machines signed is. */
10944 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10945 #ifdef LOAD_EXTEND_OP
10946 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10952 if (subtarget == 0 || GET_CODE (subtarget) != REG
10953 || GET_MODE (subtarget) != operand_mode
10954 || ! safe_from_p (subtarget, inner))
10957 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10960 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10961 size_int (bitnum), subtarget, ops_unsignedp);
10963 if (GET_MODE (op0) != mode)
10964 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10966 if ((code == EQ && ! invert) || (code == NE && invert))
10967 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10968 ops_unsignedp, OPTAB_LIB_WIDEN);
10970 /* Put the AND last so it can combine with more things. */
10971 if (bitnum != TYPE_PRECISION (type) - 1)
10972 op0 = expand_and (op0, const1_rtx, subtarget);
10977 /* Now see if we are likely to be able to do this. Return if not. */
10978 if (! can_compare_p (operand_mode))
10980 icode = setcc_gen_code[(int) code];
10981 if (icode == CODE_FOR_nothing
10982 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10984 /* We can only do this if it is one of the special cases that
10985 can be handled without an scc insn. */
10986 if ((code == LT && integer_zerop (arg1))
10987 || (! only_cheap && code == GE && integer_zerop (arg1)))
10989 else if (BRANCH_COST >= 0
10990 && ! only_cheap && (code == NE || code == EQ)
10991 && TREE_CODE (type) != REAL_TYPE
10992 && ((abs_optab->handlers[(int) operand_mode].insn_code
10993 != CODE_FOR_nothing)
10994 || (ffs_optab->handlers[(int) operand_mode].insn_code
10995 != CODE_FOR_nothing)))
11001 preexpand_calls (exp);
11002 if (subtarget == 0 || GET_CODE (subtarget) != REG
11003 || GET_MODE (subtarget) != operand_mode
11004 || ! safe_from_p (subtarget, arg1))
11007 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11008 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11011 target = gen_reg_rtx (mode);
11013 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11014 because, if the emit_store_flag does anything it will succeed and
11015 OP0 and OP1 will not be used subsequently. */
11017 result = emit_store_flag (target, code,
11018 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11019 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11020 operand_mode, unsignedp, 1);
11025 result = expand_binop (mode, xor_optab, result, const1_rtx,
11026 result, 0, OPTAB_LIB_WIDEN);
11030 /* If this failed, we have to do this with set/compare/jump/set code. */
11031 if (GET_CODE (target) != REG
11032 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11033 target = gen_reg_rtx (GET_MODE (target));
11035 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11036 result = compare_from_rtx (op0, op1, code, unsignedp,
11037 operand_mode, NULL_RTX, 0);
11038 if (GET_CODE (result) == CONST_INT)
11039 return (((result == const0_rtx && ! invert)
11040 || (result != const0_rtx && invert))
11041 ? const0_rtx : const1_rtx);
11043 label = gen_label_rtx ();
11044 if (bcc_gen_fctn[(int) code] == 0)
11047 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11048 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11049 emit_label (label);
11054 /* Generate a tablejump instruction (used for switch statements). */
11056 #ifdef HAVE_tablejump
11058 /* INDEX is the value being switched on, with the lowest value
11059 in the table already subtracted.
11060 MODE is its expected mode (needed if INDEX is constant).
11061 RANGE is the length of the jump table.
11062 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11064 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11065 index value is out of range. */
11068 do_tablejump (index, mode, range, table_label, default_label)
11069 rtx index, range, table_label, default_label;
11070 enum machine_mode mode;
11072 register rtx temp, vector;
11074 /* Do an unsigned comparison (in the proper mode) between the index
11075 expression and the value which represents the length of the range.
11076 Since we just finished subtracting the lower bound of the range
11077 from the index expression, this comparison allows us to simultaneously
11078 check that the original index expression value is both greater than
11079 or equal to the minimum value of the range and less than or equal to
11080 the maximum value of the range. */
11082 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11083 emit_jump_insn (gen_bgtu (default_label));
11085 /* If index is in range, it must fit in Pmode.
11086 Convert to Pmode so we can index with it. */
11088 index = convert_to_mode (Pmode, index, 1);
11090 /* Don't let a MEM slip thru, because then INDEX that comes
11091 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11092 and break_out_memory_refs will go to work on it and mess it up. */
11093 #ifdef PIC_CASE_VECTOR_ADDRESS
11094 if (flag_pic && GET_CODE (index) != REG)
11095 index = copy_to_mode_reg (Pmode, index);
11098 /* If flag_force_addr were to affect this address
11099 it could interfere with the tricky assumptions made
11100 about addresses that contain label-refs,
11101 which may be valid only very near the tablejump itself. */
11102 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11103 GET_MODE_SIZE, because this indicates how large insns are. The other
11104 uses should all be Pmode, because they are addresses. This code
11105 could fail if addresses and insns are not the same size. */
11106 index = gen_rtx (PLUS, Pmode,
11107 gen_rtx (MULT, Pmode, index,
11108 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11109 gen_rtx (LABEL_REF, Pmode, table_label));
11110 #ifdef PIC_CASE_VECTOR_ADDRESS
11112 index = PIC_CASE_VECTOR_ADDRESS (index);
11115 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11116 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11117 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11118 RTX_UNCHANGING_P (vector) = 1;
11119 convert_move (temp, vector, 0);
11121 emit_jump_insn (gen_tablejump (temp, table_label));
11123 #ifndef CASE_VECTOR_PC_RELATIVE
11124 /* If we are generating PIC code or if the table is PC-relative, the
11125 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11131 #endif /* HAVE_tablejump */
11134 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11135 to that value is on the top of the stack. The resulting type is TYPE, and
11136 the source declaration is DECL. */
11139 bc_load_memory (type, decl)
11142 enum bytecode_opcode opcode;
11145 /* Bit fields are special. We only know about signed and
11146 unsigned ints, and enums. The latter are treated as
11147 signed integers. */
11149 if (DECL_BIT_FIELD (decl))
11150 if (TREE_CODE (type) == ENUMERAL_TYPE
11151 || TREE_CODE (type) == INTEGER_TYPE)
11152 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11156 /* See corresponding comment in bc_store_memory. */
11157 if (TYPE_MODE (type) == BLKmode
11158 || TYPE_MODE (type) == VOIDmode)
11161 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11163 if (opcode == neverneverland)
11166 bc_emit_bytecode (opcode);
11168 #ifdef DEBUG_PRINT_CODE
11169 fputc ('\n', stderr);
11174 /* Store the contents of the second stack slot to the address in the
11175 top stack slot. DECL is the declaration of the destination and is used
11176 to determine whether we're dealing with a bitfield. */
11179 bc_store_memory (type, decl)
11182 enum bytecode_opcode opcode;
11185 if (DECL_BIT_FIELD (decl))
11187 if (TREE_CODE (type) == ENUMERAL_TYPE
11188 || TREE_CODE (type) == INTEGER_TYPE)
11194 if (TYPE_MODE (type) == BLKmode)
11196 /* Copy structure. This expands to a block copy instruction, storeBLK.
11197 In addition to the arguments expected by the other store instructions,
11198 it also expects a type size (SImode) on top of the stack, which is the
11199 structure size in size units (usually bytes). The two first arguments
11200 are already on the stack; so we just put the size on level 1. For some
11201 other languages, the size may be variable, this is why we don't encode
11202 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11204 bc_expand_expr (TYPE_SIZE (type));
11208 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11210 if (opcode == neverneverland)
11213 bc_emit_bytecode (opcode);
11215 #ifdef DEBUG_PRINT_CODE
11216 fputc ('\n', stderr);
11221 /* Allocate local stack space sufficient to hold a value of the given
11222 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11223 integral power of 2. A special case is locals of type VOID, which
11224 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11225 remapped into the corresponding attribute of SI. */
11228 bc_allocate_local (size, alignment)
11229 int size, alignment;
11232 int byte_alignment;
11237 /* Normalize size and alignment */
11239 size = UNITS_PER_WORD;
11241 if (alignment < BITS_PER_UNIT)
11242 byte_alignment = 1 << (INT_ALIGN - 1);
11245 byte_alignment = alignment / BITS_PER_UNIT;
11247 if (local_vars_size & (byte_alignment - 1))
11248 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11250 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11251 local_vars_size += size;
11257 /* Allocate variable-sized local array. Variable-sized arrays are
11258 actually pointers to the address in memory where they are stored. */
11261 bc_allocate_variable_array (size)
11265 const int ptralign = (1 << (PTR_ALIGN - 1));
11267 /* Align pointer */
11268 if (local_vars_size & ptralign)
11269 local_vars_size += ptralign - (local_vars_size & ptralign);
11271 /* Note down local space needed: pointer to block; also return
11274 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11275 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11280 /* Push the machine address for the given external variable offset. */
11283 bc_load_externaddr (externaddr)
11286 bc_emit_bytecode (constP);
11287 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11288 BYTECODE_BC_LABEL (externaddr)->offset);
11290 #ifdef DEBUG_PRINT_CODE
11291 fputc ('\n', stderr);
11296 /* Like above, but expects an IDENTIFIER. */
11299 bc_load_externaddr_id (id, offset)
11303 if (!IDENTIFIER_POINTER (id))
11306 bc_emit_bytecode (constP);
11307 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11309 #ifdef DEBUG_PRINT_CODE
11310 fputc ('\n', stderr);
11315 /* Push the machine address for the given local variable offset. */
11318 bc_load_localaddr (localaddr)
11321 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11325 /* Push the machine address for the given parameter offset.
11326 NOTE: offset is in bits. */
11329 bc_load_parmaddr (parmaddr)
11332 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11337 /* Convert a[i] into *(a + i). */
11340 bc_canonicalize_array_ref (exp)
11343 tree type = TREE_TYPE (exp);
11344 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11345 TREE_OPERAND (exp, 0));
11346 tree index = TREE_OPERAND (exp, 1);
11349 /* Convert the integer argument to a type the same size as a pointer
11350 so the multiply won't overflow spuriously. */
11352 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11353 index = convert (type_for_size (POINTER_SIZE, 0), index);
11355 /* The array address isn't volatile even if the array is.
11356 (Of course this isn't terribly relevant since the bytecode
11357 translator treats nearly everything as volatile anyway.) */
11358 TREE_THIS_VOLATILE (array_adr) = 0;
11360 return build1 (INDIRECT_REF, type,
11361 fold (build (PLUS_EXPR,
11362 TYPE_POINTER_TO (type),
11364 fold (build (MULT_EXPR,
11365 TYPE_POINTER_TO (type),
11367 size_in_bytes (type))))));
11371 /* Load the address of the component referenced by the given
11372 COMPONENT_REF expression.
11374 Returns innermost lvalue. */
11377 bc_expand_component_address (exp)
11381 enum machine_mode mode;
11383 HOST_WIDE_INT SIval;
11386 tem = TREE_OPERAND (exp, 1);
11387 mode = DECL_MODE (tem);
11390 /* Compute cumulative bit offset for nested component refs
11391 and array refs, and find the ultimate containing object. */
11393 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11395 if (TREE_CODE (tem) == COMPONENT_REF)
11396 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11398 if (TREE_CODE (tem) == ARRAY_REF
11399 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11400 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11402 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11403 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11404 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11409 bc_expand_expr (tem);
11412 /* For bitfields also push their offset and size */
11413 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11414 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11416 if (SIval = bitpos / BITS_PER_UNIT)
11417 bc_emit_instruction (addconstPSI, SIval);
11419 return (TREE_OPERAND (exp, 1));
11423 /* Emit code to push two SI constants */
11426 bc_push_offset_and_size (offset, size)
11427 HOST_WIDE_INT offset, size;
11429 bc_emit_instruction (constSI, offset);
11430 bc_emit_instruction (constSI, size);
11434 /* Emit byte code to push the address of the given lvalue expression to
11435 the stack. If it's a bit field, we also push offset and size info.
11437 Returns innermost component, which allows us to determine not only
11438 its type, but also whether it's a bitfield. */
11441 bc_expand_address (exp)
11445 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11449 switch (TREE_CODE (exp))
11453 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11455 case COMPONENT_REF:
11457 return (bc_expand_component_address (exp));
11461 bc_expand_expr (TREE_OPERAND (exp, 0));
11463 /* For variable-sized types: retrieve pointer. Sometimes the
11464 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11465 also make sure we have an operand, just in case... */
11467 if (TREE_OPERAND (exp, 0)
11468 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11469 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11470 bc_emit_instruction (loadP);
11472 /* If packed, also return offset and size */
11473 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11475 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11476 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11478 return (TREE_OPERAND (exp, 0));
11480 case FUNCTION_DECL:
11482 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11483 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11488 bc_load_parmaddr (DECL_RTL (exp));
11490 /* For variable-sized types: retrieve pointer */
11491 if (TYPE_SIZE (TREE_TYPE (exp))
11492 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11493 bc_emit_instruction (loadP);
11495 /* If packed, also return offset and size */
11496 if (DECL_BIT_FIELD (exp))
11497 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11498 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11504 bc_emit_instruction (returnP);
11510 if (BYTECODE_LABEL (DECL_RTL (exp)))
11511 bc_load_externaddr (DECL_RTL (exp));
11514 if (DECL_EXTERNAL (exp))
11515 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11516 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11518 bc_load_localaddr (DECL_RTL (exp));
11520 /* For variable-sized types: retrieve pointer */
11521 if (TYPE_SIZE (TREE_TYPE (exp))
11522 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11523 bc_emit_instruction (loadP);
11525 /* If packed, also return offset and size */
11526 if (DECL_BIT_FIELD (exp))
11527 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11528 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11536 bc_emit_bytecode (constP);
11537 r = output_constant_def (exp);
11538 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11540 #ifdef DEBUG_PRINT_CODE
11541 fputc ('\n', stderr);
11552 /* Most lvalues don't have components. */
11557 /* Emit a type code to be used by the runtime support in handling
11558 parameter passing. The type code consists of the machine mode
11559 plus the minimal alignment shifted left 8 bits. */
11562 bc_runtime_type_code (type)
11567 switch (TREE_CODE (type))
11573 case ENUMERAL_TYPE:
11577 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11589 return build_int_2 (val, 0);
11593 /* Generate constructor label */
11596 bc_gen_constr_label ()
11598 static int label_counter;
11599 static char label[20];
11601 sprintf (label, "*LR%d", label_counter++);
11603 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11607 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11608 expand the constructor data as static data, and push a pointer to it.
11609 The pointer is put in the pointer table and is retrieved by a constP
11610 bytecode instruction. We then loop and store each constructor member in
11611 the corresponding component. Finally, we return the original pointer on
11615 bc_expand_constructor (constr)
11619 HOST_WIDE_INT ptroffs;
11623 /* Literal constructors are handled as constants, whereas
11624 non-literals are evaluated and stored element by element
11625 into the data segment. */
11627 /* Allocate space in proper segment and push pointer to space on stack.
11630 l = bc_gen_constr_label ();
11632 if (TREE_CONSTANT (constr))
11636 bc_emit_const_labeldef (l);
11637 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11643 bc_emit_data_labeldef (l);
11644 bc_output_data_constructor (constr);
11648 /* Add reference to pointer table and recall pointer to stack;
11649 this code is common for both types of constructors: literals
11650 and non-literals. */
11652 ptroffs = bc_define_pointer (l);
11653 bc_emit_instruction (constP, ptroffs);
11655 /* This is all that has to be done if it's a literal. */
11656 if (TREE_CONSTANT (constr))
11660 /* At this point, we have the pointer to the structure on top of the stack.
11661 Generate sequences of store_memory calls for the constructor. */
11663 /* constructor type is structure */
11664 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11668 /* If the constructor has fewer fields than the structure,
11669 clear the whole structure first. */
11671 if (list_length (CONSTRUCTOR_ELTS (constr))
11672 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11674 bc_emit_instruction (duplicate);
11675 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11676 bc_emit_instruction (clearBLK);
11679 /* Store each element of the constructor into the corresponding
11680 field of TARGET. */
11682 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11684 register tree field = TREE_PURPOSE (elt);
11685 register enum machine_mode mode;
11690 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11691 mode = DECL_MODE (field);
11692 unsignedp = TREE_UNSIGNED (field);
11694 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11696 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11697 /* The alignment of TARGET is
11698 at least what its type requires. */
11700 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11701 int_size_in_bytes (TREE_TYPE (constr)));
11706 /* Constructor type is array */
11707 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11711 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11712 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11713 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11714 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11716 /* If the constructor has fewer fields than the structure,
11717 clear the whole structure first. */
11719 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11721 bc_emit_instruction (duplicate);
11722 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11723 bc_emit_instruction (clearBLK);
11727 /* Store each element of the constructor into the corresponding
11728 element of TARGET, determined by counting the elements. */
11730 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11732 elt = TREE_CHAIN (elt), i++)
11734 register enum machine_mode mode;
11739 mode = TYPE_MODE (elttype);
11740 bitsize = GET_MODE_BITSIZE (mode);
11741 unsignedp = TREE_UNSIGNED (elttype);
11743 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11744 /* * TYPE_SIZE_UNIT (elttype) */ );
11746 bc_store_field (elt, bitsize, bitpos, mode,
11747 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11748 /* The alignment of TARGET is
11749 at least what its type requires. */
11751 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11752 int_size_in_bytes (TREE_TYPE (constr)));
11759 /* Store the value of EXP (an expression tree) into member FIELD of
11760 structure at address on stack, which has type TYPE, mode MODE and
11761 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11764 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11765 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11768 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11769 value_mode, unsignedp, align, total_size)
11770 int bitsize, bitpos;
11771 enum machine_mode mode;
11772 tree field, exp, type;
11773 enum machine_mode value_mode;
11779 /* Expand expression and copy pointer */
11780 bc_expand_expr (exp);
11781 bc_emit_instruction (over);
11784 /* If the component is a bit field, we cannot use addressing to access
11785 it. Use bit-field techniques to store in it. */
11787 if (DECL_BIT_FIELD (field))
11789 bc_store_bit_field (bitpos, bitsize, unsignedp);
11793 /* Not bit field */
11795 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11797 /* Advance pointer to the desired member */
11799 bc_emit_instruction (addconstPSI, offset);
11802 bc_store_memory (type, field);
11807 /* Store SI/SU in bitfield */
11810 bc_store_bit_field (offset, size, unsignedp)
11811 int offset, size, unsignedp;
11813 /* Push bitfield offset and size */
11814 bc_push_offset_and_size (offset, size);
11817 bc_emit_instruction (sstoreBI);
11821 /* Load SI/SU from bitfield */
11824 bc_load_bit_field (offset, size, unsignedp)
11825 int offset, size, unsignedp;
11827 /* Push bitfield offset and size */
11828 bc_push_offset_and_size (offset, size);
11830 /* Load: sign-extend if signed, else zero-extend */
11831 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11835 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11836 (adjust stack pointer upwards), negative means add that number of
11837 levels (adjust the stack pointer downwards). Only positive values
11838 normally make sense. */
11841 bc_adjust_stack (nlevels)
11850 bc_emit_instruction (drop);
11853 bc_emit_instruction (drop);
11858 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11859 stack_depth -= nlevels;
11862 #if defined (VALIDATE_STACK_FOR_BC)
11863 VALIDATE_STACK_FOR_BC ();