1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
42 #include "bc-opcode.h"
43 #include "bc-typecd.h"
48 #define CEIL(x,y) (((x) + (y) - 1) / (y))
50 /* Decide whether a function's arguments should be processed
51 from first to last or from last to first.
53 They should if the stack and args grow in opposite directions, but
54 only if we have push insns. */
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first */
64 #ifndef STACK_PUSH_CODE
65 #ifdef STACK_GROWS_DOWNWARD
66 #define STACK_PUSH_CODE PRE_DEC
68 #define STACK_PUSH_CODE PRE_INC
72 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
73 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls = 1;
88 /* Number of units that we should eventually pop off the stack.
89 These are the arguments to function calls that have already returned. */
90 int pending_stack_adjust;
92 /* Nonzero means stack pops must not be deferred, and deferred stack
93 pops must not be output. It is nonzero inside a function call,
94 inside a conditional expression, inside a statement expression,
95 and in other cases as well. */
96 int inhibit_defer_pop;
98 /* When temporaries are created by TARGET_EXPRs, they are created at
99 this level of temp_slot_level, so that they can remain allocated
100 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
102 int target_temp_slot_level;
104 /* Nonzero means __builtin_saveregs has already been done in this function.
105 The value is the pseudoreg containing the value __builtin_saveregs
107 static rtx saveregs_value;
109 /* Similarly for __builtin_apply_args. */
110 static rtx apply_args_value;
112 /* This structure is used by move_by_pieces to describe the move to
115 struct move_by_pieces
125 int explicit_inc_from;
132 /* This structure is used by clear_by_pieces to describe the clear to
135 struct clear_by_pieces
147 /* Used to generate bytecodes: keep track of size of local variables,
148 as well as depth of arithmetic stack. (Notice that variables are
149 stored on the machine's stack, not the arithmetic stack.) */
151 static rtx get_push_address PROTO ((int));
152 extern int local_vars_size;
153 extern int stack_depth;
154 extern int max_stack_depth;
155 extern struct obstack permanent_obstack;
156 extern rtx arg_pointer_save_area;
158 static rtx enqueue_insn PROTO((rtx, rtx));
159 static int queued_subexp_p PROTO((rtx));
160 static void init_queue PROTO((void));
161 static void move_by_pieces PROTO((rtx, rtx, int, int));
162 static int move_by_pieces_ninsns PROTO((unsigned int, int));
163 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
164 struct move_by_pieces *));
165 static void clear_by_pieces PROTO((rtx, int, int));
166 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
167 struct clear_by_pieces *));
168 static int is_zeros_p PROTO((tree));
169 static int mostly_zeros_p PROTO((tree));
170 static void store_constructor PROTO((tree, rtx, int));
171 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
172 enum machine_mode, int, int, int));
173 static tree save_noncopied_parts PROTO((tree, tree));
174 static tree init_noncopied_parts PROTO((tree, tree));
175 static int safe_from_p PROTO((rtx, tree));
176 static int fixed_type_p PROTO((tree));
177 static rtx var_rtx PROTO((tree));
178 static int get_pointer_alignment PROTO((tree, unsigned));
179 static tree string_constant PROTO((tree, tree *));
180 static tree c_strlen PROTO((tree));
181 static rtx expand_builtin PROTO((tree, rtx, rtx,
182 enum machine_mode, int));
183 static int apply_args_size PROTO((void));
184 static int apply_result_size PROTO((void));
185 static rtx result_vector PROTO((int, rtx));
186 static rtx expand_builtin_apply_args PROTO((void));
187 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
188 static void expand_builtin_return PROTO((rtx));
189 static rtx expand_increment PROTO((tree, int, int));
190 void bc_expand_increment PROTO((struct increment_operator *, tree));
191 rtx bc_allocate_local PROTO((int, int));
192 void bc_store_memory PROTO((tree, tree));
193 tree bc_expand_component_address PROTO((tree));
194 tree bc_expand_address PROTO((tree));
195 void bc_expand_constructor PROTO((tree));
196 void bc_adjust_stack PROTO((int));
197 tree bc_canonicalize_array_ref PROTO((tree));
198 void bc_load_memory PROTO((tree, tree));
199 void bc_load_externaddr PROTO((rtx));
200 void bc_load_externaddr_id PROTO((tree, int));
201 void bc_load_localaddr PROTO((rtx));
202 void bc_load_parmaddr PROTO((rtx));
203 static void preexpand_calls PROTO((tree));
204 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
205 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
206 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
207 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
208 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
209 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
210 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
211 extern tree truthvalue_conversion PROTO((tree));
213 /* Record for each mode whether we can move a register directly to or
214 from an object of that mode in memory. If we can't, we won't try
215 to use that mode directly when accessing a field of that mode. */
217 static char direct_load[NUM_MACHINE_MODES];
218 static char direct_store[NUM_MACHINE_MODES];
220 /* MOVE_RATIO is the number of move instructions that is better than
224 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
227 /* A value of around 6 would minimize code size; infinity would minimize
229 #define MOVE_RATIO 15
233 /* This array records the insn_code of insns to perform block moves. */
234 enum insn_code movstr_optab[NUM_MACHINE_MODES];
236 /* This array records the insn_code of insns to perform block clears. */
237 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
239 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
241 #ifndef SLOW_UNALIGNED_ACCESS
242 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
245 /* Register mappings for target machines without register windows. */
246 #ifndef INCOMING_REGNO
247 #define INCOMING_REGNO(OUT) (OUT)
249 #ifndef OUTGOING_REGNO
250 #define OUTGOING_REGNO(IN) (IN)
253 /* Maps used to convert modes to const, load, and store bytecodes. */
254 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
255 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
256 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
258 /* Initialize maps used to convert modes to const, load, and store
262 bc_init_mode_to_opcode_maps ()
266 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
267 mode_to_const_map[mode]
268 = mode_to_load_map[mode]
269 = mode_to_store_map[mode] = neverneverland;
271 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
272 mode_to_const_map[(int) SYM] = CONST; \
273 mode_to_load_map[(int) SYM] = LOAD; \
274 mode_to_store_map[(int) SYM] = STORE;
276 #include "modemap.def"
280 /* This is run once per compilation to set up which modes can be used
281 directly in memory and to initialize the block move optab. */
287 enum machine_mode mode;
288 /* Try indexing by frame ptr and try by stack ptr.
289 It is known that on the Convex the stack ptr isn't a valid index.
290 With luck, one or the other is valid on any machine. */
291 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
292 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
295 insn = emit_insn (gen_rtx (SET, 0, NULL_RTX, NULL_RTX));
296 pat = PATTERN (insn);
298 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
299 mode = (enum machine_mode) ((int) mode + 1))
305 direct_load[(int) mode] = direct_store[(int) mode] = 0;
306 PUT_MODE (mem, mode);
307 PUT_MODE (mem1, mode);
309 /* See if there is some register that can be used in this mode and
310 directly loaded or stored from memory. */
312 if (mode != VOIDmode && mode != BLKmode)
313 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
314 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
317 if (! HARD_REGNO_MODE_OK (regno, mode))
320 reg = gen_rtx (REG, mode, regno);
323 SET_DEST (pat) = reg;
324 if (recog (pat, insn, &num_clobbers) >= 0)
325 direct_load[(int) mode] = 1;
327 SET_SRC (pat) = mem1;
328 SET_DEST (pat) = reg;
329 if (recog (pat, insn, &num_clobbers) >= 0)
330 direct_load[(int) mode] = 1;
333 SET_DEST (pat) = mem;
334 if (recog (pat, insn, &num_clobbers) >= 0)
335 direct_store[(int) mode] = 1;
338 SET_DEST (pat) = mem1;
339 if (recog (pat, insn, &num_clobbers) >= 0)
340 direct_store[(int) mode] = 1;
347 /* This is run at the start of compiling a function. */
354 pending_stack_adjust = 0;
355 inhibit_defer_pop = 0;
357 apply_args_value = 0;
361 /* Save all variables describing the current status into the structure *P.
362 This is used before starting a nested function. */
368 /* Instead of saving the postincrement queue, empty it. */
371 p->pending_stack_adjust = pending_stack_adjust;
372 p->inhibit_defer_pop = inhibit_defer_pop;
373 p->saveregs_value = saveregs_value;
374 p->apply_args_value = apply_args_value;
375 p->forced_labels = forced_labels;
377 pending_stack_adjust = 0;
378 inhibit_defer_pop = 0;
380 apply_args_value = 0;
384 /* Restore all variables describing the current status from the structure *P.
385 This is used after a nested function. */
388 restore_expr_status (p)
391 pending_stack_adjust = p->pending_stack_adjust;
392 inhibit_defer_pop = p->inhibit_defer_pop;
393 saveregs_value = p->saveregs_value;
394 apply_args_value = p->apply_args_value;
395 forced_labels = p->forced_labels;
398 /* Manage the queue of increment instructions to be output
399 for POSTINCREMENT_EXPR expressions, etc. */
401 static rtx pending_chain;
403 /* Queue up to increment (or change) VAR later. BODY says how:
404 BODY should be the same thing you would pass to emit_insn
405 to increment right away. It will go to emit_insn later on.
407 The value is a QUEUED expression to be used in place of VAR
408 where you want to guarantee the pre-incrementation value of VAR. */
411 enqueue_insn (var, body)
414 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
415 var, NULL_RTX, NULL_RTX, body, pending_chain);
416 return pending_chain;
419 /* Use protect_from_queue to convert a QUEUED expression
420 into something that you can put immediately into an instruction.
421 If the queued incrementation has not happened yet,
422 protect_from_queue returns the variable itself.
423 If the incrementation has happened, protect_from_queue returns a temp
424 that contains a copy of the old value of the variable.
426 Any time an rtx which might possibly be a QUEUED is to be put
427 into an instruction, it must be passed through protect_from_queue first.
428 QUEUED expressions are not meaningful in instructions.
430 Do not pass a value through protect_from_queue and then hold
431 on to it for a while before putting it in an instruction!
432 If the queue is flushed in between, incorrect code will result. */
435 protect_from_queue (x, modify)
439 register RTX_CODE code = GET_CODE (x);
441 #if 0 /* A QUEUED can hang around after the queue is forced out. */
442 /* Shortcut for most common case. */
443 if (pending_chain == 0)
449 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
450 use of autoincrement. Make a copy of the contents of the memory
451 location rather than a copy of the address, but not if the value is
452 of mode BLKmode. Don't modify X in place since it might be
454 if (code == MEM && GET_MODE (x) != BLKmode
455 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
457 register rtx y = XEXP (x, 0);
458 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
460 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
461 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
462 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
466 register rtx temp = gen_reg_rtx (GET_MODE (new));
467 emit_insn_before (gen_move_insn (temp, new),
473 /* Otherwise, recursively protect the subexpressions of all
474 the kinds of rtx's that can contain a QUEUED. */
477 rtx tem = protect_from_queue (XEXP (x, 0), 0);
478 if (tem != XEXP (x, 0))
484 else if (code == PLUS || code == MULT)
486 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
487 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
488 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
497 /* If the increment has not happened, use the variable itself. */
498 if (QUEUED_INSN (x) == 0)
499 return QUEUED_VAR (x);
500 /* If the increment has happened and a pre-increment copy exists,
502 if (QUEUED_COPY (x) != 0)
503 return QUEUED_COPY (x);
504 /* The increment has happened but we haven't set up a pre-increment copy.
505 Set one up now, and use it. */
506 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
507 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
509 return QUEUED_COPY (x);
512 /* Return nonzero if X contains a QUEUED expression:
513 if it contains anything that will be altered by a queued increment.
514 We handle only combinations of MEM, PLUS, MINUS and MULT operators
515 since memory addresses generally contain only those. */
521 register enum rtx_code code = GET_CODE (x);
527 return queued_subexp_p (XEXP (x, 0));
531 return queued_subexp_p (XEXP (x, 0))
532 || queued_subexp_p (XEXP (x, 1));
537 /* Perform all the pending incrementations. */
543 while (p = pending_chain)
545 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
546 pending_chain = QUEUED_NEXT (p);
557 /* Copy data from FROM to TO, where the machine modes are not the same.
558 Both modes may be integer, or both may be floating.
559 UNSIGNEDP should be nonzero if FROM is an unsigned type.
560 This causes zero-extension instead of sign-extension. */
563 convert_move (to, from, unsignedp)
564 register rtx to, from;
567 enum machine_mode to_mode = GET_MODE (to);
568 enum machine_mode from_mode = GET_MODE (from);
569 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
570 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
574 /* rtx code for making an equivalent value. */
575 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
577 to = protect_from_queue (to, 1);
578 from = protect_from_queue (from, 0);
580 if (to_real != from_real)
583 /* If FROM is a SUBREG that indicates that we have already done at least
584 the required extension, strip it. We don't handle such SUBREGs as
587 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
588 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
589 >= GET_MODE_SIZE (to_mode))
590 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
591 from = gen_lowpart (to_mode, from), from_mode = to_mode;
593 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
596 if (to_mode == from_mode
597 || (from_mode == VOIDmode && CONSTANT_P (from)))
599 emit_move_insn (to, from);
607 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
609 /* Try converting directly if the insn is supported. */
610 if ((code = can_extend_p (to_mode, from_mode, 0))
613 emit_unop_insn (code, to, from, UNKNOWN);
618 #ifdef HAVE_trunchfqf2
619 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
621 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
625 #ifdef HAVE_trunctqfqf2
626 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
628 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
632 #ifdef HAVE_truncsfqf2
633 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
639 #ifdef HAVE_truncdfqf2
640 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
646 #ifdef HAVE_truncxfqf2
647 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
653 #ifdef HAVE_trunctfqf2
654 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
661 #ifdef HAVE_trunctqfhf2
662 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncsfhf2
669 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
675 #ifdef HAVE_truncdfhf2
676 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncxfhf2
683 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
689 #ifdef HAVE_trunctfhf2
690 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
697 #ifdef HAVE_truncsftqf2
698 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
704 #ifdef HAVE_truncdftqf2
705 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
711 #ifdef HAVE_truncxftqf2
712 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
718 #ifdef HAVE_trunctftqf2
719 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
726 #ifdef HAVE_truncdfsf2
727 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
733 #ifdef HAVE_truncxfsf2
734 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
740 #ifdef HAVE_trunctfsf2
741 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
747 #ifdef HAVE_truncxfdf2
748 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
754 #ifdef HAVE_trunctfdf2
755 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
769 libcall = extendsfdf2_libfunc;
773 libcall = extendsfxf2_libfunc;
777 libcall = extendsftf2_libfunc;
786 libcall = truncdfsf2_libfunc;
790 libcall = extenddfxf2_libfunc;
794 libcall = extenddftf2_libfunc;
803 libcall = truncxfsf2_libfunc;
807 libcall = truncxfdf2_libfunc;
816 libcall = trunctfsf2_libfunc;
820 libcall = trunctfdf2_libfunc;
826 if (libcall == (rtx) 0)
827 /* This conversion is not implemented yet. */
830 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
832 emit_move_insn (to, value);
836 /* Now both modes are integers. */
838 /* Handle expanding beyond a word. */
839 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
840 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
847 enum machine_mode lowpart_mode;
848 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
850 /* Try converting directly if the insn is supported. */
851 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
854 /* If FROM is a SUBREG, put it into a register. Do this
855 so that we always generate the same set of insns for
856 better cse'ing; if an intermediate assignment occurred,
857 we won't be doing the operation directly on the SUBREG. */
858 if (optimize > 0 && GET_CODE (from) == SUBREG)
859 from = force_reg (from_mode, from);
860 emit_unop_insn (code, to, from, equiv_code);
863 /* Next, try converting via full word. */
864 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
865 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
866 != CODE_FOR_nothing))
868 if (GET_CODE (to) == REG)
869 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
870 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
871 emit_unop_insn (code, to,
872 gen_lowpart (word_mode, to), equiv_code);
876 /* No special multiword conversion insn; do it by hand. */
879 /* Since we will turn this into a no conflict block, we must ensure
880 that the source does not overlap the target. */
882 if (reg_overlap_mentioned_p (to, from))
883 from = force_reg (from_mode, from);
885 /* Get a copy of FROM widened to a word, if necessary. */
886 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
887 lowpart_mode = word_mode;
889 lowpart_mode = from_mode;
891 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
893 lowpart = gen_lowpart (lowpart_mode, to);
894 emit_move_insn (lowpart, lowfrom);
896 /* Compute the value to put in each remaining word. */
898 fill_value = const0_rtx;
903 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
904 && STORE_FLAG_VALUE == -1)
906 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
908 fill_value = gen_reg_rtx (word_mode);
909 emit_insn (gen_slt (fill_value));
915 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
916 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
918 fill_value = convert_to_mode (word_mode, fill_value, 1);
922 /* Fill the remaining words. */
923 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
925 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
926 rtx subword = operand_subword (to, index, 1, to_mode);
931 if (fill_value != subword)
932 emit_move_insn (subword, fill_value);
935 insns = get_insns ();
938 emit_no_conflict_block (insns, to, from, NULL_RTX,
939 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
943 /* Truncating multi-word to a word or less. */
944 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
945 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
947 if (!((GET_CODE (from) == MEM
948 && ! MEM_VOLATILE_P (from)
949 && direct_load[(int) to_mode]
950 && ! mode_dependent_address_p (XEXP (from, 0)))
951 || GET_CODE (from) == REG
952 || GET_CODE (from) == SUBREG))
953 from = force_reg (from_mode, from);
954 convert_move (to, gen_lowpart (word_mode, from), 0);
958 /* Handle pointer conversion */ /* SPEE 900220 */
959 if (to_mode == PSImode)
961 if (from_mode != SImode)
962 from = convert_to_mode (SImode, from, unsignedp);
964 #ifdef HAVE_truncsipsi2
965 if (HAVE_truncsipsi2)
967 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
970 #endif /* HAVE_truncsipsi2 */
974 if (from_mode == PSImode)
976 if (to_mode != SImode)
978 from = convert_to_mode (SImode, from, unsignedp);
983 #ifdef HAVE_extendpsisi2
984 if (HAVE_extendpsisi2)
986 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
989 #endif /* HAVE_extendpsisi2 */
994 if (to_mode == PDImode)
996 if (from_mode != DImode)
997 from = convert_to_mode (DImode, from, unsignedp);
999 #ifdef HAVE_truncdipdi2
1000 if (HAVE_truncdipdi2)
1002 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1005 #endif /* HAVE_truncdipdi2 */
1009 if (from_mode == PDImode)
1011 if (to_mode != DImode)
1013 from = convert_to_mode (DImode, from, unsignedp);
1018 #ifdef HAVE_extendpdidi2
1019 if (HAVE_extendpdidi2)
1021 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1024 #endif /* HAVE_extendpdidi2 */
1029 /* Now follow all the conversions between integers
1030 no more than a word long. */
1032 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1033 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1034 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1035 GET_MODE_BITSIZE (from_mode)))
1037 if (!((GET_CODE (from) == MEM
1038 && ! MEM_VOLATILE_P (from)
1039 && direct_load[(int) to_mode]
1040 && ! mode_dependent_address_p (XEXP (from, 0)))
1041 || GET_CODE (from) == REG
1042 || GET_CODE (from) == SUBREG))
1043 from = force_reg (from_mode, from);
1044 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1045 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1046 from = copy_to_reg (from);
1047 emit_move_insn (to, gen_lowpart (to_mode, from));
1051 /* Handle extension. */
1052 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1054 /* Convert directly if that works. */
1055 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1056 != CODE_FOR_nothing)
1058 emit_unop_insn (code, to, from, equiv_code);
1063 enum machine_mode intermediate;
1065 /* Search for a mode to convert via. */
1066 for (intermediate = from_mode; intermediate != VOIDmode;
1067 intermediate = GET_MODE_WIDER_MODE (intermediate))
1068 if (((can_extend_p (to_mode, intermediate, unsignedp)
1069 != CODE_FOR_nothing)
1070 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1071 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1072 && (can_extend_p (intermediate, from_mode, unsignedp)
1073 != CODE_FOR_nothing))
1075 convert_move (to, convert_to_mode (intermediate, from,
1076 unsignedp), unsignedp);
1080 /* No suitable intermediate mode. */
1085 /* Support special truncate insns for certain modes. */
1087 if (from_mode == DImode && to_mode == SImode)
1089 #ifdef HAVE_truncdisi2
1090 if (HAVE_truncdisi2)
1092 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1096 convert_move (to, force_reg (from_mode, from), unsignedp);
1100 if (from_mode == DImode && to_mode == HImode)
1102 #ifdef HAVE_truncdihi2
1103 if (HAVE_truncdihi2)
1105 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1113 if (from_mode == DImode && to_mode == QImode)
1115 #ifdef HAVE_truncdiqi2
1116 if (HAVE_truncdiqi2)
1118 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == SImode && to_mode == HImode)
1128 #ifdef HAVE_truncsihi2
1129 if (HAVE_truncsihi2)
1131 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == SImode && to_mode == QImode)
1141 #ifdef HAVE_truncsiqi2
1142 if (HAVE_truncsiqi2)
1144 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == HImode && to_mode == QImode)
1154 #ifdef HAVE_trunchiqi2
1155 if (HAVE_trunchiqi2)
1157 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == TImode && to_mode == DImode)
1167 #ifdef HAVE_trunctidi2
1168 if (HAVE_trunctidi2)
1170 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == TImode && to_mode == SImode)
1180 #ifdef HAVE_trunctisi2
1181 if (HAVE_trunctisi2)
1183 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == HImode)
1193 #ifdef HAVE_trunctihi2
1194 if (HAVE_trunctihi2)
1196 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == QImode)
1206 #ifdef HAVE_trunctiqi2
1207 if (HAVE_trunctiqi2)
1209 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 /* Handle truncation of volatile memrefs, and so on;
1218 the things that couldn't be truncated directly,
1219 and for which there was no special instruction. */
1220 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1222 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1223 emit_move_insn (to, temp);
1227 /* Mode combination is not recognized. */
1231 /* Return an rtx for a value that would result
1232 from converting X to mode MODE.
1233 Both X and MODE may be floating, or both integer.
1234 UNSIGNEDP is nonzero if X is an unsigned value.
1235 This can be done by referring to a part of X in place
1236 or by copying to a new temporary with conversion.
1238 This function *must not* call protect_from_queue
1239 except when putting X into an insn (in which case convert_move does it). */
1242 convert_to_mode (mode, x, unsignedp)
1243 enum machine_mode mode;
1247 return convert_modes (mode, VOIDmode, x, unsignedp);
1250 /* Return an rtx for a value that would result
1251 from converting X from mode OLDMODE to mode MODE.
1252 Both modes may be floating, or both integer.
1253 UNSIGNEDP is nonzero if X is an unsigned value.
1255 This can be done by referring to a part of X in place
1256 or by copying to a new temporary with conversion.
1258 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1260 This function *must not* call protect_from_queue
1261 except when putting X into an insn (in which case convert_move does it). */
1264 convert_modes (mode, oldmode, x, unsignedp)
1265 enum machine_mode mode, oldmode;
1271 /* If FROM is a SUBREG that indicates that we have already done at least
1272 the required extension, strip it. */
1274 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1275 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1276 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1277 x = gen_lowpart (mode, x);
1279 if (GET_MODE (x) != VOIDmode)
1280 oldmode = GET_MODE (x);
1282 if (mode == oldmode)
1285 /* There is one case that we must handle specially: If we are converting
1286 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1287 we are to interpret the constant as unsigned, gen_lowpart will do
1288 the wrong if the constant appears negative. What we want to do is
1289 make the high-order word of the constant zero, not all ones. */
1291 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1292 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1293 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1295 HOST_WIDE_INT val = INTVAL (x);
1297 if (oldmode != VOIDmode
1298 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1300 int width = GET_MODE_BITSIZE (oldmode);
1302 /* We need to zero extend VAL. */
1303 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1306 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1309 /* We can do this with a gen_lowpart if both desired and current modes
1310 are integer, and this is either a constant integer, a register, or a
1311 non-volatile MEM. Except for the constant case where MODE is no
1312 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1314 if ((GET_CODE (x) == CONST_INT
1315 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1316 || (GET_MODE_CLASS (mode) == MODE_INT
1317 && GET_MODE_CLASS (oldmode) == MODE_INT
1318 && (GET_CODE (x) == CONST_DOUBLE
1319 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1320 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1321 && direct_load[(int) mode])
1322 || (GET_CODE (x) == REG
1323 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1324 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1326 /* ?? If we don't know OLDMODE, we have to assume here that
1327 X does not need sign- or zero-extension. This may not be
1328 the case, but it's the best we can do. */
1329 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1330 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1332 HOST_WIDE_INT val = INTVAL (x);
1333 int width = GET_MODE_BITSIZE (oldmode);
1335 /* We must sign or zero-extend in this case. Start by
1336 zero-extending, then sign extend if we need to. */
1337 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1339 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1340 val |= (HOST_WIDE_INT) (-1) << width;
1342 return GEN_INT (val);
1345 return gen_lowpart (mode, x);
1348 temp = gen_reg_rtx (mode);
1349 convert_move (temp, x, unsignedp);
1353 /* Generate several move instructions to copy LEN bytes
1354 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1355 The caller must pass FROM and TO
1356 through protect_from_queue before calling.
1357 ALIGN (in bytes) is maximum alignment we can assume. */
1360 move_by_pieces (to, from, len, align)
1364 struct move_by_pieces data;
1365 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1366 int max_size = MOVE_MAX + 1;
1369 data.to_addr = to_addr;
1370 data.from_addr = from_addr;
1374 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1375 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1377 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1378 || GET_CODE (from_addr) == POST_INC
1379 || GET_CODE (from_addr) == POST_DEC);
1381 data.explicit_inc_from = 0;
1382 data.explicit_inc_to = 0;
1384 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1385 if (data.reverse) data.offset = len;
1388 data.to_struct = MEM_IN_STRUCT_P (to);
1389 data.from_struct = MEM_IN_STRUCT_P (from);
1391 /* If copying requires more than two move insns,
1392 copy addresses to registers (to make displacements shorter)
1393 and use post-increment if available. */
1394 if (!(data.autinc_from && data.autinc_to)
1395 && move_by_pieces_ninsns (len, align) > 2)
1397 #ifdef HAVE_PRE_DECREMENT
1398 if (data.reverse && ! data.autinc_from)
1400 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1401 data.autinc_from = 1;
1402 data.explicit_inc_from = -1;
1405 #ifdef HAVE_POST_INCREMENT
1406 if (! data.autinc_from)
1408 data.from_addr = copy_addr_to_reg (from_addr);
1409 data.autinc_from = 1;
1410 data.explicit_inc_from = 1;
1413 if (!data.autinc_from && CONSTANT_P (from_addr))
1414 data.from_addr = copy_addr_to_reg (from_addr);
1415 #ifdef HAVE_PRE_DECREMENT
1416 if (data.reverse && ! data.autinc_to)
1418 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1420 data.explicit_inc_to = -1;
1423 #ifdef HAVE_POST_INCREMENT
1424 if (! data.reverse && ! data.autinc_to)
1426 data.to_addr = copy_addr_to_reg (to_addr);
1428 data.explicit_inc_to = 1;
1431 if (!data.autinc_to && CONSTANT_P (to_addr))
1432 data.to_addr = copy_addr_to_reg (to_addr);
1435 if (! SLOW_UNALIGNED_ACCESS
1436 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1439 /* First move what we can in the largest integer mode, then go to
1440 successively smaller modes. */
1442 while (max_size > 1)
1444 enum machine_mode mode = VOIDmode, tmode;
1445 enum insn_code icode;
1447 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1448 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1449 if (GET_MODE_SIZE (tmode) < max_size)
1452 if (mode == VOIDmode)
1455 icode = mov_optab->handlers[(int) mode].insn_code;
1456 if (icode != CODE_FOR_nothing
1457 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1458 GET_MODE_SIZE (mode)))
1459 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1461 max_size = GET_MODE_SIZE (mode);
1464 /* The code above should have handled everything. */
1469 /* Return number of insns required to move L bytes by pieces.
1470 ALIGN (in bytes) is maximum alignment we can assume. */
1473 move_by_pieces_ninsns (l, align)
1477 register int n_insns = 0;
1478 int max_size = MOVE_MAX + 1;
1480 if (! SLOW_UNALIGNED_ACCESS
1481 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1484 while (max_size > 1)
1486 enum machine_mode mode = VOIDmode, tmode;
1487 enum insn_code icode;
1489 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1490 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1491 if (GET_MODE_SIZE (tmode) < max_size)
1494 if (mode == VOIDmode)
1497 icode = mov_optab->handlers[(int) mode].insn_code;
1498 if (icode != CODE_FOR_nothing
1499 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1500 GET_MODE_SIZE (mode)))
1501 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1503 max_size = GET_MODE_SIZE (mode);
1509 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1510 with move instructions for mode MODE. GENFUN is the gen_... function
1511 to make a move insn for that mode. DATA has all the other info. */
1514 move_by_pieces_1 (genfun, mode, data)
1515 rtx (*genfun) PROTO ((rtx, ...));
1516 enum machine_mode mode;
1517 struct move_by_pieces *data;
1519 register int size = GET_MODE_SIZE (mode);
1520 register rtx to1, from1;
1522 while (data->len >= size)
1524 if (data->reverse) data->offset -= size;
1526 to1 = (data->autinc_to
1527 ? gen_rtx (MEM, mode, data->to_addr)
1528 : copy_rtx (change_address (data->to, mode,
1529 plus_constant (data->to_addr,
1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
1534 = (data->autinc_from
1535 ? gen_rtx (MEM, mode, data->from_addr)
1536 : copy_rtx (change_address (data->from, mode,
1537 plus_constant (data->from_addr,
1539 MEM_IN_STRUCT_P (from1) = data->from_struct;
1541 #ifdef HAVE_PRE_DECREMENT
1542 if (data->explicit_inc_to < 0)
1543 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1544 if (data->explicit_inc_from < 0)
1545 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1548 emit_insn ((*genfun) (to1, from1));
1549 #ifdef HAVE_POST_INCREMENT
1550 if (data->explicit_inc_to > 0)
1551 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1552 if (data->explicit_inc_from > 0)
1553 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1556 if (! data->reverse) data->offset += size;
1562 /* Emit code to move a block Y to a block X.
1563 This may be done with string-move instructions,
1564 with multiple scalar move instructions, or with a library call.
1566 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1568 SIZE is an rtx that says how long they are.
1569 ALIGN is the maximum alignment we can assume they have,
1570 measured in bytes. */
1573 emit_block_move (x, y, size, align)
1578 if (GET_MODE (x) != BLKmode)
1581 if (GET_MODE (y) != BLKmode)
1584 x = protect_from_queue (x, 1);
1585 y = protect_from_queue (y, 0);
1586 size = protect_from_queue (size, 0);
1588 if (GET_CODE (x) != MEM)
1590 if (GET_CODE (y) != MEM)
1595 if (GET_CODE (size) == CONST_INT
1596 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1597 move_by_pieces (x, y, INTVAL (size), align);
1600 /* Try the most limited insn first, because there's no point
1601 including more than one in the machine description unless
1602 the more limited one has some advantage. */
1604 rtx opalign = GEN_INT (align);
1605 enum machine_mode mode;
1607 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1608 mode = GET_MODE_WIDER_MODE (mode))
1610 enum insn_code code = movstr_optab[(int) mode];
1612 if (code != CODE_FOR_nothing
1613 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1614 here because if SIZE is less than the mode mask, as it is
1615 returned by the macro, it will definitely be less than the
1616 actual mode mask. */
1617 && ((GET_CODE (size) == CONST_INT
1618 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1619 <= GET_MODE_MASK (mode)))
1620 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1621 && (insn_operand_predicate[(int) code][0] == 0
1622 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1623 && (insn_operand_predicate[(int) code][1] == 0
1624 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1625 && (insn_operand_predicate[(int) code][3] == 0
1626 || (*insn_operand_predicate[(int) code][3]) (opalign,
1630 rtx last = get_last_insn ();
1633 op2 = convert_to_mode (mode, size, 1);
1634 if (insn_operand_predicate[(int) code][2] != 0
1635 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1636 op2 = copy_to_mode_reg (mode, op2);
1638 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1645 delete_insns_since (last);
1649 #ifdef TARGET_MEM_FUNCTIONS
1650 emit_library_call (memcpy_libfunc, 0,
1651 VOIDmode, 3, XEXP (x, 0), Pmode,
1653 convert_to_mode (TYPE_MODE (sizetype), size,
1654 TREE_UNSIGNED (sizetype)),
1655 TYPE_MODE (sizetype));
1657 emit_library_call (bcopy_libfunc, 0,
1658 VOIDmode, 3, XEXP (y, 0), Pmode,
1660 convert_to_mode (TYPE_MODE (integer_type_node), size,
1661 TREE_UNSIGNED (integer_type_node)),
1662 TYPE_MODE (integer_type_node));
1667 /* Copy all or part of a value X into registers starting at REGNO.
1668 The number of registers to be filled is NREGS. */
1671 move_block_to_reg (regno, x, nregs, mode)
1675 enum machine_mode mode;
1683 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1684 x = validize_mem (force_const_mem (mode, x));
1686 /* See if the machine can do this with a load multiple insn. */
1687 #ifdef HAVE_load_multiple
1688 if (HAVE_load_multiple)
1690 last = get_last_insn ();
1691 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1699 delete_insns_since (last);
1703 for (i = 0; i < nregs; i++)
1704 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1705 operand_subword_force (x, i, mode));
1708 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1709 The number of registers to be filled is NREGS. SIZE indicates the number
1710 of bytes in the object X. */
1714 move_block_from_reg (regno, x, nregs, size)
1722 enum machine_mode mode;
1724 /* If SIZE is that of a mode no bigger than a word, just use that
1725 mode's store operation. */
1726 if (size <= UNITS_PER_WORD
1727 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1729 emit_move_insn (change_address (x, mode, NULL),
1730 gen_rtx (REG, mode, regno));
1734 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1735 to the left before storing to memory. Note that the previous test
1736 doesn't handle all cases (e.g. SIZE == 3). */
1737 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1739 rtx tem = operand_subword (x, 0, 1, BLKmode);
1745 shift = expand_shift (LSHIFT_EXPR, word_mode,
1746 gen_rtx (REG, word_mode, regno),
1747 build_int_2 ((UNITS_PER_WORD - size)
1748 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1749 emit_move_insn (tem, shift);
1753 /* See if the machine can do this with a store multiple insn. */
1754 #ifdef HAVE_store_multiple
1755 if (HAVE_store_multiple)
1757 last = get_last_insn ();
1758 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1766 delete_insns_since (last);
1770 for (i = 0; i < nregs; i++)
1772 rtx tem = operand_subword (x, i, 1, BLKmode);
1777 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1781 /* Emit code to move a block Y to a block X, where X is non-consecutive
1782 registers represented by a PARALLEL. */
1785 emit_group_load (x, y)
1788 rtx target_reg, source;
1791 if (GET_CODE (x) != PARALLEL)
1794 /* Check for a NULL entry, used to indicate that the parameter goes
1795 both on the stack and in registers. */
1796 if (XEXP (XVECEXP (x, 0, 0), 0))
1801 for (; i < XVECLEN (x, 0); i++)
1803 rtx element = XVECEXP (x, 0, i);
1805 target_reg = XEXP (element, 0);
1807 if (GET_CODE (y) == MEM)
1808 source = change_address (y, GET_MODE (target_reg),
1809 plus_constant (XEXP (y, 0),
1810 INTVAL (XEXP (element, 1))));
1811 else if (XEXP (element, 1) == const0_rtx)
1813 if (GET_MODE (target_reg) == GET_MODE (y))
1815 /* Allow for the target_reg to be smaller than the input register
1816 to allow for AIX with 4 DF arguments after a single SI arg. The
1817 last DF argument will only load 1 word into the integer registers,
1818 but load a DF value into the float registers. */
1819 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1820 <= GET_MODE_SIZE (GET_MODE (y)))
1821 && GET_MODE (target_reg) == word_mode)
1822 /* This might be a const_double, so we can't just use SUBREG. */
1823 source = operand_subword (y, 0, 0, VOIDmode);
1824 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1825 == GET_MODE_SIZE (GET_MODE (y)))
1826 source = gen_lowpart (GET_MODE (target_reg), y);
1833 emit_move_insn (target_reg, source);
1837 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1838 registers represented by a PARALLEL. */
1841 emit_group_store (x, y)
1844 rtx source_reg, target;
1847 if (GET_CODE (y) != PARALLEL)
1850 /* Check for a NULL entry, used to indicate that the parameter goes
1851 both on the stack and in registers. */
1852 if (XEXP (XVECEXP (y, 0, 0), 0))
1857 for (; i < XVECLEN (y, 0); i++)
1859 rtx element = XVECEXP (y, 0, i);
1861 source_reg = XEXP (element, 0);
1863 if (GET_CODE (x) == MEM)
1864 target = change_address (x, GET_MODE (source_reg),
1865 plus_constant (XEXP (x, 0),
1866 INTVAL (XEXP (element, 1))));
1867 else if (XEXP (element, 1) == const0_rtx)
1870 if (GET_MODE (target) != GET_MODE (source_reg))
1871 target = gen_lowpart (GET_MODE (source_reg), target);
1876 emit_move_insn (target, source_reg);
1880 /* Add a USE expression for REG to the (possibly empty) list pointed
1881 to by CALL_FUSAGE. REG must denote a hard register. */
1884 use_reg (call_fusage, reg)
1885 rtx *call_fusage, reg;
1887 if (GET_CODE (reg) != REG
1888 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1892 = gen_rtx (EXPR_LIST, VOIDmode,
1893 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1896 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1897 starting at REGNO. All of these registers must be hard registers. */
1900 use_regs (call_fusage, regno, nregs)
1907 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1910 for (i = 0; i < nregs; i++)
1911 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1914 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1915 PARALLEL REGS. This is for calls that pass values in multiple
1916 non-contiguous locations. The Irix 6 ABI has examples of this. */
1919 use_group_regs (call_fusage, regs)
1925 for (i = 0; i < XVECLEN (regs, 0); i++)
1927 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1929 /* A NULL entry means the parameter goes both on the stack and in
1930 registers. This can also be a MEM for targets that pass values
1931 partially on the stack and partially in registers. */
1932 if (reg && GET_CODE (reg) == REG)
1933 use_reg (call_fusage, reg);
1937 /* Generate several move instructions to clear LEN bytes of block TO.
1938 (A MEM rtx with BLKmode). The caller must pass TO through
1939 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1943 clear_by_pieces (to, len, align)
1947 struct clear_by_pieces data;
1948 rtx to_addr = XEXP (to, 0);
1949 int max_size = MOVE_MAX + 1;
1952 data.to_addr = to_addr;
1955 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1956 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1958 data.explicit_inc_to = 0;
1960 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1961 if (data.reverse) data.offset = len;
1964 data.to_struct = MEM_IN_STRUCT_P (to);
1966 /* If copying requires more than two move insns,
1967 copy addresses to registers (to make displacements shorter)
1968 and use post-increment if available. */
1970 && move_by_pieces_ninsns (len, align) > 2)
1972 #ifdef HAVE_PRE_DECREMENT
1973 if (data.reverse && ! data.autinc_to)
1975 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1977 data.explicit_inc_to = -1;
1980 #ifdef HAVE_POST_INCREMENT
1981 if (! data.reverse && ! data.autinc_to)
1983 data.to_addr = copy_addr_to_reg (to_addr);
1985 data.explicit_inc_to = 1;
1988 if (!data.autinc_to && CONSTANT_P (to_addr))
1989 data.to_addr = copy_addr_to_reg (to_addr);
1992 if (! SLOW_UNALIGNED_ACCESS
1993 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1996 /* First move what we can in the largest integer mode, then go to
1997 successively smaller modes. */
1999 while (max_size > 1)
2001 enum machine_mode mode = VOIDmode, tmode;
2002 enum insn_code icode;
2004 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2005 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2006 if (GET_MODE_SIZE (tmode) < max_size)
2009 if (mode == VOIDmode)
2012 icode = mov_optab->handlers[(int) mode].insn_code;
2013 if (icode != CODE_FOR_nothing
2014 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2015 GET_MODE_SIZE (mode)))
2016 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2018 max_size = GET_MODE_SIZE (mode);
2021 /* The code above should have handled everything. */
2026 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2027 with move instructions for mode MODE. GENFUN is the gen_... function
2028 to make a move insn for that mode. DATA has all the other info. */
2031 clear_by_pieces_1 (genfun, mode, data)
2032 rtx (*genfun) PROTO ((rtx, ...));
2033 enum machine_mode mode;
2034 struct clear_by_pieces *data;
2036 register int size = GET_MODE_SIZE (mode);
2039 while (data->len >= size)
2041 if (data->reverse) data->offset -= size;
2043 to1 = (data->autinc_to
2044 ? gen_rtx (MEM, mode, data->to_addr)
2045 : copy_rtx (change_address (data->to, mode,
2046 plus_constant (data->to_addr,
2048 MEM_IN_STRUCT_P (to1) = data->to_struct;
2050 #ifdef HAVE_PRE_DECREMENT
2051 if (data->explicit_inc_to < 0)
2052 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2055 emit_insn ((*genfun) (to1, const0_rtx));
2056 #ifdef HAVE_POST_INCREMENT
2057 if (data->explicit_inc_to > 0)
2058 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2061 if (! data->reverse) data->offset += size;
2067 /* Write zeros through the storage of OBJECT.
2068 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2069 the maximum alignment we can is has, measured in bytes. */
2072 clear_storage (object, size, align)
2077 if (GET_MODE (object) == BLKmode)
2079 object = protect_from_queue (object, 1);
2080 size = protect_from_queue (size, 0);
2082 if (GET_CODE (size) == CONST_INT
2083 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2084 clear_by_pieces (object, INTVAL (size), align);
2088 /* Try the most limited insn first, because there's no point
2089 including more than one in the machine description unless
2090 the more limited one has some advantage. */
2092 rtx opalign = GEN_INT (align);
2093 enum machine_mode mode;
2095 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2096 mode = GET_MODE_WIDER_MODE (mode))
2098 enum insn_code code = clrstr_optab[(int) mode];
2100 if (code != CODE_FOR_nothing
2101 /* We don't need MODE to be narrower than
2102 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2103 the mode mask, as it is returned by the macro, it will
2104 definitely be less than the actual mode mask. */
2105 && ((GET_CODE (size) == CONST_INT
2106 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2107 <= GET_MODE_MASK (mode)))
2108 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2109 && (insn_operand_predicate[(int) code][0] == 0
2110 || (*insn_operand_predicate[(int) code][0]) (object,
2112 && (insn_operand_predicate[(int) code][2] == 0
2113 || (*insn_operand_predicate[(int) code][2]) (opalign,
2117 rtx last = get_last_insn ();
2120 op1 = convert_to_mode (mode, size, 1);
2121 if (insn_operand_predicate[(int) code][1] != 0
2122 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2124 op1 = copy_to_mode_reg (mode, op1);
2126 pat = GEN_FCN ((int) code) (object, op1, opalign);
2133 delete_insns_since (last);
2138 #ifdef TARGET_MEM_FUNCTIONS
2139 emit_library_call (memset_libfunc, 0,
2141 XEXP (object, 0), Pmode,
2142 const0_rtx, TYPE_MODE (integer_type_node),
2143 convert_to_mode (TYPE_MODE (sizetype),
2144 size, TREE_UNSIGNED (sizetype)),
2145 TYPE_MODE (sizetype));
2147 emit_library_call (bzero_libfunc, 0,
2149 XEXP (object, 0), Pmode,
2150 convert_to_mode (TYPE_MODE (integer_type_node),
2152 TREE_UNSIGNED (integer_type_node)),
2153 TYPE_MODE (integer_type_node));
2158 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2161 /* Generate code to copy Y into X.
2162 Both Y and X must have the same mode, except that
2163 Y can be a constant with VOIDmode.
2164 This mode cannot be BLKmode; use emit_block_move for that.
2166 Return the last instruction emitted. */
2169 emit_move_insn (x, y)
2172 enum machine_mode mode = GET_MODE (x);
2174 x = protect_from_queue (x, 1);
2175 y = protect_from_queue (y, 0);
2177 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2180 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2181 y = force_const_mem (mode, y);
2183 /* If X or Y are memory references, verify that their addresses are valid
2185 if (GET_CODE (x) == MEM
2186 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2187 && ! push_operand (x, GET_MODE (x)))
2189 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2190 x = change_address (x, VOIDmode, XEXP (x, 0));
2192 if (GET_CODE (y) == MEM
2193 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2195 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2196 y = change_address (y, VOIDmode, XEXP (y, 0));
2198 if (mode == BLKmode)
2201 return emit_move_insn_1 (x, y);
2204 /* Low level part of emit_move_insn.
2205 Called just like emit_move_insn, but assumes X and Y
2206 are basically valid. */
2209 emit_move_insn_1 (x, y)
2212 enum machine_mode mode = GET_MODE (x);
2213 enum machine_mode submode;
2214 enum mode_class class = GET_MODE_CLASS (mode);
2217 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2219 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2221 /* Expand complex moves by moving real part and imag part, if possible. */
2222 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2223 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2225 (class == MODE_COMPLEX_INT
2226 ? MODE_INT : MODE_FLOAT),
2228 && (mov_optab->handlers[(int) submode].insn_code
2229 != CODE_FOR_nothing))
2231 /* Don't split destination if it is a stack push. */
2232 int stack = push_operand (x, GET_MODE (x));
2235 /* If this is a stack, push the highpart first, so it
2236 will be in the argument order.
2238 In that case, change_address is used only to convert
2239 the mode, not to change the address. */
2242 /* Note that the real part always precedes the imag part in memory
2243 regardless of machine's endianness. */
2244 #ifdef STACK_GROWS_DOWNWARD
2245 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2246 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2247 gen_imagpart (submode, y)));
2248 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2249 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2250 gen_realpart (submode, y)));
2252 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2253 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2254 gen_realpart (submode, y)));
2255 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2256 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2257 gen_imagpart (submode, y)));
2262 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2263 (gen_realpart (submode, x), gen_realpart (submode, y)));
2264 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2265 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2268 return get_last_insn ();
2271 /* This will handle any multi-word mode that lacks a move_insn pattern.
2272 However, you will get better code if you define such patterns,
2273 even if they must turn into multiple assembler instructions. */
2274 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2279 #ifdef PUSH_ROUNDING
2281 /* If X is a push on the stack, do the push now and replace
2282 X with a reference to the stack pointer. */
2283 if (push_operand (x, GET_MODE (x)))
2285 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2286 x = change_address (x, VOIDmode, stack_pointer_rtx);
2290 /* Show the output dies here. */
2292 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2295 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2298 rtx xpart = operand_subword (x, i, 1, mode);
2299 rtx ypart = operand_subword (y, i, 1, mode);
2301 /* If we can't get a part of Y, put Y into memory if it is a
2302 constant. Otherwise, force it into a register. If we still
2303 can't get a part of Y, abort. */
2304 if (ypart == 0 && CONSTANT_P (y))
2306 y = force_const_mem (mode, y);
2307 ypart = operand_subword (y, i, 1, mode);
2309 else if (ypart == 0)
2310 ypart = operand_subword_force (y, i, mode);
2312 if (xpart == 0 || ypart == 0)
2315 last_insn = emit_move_insn (xpart, ypart);
2324 /* Pushing data onto the stack. */
2326 /* Push a block of length SIZE (perhaps variable)
2327 and return an rtx to address the beginning of the block.
2328 Note that it is not possible for the value returned to be a QUEUED.
2329 The value may be virtual_outgoing_args_rtx.
2331 EXTRA is the number of bytes of padding to push in addition to SIZE.
2332 BELOW nonzero means this padding comes at low addresses;
2333 otherwise, the padding comes at high addresses. */
2336 push_block (size, extra, below)
2342 size = convert_modes (Pmode, ptr_mode, size, 1);
2343 if (CONSTANT_P (size))
2344 anti_adjust_stack (plus_constant (size, extra));
2345 else if (GET_CODE (size) == REG && extra == 0)
2346 anti_adjust_stack (size);
2349 rtx temp = copy_to_mode_reg (Pmode, size);
2351 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2352 temp, 0, OPTAB_LIB_WIDEN);
2353 anti_adjust_stack (temp);
2356 #ifdef STACK_GROWS_DOWNWARD
2357 temp = virtual_outgoing_args_rtx;
2358 if (extra != 0 && below)
2359 temp = plus_constant (temp, extra);
2361 if (GET_CODE (size) == CONST_INT)
2362 temp = plus_constant (virtual_outgoing_args_rtx,
2363 - INTVAL (size) - (below ? 0 : extra));
2364 else if (extra != 0 && !below)
2365 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2366 negate_rtx (Pmode, plus_constant (size, extra)));
2368 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2369 negate_rtx (Pmode, size));
2372 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2378 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2381 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2382 block of SIZE bytes. */
2385 get_push_address (size)
2390 if (STACK_PUSH_CODE == POST_DEC)
2391 temp = gen_rtx (PLUS, Pmode, stack_pointer_rtx, GEN_INT (size));
2392 else if (STACK_PUSH_CODE == POST_INC)
2393 temp = gen_rtx (MINUS, Pmode, stack_pointer_rtx, GEN_INT (size));
2395 temp = stack_pointer_rtx;
2397 return force_operand (temp, NULL_RTX);
2400 /* Generate code to push X onto the stack, assuming it has mode MODE and
2402 MODE is redundant except when X is a CONST_INT (since they don't
2404 SIZE is an rtx for the size of data to be copied (in bytes),
2405 needed only if X is BLKmode.
2407 ALIGN (in bytes) is maximum alignment we can assume.
2409 If PARTIAL and REG are both nonzero, then copy that many of the first
2410 words of X into registers starting with REG, and push the rest of X.
2411 The amount of space pushed is decreased by PARTIAL words,
2412 rounded *down* to a multiple of PARM_BOUNDARY.
2413 REG must be a hard register in this case.
2414 If REG is zero but PARTIAL is not, take any all others actions for an
2415 argument partially in registers, but do not actually load any
2418 EXTRA is the amount in bytes of extra space to leave next to this arg.
2419 This is ignored if an argument block has already been allocated.
2421 On a machine that lacks real push insns, ARGS_ADDR is the address of
2422 the bottom of the argument block for this call. We use indexing off there
2423 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2424 argument block has not been preallocated.
2426 ARGS_SO_FAR is the size of args previously pushed for this call. */
2429 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2430 args_addr, args_so_far)
2432 enum machine_mode mode;
2443 enum direction stack_direction
2444 #ifdef STACK_GROWS_DOWNWARD
2450 /* Decide where to pad the argument: `downward' for below,
2451 `upward' for above, or `none' for don't pad it.
2452 Default is below for small data on big-endian machines; else above. */
2453 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2455 /* Invert direction if stack is post-update. */
2456 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2457 if (where_pad != none)
2458 where_pad = (where_pad == downward ? upward : downward);
2460 xinner = x = protect_from_queue (x, 0);
2462 if (mode == BLKmode)
2464 /* Copy a block into the stack, entirely or partially. */
2467 int used = partial * UNITS_PER_WORD;
2468 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2476 /* USED is now the # of bytes we need not copy to the stack
2477 because registers will take care of them. */
2480 xinner = change_address (xinner, BLKmode,
2481 plus_constant (XEXP (xinner, 0), used));
2483 /* If the partial register-part of the arg counts in its stack size,
2484 skip the part of stack space corresponding to the registers.
2485 Otherwise, start copying to the beginning of the stack space,
2486 by setting SKIP to 0. */
2487 #ifndef REG_PARM_STACK_SPACE
2493 #ifdef PUSH_ROUNDING
2494 /* Do it with several push insns if that doesn't take lots of insns
2495 and if there is no difficulty with push insns that skip bytes
2496 on the stack for alignment purposes. */
2498 && GET_CODE (size) == CONST_INT
2500 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2502 /* Here we avoid the case of a structure whose weak alignment
2503 forces many pushes of a small amount of data,
2504 and such small pushes do rounding that causes trouble. */
2505 && ((! SLOW_UNALIGNED_ACCESS)
2506 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2507 || PUSH_ROUNDING (align) == align)
2508 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2510 /* Push padding now if padding above and stack grows down,
2511 or if padding below and stack grows up.
2512 But if space already allocated, this has already been done. */
2513 if (extra && args_addr == 0
2514 && where_pad != none && where_pad != stack_direction)
2515 anti_adjust_stack (GEN_INT (extra));
2517 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2518 INTVAL (size) - used, align);
2520 if (flag_check_memory_usage)
2524 temp = get_push_address (INTVAL(size) - used);
2525 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2526 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2528 XEXP (xinner, 0), ptr_mode,
2529 GEN_INT (INTVAL(size) - used),
2530 TYPE_MODE (sizetype));
2532 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2534 GEN_INT (INTVAL(size) - used),
2535 TYPE_MODE (sizetype),
2536 GEN_INT (MEMORY_USE_RW), QImode);
2540 #endif /* PUSH_ROUNDING */
2542 /* Otherwise make space on the stack and copy the data
2543 to the address of that space. */
2545 /* Deduct words put into registers from the size we must copy. */
2548 if (GET_CODE (size) == CONST_INT)
2549 size = GEN_INT (INTVAL (size) - used);
2551 size = expand_binop (GET_MODE (size), sub_optab, size,
2552 GEN_INT (used), NULL_RTX, 0,
2556 /* Get the address of the stack space.
2557 In this case, we do not deal with EXTRA separately.
2558 A single stack adjust will do. */
2561 temp = push_block (size, extra, where_pad == downward);
2564 else if (GET_CODE (args_so_far) == CONST_INT)
2565 temp = memory_address (BLKmode,
2566 plus_constant (args_addr,
2567 skip + INTVAL (args_so_far)));
2569 temp = memory_address (BLKmode,
2570 plus_constant (gen_rtx (PLUS, Pmode,
2571 args_addr, args_so_far),
2573 if (flag_check_memory_usage)
2577 target = copy_to_reg (temp);
2578 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2579 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2581 XEXP (xinner, 0), ptr_mode,
2582 size, TYPE_MODE (sizetype));
2584 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2586 size, TYPE_MODE (sizetype),
2587 GEN_INT (MEMORY_USE_RW), QImode);
2590 /* TEMP is the address of the block. Copy the data there. */
2591 if (GET_CODE (size) == CONST_INT
2592 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2595 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2596 INTVAL (size), align);
2599 /* Try the most limited insn first, because there's no point
2600 including more than one in the machine description unless
2601 the more limited one has some advantage. */
2602 #ifdef HAVE_movstrqi
2604 && GET_CODE (size) == CONST_INT
2605 && ((unsigned) INTVAL (size)
2606 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2608 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2609 xinner, size, GEN_INT (align));
2617 #ifdef HAVE_movstrhi
2619 && GET_CODE (size) == CONST_INT
2620 && ((unsigned) INTVAL (size)
2621 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2623 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2624 xinner, size, GEN_INT (align));
2632 #ifdef HAVE_movstrsi
2635 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2636 xinner, size, GEN_INT (align));
2644 #ifdef HAVE_movstrdi
2647 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2648 xinner, size, GEN_INT (align));
2657 #ifndef ACCUMULATE_OUTGOING_ARGS
2658 /* If the source is referenced relative to the stack pointer,
2659 copy it to another register to stabilize it. We do not need
2660 to do this if we know that we won't be changing sp. */
2662 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2663 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2664 temp = copy_to_reg (temp);
2667 /* Make inhibit_defer_pop nonzero around the library call
2668 to force it to pop the bcopy-arguments right away. */
2670 #ifdef TARGET_MEM_FUNCTIONS
2671 emit_library_call (memcpy_libfunc, 0,
2672 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2673 convert_to_mode (TYPE_MODE (sizetype),
2674 size, TREE_UNSIGNED (sizetype)),
2675 TYPE_MODE (sizetype));
2677 emit_library_call (bcopy_libfunc, 0,
2678 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2679 convert_to_mode (TYPE_MODE (integer_type_node),
2681 TREE_UNSIGNED (integer_type_node)),
2682 TYPE_MODE (integer_type_node));
2687 else if (partial > 0)
2689 /* Scalar partly in registers. */
2691 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2694 /* # words of start of argument
2695 that we must make space for but need not store. */
2696 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2697 int args_offset = INTVAL (args_so_far);
2700 /* Push padding now if padding above and stack grows down,
2701 or if padding below and stack grows up.
2702 But if space already allocated, this has already been done. */
2703 if (extra && args_addr == 0
2704 && where_pad != none && where_pad != stack_direction)
2705 anti_adjust_stack (GEN_INT (extra));
2707 /* If we make space by pushing it, we might as well push
2708 the real data. Otherwise, we can leave OFFSET nonzero
2709 and leave the space uninitialized. */
2713 /* Now NOT_STACK gets the number of words that we don't need to
2714 allocate on the stack. */
2715 not_stack = partial - offset;
2717 /* If the partial register-part of the arg counts in its stack size,
2718 skip the part of stack space corresponding to the registers.
2719 Otherwise, start copying to the beginning of the stack space,
2720 by setting SKIP to 0. */
2721 #ifndef REG_PARM_STACK_SPACE
2727 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2728 x = validize_mem (force_const_mem (mode, x));
2730 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2731 SUBREGs of such registers are not allowed. */
2732 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2733 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2734 x = copy_to_reg (x);
2736 /* Loop over all the words allocated on the stack for this arg. */
2737 /* We can do it by words, because any scalar bigger than a word
2738 has a size a multiple of a word. */
2739 #ifndef PUSH_ARGS_REVERSED
2740 for (i = not_stack; i < size; i++)
2742 for (i = size - 1; i >= not_stack; i--)
2744 if (i >= not_stack + offset)
2745 emit_push_insn (operand_subword_force (x, i, mode),
2746 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2748 GEN_INT (args_offset + ((i - not_stack + skip)
2749 * UNITS_PER_WORD)));
2754 rtx target = NULL_RTX;
2756 /* Push padding now if padding above and stack grows down,
2757 or if padding below and stack grows up.
2758 But if space already allocated, this has already been done. */
2759 if (extra && args_addr == 0
2760 && where_pad != none && where_pad != stack_direction)
2761 anti_adjust_stack (GEN_INT (extra));
2763 #ifdef PUSH_ROUNDING
2765 addr = gen_push_operand ();
2769 if (GET_CODE (args_so_far) == CONST_INT)
2771 = memory_address (mode,
2772 plus_constant (args_addr,
2773 INTVAL (args_so_far)));
2775 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2780 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2782 if (flag_check_memory_usage)
2785 target = get_push_address (GET_MODE_SIZE (mode));
2787 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2788 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2790 XEXP (x, 0), ptr_mode,
2791 GEN_INT (GET_MODE_SIZE (mode)),
2792 TYPE_MODE (sizetype));
2794 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2796 GEN_INT (GET_MODE_SIZE (mode)),
2797 TYPE_MODE (sizetype),
2798 GEN_INT (MEMORY_USE_RW), QImode);
2803 /* If part should go in registers, copy that part
2804 into the appropriate registers. Do this now, at the end,
2805 since mem-to-mem copies above may do function calls. */
2806 if (partial > 0 && reg != 0)
2808 /* Handle calls that pass values in multiple non-contiguous locations.
2809 The Irix 6 ABI has examples of this. */
2810 if (GET_CODE (reg) == PARALLEL)
2811 emit_group_load (reg, x);
2813 move_block_to_reg (REGNO (reg), x, partial, mode);
2816 if (extra && args_addr == 0 && where_pad == stack_direction)
2817 anti_adjust_stack (GEN_INT (extra));
2820 /* Expand an assignment that stores the value of FROM into TO.
2821 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2822 (This may contain a QUEUED rtx;
2823 if the value is constant, this rtx is a constant.)
2824 Otherwise, the returned value is NULL_RTX.
2826 SUGGEST_REG is no longer actually used.
2827 It used to mean, copy the value through a register
2828 and return that register, if that is possible.
2829 We now use WANT_VALUE to decide whether to do this. */
2832 expand_assignment (to, from, want_value, suggest_reg)
2837 register rtx to_rtx = 0;
2840 /* Don't crash if the lhs of the assignment was erroneous. */
2842 if (TREE_CODE (to) == ERROR_MARK)
2844 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2845 return want_value ? result : NULL_RTX;
2848 if (output_bytecode)
2850 tree dest_innermost;
2852 bc_expand_expr (from);
2853 bc_emit_instruction (duplicate);
2855 dest_innermost = bc_expand_address (to);
2857 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2858 take care of it here. */
2860 bc_store_memory (TREE_TYPE (to), dest_innermost);
2864 /* Assignment of a structure component needs special treatment
2865 if the structure component's rtx is not simply a MEM.
2866 Assignment of an array element at a constant index, and assignment of
2867 an array element in an unaligned packed structure field, has the same
2870 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2871 || TREE_CODE (to) == ARRAY_REF)
2873 enum machine_mode mode1;
2883 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2884 &unsignedp, &volatilep, &alignment);
2886 /* If we are going to use store_bit_field and extract_bit_field,
2887 make sure to_rtx will be safe for multiple use. */
2889 if (mode1 == VOIDmode && want_value)
2890 tem = stabilize_reference (tem);
2892 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
2895 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2897 if (GET_CODE (to_rtx) != MEM)
2899 to_rtx = change_address (to_rtx, VOIDmode,
2900 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2901 force_reg (ptr_mode, offset_rtx)));
2905 if (GET_CODE (to_rtx) == MEM)
2907 /* When the offset is zero, to_rtx is the address of the
2908 structure we are storing into, and hence may be shared.
2909 We must make a new MEM before setting the volatile bit. */
2911 to_rtx = copy_rtx (to_rtx);
2913 MEM_VOLATILE_P (to_rtx) = 1;
2915 #if 0 /* This was turned off because, when a field is volatile
2916 in an object which is not volatile, the object may be in a register,
2917 and then we would abort over here. */
2923 /* Check the access. */
2924 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2929 enum machine_mode best_mode;
2931 best_mode = get_best_mode (bitsize, bitpos,
2932 TYPE_ALIGN (TREE_TYPE (tem)),
2934 if (best_mode == VOIDmode)
2937 best_mode_size = GET_MODE_BITSIZE (best_mode);
2938 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2939 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2940 size *= GET_MODE_SIZE (best_mode);
2942 /* Check the access right of the pointer. */
2943 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3, to_addr,
2944 ptr_mode, GEN_INT (size), TYPE_MODE (sizetype),
2945 GEN_INT (MEMORY_USE_WO), QImode);
2948 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2950 /* Spurious cast makes HPUX compiler happy. */
2951 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2954 /* Required alignment of containing datum. */
2956 int_size_in_bytes (TREE_TYPE (tem)));
2957 preserve_temp_slots (result);
2961 /* If the value is meaningful, convert RESULT to the proper mode.
2962 Otherwise, return nothing. */
2963 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2964 TYPE_MODE (TREE_TYPE (from)),
2966 TREE_UNSIGNED (TREE_TYPE (to)))
2970 /* If the rhs is a function call and its value is not an aggregate,
2971 call the function before we start to compute the lhs.
2972 This is needed for correct code for cases such as
2973 val = setjmp (buf) on machines where reference to val
2974 requires loading up part of an address in a separate insn.
2976 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2977 a promoted variable where the zero- or sign- extension needs to be done.
2978 Handling this in the normal way is safe because no computation is done
2980 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2981 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2982 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2987 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2989 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
2991 /* Handle calls that return values in multiple non-contiguous locations.
2992 The Irix 6 ABI has examples of this. */
2993 if (GET_CODE (to_rtx) == PARALLEL)
2994 emit_group_load (to_rtx, value);
2995 else if (GET_MODE (to_rtx) == BLKmode)
2996 emit_block_move (to_rtx, value, expr_size (from),
2997 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2999 emit_move_insn (to_rtx, value);
3000 preserve_temp_slots (to_rtx);
3003 return want_value ? to_rtx : NULL_RTX;
3006 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3007 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3010 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3012 /* Don't move directly into a return register. */
3013 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3018 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3019 emit_move_insn (to_rtx, temp);
3020 preserve_temp_slots (to_rtx);
3023 return want_value ? to_rtx : NULL_RTX;
3026 /* In case we are returning the contents of an object which overlaps
3027 the place the value is being stored, use a safe function when copying
3028 a value through a pointer into a structure value return block. */
3029 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3030 && current_function_returns_struct
3031 && !current_function_returns_pcc_struct)
3036 size = expr_size (from);
3037 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3038 EXPAND_MEMORY_USE_DONT);
3040 /* Copy the rights of the bitmap. */
3041 if (flag_check_memory_usage)
3042 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3043 XEXP (to_rtx, 0), ptr_mode,
3044 XEXP (from_rtx, 0), ptr_mode,
3045 convert_to_mode (TYPE_MODE (sizetype),
3046 size, TREE_UNSIGNED (sizetype)),
3047 TYPE_MODE (sizetype));
3049 #ifdef TARGET_MEM_FUNCTIONS
3050 emit_library_call (memcpy_libfunc, 0,
3051 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3052 XEXP (from_rtx, 0), Pmode,
3053 convert_to_mode (TYPE_MODE (sizetype),
3054 size, TREE_UNSIGNED (sizetype)),
3055 TYPE_MODE (sizetype));
3057 emit_library_call (bcopy_libfunc, 0,
3058 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3059 XEXP (to_rtx, 0), Pmode,
3060 convert_to_mode (TYPE_MODE (integer_type_node),
3061 size, TREE_UNSIGNED (integer_type_node)),
3062 TYPE_MODE (integer_type_node));
3065 preserve_temp_slots (to_rtx);
3068 return want_value ? to_rtx : NULL_RTX;
3071 /* Compute FROM and store the value in the rtx we got. */
3074 result = store_expr (from, to_rtx, want_value);
3075 preserve_temp_slots (result);
3078 return want_value ? result : NULL_RTX;
3081 /* Generate code for computing expression EXP,
3082 and storing the value into TARGET.
3083 TARGET may contain a QUEUED rtx.
3085 If WANT_VALUE is nonzero, return a copy of the value
3086 not in TARGET, so that we can be sure to use the proper
3087 value in a containing expression even if TARGET has something
3088 else stored in it. If possible, we copy the value through a pseudo
3089 and return that pseudo. Or, if the value is constant, we try to
3090 return the constant. In some cases, we return a pseudo
3091 copied *from* TARGET.
3093 If the mode is BLKmode then we may return TARGET itself.
3094 It turns out that in BLKmode it doesn't cause a problem.
3095 because C has no operators that could combine two different
3096 assignments into the same BLKmode object with different values
3097 with no sequence point. Will other languages need this to
3100 If WANT_VALUE is 0, we return NULL, to make sure
3101 to catch quickly any cases where the caller uses the value
3102 and fails to set WANT_VALUE. */
3105 store_expr (exp, target, want_value)
3107 register rtx target;
3111 int dont_return_target = 0;
3113 if (TREE_CODE (exp) == COMPOUND_EXPR)
3115 /* Perform first part of compound expression, then assign from second
3117 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3119 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3121 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3123 /* For conditional expression, get safe form of the target. Then
3124 test the condition, doing the appropriate assignment on either
3125 side. This avoids the creation of unnecessary temporaries.
3126 For non-BLKmode, it is more efficient not to do this. */
3128 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3131 target = protect_from_queue (target, 1);
3133 do_pending_stack_adjust ();
3135 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3136 start_cleanup_deferal ();
3137 store_expr (TREE_OPERAND (exp, 1), target, 0);
3138 end_cleanup_deferal ();
3140 emit_jump_insn (gen_jump (lab2));
3143 start_cleanup_deferal ();
3144 store_expr (TREE_OPERAND (exp, 2), target, 0);
3145 end_cleanup_deferal ();
3150 return want_value ? target : NULL_RTX;
3152 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3153 && GET_MODE (target) != BLKmode)
3154 /* If target is in memory and caller wants value in a register instead,
3155 arrange that. Pass TARGET as target for expand_expr so that,
3156 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3157 We know expand_expr will not use the target in that case.
3158 Don't do this if TARGET is volatile because we are supposed
3159 to write it and then read it. */
3161 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3162 GET_MODE (target), 0);
3163 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3164 temp = copy_to_reg (temp);
3165 dont_return_target = 1;
3167 else if (queued_subexp_p (target))
3168 /* If target contains a postincrement, let's not risk
3169 using it as the place to generate the rhs. */
3171 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3173 /* Expand EXP into a new pseudo. */
3174 temp = gen_reg_rtx (GET_MODE (target));
3175 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3178 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3180 /* If target is volatile, ANSI requires accessing the value
3181 *from* the target, if it is accessed. So make that happen.
3182 In no case return the target itself. */
3183 if (! MEM_VOLATILE_P (target) && want_value)
3184 dont_return_target = 1;
3186 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3187 /* If this is an scalar in a register that is stored in a wider mode
3188 than the declared mode, compute the result into its declared mode
3189 and then convert to the wider mode. Our value is the computed
3192 /* If we don't want a value, we can do the conversion inside EXP,
3193 which will often result in some optimizations. Do the conversion
3194 in two steps: first change the signedness, if needed, then
3195 the extend. But don't do this if the type of EXP is a subtype
3196 of something else since then the conversion might involve
3197 more than just converting modes. */
3198 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3199 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3201 if (TREE_UNSIGNED (TREE_TYPE (exp))
3202 != SUBREG_PROMOTED_UNSIGNED_P (target))
3205 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3209 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3210 SUBREG_PROMOTED_UNSIGNED_P (target)),
3214 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3216 /* If TEMP is a volatile MEM and we want a result value, make
3217 the access now so it gets done only once. Likewise if
3218 it contains TARGET. */
3219 if (GET_CODE (temp) == MEM && want_value
3220 && (MEM_VOLATILE_P (temp)
3221 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3222 temp = copy_to_reg (temp);
3224 /* If TEMP is a VOIDmode constant, use convert_modes to make
3225 sure that we properly convert it. */
3226 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3227 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3228 TYPE_MODE (TREE_TYPE (exp)), temp,
3229 SUBREG_PROMOTED_UNSIGNED_P (target));
3231 convert_move (SUBREG_REG (target), temp,
3232 SUBREG_PROMOTED_UNSIGNED_P (target));
3233 return want_value ? temp : NULL_RTX;
3237 temp = expand_expr (exp, target, GET_MODE (target), 0);
3238 /* Return TARGET if it's a specified hardware register.
3239 If TARGET is a volatile mem ref, either return TARGET
3240 or return a reg copied *from* TARGET; ANSI requires this.
3242 Otherwise, if TEMP is not TARGET, return TEMP
3243 if it is constant (for efficiency),
3244 or if we really want the correct value. */
3245 if (!(target && GET_CODE (target) == REG
3246 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3247 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3248 && ! rtx_equal_p (temp, target)
3249 && (CONSTANT_P (temp) || want_value))
3250 dont_return_target = 1;
3253 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3254 the same as that of TARGET, adjust the constant. This is needed, for
3255 example, in case it is a CONST_DOUBLE and we want only a word-sized
3257 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3258 && TREE_CODE (exp) != ERROR_MARK
3259 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3260 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3261 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3263 if (flag_check_memory_usage
3264 && GET_CODE (target) == MEM
3265 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3267 if (GET_CODE (temp) == MEM)
3268 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3269 XEXP (target, 0), ptr_mode,
3270 XEXP (temp, 0), ptr_mode,
3271 expr_size (exp), TYPE_MODE (sizetype));
3273 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3274 XEXP (target, 0), ptr_mode,
3275 expr_size (exp), TYPE_MODE (sizetype),
3276 GEN_INT (MEMORY_USE_WO), QImode);
3279 /* If value was not generated in the target, store it there.
3280 Convert the value to TARGET's type first if nec. */
3282 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3284 target = protect_from_queue (target, 1);
3285 if (GET_MODE (temp) != GET_MODE (target)
3286 && GET_MODE (temp) != VOIDmode)
3288 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3289 if (dont_return_target)
3291 /* In this case, we will return TEMP,
3292 so make sure it has the proper mode.
3293 But don't forget to store the value into TARGET. */
3294 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3295 emit_move_insn (target, temp);
3298 convert_move (target, temp, unsignedp);
3301 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3303 /* Handle copying a string constant into an array.
3304 The string constant may be shorter than the array.
3305 So copy just the string's actual length, and clear the rest. */
3309 /* Get the size of the data type of the string,
3310 which is actually the size of the target. */
3311 size = expr_size (exp);
3312 if (GET_CODE (size) == CONST_INT
3313 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3314 emit_block_move (target, temp, size,
3315 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3318 /* Compute the size of the data to copy from the string. */
3320 = size_binop (MIN_EXPR,
3321 make_tree (sizetype, size),
3323 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3324 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3328 /* Copy that much. */
3329 emit_block_move (target, temp, copy_size_rtx,
3330 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3332 /* Figure out how much is left in TARGET that we have to clear.
3333 Do all calculations in ptr_mode. */
3335 addr = XEXP (target, 0);
3336 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3338 if (GET_CODE (copy_size_rtx) == CONST_INT)
3340 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3341 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3345 addr = force_reg (ptr_mode, addr);
3346 addr = expand_binop (ptr_mode, add_optab, addr,
3347 copy_size_rtx, NULL_RTX, 0,
3350 size = expand_binop (ptr_mode, sub_optab, size,
3351 copy_size_rtx, NULL_RTX, 0,
3354 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3355 GET_MODE (size), 0, 0);
3356 label = gen_label_rtx ();
3357 emit_jump_insn (gen_blt (label));
3360 if (size != const0_rtx)
3362 /* Be sure we can write on ADDR. */
3363 if (flag_check_memory_usage)
3364 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3366 size, TYPE_MODE (sizetype),
3367 GEN_INT (MEMORY_USE_WO), QImode);
3368 #ifdef TARGET_MEM_FUNCTIONS
3369 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3371 const0_rtx, TYPE_MODE (integer_type_node),
3372 convert_to_mode (TYPE_MODE (sizetype),
3374 TREE_UNSIGNED (sizetype)),
3375 TYPE_MODE (sizetype));
3377 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3379 convert_to_mode (TYPE_MODE (integer_type_node),
3381 TREE_UNSIGNED (integer_type_node)),
3382 TYPE_MODE (integer_type_node));
3390 /* Handle calls that return values in multiple non-contiguous locations.
3391 The Irix 6 ABI has examples of this. */
3392 else if (GET_CODE (target) == PARALLEL)
3393 emit_group_load (target, temp);
3394 else if (GET_MODE (temp) == BLKmode)
3395 emit_block_move (target, temp, expr_size (exp),
3396 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3398 emit_move_insn (target, temp);
3401 /* If we don't want a value, return NULL_RTX. */
3405 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3406 ??? The latter test doesn't seem to make sense. */
3407 else if (dont_return_target && GET_CODE (temp) != MEM)
3410 /* Return TARGET itself if it is a hard register. */
3411 else if (want_value && GET_MODE (target) != BLKmode
3412 && ! (GET_CODE (target) == REG
3413 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3414 return copy_to_reg (target);
3420 /* Return 1 if EXP just contains zeros. */
3428 switch (TREE_CODE (exp))
3432 case NON_LVALUE_EXPR:
3433 return is_zeros_p (TREE_OPERAND (exp, 0));
3436 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3440 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3443 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3446 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3447 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3448 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3449 if (! is_zeros_p (TREE_VALUE (elt)))
3458 /* Return 1 if EXP contains mostly (3/4) zeros. */
3461 mostly_zeros_p (exp)
3464 if (TREE_CODE (exp) == CONSTRUCTOR)
3466 int elts = 0, zeros = 0;
3467 tree elt = CONSTRUCTOR_ELTS (exp);
3468 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3470 /* If there are no ranges of true bits, it is all zero. */
3471 return elt == NULL_TREE;
3473 for (; elt; elt = TREE_CHAIN (elt))
3475 /* We do not handle the case where the index is a RANGE_EXPR,
3476 so the statistic will be somewhat inaccurate.
3477 We do make a more accurate count in store_constructor itself,
3478 so since this function is only used for nested array elements,
3479 this should be close enough. */
3480 if (mostly_zeros_p (TREE_VALUE (elt)))
3485 return 4 * zeros >= 3 * elts;
3488 return is_zeros_p (exp);
3491 /* Helper function for store_constructor.
3492 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3493 TYPE is the type of the CONSTRUCTOR, not the element type.
3494 CLEARED is as for store_constructor.
3496 This provides a recursive shortcut back to store_constructor when it isn't
3497 necessary to go through store_field. This is so that we can pass through
3498 the cleared field to let store_constructor know that we may not have to
3499 clear a substructure if the outer structure has already been cleared. */
3502 store_constructor_field (target, bitsize, bitpos,
3503 mode, exp, type, cleared)
3505 int bitsize, bitpos;
3506 enum machine_mode mode;
3510 if (TREE_CODE (exp) == CONSTRUCTOR
3511 && bitpos % BITS_PER_UNIT == 0
3512 /* If we have a non-zero bitpos for a register target, then we just
3513 let store_field do the bitfield handling. This is unlikely to
3514 generate unnecessary clear instructions anyways. */
3515 && (bitpos == 0 || GET_CODE (target) == MEM))
3518 target = change_address (target, VOIDmode,
3519 plus_constant (XEXP (target, 0),
3520 bitpos / BITS_PER_UNIT));
3521 store_constructor (exp, target, cleared);
3524 store_field (target, bitsize, bitpos, mode, exp,
3525 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3526 int_size_in_bytes (type));
3529 /* Store the value of constructor EXP into the rtx TARGET.
3530 TARGET is either a REG or a MEM.
3531 CLEARED is true if TARGET is known to have been zero'd. */
3534 store_constructor (exp, target, cleared)
3539 tree type = TREE_TYPE (exp);
3541 /* We know our target cannot conflict, since safe_from_p has been called. */
3543 /* Don't try copying piece by piece into a hard register
3544 since that is vulnerable to being clobbered by EXP.
3545 Instead, construct in a pseudo register and then copy it all. */
3546 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3548 rtx temp = gen_reg_rtx (GET_MODE (target));
3549 store_constructor (exp, temp, 0);
3550 emit_move_insn (target, temp);
3555 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3556 || TREE_CODE (type) == QUAL_UNION_TYPE)
3560 /* Inform later passes that the whole union value is dead. */
3561 if (TREE_CODE (type) == UNION_TYPE
3562 || TREE_CODE (type) == QUAL_UNION_TYPE)
3563 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3565 /* If we are building a static constructor into a register,
3566 set the initial value as zero so we can fold the value into
3567 a constant. But if more than one register is involved,
3568 this probably loses. */
3569 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3570 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3573 emit_move_insn (target, const0_rtx);
3578 /* If the constructor has fewer fields than the structure
3579 or if we are initializing the structure to mostly zeros,
3580 clear the whole structure first. */
3581 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3582 != list_length (TYPE_FIELDS (type)))
3583 || mostly_zeros_p (exp))
3586 clear_storage (target, expr_size (exp),
3587 TYPE_ALIGN (type) / BITS_PER_UNIT);
3592 /* Inform later passes that the old value is dead. */
3593 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3595 /* Store each element of the constructor into
3596 the corresponding field of TARGET. */
3598 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3600 register tree field = TREE_PURPOSE (elt);
3601 register enum machine_mode mode;
3605 tree pos, constant = 0, offset = 0;
3606 rtx to_rtx = target;
3608 /* Just ignore missing fields.
3609 We cleared the whole structure, above,
3610 if any fields are missing. */
3614 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3617 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3618 unsignedp = TREE_UNSIGNED (field);
3619 mode = DECL_MODE (field);
3620 if (DECL_BIT_FIELD (field))
3623 pos = DECL_FIELD_BITPOS (field);
3624 if (TREE_CODE (pos) == INTEGER_CST)
3626 else if (TREE_CODE (pos) == PLUS_EXPR
3627 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3628 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3633 bitpos = TREE_INT_CST_LOW (constant);
3639 if (contains_placeholder_p (offset))
3640 offset = build (WITH_RECORD_EXPR, sizetype,
3643 offset = size_binop (FLOOR_DIV_EXPR, offset,
3644 size_int (BITS_PER_UNIT));
3646 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3647 if (GET_CODE (to_rtx) != MEM)
3651 = change_address (to_rtx, VOIDmode,
3652 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3653 force_reg (ptr_mode, offset_rtx)));
3655 if (TREE_READONLY (field))
3657 if (GET_CODE (to_rtx) == MEM)
3658 to_rtx = copy_rtx (to_rtx);
3660 RTX_UNCHANGING_P (to_rtx) = 1;
3663 store_constructor_field (to_rtx, bitsize, bitpos,
3664 mode, TREE_VALUE (elt), type, cleared);
3667 else if (TREE_CODE (type) == ARRAY_TYPE)
3672 tree domain = TYPE_DOMAIN (type);
3673 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3674 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3675 tree elttype = TREE_TYPE (type);
3677 /* If the constructor has fewer elements than the array,
3678 clear the whole array first. Similarly if this this is
3679 static constructor of a non-BLKmode object. */
3680 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3684 HOST_WIDE_INT count = 0, zero_count = 0;
3686 /* This loop is a more accurate version of the loop in
3687 mostly_zeros_p (it handles RANGE_EXPR in an index).
3688 It is also needed to check for missing elements. */
3689 for (elt = CONSTRUCTOR_ELTS (exp);
3691 elt = TREE_CHAIN (elt))
3693 tree index = TREE_PURPOSE (elt);
3694 HOST_WIDE_INT this_node_count;
3695 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3697 tree lo_index = TREE_OPERAND (index, 0);
3698 tree hi_index = TREE_OPERAND (index, 1);
3699 if (TREE_CODE (lo_index) != INTEGER_CST
3700 || TREE_CODE (hi_index) != INTEGER_CST)
3705 this_node_count = TREE_INT_CST_LOW (hi_index)
3706 - TREE_INT_CST_LOW (lo_index) + 1;
3709 this_node_count = 1;
3710 count += this_node_count;
3711 if (mostly_zeros_p (TREE_VALUE (elt)))
3712 zero_count += this_node_count;
3714 /* Clear the entire array first if there are any missing elements,
3715 or if the incidence of zero elements is >= 75%. */
3716 if (count < maxelt - minelt + 1
3717 || 4 * zero_count >= 3 * count)
3723 clear_storage (target, expr_size (exp),
3724 TYPE_ALIGN (type) / BITS_PER_UNIT);
3728 /* Inform later passes that the old value is dead. */
3729 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3731 /* Store each element of the constructor into
3732 the corresponding element of TARGET, determined
3733 by counting the elements. */
3734 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3736 elt = TREE_CHAIN (elt), i++)
3738 register enum machine_mode mode;
3742 tree value = TREE_VALUE (elt);
3743 tree index = TREE_PURPOSE (elt);
3744 rtx xtarget = target;
3746 if (cleared && is_zeros_p (value))
3749 mode = TYPE_MODE (elttype);
3750 bitsize = GET_MODE_BITSIZE (mode);
3751 unsignedp = TREE_UNSIGNED (elttype);
3753 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3755 tree lo_index = TREE_OPERAND (index, 0);
3756 tree hi_index = TREE_OPERAND (index, 1);
3757 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3758 struct nesting *loop;
3759 HOST_WIDE_INT lo, hi, count;
3762 /* If the range is constant and "small", unroll the loop. */
3763 if (TREE_CODE (lo_index) == INTEGER_CST
3764 && TREE_CODE (hi_index) == INTEGER_CST
3765 && (lo = TREE_INT_CST_LOW (lo_index),
3766 hi = TREE_INT_CST_LOW (hi_index),
3767 count = hi - lo + 1,
3768 (GET_CODE (target) != MEM
3770 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3771 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3774 lo -= minelt; hi -= minelt;
3775 for (; lo <= hi; lo++)
3777 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3778 store_constructor_field (target, bitsize, bitpos,
3779 mode, value, type, cleared);
3784 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3785 loop_top = gen_label_rtx ();
3786 loop_end = gen_label_rtx ();
3788 unsignedp = TREE_UNSIGNED (domain);
3790 index = build_decl (VAR_DECL, NULL_TREE, domain);
3792 DECL_RTL (index) = index_r
3793 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3796 if (TREE_CODE (value) == SAVE_EXPR
3797 && SAVE_EXPR_RTL (value) == 0)
3799 /* Make sure value gets expanded once before the
3801 expand_expr (value, const0_rtx, VOIDmode, 0);
3804 store_expr (lo_index, index_r, 0);
3805 loop = expand_start_loop (0);
3807 /* Assign value to element index. */
3808 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3809 size_int (BITS_PER_UNIT));
3810 position = size_binop (MULT_EXPR,
3811 size_binop (MINUS_EXPR, index,
3812 TYPE_MIN_VALUE (domain)),
3814 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3815 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3816 xtarget = change_address (target, mode, addr);
3817 if (TREE_CODE (value) == CONSTRUCTOR)
3818 store_constructor (value, xtarget, cleared);
3820 store_expr (value, xtarget, 0);
3822 expand_exit_loop_if_false (loop,
3823 build (LT_EXPR, integer_type_node,
3826 expand_increment (build (PREINCREMENT_EXPR,
3828 index, integer_one_node), 0, 0);
3830 emit_label (loop_end);
3832 /* Needed by stupid register allocation. to extend the
3833 lifetime of pseudo-regs used by target past the end
3835 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3838 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3839 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3845 index = size_int (i);
3848 index = size_binop (MINUS_EXPR, index,
3849 TYPE_MIN_VALUE (domain));
3850 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3851 size_int (BITS_PER_UNIT));
3852 position = size_binop (MULT_EXPR, index, position);
3853 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3854 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3855 xtarget = change_address (target, mode, addr);
3856 store_expr (value, xtarget, 0);
3861 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3862 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3864 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3865 store_constructor_field (target, bitsize, bitpos,
3866 mode, value, type, cleared);
3870 /* set constructor assignments */
3871 else if (TREE_CODE (type) == SET_TYPE)
3873 tree elt = CONSTRUCTOR_ELTS (exp);
3874 rtx xtarget = XEXP (target, 0);
3875 int set_word_size = TYPE_ALIGN (type);
3876 int nbytes = int_size_in_bytes (type), nbits;
3877 tree domain = TYPE_DOMAIN (type);
3878 tree domain_min, domain_max, bitlength;
3880 /* The default implementation strategy is to extract the constant
3881 parts of the constructor, use that to initialize the target,
3882 and then "or" in whatever non-constant ranges we need in addition.
3884 If a large set is all zero or all ones, it is
3885 probably better to set it using memset (if available) or bzero.
3886 Also, if a large set has just a single range, it may also be
3887 better to first clear all the first clear the set (using
3888 bzero/memset), and set the bits we want. */
3890 /* Check for all zeros. */
3891 if (elt == NULL_TREE)
3894 clear_storage (target, expr_size (exp),
3895 TYPE_ALIGN (type) / BITS_PER_UNIT);
3899 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3900 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3901 bitlength = size_binop (PLUS_EXPR,
3902 size_binop (MINUS_EXPR, domain_max, domain_min),
3905 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3907 nbits = TREE_INT_CST_LOW (bitlength);
3909 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3910 are "complicated" (more than one range), initialize (the
3911 constant parts) by copying from a constant. */
3912 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3913 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3915 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3916 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3917 char *bit_buffer = (char *) alloca (nbits);
3918 HOST_WIDE_INT word = 0;
3921 int offset = 0; /* In bytes from beginning of set. */
3922 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3925 if (bit_buffer[ibit])
3927 if (BYTES_BIG_ENDIAN)
3928 word |= (1 << (set_word_size - 1 - bit_pos));
3930 word |= 1 << bit_pos;
3933 if (bit_pos >= set_word_size || ibit == nbits)
3935 if (word != 0 || ! cleared)
3937 rtx datum = GEN_INT (word);
3939 /* The assumption here is that it is safe to use
3940 XEXP if the set is multi-word, but not if
3941 it's single-word. */
3942 if (GET_CODE (target) == MEM)
3944 to_rtx = plus_constant (XEXP (target, 0), offset);
3945 to_rtx = change_address (target, mode, to_rtx);
3947 else if (offset == 0)
3951 emit_move_insn (to_rtx, datum);
3957 offset += set_word_size / BITS_PER_UNIT;
3963 /* Don't bother clearing storage if the set is all ones. */
3964 if (TREE_CHAIN (elt) != NULL_TREE
3965 || (TREE_PURPOSE (elt) == NULL_TREE
3967 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3968 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3969 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3970 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3972 clear_storage (target, expr_size (exp),
3973 TYPE_ALIGN (type) / BITS_PER_UNIT);
3976 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3978 /* start of range of element or NULL */
3979 tree startbit = TREE_PURPOSE (elt);
3980 /* end of range of element, or element value */
3981 tree endbit = TREE_VALUE (elt);
3982 HOST_WIDE_INT startb, endb;
3983 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3985 bitlength_rtx = expand_expr (bitlength,
3986 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3988 /* handle non-range tuple element like [ expr ] */
3989 if (startbit == NULL_TREE)
3991 startbit = save_expr (endbit);
3994 startbit = convert (sizetype, startbit);
3995 endbit = convert (sizetype, endbit);
3996 if (! integer_zerop (domain_min))
3998 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3999 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4001 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4002 EXPAND_CONST_ADDRESS);
4003 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4004 EXPAND_CONST_ADDRESS);
4008 targetx = assign_stack_temp (GET_MODE (target),
4009 GET_MODE_SIZE (GET_MODE (target)),
4011 emit_move_insn (targetx, target);
4013 else if (GET_CODE (target) == MEM)
4018 #ifdef TARGET_MEM_FUNCTIONS
4019 /* Optimization: If startbit and endbit are
4020 constants divisible by BITS_PER_UNIT,
4021 call memset instead. */
4022 if (TREE_CODE (startbit) == INTEGER_CST
4023 && TREE_CODE (endbit) == INTEGER_CST
4024 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4025 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4027 emit_library_call (memset_libfunc, 0,
4029 plus_constant (XEXP (targetx, 0),
4030 startb / BITS_PER_UNIT),
4032 constm1_rtx, TYPE_MODE (integer_type_node),
4033 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4034 TYPE_MODE (sizetype));
4039 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
4040 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4041 bitlength_rtx, TYPE_MODE (sizetype),
4042 startbit_rtx, TYPE_MODE (sizetype),
4043 endbit_rtx, TYPE_MODE (sizetype));
4046 emit_move_insn (target, targetx);
4054 /* Store the value of EXP (an expression tree)
4055 into a subfield of TARGET which has mode MODE and occupies
4056 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4057 If MODE is VOIDmode, it means that we are storing into a bit-field.
4059 If VALUE_MODE is VOIDmode, return nothing in particular.
4060 UNSIGNEDP is not used in this case.
4062 Otherwise, return an rtx for the value stored. This rtx
4063 has mode VALUE_MODE if that is convenient to do.
4064 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4066 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4067 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4070 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4071 unsignedp, align, total_size)
4073 int bitsize, bitpos;
4074 enum machine_mode mode;
4076 enum machine_mode value_mode;
4081 HOST_WIDE_INT width_mask = 0;
4083 if (bitsize < HOST_BITS_PER_WIDE_INT)
4084 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4086 /* If we are storing into an unaligned field of an aligned union that is
4087 in a register, we may have the mode of TARGET being an integer mode but
4088 MODE == BLKmode. In that case, get an aligned object whose size and
4089 alignment are the same as TARGET and store TARGET into it (we can avoid
4090 the store if the field being stored is the entire width of TARGET). Then
4091 call ourselves recursively to store the field into a BLKmode version of
4092 that object. Finally, load from the object into TARGET. This is not
4093 very efficient in general, but should only be slightly more expensive
4094 than the otherwise-required unaligned accesses. Perhaps this can be
4095 cleaned up later. */
4098 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4100 rtx object = assign_stack_temp (GET_MODE (target),
4101 GET_MODE_SIZE (GET_MODE (target)), 0);
4102 rtx blk_object = copy_rtx (object);
4104 MEM_IN_STRUCT_P (object) = 1;
4105 MEM_IN_STRUCT_P (blk_object) = 1;
4106 PUT_MODE (blk_object, BLKmode);
4108 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4109 emit_move_insn (object, target);
4111 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4114 /* Even though we aren't returning target, we need to
4115 give it the updated value. */
4116 emit_move_insn (target, object);
4121 /* If the structure is in a register or if the component
4122 is a bit field, we cannot use addressing to access it.
4123 Use bit-field techniques or SUBREG to store in it. */
4125 if (mode == VOIDmode
4126 || (mode != BLKmode && ! direct_store[(int) mode])
4127 || GET_CODE (target) == REG
4128 || GET_CODE (target) == SUBREG
4129 /* If the field isn't aligned enough to store as an ordinary memref,
4130 store it as a bit field. */
4131 || (SLOW_UNALIGNED_ACCESS
4132 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4133 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4135 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4137 /* If BITSIZE is narrower than the size of the type of EXP
4138 we will be narrowing TEMP. Normally, what's wanted are the
4139 low-order bits. However, if EXP's type is a record and this is
4140 big-endian machine, we want the upper BITSIZE bits. */
4141 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4142 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4143 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4144 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4145 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4149 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4151 if (mode != VOIDmode && mode != BLKmode
4152 && mode != TYPE_MODE (TREE_TYPE (exp)))
4153 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4155 /* If the modes of TARGET and TEMP are both BLKmode, both
4156 must be in memory and BITPOS must be aligned on a byte
4157 boundary. If so, we simply do a block copy. */
4158 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4160 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4161 || bitpos % BITS_PER_UNIT != 0)
4164 target = change_address (target, VOIDmode,
4165 plus_constant (XEXP (target, 0),
4166 bitpos / BITS_PER_UNIT));
4168 emit_block_move (target, temp,
4169 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4173 return value_mode == VOIDmode ? const0_rtx : target;
4176 /* Store the value in the bitfield. */
4177 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4178 if (value_mode != VOIDmode)
4180 /* The caller wants an rtx for the value. */
4181 /* If possible, avoid refetching from the bitfield itself. */
4183 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4186 enum machine_mode tmode;
4189 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4190 tmode = GET_MODE (temp);
4191 if (tmode == VOIDmode)
4193 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4194 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4195 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4197 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4198 NULL_RTX, value_mode, 0, align,
4205 rtx addr = XEXP (target, 0);
4208 /* If a value is wanted, it must be the lhs;
4209 so make the address stable for multiple use. */
4211 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4212 && ! CONSTANT_ADDRESS_P (addr)
4213 /* A frame-pointer reference is already stable. */
4214 && ! (GET_CODE (addr) == PLUS
4215 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4216 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4217 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4218 addr = copy_to_reg (addr);
4220 /* Now build a reference to just the desired component. */
4222 to_rtx = copy_rtx (change_address (target, mode,
4223 plus_constant (addr,
4225 / BITS_PER_UNIT))));
4226 MEM_IN_STRUCT_P (to_rtx) = 1;
4228 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4232 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4233 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4234 ARRAY_REFs and find the ultimate containing object, which we return.
4236 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4237 bit position, and *PUNSIGNEDP to the signedness of the field.
4238 If the position of the field is variable, we store a tree
4239 giving the variable offset (in units) in *POFFSET.
4240 This offset is in addition to the bit position.
4241 If the position is not variable, we store 0 in *POFFSET.
4242 We set *PALIGNMENT to the alignment in bytes of the address that will be
4243 computed. This is the alignment of the thing we return if *POFFSET
4244 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4246 If any of the extraction expressions is volatile,
4247 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4249 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4250 is a mode that can be used to access the field. In that case, *PBITSIZE
4253 If the field describes a variable-sized object, *PMODE is set to
4254 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4255 this case, but the address of the object can be found. */
4258 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4259 punsignedp, pvolatilep, palignment)
4264 enum machine_mode *pmode;
4269 tree orig_exp = exp;
4271 enum machine_mode mode = VOIDmode;
4272 tree offset = integer_zero_node;
4273 int alignment = BIGGEST_ALIGNMENT;
4275 if (TREE_CODE (exp) == COMPONENT_REF)
4277 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4278 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4279 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4280 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4282 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4284 size_tree = TREE_OPERAND (exp, 1);
4285 *punsignedp = TREE_UNSIGNED (exp);
4289 mode = TYPE_MODE (TREE_TYPE (exp));
4290 *pbitsize = GET_MODE_BITSIZE (mode);
4291 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4296 if (TREE_CODE (size_tree) != INTEGER_CST)
4297 mode = BLKmode, *pbitsize = -1;
4299 *pbitsize = TREE_INT_CST_LOW (size_tree);
4302 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4303 and find the ultimate containing object. */
4309 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4311 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4312 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4313 : TREE_OPERAND (exp, 2));
4314 tree constant = integer_zero_node, var = pos;
4316 /* If this field hasn't been filled in yet, don't go
4317 past it. This should only happen when folding expressions
4318 made during type construction. */
4322 /* Assume here that the offset is a multiple of a unit.
4323 If not, there should be an explicitly added constant. */
4324 if (TREE_CODE (pos) == PLUS_EXPR
4325 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4326 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4327 else if (TREE_CODE (pos) == INTEGER_CST)
4328 constant = pos, var = integer_zero_node;
4330 *pbitpos += TREE_INT_CST_LOW (constant);
4331 offset = size_binop (PLUS_EXPR, offset,
4332 size_binop (EXACT_DIV_EXPR, var,
4333 size_int (BITS_PER_UNIT)));
4336 else if (TREE_CODE (exp) == ARRAY_REF)
4338 /* This code is based on the code in case ARRAY_REF in expand_expr
4339 below. We assume here that the size of an array element is
4340 always an integral multiple of BITS_PER_UNIT. */
4342 tree index = TREE_OPERAND (exp, 1);
4343 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4345 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4346 tree index_type = TREE_TYPE (index);
4348 if (! integer_zerop (low_bound))
4349 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4351 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4353 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4355 index_type = TREE_TYPE (index);
4358 index = fold (build (MULT_EXPR, index_type, index,
4359 convert (index_type,
4360 TYPE_SIZE (TREE_TYPE (exp)))));
4362 if (TREE_CODE (index) == INTEGER_CST
4363 && TREE_INT_CST_HIGH (index) == 0)
4364 *pbitpos += TREE_INT_CST_LOW (index);
4366 offset = size_binop (PLUS_EXPR, offset,
4367 size_binop (FLOOR_DIV_EXPR, index,
4368 size_int (BITS_PER_UNIT)));
4370 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4371 && ! ((TREE_CODE (exp) == NOP_EXPR
4372 || TREE_CODE (exp) == CONVERT_EXPR)
4373 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4374 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4376 && (TYPE_MODE (TREE_TYPE (exp))
4377 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4380 /* If any reference in the chain is volatile, the effect is volatile. */
4381 if (TREE_THIS_VOLATILE (exp))
4384 /* If the offset is non-constant already, then we can't assume any
4385 alignment more than the alignment here. */
4386 if (! integer_zerop (offset))
4387 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4389 exp = TREE_OPERAND (exp, 0);
4392 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4393 alignment = MIN (alignment, DECL_ALIGN (exp));
4394 else if (TREE_TYPE (exp) != 0)
4395 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4397 if (integer_zerop (offset))
4400 if (offset != 0 && contains_placeholder_p (offset))
4401 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4405 *palignment = alignment / BITS_PER_UNIT;
4409 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4410 static enum memory_use_mode
4411 get_memory_usage_from_modifier (modifier)
4412 enum expand_modifier modifier;
4417 return MEMORY_USE_RO;
4419 case EXPAND_MEMORY_USE_WO:
4420 return MEMORY_USE_WO;
4422 case EXPAND_MEMORY_USE_RW:
4423 return MEMORY_USE_RW;
4425 case EXPAND_INITIALIZER:
4426 case EXPAND_MEMORY_USE_DONT:
4428 case EXPAND_CONST_ADDRESS:
4429 return MEMORY_USE_DONT;
4430 case EXPAND_MEMORY_USE_BAD:
4436 /* Given an rtx VALUE that may contain additions and multiplications,
4437 return an equivalent value that just refers to a register or memory.
4438 This is done by generating instructions to perform the arithmetic
4439 and returning a pseudo-register containing the value.
4441 The returned value may be a REG, SUBREG, MEM or constant. */
4444 force_operand (value, target)
4447 register optab binoptab = 0;
4448 /* Use a temporary to force order of execution of calls to
4452 /* Use subtarget as the target for operand 0 of a binary operation. */
4453 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4455 if (GET_CODE (value) == PLUS)
4456 binoptab = add_optab;
4457 else if (GET_CODE (value) == MINUS)
4458 binoptab = sub_optab;
4459 else if (GET_CODE (value) == MULT)
4461 op2 = XEXP (value, 1);
4462 if (!CONSTANT_P (op2)
4463 && !(GET_CODE (op2) == REG && op2 != subtarget))
4465 tmp = force_operand (XEXP (value, 0), subtarget);
4466 return expand_mult (GET_MODE (value), tmp,
4467 force_operand (op2, NULL_RTX),
4473 op2 = XEXP (value, 1);
4474 if (!CONSTANT_P (op2)
4475 && !(GET_CODE (op2) == REG && op2 != subtarget))
4477 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4479 binoptab = add_optab;
4480 op2 = negate_rtx (GET_MODE (value), op2);
4483 /* Check for an addition with OP2 a constant integer and our first
4484 operand a PLUS of a virtual register and something else. In that
4485 case, we want to emit the sum of the virtual register and the
4486 constant first and then add the other value. This allows virtual
4487 register instantiation to simply modify the constant rather than
4488 creating another one around this addition. */
4489 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4490 && GET_CODE (XEXP (value, 0)) == PLUS
4491 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4492 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4493 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4495 rtx temp = expand_binop (GET_MODE (value), binoptab,
4496 XEXP (XEXP (value, 0), 0), op2,
4497 subtarget, 0, OPTAB_LIB_WIDEN);
4498 return expand_binop (GET_MODE (value), binoptab, temp,
4499 force_operand (XEXP (XEXP (value, 0), 1), 0),
4500 target, 0, OPTAB_LIB_WIDEN);
4503 tmp = force_operand (XEXP (value, 0), subtarget);
4504 return expand_binop (GET_MODE (value), binoptab, tmp,
4505 force_operand (op2, NULL_RTX),
4506 target, 0, OPTAB_LIB_WIDEN);
4507 /* We give UNSIGNEDP = 0 to expand_binop
4508 because the only operations we are expanding here are signed ones. */
4513 /* Subroutine of expand_expr:
4514 save the non-copied parts (LIST) of an expr (LHS), and return a list
4515 which can restore these values to their previous values,
4516 should something modify their storage. */
4519 save_noncopied_parts (lhs, list)
4526 for (tail = list; tail; tail = TREE_CHAIN (tail))
4527 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4528 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4531 tree part = TREE_VALUE (tail);
4532 tree part_type = TREE_TYPE (part);
4533 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4534 rtx target = assign_temp (part_type, 0, 1, 1);
4535 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4536 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4537 parts = tree_cons (to_be_saved,
4538 build (RTL_EXPR, part_type, NULL_TREE,
4541 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4546 /* Subroutine of expand_expr:
4547 record the non-copied parts (LIST) of an expr (LHS), and return a list
4548 which specifies the initial values of these parts. */
4551 init_noncopied_parts (lhs, list)
4558 for (tail = list; tail; tail = TREE_CHAIN (tail))
4559 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4560 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4563 tree part = TREE_VALUE (tail);
4564 tree part_type = TREE_TYPE (part);
4565 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4566 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4571 /* Subroutine of expand_expr: return nonzero iff there is no way that
4572 EXP can reference X, which is being modified. */
4575 safe_from_p (x, exp)
4583 /* If EXP has varying size, we MUST use a target since we currently
4584 have no way of allocating temporaries of variable size
4585 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4586 So we assume here that something at a higher level has prevented a
4587 clash. This is somewhat bogus, but the best we can do. Only
4588 do this when X is BLKmode. */
4589 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4590 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4591 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4592 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4593 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4595 && GET_MODE (x) == BLKmode))
4598 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4599 find the underlying pseudo. */
4600 if (GET_CODE (x) == SUBREG)
4603 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4607 /* If X is a location in the outgoing argument area, it is always safe. */
4608 if (GET_CODE (x) == MEM
4609 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4610 || (GET_CODE (XEXP (x, 0)) == PLUS
4611 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4614 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4617 exp_rtl = DECL_RTL (exp);
4624 if (TREE_CODE (exp) == TREE_LIST)
4625 return ((TREE_VALUE (exp) == 0
4626 || safe_from_p (x, TREE_VALUE (exp)))
4627 && (TREE_CHAIN (exp) == 0
4628 || safe_from_p (x, TREE_CHAIN (exp))));
4633 return safe_from_p (x, TREE_OPERAND (exp, 0));
4637 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4638 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4642 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4643 the expression. If it is set, we conflict iff we are that rtx or
4644 both are in memory. Otherwise, we check all operands of the
4645 expression recursively. */
4647 switch (TREE_CODE (exp))
4650 return (staticp (TREE_OPERAND (exp, 0))
4651 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4654 if (GET_CODE (x) == MEM)
4659 exp_rtl = CALL_EXPR_RTL (exp);
4662 /* Assume that the call will clobber all hard registers and
4664 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4665 || GET_CODE (x) == MEM)
4672 /* If a sequence exists, we would have to scan every instruction
4673 in the sequence to see if it was safe. This is probably not
4675 if (RTL_EXPR_SEQUENCE (exp))
4678 exp_rtl = RTL_EXPR_RTL (exp);
4681 case WITH_CLEANUP_EXPR:
4682 exp_rtl = RTL_EXPR_RTL (exp);
4685 case CLEANUP_POINT_EXPR:
4686 return safe_from_p (x, TREE_OPERAND (exp, 0));
4689 exp_rtl = SAVE_EXPR_RTL (exp);
4693 /* The only operand we look at is operand 1. The rest aren't
4694 part of the expression. */
4695 return safe_from_p (x, TREE_OPERAND (exp, 1));
4697 case METHOD_CALL_EXPR:
4698 /* This takes a rtx argument, but shouldn't appear here. */
4702 /* If we have an rtx, we do not need to scan our operands. */
4706 nops = tree_code_length[(int) TREE_CODE (exp)];
4707 for (i = 0; i < nops; i++)
4708 if (TREE_OPERAND (exp, i) != 0
4709 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4713 /* If we have an rtl, find any enclosed object. Then see if we conflict
4717 if (GET_CODE (exp_rtl) == SUBREG)
4719 exp_rtl = SUBREG_REG (exp_rtl);
4720 if (GET_CODE (exp_rtl) == REG
4721 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4725 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4726 are memory and EXP is not readonly. */
4727 return ! (rtx_equal_p (x, exp_rtl)
4728 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4729 && ! TREE_READONLY (exp)));
4732 /* If we reach here, it is safe. */
4736 /* Subroutine of expand_expr: return nonzero iff EXP is an
4737 expression whose type is statically determinable. */
4743 if (TREE_CODE (exp) == PARM_DECL
4744 || TREE_CODE (exp) == VAR_DECL
4745 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4746 || TREE_CODE (exp) == COMPONENT_REF
4747 || TREE_CODE (exp) == ARRAY_REF)
4752 /* Subroutine of expand_expr: return rtx if EXP is a
4753 variable or parameter; else return 0. */
4760 switch (TREE_CODE (exp))
4764 return DECL_RTL (exp);
4770 /* expand_expr: generate code for computing expression EXP.
4771 An rtx for the computed value is returned. The value is never null.
4772 In the case of a void EXP, const0_rtx is returned.
4774 The value may be stored in TARGET if TARGET is nonzero.
4775 TARGET is just a suggestion; callers must assume that
4776 the rtx returned may not be the same as TARGET.
4778 If TARGET is CONST0_RTX, it means that the value will be ignored.
4780 If TMODE is not VOIDmode, it suggests generating the
4781 result in mode TMODE. But this is done only when convenient.
4782 Otherwise, TMODE is ignored and the value generated in its natural mode.
4783 TMODE is just a suggestion; callers must assume that
4784 the rtx returned may not have mode TMODE.
4786 Note that TARGET may have neither TMODE nor MODE. In that case, it
4787 probably will not be used.
4789 If MODIFIER is EXPAND_SUM then when EXP is an addition
4790 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4791 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4792 products as above, or REG or MEM, or constant.
4793 Ordinarily in such cases we would output mul or add instructions
4794 and then return a pseudo reg containing the sum.
4796 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4797 it also marks a label as absolutely required (it can't be dead).
4798 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4799 This is used for outputting expressions used in initializers.
4801 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4802 with a constant address even if that address is not normally legitimate.
4803 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4806 expand_expr (exp, target, tmode, modifier)
4809 enum machine_mode tmode;
4810 enum expand_modifier modifier;
4812 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4813 This is static so it will be accessible to our recursive callees. */
4814 static tree placeholder_list = 0;
4815 register rtx op0, op1, temp;
4816 tree type = TREE_TYPE (exp);
4817 int unsignedp = TREE_UNSIGNED (type);
4818 register enum machine_mode mode = TYPE_MODE (type);
4819 register enum tree_code code = TREE_CODE (exp);
4821 /* Use subtarget as the target for operand 0 of a binary operation. */
4822 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4823 rtx original_target = target;
4824 /* Maybe defer this until sure not doing bytecode? */
4825 int ignore = (target == const0_rtx
4826 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4827 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4828 || code == COND_EXPR)
4829 && TREE_CODE (type) == VOID_TYPE));
4831 /* Used by check-memory-usage to make modifier read only. */
4832 enum expand_modifier ro_modifier;
4834 /* Make a read-only version of the modifier. */
4835 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4836 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4837 ro_modifier = modifier;
4839 ro_modifier = EXPAND_NORMAL;
4841 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4843 bc_expand_expr (exp);
4847 /* Don't use hard regs as subtargets, because the combiner
4848 can only handle pseudo regs. */
4849 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4851 /* Avoid subtargets inside loops,
4852 since they hide some invariant expressions. */
4853 if (preserve_subexpressions_p ())
4856 /* If we are going to ignore this result, we need only do something
4857 if there is a side-effect somewhere in the expression. If there
4858 is, short-circuit the most common cases here. Note that we must
4859 not call expand_expr with anything but const0_rtx in case this
4860 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4864 if (! TREE_SIDE_EFFECTS (exp))
4867 /* Ensure we reference a volatile object even if value is ignored. */
4868 if (TREE_THIS_VOLATILE (exp)
4869 && TREE_CODE (exp) != FUNCTION_DECL
4870 && mode != VOIDmode && mode != BLKmode)
4872 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
4873 if (GET_CODE (temp) == MEM)
4874 temp = copy_to_reg (temp);
4878 if (TREE_CODE_CLASS (code) == '1')
4879 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4880 VOIDmode, ro_modifier);
4881 else if (TREE_CODE_CLASS (code) == '2'
4882 || TREE_CODE_CLASS (code) == '<')
4884 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
4885 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
4888 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4889 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4890 /* If the second operand has no side effects, just evaluate
4892 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4893 VOIDmode, ro_modifier);
4898 /* If will do cse, generate all results into pseudo registers
4899 since 1) that allows cse to find more things
4900 and 2) otherwise cse could produce an insn the machine
4903 if (! cse_not_expected && mode != BLKmode && target
4904 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4911 tree function = decl_function_context (exp);
4912 /* Handle using a label in a containing function. */
4913 if (function != current_function_decl
4914 && function != inline_function_decl && function != 0)
4916 struct function *p = find_function_data (function);
4917 /* Allocate in the memory associated with the function
4918 that the label is in. */
4919 push_obstacks (p->function_obstack,
4920 p->function_maybepermanent_obstack);
4922 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4923 label_rtx (exp), p->forced_labels);
4926 else if (modifier == EXPAND_INITIALIZER)
4927 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4928 label_rtx (exp), forced_labels);
4929 temp = gen_rtx (MEM, FUNCTION_MODE,
4930 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4931 if (function != current_function_decl
4932 && function != inline_function_decl && function != 0)
4933 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4938 if (DECL_RTL (exp) == 0)
4940 error_with_decl (exp, "prior parameter's size depends on `%s'");
4941 return CONST0_RTX (mode);
4944 /* ... fall through ... */
4947 /* If a static var's type was incomplete when the decl was written,
4948 but the type is complete now, lay out the decl now. */
4949 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4950 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4952 push_obstacks_nochange ();
4953 end_temporary_allocation ();
4954 layout_decl (exp, 0);
4955 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4959 /* Only check automatic variables. Currently, function arguments are
4960 not checked (this can be done at compile-time with prototypes).
4961 Aggregates are not checked. */
4962 if (flag_check_memory_usage && code == VAR_DECL
4963 && GET_CODE (DECL_RTL (exp)) == MEM
4964 && DECL_CONTEXT (exp) != NULL_TREE
4965 && ! TREE_STATIC (exp)
4966 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4968 enum memory_use_mode memory_usage;
4969 memory_usage = get_memory_usage_from_modifier (modifier);
4971 if (memory_usage != MEMORY_USE_DONT)
4972 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
4973 XEXP (DECL_RTL (exp), 0), ptr_mode,
4974 GEN_INT (int_size_in_bytes (type)),
4975 TYPE_MODE (sizetype),
4976 GEN_INT (memory_usage), QImode);
4979 /* ... fall through ... */
4983 if (DECL_RTL (exp) == 0)
4986 /* Ensure variable marked as used even if it doesn't go through
4987 a parser. If it hasn't be used yet, write out an external
4989 if (! TREE_USED (exp))
4991 assemble_external (exp);
4992 TREE_USED (exp) = 1;
4995 /* Show we haven't gotten RTL for this yet. */
4998 /* Handle variables inherited from containing functions. */
4999 context = decl_function_context (exp);
5001 /* We treat inline_function_decl as an alias for the current function
5002 because that is the inline function whose vars, types, etc.
5003 are being merged into the current function.
5004 See expand_inline_function. */
5006 if (context != 0 && context != current_function_decl
5007 && context != inline_function_decl
5008 /* If var is static, we don't need a static chain to access it. */
5009 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5010 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5014 /* Mark as non-local and addressable. */
5015 DECL_NONLOCAL (exp) = 1;
5016 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5018 mark_addressable (exp);
5019 if (GET_CODE (DECL_RTL (exp)) != MEM)
5021 addr = XEXP (DECL_RTL (exp), 0);
5022 if (GET_CODE (addr) == MEM)
5023 addr = gen_rtx (MEM, Pmode,
5024 fix_lexical_addr (XEXP (addr, 0), exp));
5026 addr = fix_lexical_addr (addr, exp);
5027 temp = change_address (DECL_RTL (exp), mode, addr);
5030 /* This is the case of an array whose size is to be determined
5031 from its initializer, while the initializer is still being parsed.
5034 else if (GET_CODE (DECL_RTL (exp)) == MEM
5035 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5036 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5037 XEXP (DECL_RTL (exp), 0));
5039 /* If DECL_RTL is memory, we are in the normal case and either
5040 the address is not valid or it is not a register and -fforce-addr
5041 is specified, get the address into a register. */
5043 else if (GET_CODE (DECL_RTL (exp)) == MEM
5044 && modifier != EXPAND_CONST_ADDRESS
5045 && modifier != EXPAND_SUM
5046 && modifier != EXPAND_INITIALIZER
5047 && (! memory_address_p (DECL_MODE (exp),
5048 XEXP (DECL_RTL (exp), 0))
5050 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5051 temp = change_address (DECL_RTL (exp), VOIDmode,
5052 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5054 /* If we got something, return it. But first, set the alignment
5055 the address is a register. */
5058 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5059 mark_reg_pointer (XEXP (temp, 0),
5060 DECL_ALIGN (exp) / BITS_PER_UNIT);
5065 /* If the mode of DECL_RTL does not match that of the decl, it
5066 must be a promoted value. We return a SUBREG of the wanted mode,
5067 but mark it so that we know that it was already extended. */
5069 if (GET_CODE (DECL_RTL (exp)) == REG
5070 && GET_MODE (DECL_RTL (exp)) != mode)
5072 /* Get the signedness used for this variable. Ensure we get the
5073 same mode we got when the variable was declared. */
5074 if (GET_MODE (DECL_RTL (exp))
5075 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5078 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
5079 SUBREG_PROMOTED_VAR_P (temp) = 1;
5080 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5084 return DECL_RTL (exp);
5087 return immed_double_const (TREE_INT_CST_LOW (exp),
5088 TREE_INT_CST_HIGH (exp),
5092 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5093 EXPAND_MEMORY_USE_BAD);
5096 /* If optimized, generate immediate CONST_DOUBLE
5097 which will be turned into memory by reload if necessary.
5099 We used to force a register so that loop.c could see it. But
5100 this does not allow gen_* patterns to perform optimizations with
5101 the constants. It also produces two insns in cases like "x = 1.0;".
5102 On most machines, floating-point constants are not permitted in
5103 many insns, so we'd end up copying it to a register in any case.
5105 Now, we do the copying in expand_binop, if appropriate. */
5106 return immed_real_const (exp);
5110 if (! TREE_CST_RTL (exp))
5111 output_constant_def (exp);
5113 /* TREE_CST_RTL probably contains a constant address.
5114 On RISC machines where a constant address isn't valid,
5115 make some insns to get that address into a register. */
5116 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5117 && modifier != EXPAND_CONST_ADDRESS
5118 && modifier != EXPAND_INITIALIZER
5119 && modifier != EXPAND_SUM
5120 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5122 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5123 return change_address (TREE_CST_RTL (exp), VOIDmode,
5124 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5125 return TREE_CST_RTL (exp);
5128 context = decl_function_context (exp);
5130 /* If this SAVE_EXPR was at global context, assume we are an
5131 initialization function and move it into our context. */
5133 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5135 /* We treat inline_function_decl as an alias for the current function
5136 because that is the inline function whose vars, types, etc.
5137 are being merged into the current function.
5138 See expand_inline_function. */
5139 if (context == current_function_decl || context == inline_function_decl)
5142 /* If this is non-local, handle it. */
5145 /* The following call just exists to abort if the context is
5146 not of a containing function. */
5147 find_function_data (context);
5149 temp = SAVE_EXPR_RTL (exp);
5150 if (temp && GET_CODE (temp) == REG)
5152 put_var_into_stack (exp);
5153 temp = SAVE_EXPR_RTL (exp);
5155 if (temp == 0 || GET_CODE (temp) != MEM)
5157 return change_address (temp, mode,
5158 fix_lexical_addr (XEXP (temp, 0), exp));
5160 if (SAVE_EXPR_RTL (exp) == 0)
5162 if (mode == VOIDmode)
5165 temp = assign_temp (type, 0, 0, 0);
5167 SAVE_EXPR_RTL (exp) = temp;
5168 if (!optimize && GET_CODE (temp) == REG)
5169 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5172 /* If the mode of TEMP does not match that of the expression, it
5173 must be a promoted value. We pass store_expr a SUBREG of the
5174 wanted mode but mark it so that we know that it was already
5175 extended. Note that `unsignedp' was modified above in
5178 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5180 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5181 SUBREG_PROMOTED_VAR_P (temp) = 1;
5182 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5185 if (temp == const0_rtx)
5186 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5187 EXPAND_MEMORY_USE_BAD);
5189 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5192 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5193 must be a promoted value. We return a SUBREG of the wanted mode,
5194 but mark it so that we know that it was already extended. */
5196 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5197 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5199 /* Compute the signedness and make the proper SUBREG. */
5200 promote_mode (type, mode, &unsignedp, 0);
5201 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5202 SUBREG_PROMOTED_VAR_P (temp) = 1;
5203 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5207 return SAVE_EXPR_RTL (exp);
5212 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5213 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5217 case PLACEHOLDER_EXPR:
5218 /* If there is an object on the head of the placeholder list,
5219 see if some object in it's references is of type TYPE. For
5220 further information, see tree.def. */
5221 if (placeholder_list)
5223 tree need_type = TYPE_MAIN_VARIANT (type);
5225 tree old_list = placeholder_list;
5228 /* See if the object is the type that we want. */
5229 if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_PURPOSE (placeholder_list)))
5231 object = TREE_PURPOSE (placeholder_list);
5233 /* Find the innermost reference that is of the type we want. */
5234 for (elt = TREE_PURPOSE (placeholder_list);
5236 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5237 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5238 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5239 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5240 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5241 || TREE_CODE (elt) == COND_EXPR)
5242 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5243 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5244 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5247 object = TREE_OPERAND (elt, 0);
5253 /* Expand this object skipping the list entries before
5254 it was found in case it is also a PLACEHOLDER_EXPR.
5255 In that case, we want to translate it using subsequent
5257 placeholder_list = TREE_CHAIN (placeholder_list);
5258 temp = expand_expr (object, original_target, tmode, ro_modifier);
5259 placeholder_list = old_list;
5264 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5267 case WITH_RECORD_EXPR:
5268 /* Put the object on the placeholder list, expand our first operand,
5269 and pop the list. */
5270 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5272 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5273 tmode, ro_modifier);
5274 placeholder_list = TREE_CHAIN (placeholder_list);
5278 expand_exit_loop_if_false (NULL_PTR,
5279 invert_truthvalue (TREE_OPERAND (exp, 0)));
5284 expand_start_loop (1);
5285 expand_expr_stmt (TREE_OPERAND (exp, 0));
5293 tree vars = TREE_OPERAND (exp, 0);
5294 int vars_need_expansion = 0;
5296 /* Need to open a binding contour here because
5297 if there are any cleanups they must be contained here. */
5298 expand_start_bindings (0);
5300 /* Mark the corresponding BLOCK for output in its proper place. */
5301 if (TREE_OPERAND (exp, 2) != 0
5302 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5303 insert_block (TREE_OPERAND (exp, 2));
5305 /* If VARS have not yet been expanded, expand them now. */
5308 if (DECL_RTL (vars) == 0)
5310 vars_need_expansion = 1;
5313 expand_decl_init (vars);
5314 vars = TREE_CHAIN (vars);
5317 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5319 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5325 if (RTL_EXPR_SEQUENCE (exp))
5327 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5329 emit_insns (RTL_EXPR_SEQUENCE (exp));
5330 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5332 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5333 free_temps_for_rtl_expr (exp);
5334 return RTL_EXPR_RTL (exp);
5337 /* If we don't need the result, just ensure we evaluate any
5342 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5343 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5344 EXPAND_MEMORY_USE_BAD);
5348 /* All elts simple constants => refer to a constant in memory. But
5349 if this is a non-BLKmode mode, let it store a field at a time
5350 since that should make a CONST_INT or CONST_DOUBLE when we
5351 fold. Likewise, if we have a target we can use, it is best to
5352 store directly into the target unless the type is large enough
5353 that memcpy will be used. If we are making an initializer and
5354 all operands are constant, put it in memory as well. */
5355 else if ((TREE_STATIC (exp)
5356 && ((mode == BLKmode
5357 && ! (target != 0 && safe_from_p (target, exp)))
5358 || TREE_ADDRESSABLE (exp)
5359 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5360 && (move_by_pieces_ninsns
5361 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5362 TYPE_ALIGN (type) / BITS_PER_UNIT)
5364 && ! mostly_zeros_p (exp))))
5365 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5367 rtx constructor = output_constant_def (exp);
5368 if (modifier != EXPAND_CONST_ADDRESS
5369 && modifier != EXPAND_INITIALIZER
5370 && modifier != EXPAND_SUM
5371 && (! memory_address_p (GET_MODE (constructor),
5372 XEXP (constructor, 0))
5374 && GET_CODE (XEXP (constructor, 0)) != REG)))
5375 constructor = change_address (constructor, VOIDmode,
5376 XEXP (constructor, 0));
5382 /* Handle calls that pass values in multiple non-contiguous
5383 locations. The Irix 6 ABI has examples of this. */
5384 if (target == 0 || ! safe_from_p (target, exp)
5385 || GET_CODE (target) == PARALLEL)
5387 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5388 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5390 target = assign_temp (type, 0, 1, 1);
5393 if (TREE_READONLY (exp))
5395 if (GET_CODE (target) == MEM)
5396 target = copy_rtx (target);
5398 RTX_UNCHANGING_P (target) = 1;
5401 store_constructor (exp, target, 0);
5407 tree exp1 = TREE_OPERAND (exp, 0);
5410 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5411 op0 = memory_address (mode, op0);
5413 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5415 enum memory_use_mode memory_usage;
5416 memory_usage = get_memory_usage_from_modifier (modifier);
5418 if (memory_usage != MEMORY_USE_DONT)
5419 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5421 GEN_INT (int_size_in_bytes (type)),
5422 TYPE_MODE (sizetype),
5423 GEN_INT (memory_usage), QImode);
5426 temp = gen_rtx (MEM, mode, op0);
5427 /* If address was computed by addition,
5428 mark this as an element of an aggregate. */
5429 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5430 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5431 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5432 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5433 || (TREE_CODE (exp1) == ADDR_EXPR
5434 && (exp2 = TREE_OPERAND (exp1, 0))
5435 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5436 MEM_IN_STRUCT_P (temp) = 1;
5437 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5439 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5440 here, because, in C and C++, the fact that a location is accessed
5441 through a pointer to const does not mean that the value there can
5442 never change. Languages where it can never change should
5443 also set TREE_STATIC. */
5444 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5449 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5453 tree array = TREE_OPERAND (exp, 0);
5454 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5455 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5456 tree index = TREE_OPERAND (exp, 1);
5457 tree index_type = TREE_TYPE (index);
5460 /* Optimize the special-case of a zero lower bound.
5462 We convert the low_bound to sizetype to avoid some problems
5463 with constant folding. (E.g. suppose the lower bound is 1,
5464 and its mode is QI. Without the conversion, (ARRAY
5465 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5466 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5468 But sizetype isn't quite right either (especially if
5469 the lowbound is negative). FIXME */
5471 if (! integer_zerop (low_bound))
5472 index = fold (build (MINUS_EXPR, index_type, index,
5473 convert (sizetype, low_bound)));
5475 /* Fold an expression like: "foo"[2].
5476 This is not done in fold so it won't happen inside &.
5477 Don't fold if this is for wide characters since it's too
5478 difficult to do correctly and this is a very rare case. */
5480 if (TREE_CODE (array) == STRING_CST
5481 && TREE_CODE (index) == INTEGER_CST
5482 && !TREE_INT_CST_HIGH (index)
5483 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5484 && GET_MODE_CLASS (mode) == MODE_INT
5485 && GET_MODE_SIZE (mode) == 1)
5486 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5488 /* If this is a constant index into a constant array,
5489 just get the value from the array. Handle both the cases when
5490 we have an explicit constructor and when our operand is a variable
5491 that was declared const. */
5493 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5495 if (TREE_CODE (index) == INTEGER_CST
5496 && TREE_INT_CST_HIGH (index) == 0)
5498 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5500 i = TREE_INT_CST_LOW (index);
5502 elem = TREE_CHAIN (elem);
5504 return expand_expr (fold (TREE_VALUE (elem)), target,
5505 tmode, ro_modifier);
5509 else if (optimize >= 1
5510 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5511 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5512 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5514 if (TREE_CODE (index) == INTEGER_CST)
5516 tree init = DECL_INITIAL (array);
5518 i = TREE_INT_CST_LOW (index);
5519 if (TREE_CODE (init) == CONSTRUCTOR)
5521 tree elem = CONSTRUCTOR_ELTS (init);
5524 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5525 elem = TREE_CHAIN (elem);
5527 return expand_expr (fold (TREE_VALUE (elem)), target,
5528 tmode, ro_modifier);
5530 else if (TREE_CODE (init) == STRING_CST
5531 && TREE_INT_CST_HIGH (index) == 0
5532 && (TREE_INT_CST_LOW (index)
5533 < TREE_STRING_LENGTH (init)))
5535 (TREE_STRING_POINTER
5536 (init)[TREE_INT_CST_LOW (index)]));
5541 /* ... fall through ... */
5545 /* If the operand is a CONSTRUCTOR, we can just extract the
5546 appropriate field if it is present. Don't do this if we have
5547 already written the data since we want to refer to that copy
5548 and varasm.c assumes that's what we'll do. */
5549 if (code != ARRAY_REF
5550 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5551 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5555 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5556 elt = TREE_CHAIN (elt))
5557 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5558 /* We can normally use the value of the field in the
5559 CONSTRUCTOR. However, if this is a bitfield in
5560 an integral mode that we can fit in a HOST_WIDE_INT,
5561 we must mask only the number of bits in the bitfield,
5562 since this is done implicitly by the constructor. If
5563 the bitfield does not meet either of those conditions,
5564 we can't do this optimization. */
5565 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5566 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5568 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5569 <= HOST_BITS_PER_WIDE_INT))))
5571 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5572 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5574 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5575 enum machine_mode imode
5576 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5578 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5580 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5581 op0 = expand_and (op0, op1, target);
5586 = build_int_2 (imode - bitsize, 0);
5588 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5590 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5600 enum machine_mode mode1;
5606 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5607 &mode1, &unsignedp, &volatilep,
5610 /* If we got back the original object, something is wrong. Perhaps
5611 we are evaluating an expression too early. In any event, don't
5612 infinitely recurse. */
5616 /* If TEM's type is a union of variable size, pass TARGET to the inner
5617 computation, since it will need a temporary and TARGET is known
5618 to have to do. This occurs in unchecked conversion in Ada. */
5620 op0 = expand_expr (tem,
5621 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5622 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5624 ? target : NULL_RTX),
5626 modifier == EXPAND_INITIALIZER ? modifier : 0);
5628 /* If this is a constant, put it into a register if it is a
5629 legitimate constant and memory if it isn't. */
5630 if (CONSTANT_P (op0))
5632 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5633 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5634 op0 = force_reg (mode, op0);
5636 op0 = validize_mem (force_const_mem (mode, op0));
5641 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5643 if (GET_CODE (op0) != MEM)
5645 op0 = change_address (op0, VOIDmode,
5646 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5647 force_reg (ptr_mode, offset_rtx)));
5650 /* Don't forget about volatility even if this is a bitfield. */
5651 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5653 op0 = copy_rtx (op0);
5654 MEM_VOLATILE_P (op0) = 1;
5657 /* Check the access. */
5658 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5660 enum memory_use_mode memory_usage;
5661 memory_usage = get_memory_usage_from_modifier (modifier);
5663 if (memory_usage != MEMORY_USE_DONT)
5668 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5669 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5671 /* Check the access right of the pointer. */
5672 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5674 GEN_INT (size / BITS_PER_UNIT),
5675 TYPE_MODE (sizetype),
5676 GEN_INT (memory_usage), QImode);
5680 /* In cases where an aligned union has an unaligned object
5681 as a field, we might be extracting a BLKmode value from
5682 an integer-mode (e.g., SImode) object. Handle this case
5683 by doing the extract into an object as wide as the field
5684 (which we know to be the width of a basic mode), then
5685 storing into memory, and changing the mode to BLKmode.
5686 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5687 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5688 if (mode1 == VOIDmode
5689 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5690 || (modifier != EXPAND_CONST_ADDRESS
5691 && modifier != EXPAND_INITIALIZER
5692 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5693 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5694 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5695 /* If the field isn't aligned enough to fetch as a memref,
5696 fetch it as a bit field. */
5697 || (SLOW_UNALIGNED_ACCESS
5698 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5699 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5701 enum machine_mode ext_mode = mode;
5703 if (ext_mode == BLKmode)
5704 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5706 if (ext_mode == BLKmode)
5708 /* In this case, BITPOS must start at a byte boundary and
5709 TARGET, if specified, must be a MEM. */
5710 if (GET_CODE (op0) != MEM
5711 || (target != 0 && GET_CODE (target) != MEM)
5712 || bitpos % BITS_PER_UNIT != 0)
5715 op0 = change_address (op0, VOIDmode,
5716 plus_constant (XEXP (op0, 0),
5717 bitpos / BITS_PER_UNIT));
5719 target = assign_temp (type, 0, 1, 1);
5721 emit_block_move (target, op0,
5722 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5729 op0 = validize_mem (op0);
5731 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5732 mark_reg_pointer (XEXP (op0, 0), alignment);
5734 op0 = extract_bit_field (op0, bitsize, bitpos,
5735 unsignedp, target, ext_mode, ext_mode,
5737 int_size_in_bytes (TREE_TYPE (tem)));
5739 /* If the result is a record type and BITSIZE is narrower than
5740 the mode of OP0, an integral mode, and this is a big endian
5741 machine, we must put the field into the high-order bits. */
5742 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5743 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5744 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5745 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5746 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5750 if (mode == BLKmode)
5752 rtx new = assign_stack_temp (ext_mode,
5753 bitsize / BITS_PER_UNIT, 0);
5755 emit_move_insn (new, op0);
5756 op0 = copy_rtx (new);
5757 PUT_MODE (op0, BLKmode);
5758 MEM_IN_STRUCT_P (op0) = 1;
5764 /* If the result is BLKmode, use that to access the object
5766 if (mode == BLKmode)
5769 /* Get a reference to just this component. */
5770 if (modifier == EXPAND_CONST_ADDRESS
5771 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5772 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5773 (bitpos / BITS_PER_UNIT)));
5775 op0 = change_address (op0, mode1,
5776 plus_constant (XEXP (op0, 0),
5777 (bitpos / BITS_PER_UNIT)));
5778 if (GET_CODE (XEXP (op0, 0)) == REG)
5779 mark_reg_pointer (XEXP (op0, 0), alignment);
5781 MEM_IN_STRUCT_P (op0) = 1;
5782 MEM_VOLATILE_P (op0) |= volatilep;
5783 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5784 || modifier == EXPAND_CONST_ADDRESS
5785 || modifier == EXPAND_INITIALIZER)
5787 else if (target == 0)
5788 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5790 convert_move (target, op0, unsignedp);
5794 /* Intended for a reference to a buffer of a file-object in Pascal.
5795 But it's not certain that a special tree code will really be
5796 necessary for these. INDIRECT_REF might work for them. */
5802 /* Pascal set IN expression.
5805 rlo = set_low - (set_low%bits_per_word);
5806 the_word = set [ (index - rlo)/bits_per_word ];
5807 bit_index = index % bits_per_word;
5808 bitmask = 1 << bit_index;
5809 return !!(the_word & bitmask); */
5811 tree set = TREE_OPERAND (exp, 0);
5812 tree index = TREE_OPERAND (exp, 1);
5813 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5814 tree set_type = TREE_TYPE (set);
5815 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5816 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5817 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5818 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5819 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5820 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5821 rtx setaddr = XEXP (setval, 0);
5822 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5824 rtx diff, quo, rem, addr, bit, result;
5826 preexpand_calls (exp);
5828 /* If domain is empty, answer is no. Likewise if index is constant
5829 and out of bounds. */
5830 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5831 && TREE_CODE (set_low_bound) == INTEGER_CST
5832 && tree_int_cst_lt (set_high_bound, set_low_bound)
5833 || (TREE_CODE (index) == INTEGER_CST
5834 && TREE_CODE (set_low_bound) == INTEGER_CST
5835 && tree_int_cst_lt (index, set_low_bound))
5836 || (TREE_CODE (set_high_bound) == INTEGER_CST
5837 && TREE_CODE (index) == INTEGER_CST
5838 && tree_int_cst_lt (set_high_bound, index))))
5842 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5844 /* If we get here, we have to generate the code for both cases
5845 (in range and out of range). */
5847 op0 = gen_label_rtx ();
5848 op1 = gen_label_rtx ();
5850 if (! (GET_CODE (index_val) == CONST_INT
5851 && GET_CODE (lo_r) == CONST_INT))
5853 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5854 GET_MODE (index_val), iunsignedp, 0);
5855 emit_jump_insn (gen_blt (op1));
5858 if (! (GET_CODE (index_val) == CONST_INT
5859 && GET_CODE (hi_r) == CONST_INT))
5861 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5862 GET_MODE (index_val), iunsignedp, 0);
5863 emit_jump_insn (gen_bgt (op1));
5866 /* Calculate the element number of bit zero in the first word
5868 if (GET_CODE (lo_r) == CONST_INT)
5869 rlow = GEN_INT (INTVAL (lo_r)
5870 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5872 rlow = expand_binop (index_mode, and_optab, lo_r,
5873 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5874 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5876 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5877 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5879 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5880 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5881 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5882 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5884 addr = memory_address (byte_mode,
5885 expand_binop (index_mode, add_optab, diff,
5886 setaddr, NULL_RTX, iunsignedp,
5889 /* Extract the bit we want to examine */
5890 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5891 gen_rtx (MEM, byte_mode, addr),
5892 make_tree (TREE_TYPE (index), rem),
5894 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5895 GET_MODE (target) == byte_mode ? target : 0,
5896 1, OPTAB_LIB_WIDEN);
5898 if (result != target)
5899 convert_move (target, result, 1);
5901 /* Output the code to handle the out-of-range case. */
5904 emit_move_insn (target, const0_rtx);
5909 case WITH_CLEANUP_EXPR:
5910 if (RTL_EXPR_RTL (exp) == 0)
5913 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
5914 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
5916 /* That's it for this cleanup. */
5917 TREE_OPERAND (exp, 2) = 0;
5919 return RTL_EXPR_RTL (exp);
5921 case CLEANUP_POINT_EXPR:
5923 extern int temp_slot_level;
5924 /* Start a new binding layer that will keep track of all cleanup
5925 actions to be performed. */
5926 expand_start_bindings (0);
5928 target_temp_slot_level = temp_slot_level;
5930 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
5931 /* If we're going to use this value, load it up now. */
5933 op0 = force_not_mem (op0);
5934 preserve_temp_slots (op0);
5935 expand_end_bindings (NULL_TREE, 0, 0);
5940 /* Check for a built-in function. */
5941 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5942 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5944 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5945 return expand_builtin (exp, target, subtarget, tmode, ignore);
5947 /* If this call was expanded already by preexpand_calls,
5948 just return the result we got. */
5949 if (CALL_EXPR_RTL (exp) != 0)
5950 return CALL_EXPR_RTL (exp);
5952 return expand_call (exp, target, ignore);
5954 case NON_LVALUE_EXPR:
5957 case REFERENCE_EXPR:
5958 if (TREE_CODE (type) == UNION_TYPE)
5960 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5963 if (mode != BLKmode)
5964 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5966 target = assign_temp (type, 0, 1, 1);
5969 if (GET_CODE (target) == MEM)
5970 /* Store data into beginning of memory target. */
5971 store_expr (TREE_OPERAND (exp, 0),
5972 change_address (target, TYPE_MODE (valtype), 0), 0);
5974 else if (GET_CODE (target) == REG)
5975 /* Store this field into a union of the proper type. */
5976 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5977 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5979 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5983 /* Return the entire union. */
5987 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5989 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5992 /* If the signedness of the conversion differs and OP0 is
5993 a promoted SUBREG, clear that indication since we now
5994 have to do the proper extension. */
5995 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5996 && GET_CODE (op0) == SUBREG)
5997 SUBREG_PROMOTED_VAR_P (op0) = 0;
6002 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6003 if (GET_MODE (op0) == mode)
6006 /* If OP0 is a constant, just convert it into the proper mode. */
6007 if (CONSTANT_P (op0))
6009 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6010 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6012 if (modifier == EXPAND_INITIALIZER)
6013 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6017 convert_to_mode (mode, op0,
6018 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6020 convert_move (target, op0,
6021 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6025 /* We come here from MINUS_EXPR when the second operand is a
6028 this_optab = add_optab;
6030 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6031 something else, make sure we add the register to the constant and
6032 then to the other thing. This case can occur during strength
6033 reduction and doing it this way will produce better code if the
6034 frame pointer or argument pointer is eliminated.
6036 fold-const.c will ensure that the constant is always in the inner
6037 PLUS_EXPR, so the only case we need to do anything about is if
6038 sp, ap, or fp is our second argument, in which case we must swap
6039 the innermost first argument and our second argument. */
6041 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6042 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6043 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6044 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6045 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6046 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6048 tree t = TREE_OPERAND (exp, 1);
6050 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6051 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6054 /* If the result is to be ptr_mode and we are adding an integer to
6055 something, we might be forming a constant. So try to use
6056 plus_constant. If it produces a sum and we can't accept it,
6057 use force_operand. This allows P = &ARR[const] to generate
6058 efficient code on machines where a SYMBOL_REF is not a valid
6061 If this is an EXPAND_SUM call, always return the sum. */
6062 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6063 || mode == ptr_mode)
6065 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6066 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6067 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6069 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6071 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6072 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6073 op1 = force_operand (op1, target);
6077 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6078 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6079 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6081 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6083 if (! CONSTANT_P (op0))
6085 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6086 VOIDmode, modifier);
6087 /* Don't go to both_summands if modifier
6088 says it's not right to return a PLUS. */
6089 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6093 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6094 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6095 op0 = force_operand (op0, target);
6100 /* No sense saving up arithmetic to be done
6101 if it's all in the wrong mode to form part of an address.
6102 And force_operand won't know whether to sign-extend or
6104 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6105 || mode != ptr_mode)
6108 preexpand_calls (exp);
6109 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6112 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6113 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6116 /* Make sure any term that's a sum with a constant comes last. */
6117 if (GET_CODE (op0) == PLUS
6118 && CONSTANT_P (XEXP (op0, 1)))
6124 /* If adding to a sum including a constant,
6125 associate it to put the constant outside. */
6126 if (GET_CODE (op1) == PLUS
6127 && CONSTANT_P (XEXP (op1, 1)))
6129 rtx constant_term = const0_rtx;
6131 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6134 /* Ensure that MULT comes first if there is one. */
6135 else if (GET_CODE (op0) == MULT)
6136 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
6138 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
6140 /* Let's also eliminate constants from op0 if possible. */
6141 op0 = eliminate_constant_term (op0, &constant_term);
6143 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6144 their sum should be a constant. Form it into OP1, since the
6145 result we want will then be OP0 + OP1. */
6147 temp = simplify_binary_operation (PLUS, mode, constant_term,
6152 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6155 /* Put a constant term last and put a multiplication first. */
6156 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6157 temp = op1, op1 = op0, op0 = temp;
6159 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6160 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6163 /* For initializers, we are allowed to return a MINUS of two
6164 symbolic constants. Here we handle all cases when both operands
6166 /* Handle difference of two symbolic constants,
6167 for the sake of an initializer. */
6168 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6169 && really_constant_p (TREE_OPERAND (exp, 0))
6170 && really_constant_p (TREE_OPERAND (exp, 1)))
6172 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6173 VOIDmode, ro_modifier);
6174 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6175 VOIDmode, ro_modifier);
6177 /* If the last operand is a CONST_INT, use plus_constant of
6178 the negated constant. Else make the MINUS. */
6179 if (GET_CODE (op1) == CONST_INT)
6180 return plus_constant (op0, - INTVAL (op1));
6182 return gen_rtx (MINUS, mode, op0, op1);
6184 /* Convert A - const to A + (-const). */
6185 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6187 tree negated = fold (build1 (NEGATE_EXPR, type,
6188 TREE_OPERAND (exp, 1)));
6190 /* Deal with the case where we can't negate the constant
6192 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6194 tree newtype = signed_type (type);
6195 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6196 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6197 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6199 if (! TREE_OVERFLOW (newneg))
6200 return expand_expr (convert (type,
6201 build (PLUS_EXPR, newtype,
6203 target, tmode, ro_modifier);
6207 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6211 this_optab = sub_optab;
6215 preexpand_calls (exp);
6216 /* If first operand is constant, swap them.
6217 Thus the following special case checks need only
6218 check the second operand. */
6219 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6221 register tree t1 = TREE_OPERAND (exp, 0);
6222 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6223 TREE_OPERAND (exp, 1) = t1;
6226 /* Attempt to return something suitable for generating an
6227 indexed address, for machines that support that. */
6229 if (modifier == EXPAND_SUM && mode == ptr_mode
6230 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6231 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6233 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6236 /* Apply distributive law if OP0 is x+c. */
6237 if (GET_CODE (op0) == PLUS
6238 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6239 return gen_rtx (PLUS, mode,
6240 gen_rtx (MULT, mode, XEXP (op0, 0),
6241 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6242 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6243 * INTVAL (XEXP (op0, 1))));
6245 if (GET_CODE (op0) != REG)
6246 op0 = force_operand (op0, NULL_RTX);
6247 if (GET_CODE (op0) != REG)
6248 op0 = copy_to_mode_reg (mode, op0);
6250 return gen_rtx (MULT, mode, op0,
6251 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6254 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6257 /* Check for multiplying things that have been extended
6258 from a narrower type. If this machine supports multiplying
6259 in that narrower type with a result in the desired type,
6260 do it that way, and avoid the explicit type-conversion. */
6261 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6262 && TREE_CODE (type) == INTEGER_TYPE
6263 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6264 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6265 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6266 && int_fits_type_p (TREE_OPERAND (exp, 1),
6267 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6268 /* Don't use a widening multiply if a shift will do. */
6269 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6270 > HOST_BITS_PER_WIDE_INT)
6271 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6273 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6274 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6276 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6277 /* If both operands are extended, they must either both
6278 be zero-extended or both be sign-extended. */
6279 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6281 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6283 enum machine_mode innermode
6284 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6285 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6286 ? smul_widen_optab : umul_widen_optab);
6287 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6288 ? umul_widen_optab : smul_widen_optab);
6289 if (mode == GET_MODE_WIDER_MODE (innermode))
6291 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6293 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6294 NULL_RTX, VOIDmode, 0);
6295 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6296 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6299 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6300 NULL_RTX, VOIDmode, 0);
6303 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6304 && innermode == word_mode)
6307 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6308 NULL_RTX, VOIDmode, 0);
6309 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6310 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6313 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6314 NULL_RTX, VOIDmode, 0);
6315 temp = expand_binop (mode, other_optab, op0, op1, target,
6316 unsignedp, OPTAB_LIB_WIDEN);
6317 htem = expand_mult_highpart_adjust (innermode,
6318 gen_highpart (innermode, temp),
6320 gen_highpart (innermode, temp),
6322 emit_move_insn (gen_highpart (innermode, temp), htem);
6327 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6328 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6329 return expand_mult (mode, op0, op1, target, unsignedp);
6331 case TRUNC_DIV_EXPR:
6332 case FLOOR_DIV_EXPR:
6334 case ROUND_DIV_EXPR:
6335 case EXACT_DIV_EXPR:
6336 preexpand_calls (exp);
6337 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6339 /* Possible optimization: compute the dividend with EXPAND_SUM
6340 then if the divisor is constant can optimize the case
6341 where some terms of the dividend have coeffs divisible by it. */
6342 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6343 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6344 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6347 this_optab = flodiv_optab;
6350 case TRUNC_MOD_EXPR:
6351 case FLOOR_MOD_EXPR:
6353 case ROUND_MOD_EXPR:
6354 preexpand_calls (exp);
6355 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6357 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6358 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6359 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6361 case FIX_ROUND_EXPR:
6362 case FIX_FLOOR_EXPR:
6364 abort (); /* Not used for C. */
6366 case FIX_TRUNC_EXPR:
6367 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6369 target = gen_reg_rtx (mode);
6370 expand_fix (target, op0, unsignedp);
6374 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6376 target = gen_reg_rtx (mode);
6377 /* expand_float can't figure out what to do if FROM has VOIDmode.
6378 So give it the correct mode. With -O, cse will optimize this. */
6379 if (GET_MODE (op0) == VOIDmode)
6380 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6382 expand_float (target, op0,
6383 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6387 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6388 temp = expand_unop (mode, neg_optab, op0, target, 0);
6394 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6396 /* Handle complex values specially. */
6397 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6398 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6399 return expand_complex_abs (mode, op0, target, unsignedp);
6401 /* Unsigned abs is simply the operand. Testing here means we don't
6402 risk generating incorrect code below. */
6403 if (TREE_UNSIGNED (type))
6406 return expand_abs (mode, op0, target, unsignedp,
6407 safe_from_p (target, TREE_OPERAND (exp, 0)));
6411 target = original_target;
6412 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6413 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6414 || GET_MODE (target) != mode
6415 || (GET_CODE (target) == REG
6416 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6417 target = gen_reg_rtx (mode);
6418 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6419 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6421 /* First try to do it with a special MIN or MAX instruction.
6422 If that does not win, use a conditional jump to select the proper
6424 this_optab = (TREE_UNSIGNED (type)
6425 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6426 : (code == MIN_EXPR ? smin_optab : smax_optab));
6428 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6433 /* At this point, a MEM target is no longer useful; we will get better
6436 if (GET_CODE (target) == MEM)
6437 target = gen_reg_rtx (mode);
6440 emit_move_insn (target, op0);
6442 op0 = gen_label_rtx ();
6444 /* If this mode is an integer too wide to compare properly,
6445 compare word by word. Rely on cse to optimize constant cases. */
6446 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6448 if (code == MAX_EXPR)
6449 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6450 target, op1, NULL_RTX, op0);
6452 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6453 op1, target, NULL_RTX, op0);
6454 emit_move_insn (target, op1);
6458 if (code == MAX_EXPR)
6459 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6460 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6461 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6463 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6464 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6465 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6466 if (temp == const0_rtx)
6467 emit_move_insn (target, op1);
6468 else if (temp != const_true_rtx)
6470 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6471 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6474 emit_move_insn (target, op1);
6481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6482 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6488 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6489 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6494 /* ??? Can optimize bitwise operations with one arg constant.
6495 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6496 and (a bitwise1 b) bitwise2 b (etc)
6497 but that is probably not worth while. */
6499 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6500 boolean values when we want in all cases to compute both of them. In
6501 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6502 as actual zero-or-1 values and then bitwise anding. In cases where
6503 there cannot be any side effects, better code would be made by
6504 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6505 how to recognize those cases. */
6507 case TRUTH_AND_EXPR:
6509 this_optab = and_optab;
6514 this_optab = ior_optab;
6517 case TRUTH_XOR_EXPR:
6519 this_optab = xor_optab;
6526 preexpand_calls (exp);
6527 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6529 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6530 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6533 /* Could determine the answer when only additive constants differ. Also,
6534 the addition of one can be handled by changing the condition. */
6541 preexpand_calls (exp);
6542 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6546 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6547 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6549 && GET_CODE (original_target) == REG
6550 && (GET_MODE (original_target)
6551 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6553 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6556 if (temp != original_target)
6557 temp = copy_to_reg (temp);
6559 op1 = gen_label_rtx ();
6560 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6561 GET_MODE (temp), unsignedp, 0);
6562 emit_jump_insn (gen_beq (op1));
6563 emit_move_insn (temp, const1_rtx);
6568 /* If no set-flag instruction, must generate a conditional
6569 store into a temporary variable. Drop through
6570 and handle this like && and ||. */
6572 case TRUTH_ANDIF_EXPR:
6573 case TRUTH_ORIF_EXPR:
6575 && (target == 0 || ! safe_from_p (target, exp)
6576 /* Make sure we don't have a hard reg (such as function's return
6577 value) live across basic blocks, if not optimizing. */
6578 || (!optimize && GET_CODE (target) == REG
6579 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6580 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6583 emit_clr_insn (target);
6585 op1 = gen_label_rtx ();
6586 jumpifnot (exp, op1);
6589 emit_0_to_1_insn (target);
6592 return ignore ? const0_rtx : target;
6594 case TRUTH_NOT_EXPR:
6595 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6596 /* The parser is careful to generate TRUTH_NOT_EXPR
6597 only with operands that are always zero or one. */
6598 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6599 target, 1, OPTAB_LIB_WIDEN);
6605 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6607 return expand_expr (TREE_OPERAND (exp, 1),
6608 (ignore ? const0_rtx : target),
6612 /* If we would have a "singleton" (see below) were it not for a
6613 conversion in each arm, bring that conversion back out. */
6614 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6615 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6616 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6617 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6619 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6620 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6622 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6623 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6624 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6625 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6626 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6627 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6628 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6629 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6630 return expand_expr (build1 (NOP_EXPR, type,
6631 build (COND_EXPR, TREE_TYPE (true),
6632 TREE_OPERAND (exp, 0),
6634 target, tmode, modifier);
6638 /* Note that COND_EXPRs whose type is a structure or union
6639 are required to be constructed to contain assignments of
6640 a temporary variable, so that we can evaluate them here
6641 for side effect only. If type is void, we must do likewise. */
6643 /* If an arm of the branch requires a cleanup,
6644 only that cleanup is performed. */
6647 tree binary_op = 0, unary_op = 0;
6649 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6650 convert it to our mode, if necessary. */
6651 if (integer_onep (TREE_OPERAND (exp, 1))
6652 && integer_zerop (TREE_OPERAND (exp, 2))
6653 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6657 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6662 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
6663 if (GET_MODE (op0) == mode)
6667 target = gen_reg_rtx (mode);
6668 convert_move (target, op0, unsignedp);
6672 /* Check for X ? A + B : A. If we have this, we can copy A to the
6673 output and conditionally add B. Similarly for unary operations.
6674 Don't do this if X has side-effects because those side effects
6675 might affect A or B and the "?" operation is a sequence point in
6676 ANSI. (operand_equal_p tests for side effects.) */
6678 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6679 && operand_equal_p (TREE_OPERAND (exp, 2),
6680 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6681 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6682 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6683 && operand_equal_p (TREE_OPERAND (exp, 1),
6684 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6685 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6686 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6687 && operand_equal_p (TREE_OPERAND (exp, 2),
6688 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6689 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6690 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6691 && operand_equal_p (TREE_OPERAND (exp, 1),
6692 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6693 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6695 /* If we are not to produce a result, we have no target. Otherwise,
6696 if a target was specified use it; it will not be used as an
6697 intermediate target unless it is safe. If no target, use a
6702 else if (original_target
6703 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6704 || (singleton && GET_CODE (original_target) == REG
6705 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6706 && original_target == var_rtx (singleton)))
6707 && GET_MODE (original_target) == mode
6708 && ! (GET_CODE (original_target) == MEM
6709 && MEM_VOLATILE_P (original_target)))
6710 temp = original_target;
6711 else if (TREE_ADDRESSABLE (type))
6714 temp = assign_temp (type, 0, 0, 1);
6716 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6717 do the test of X as a store-flag operation, do this as
6718 A + ((X != 0) << log C). Similarly for other simple binary
6719 operators. Only do for C == 1 if BRANCH_COST is low. */
6720 if (temp && singleton && binary_op
6721 && (TREE_CODE (binary_op) == PLUS_EXPR
6722 || TREE_CODE (binary_op) == MINUS_EXPR
6723 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6724 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6725 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6726 : integer_onep (TREE_OPERAND (binary_op, 1)))
6727 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6730 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6731 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6732 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6735 /* If we had X ? A : A + 1, do this as A + (X == 0).
6737 We have to invert the truth value here and then put it
6738 back later if do_store_flag fails. We cannot simply copy
6739 TREE_OPERAND (exp, 0) to another variable and modify that
6740 because invert_truthvalue can modify the tree pointed to
6742 if (singleton == TREE_OPERAND (exp, 1))
6743 TREE_OPERAND (exp, 0)
6744 = invert_truthvalue (TREE_OPERAND (exp, 0));
6746 result = do_store_flag (TREE_OPERAND (exp, 0),
6747 (safe_from_p (temp, singleton)
6749 mode, BRANCH_COST <= 1);
6751 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6752 result = expand_shift (LSHIFT_EXPR, mode, result,
6753 build_int_2 (tree_log2
6757 (safe_from_p (temp, singleton)
6758 ? temp : NULL_RTX), 0);
6762 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6763 return expand_binop (mode, boptab, op1, result, temp,
6764 unsignedp, OPTAB_LIB_WIDEN);
6766 else if (singleton == TREE_OPERAND (exp, 1))
6767 TREE_OPERAND (exp, 0)
6768 = invert_truthvalue (TREE_OPERAND (exp, 0));
6771 do_pending_stack_adjust ();
6773 op0 = gen_label_rtx ();
6775 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6779 /* If the target conflicts with the other operand of the
6780 binary op, we can't use it. Also, we can't use the target
6781 if it is a hard register, because evaluating the condition
6782 might clobber it. */
6784 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6785 || (GET_CODE (temp) == REG
6786 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6787 temp = gen_reg_rtx (mode);
6788 store_expr (singleton, temp, 0);
6791 expand_expr (singleton,
6792 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6793 if (singleton == TREE_OPERAND (exp, 1))
6794 jumpif (TREE_OPERAND (exp, 0), op0);
6796 jumpifnot (TREE_OPERAND (exp, 0), op0);
6798 start_cleanup_deferal ();
6799 if (binary_op && temp == 0)
6800 /* Just touch the other operand. */
6801 expand_expr (TREE_OPERAND (binary_op, 1),
6802 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6804 store_expr (build (TREE_CODE (binary_op), type,
6805 make_tree (type, temp),
6806 TREE_OPERAND (binary_op, 1)),
6809 store_expr (build1 (TREE_CODE (unary_op), type,
6810 make_tree (type, temp)),
6814 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6815 comparison operator. If we have one of these cases, set the
6816 output to A, branch on A (cse will merge these two references),
6817 then set the output to FOO. */
6819 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6820 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6821 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6822 TREE_OPERAND (exp, 1), 0)
6823 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6824 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6826 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6827 temp = gen_reg_rtx (mode);
6828 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6829 jumpif (TREE_OPERAND (exp, 0), op0);
6831 start_cleanup_deferal ();
6832 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6836 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6837 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6838 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6839 TREE_OPERAND (exp, 2), 0)
6840 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6841 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6843 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6844 temp = gen_reg_rtx (mode);
6845 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6846 jumpifnot (TREE_OPERAND (exp, 0), op0);
6848 start_cleanup_deferal ();
6849 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6854 op1 = gen_label_rtx ();
6855 jumpifnot (TREE_OPERAND (exp, 0), op0);
6857 start_cleanup_deferal ();
6859 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6861 expand_expr (TREE_OPERAND (exp, 1),
6862 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6863 end_cleanup_deferal ();
6865 emit_jump_insn (gen_jump (op1));
6868 start_cleanup_deferal ();
6870 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6872 expand_expr (TREE_OPERAND (exp, 2),
6873 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6876 end_cleanup_deferal ();
6887 /* Something needs to be initialized, but we didn't know
6888 where that thing was when building the tree. For example,
6889 it could be the return value of a function, or a parameter
6890 to a function which lays down in the stack, or a temporary
6891 variable which must be passed by reference.
6893 We guarantee that the expression will either be constructed
6894 or copied into our original target. */
6896 tree slot = TREE_OPERAND (exp, 0);
6897 tree cleanups = NULL_TREE;
6901 if (TREE_CODE (slot) != VAR_DECL)
6905 target = original_target;
6909 if (DECL_RTL (slot) != 0)
6911 target = DECL_RTL (slot);
6912 /* If we have already expanded the slot, so don't do
6914 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6919 target = assign_temp (type, 2, 1, 1);
6920 /* All temp slots at this level must not conflict. */
6921 preserve_temp_slots (target);
6922 DECL_RTL (slot) = target;
6924 /* Since SLOT is not known to the called function
6925 to belong to its stack frame, we must build an explicit
6926 cleanup. This case occurs when we must build up a reference
6927 to pass the reference as an argument. In this case,
6928 it is very likely that such a reference need not be
6931 if (TREE_OPERAND (exp, 2) == 0)
6932 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6933 cleanups = TREE_OPERAND (exp, 2);
6938 /* This case does occur, when expanding a parameter which
6939 needs to be constructed on the stack. The target
6940 is the actual stack address that we want to initialize.
6941 The function we call will perform the cleanup in this case. */
6943 /* If we have already assigned it space, use that space,
6944 not target that we were passed in, as our target
6945 parameter is only a hint. */
6946 if (DECL_RTL (slot) != 0)
6948 target = DECL_RTL (slot);
6949 /* If we have already expanded the slot, so don't do
6951 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6956 DECL_RTL (slot) = target;
6957 /* If we must have an addressable slot, then make sure that
6958 the RTL that we just stored in slot is OK. */
6959 if (TREE_ADDRESSABLE (slot))
6961 TREE_ADDRESSABLE (slot) = 0;
6962 mark_addressable (slot);
6967 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6968 /* Mark it as expanded. */
6969 TREE_OPERAND (exp, 1) = NULL_TREE;
6971 store_expr (exp1, target, 0);
6973 expand_decl_cleanup (NULL_TREE, cleanups);
6980 tree lhs = TREE_OPERAND (exp, 0);
6981 tree rhs = TREE_OPERAND (exp, 1);
6982 tree noncopied_parts = 0;
6983 tree lhs_type = TREE_TYPE (lhs);
6985 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6986 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6987 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6988 TYPE_NONCOPIED_PARTS (lhs_type));
6989 while (noncopied_parts != 0)
6991 expand_assignment (TREE_VALUE (noncopied_parts),
6992 TREE_PURPOSE (noncopied_parts), 0, 0);
6993 noncopied_parts = TREE_CHAIN (noncopied_parts);
7000 /* If lhs is complex, expand calls in rhs before computing it.
7001 That's so we don't compute a pointer and save it over a call.
7002 If lhs is simple, compute it first so we can give it as a
7003 target if the rhs is just a call. This avoids an extra temp and copy
7004 and that prevents a partial-subsumption which makes bad code.
7005 Actually we could treat component_ref's of vars like vars. */
7007 tree lhs = TREE_OPERAND (exp, 0);
7008 tree rhs = TREE_OPERAND (exp, 1);
7009 tree noncopied_parts = 0;
7010 tree lhs_type = TREE_TYPE (lhs);
7014 if (TREE_CODE (lhs) != VAR_DECL
7015 && TREE_CODE (lhs) != RESULT_DECL
7016 && TREE_CODE (lhs) != PARM_DECL
7017 && ! (TREE_CODE (lhs) == INDIRECT_REF
7018 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7019 preexpand_calls (exp);
7021 /* Check for |= or &= of a bitfield of size one into another bitfield
7022 of size 1. In this case, (unless we need the result of the
7023 assignment) we can do this more efficiently with a
7024 test followed by an assignment, if necessary.
7026 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7027 things change so we do, this code should be enhanced to
7030 && TREE_CODE (lhs) == COMPONENT_REF
7031 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7032 || TREE_CODE (rhs) == BIT_AND_EXPR)
7033 && TREE_OPERAND (rhs, 0) == lhs
7034 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7035 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7036 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7038 rtx label = gen_label_rtx ();
7040 do_jump (TREE_OPERAND (rhs, 1),
7041 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7042 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7043 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7044 (TREE_CODE (rhs) == BIT_IOR_EXPR
7046 : integer_zero_node)),
7048 do_pending_stack_adjust ();
7053 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7054 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7055 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7056 TYPE_NONCOPIED_PARTS (lhs_type));
7058 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7059 while (noncopied_parts != 0)
7061 expand_assignment (TREE_PURPOSE (noncopied_parts),
7062 TREE_VALUE (noncopied_parts), 0, 0);
7063 noncopied_parts = TREE_CHAIN (noncopied_parts);
7068 case PREINCREMENT_EXPR:
7069 case PREDECREMENT_EXPR:
7070 return expand_increment (exp, 0, ignore);
7072 case POSTINCREMENT_EXPR:
7073 case POSTDECREMENT_EXPR:
7074 /* Faster to treat as pre-increment if result is not used. */
7075 return expand_increment (exp, ! ignore, ignore);
7078 /* If nonzero, TEMP will be set to the address of something that might
7079 be a MEM corresponding to a stack slot. */
7082 /* Are we taking the address of a nested function? */
7083 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7084 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7085 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7087 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7088 op0 = force_operand (op0, target);
7090 /* If we are taking the address of something erroneous, just
7092 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7096 /* We make sure to pass const0_rtx down if we came in with
7097 ignore set, to avoid doing the cleanups twice for something. */
7098 op0 = expand_expr (TREE_OPERAND (exp, 0),
7099 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7100 (modifier == EXPAND_INITIALIZER
7101 ? modifier : EXPAND_CONST_ADDRESS));
7103 /* If we are going to ignore the result, OP0 will have been set
7104 to const0_rtx, so just return it. Don't get confused and
7105 think we are taking the address of the constant. */
7109 op0 = protect_from_queue (op0, 0);
7111 /* We would like the object in memory. If it is a constant,
7112 we can have it be statically allocated into memory. For
7113 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7114 memory and store the value into it. */
7116 if (CONSTANT_P (op0))
7117 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7119 else if (GET_CODE (op0) == MEM)
7121 mark_temp_addr_taken (op0);
7122 temp = XEXP (op0, 0);
7125 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7126 || GET_CODE (op0) == CONCAT)
7128 /* If this object is in a register, it must be not
7130 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7131 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7133 mark_temp_addr_taken (memloc);
7134 emit_move_insn (memloc, op0);
7138 if (GET_CODE (op0) != MEM)
7141 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7143 temp = XEXP (op0, 0);
7144 #ifdef POINTERS_EXTEND_UNSIGNED
7145 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7146 && mode == ptr_mode)
7147 temp = convert_memory_address (ptr_mode, temp);
7152 op0 = force_operand (XEXP (op0, 0), target);
7155 if (flag_force_addr && GET_CODE (op0) != REG)
7156 op0 = force_reg (Pmode, op0);
7158 if (GET_CODE (op0) == REG
7159 && ! REG_USERVAR_P (op0))
7160 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7162 /* If we might have had a temp slot, add an equivalent address
7165 update_temp_slot_address (temp, op0);
7167 #ifdef POINTERS_EXTEND_UNSIGNED
7168 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7169 && mode == ptr_mode)
7170 op0 = convert_memory_address (ptr_mode, op0);
7175 case ENTRY_VALUE_EXPR:
7178 /* COMPLEX type for Extended Pascal & Fortran */
7181 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7184 /* Get the rtx code of the operands. */
7185 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7186 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7189 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7193 /* Move the real (op0) and imaginary (op1) parts to their location. */
7194 emit_move_insn (gen_realpart (mode, target), op0);
7195 emit_move_insn (gen_imagpart (mode, target), op1);
7197 insns = get_insns ();
7200 /* Complex construction should appear as a single unit. */
7201 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7202 each with a separate pseudo as destination.
7203 It's not correct for flow to treat them as a unit. */
7204 if (GET_CODE (target) != CONCAT)
7205 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7213 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7214 return gen_realpart (mode, op0);
7217 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7218 return gen_imagpart (mode, op0);
7222 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7226 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7229 target = gen_reg_rtx (mode);
7233 /* Store the realpart and the negated imagpart to target. */
7234 emit_move_insn (gen_realpart (partmode, target),
7235 gen_realpart (partmode, op0));
7237 imag_t = gen_imagpart (partmode, target);
7238 temp = expand_unop (partmode, neg_optab,
7239 gen_imagpart (partmode, op0), imag_t, 0);
7241 emit_move_insn (imag_t, temp);
7243 insns = get_insns ();
7246 /* Conjugate should appear as a single unit
7247 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7248 each with a separate pseudo as destination.
7249 It's not correct for flow to treat them as a unit. */
7250 if (GET_CODE (target) != CONCAT)
7251 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7258 case TRY_CATCH_EXPR:
7260 tree handler = TREE_OPERAND (exp, 1);
7262 expand_eh_region_start ();
7264 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7266 expand_eh_region_end (handler);
7273 rtx dcc = get_dynamic_cleanup_chain ();
7274 emit_move_insn (dcc, validize_mem (gen_rtx (MEM, Pmode, dcc)));
7280 rtx dhc = get_dynamic_handler_chain ();
7281 emit_move_insn (dhc, validize_mem (gen_rtx (MEM, Pmode, dhc)));
7286 op0 = CONST0_RTX (tmode);
7292 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7295 /* Here to do an ordinary binary operator, generating an instruction
7296 from the optab already placed in `this_optab'. */
7298 preexpand_calls (exp);
7299 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7301 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7302 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7304 temp = expand_binop (mode, this_optab, op0, op1, target,
7305 unsignedp, OPTAB_LIB_WIDEN);
7312 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7315 bc_expand_expr (exp)
7318 enum tree_code code;
7321 struct binary_operator *binoptab;
7322 struct unary_operator *unoptab;
7323 struct increment_operator *incroptab;
7324 struct bc_label *lab, *lab1;
7325 enum bytecode_opcode opcode;
7328 code = TREE_CODE (exp);
7334 if (DECL_RTL (exp) == 0)
7336 error_with_decl (exp, "prior parameter's size depends on `%s'");
7340 bc_load_parmaddr (DECL_RTL (exp));
7341 bc_load_memory (TREE_TYPE (exp), exp);
7347 if (DECL_RTL (exp) == 0)
7351 if (BYTECODE_LABEL (DECL_RTL (exp)))
7352 bc_load_externaddr (DECL_RTL (exp));
7354 bc_load_localaddr (DECL_RTL (exp));
7356 if (TREE_PUBLIC (exp))
7357 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7358 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7360 bc_load_localaddr (DECL_RTL (exp));
7362 bc_load_memory (TREE_TYPE (exp), exp);
7367 #ifdef DEBUG_PRINT_CODE
7368 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7370 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7372 : TYPE_MODE (TREE_TYPE (exp)))],
7373 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7379 #ifdef DEBUG_PRINT_CODE
7380 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7382 /* FIX THIS: find a better way to pass real_cst's. -bson */
7383 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7384 (double) TREE_REAL_CST (exp));
7393 /* We build a call description vector describing the type of
7394 the return value and of the arguments; this call vector,
7395 together with a pointer to a location for the return value
7396 and the base of the argument list, is passed to the low
7397 level machine dependent call subroutine, which is responsible
7398 for putting the arguments wherever real functions expect
7399 them, as well as getting the return value back. */
7401 tree calldesc = 0, arg;
7405 /* Push the evaluated args on the evaluation stack in reverse
7406 order. Also make an entry for each arg in the calldesc
7407 vector while we're at it. */
7409 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7411 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7414 bc_expand_expr (TREE_VALUE (arg));
7416 calldesc = tree_cons ((tree) 0,
7417 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7419 calldesc = tree_cons ((tree) 0,
7420 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7424 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7426 /* Allocate a location for the return value and push its
7427 address on the evaluation stack. Also make an entry
7428 at the front of the calldesc for the return value type. */
7430 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7431 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7432 bc_load_localaddr (retval);
7434 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7435 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7437 /* Prepend the argument count. */
7438 calldesc = tree_cons ((tree) 0,
7439 build_int_2 (nargs, 0),
7442 /* Push the address of the call description vector on the stack. */
7443 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7444 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7445 build_index_type (build_int_2 (nargs * 2, 0)));
7446 r = output_constant_def (calldesc);
7447 bc_load_externaddr (r);
7449 /* Push the address of the function to be called. */
7450 bc_expand_expr (TREE_OPERAND (exp, 0));
7452 /* Call the function, popping its address and the calldesc vector
7453 address off the evaluation stack in the process. */
7454 bc_emit_instruction (call);
7456 /* Pop the arguments off the stack. */
7457 bc_adjust_stack (nargs);
7459 /* Load the return value onto the stack. */
7460 bc_load_localaddr (retval);
7461 bc_load_memory (type, TREE_OPERAND (exp, 0));
7467 if (!SAVE_EXPR_RTL (exp))
7469 /* First time around: copy to local variable */
7470 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7471 TYPE_ALIGN (TREE_TYPE(exp)));
7472 bc_expand_expr (TREE_OPERAND (exp, 0));
7473 bc_emit_instruction (duplicate);
7475 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7476 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7480 /* Consecutive reference: use saved copy */
7481 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7482 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7487 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7488 how are they handled instead? */
7491 TREE_USED (exp) = 1;
7492 bc_expand_expr (STMT_BODY (exp));
7499 bc_expand_expr (TREE_OPERAND (exp, 0));
7500 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7505 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7510 bc_expand_address (TREE_OPERAND (exp, 0));
7515 bc_expand_expr (TREE_OPERAND (exp, 0));
7516 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7521 bc_expand_expr (bc_canonicalize_array_ref (exp));
7526 bc_expand_component_address (exp);
7528 /* If we have a bitfield, generate a proper load */
7529 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7534 bc_expand_expr (TREE_OPERAND (exp, 0));
7535 bc_emit_instruction (drop);
7536 bc_expand_expr (TREE_OPERAND (exp, 1));
7541 bc_expand_expr (TREE_OPERAND (exp, 0));
7542 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7543 lab = bc_get_bytecode_label ();
7544 bc_emit_bytecode (xjumpifnot);
7545 bc_emit_bytecode_labelref (lab);
7547 #ifdef DEBUG_PRINT_CODE
7548 fputc ('\n', stderr);
7550 bc_expand_expr (TREE_OPERAND (exp, 1));
7551 lab1 = bc_get_bytecode_label ();
7552 bc_emit_bytecode (jump);
7553 bc_emit_bytecode_labelref (lab1);
7555 #ifdef DEBUG_PRINT_CODE
7556 fputc ('\n', stderr);
7559 bc_emit_bytecode_labeldef (lab);
7560 bc_expand_expr (TREE_OPERAND (exp, 2));
7561 bc_emit_bytecode_labeldef (lab1);
7564 case TRUTH_ANDIF_EXPR:
7566 opcode = xjumpifnot;
7569 case TRUTH_ORIF_EXPR:
7576 binoptab = optab_plus_expr;
7581 binoptab = optab_minus_expr;
7586 binoptab = optab_mult_expr;
7589 case TRUNC_DIV_EXPR:
7590 case FLOOR_DIV_EXPR:
7592 case ROUND_DIV_EXPR:
7593 case EXACT_DIV_EXPR:
7595 binoptab = optab_trunc_div_expr;
7598 case TRUNC_MOD_EXPR:
7599 case FLOOR_MOD_EXPR:
7601 case ROUND_MOD_EXPR:
7603 binoptab = optab_trunc_mod_expr;
7606 case FIX_ROUND_EXPR:
7607 case FIX_FLOOR_EXPR:
7609 abort (); /* Not used for C. */
7611 case FIX_TRUNC_EXPR:
7618 abort (); /* FIXME */
7622 binoptab = optab_rdiv_expr;
7627 binoptab = optab_bit_and_expr;
7632 binoptab = optab_bit_ior_expr;
7637 binoptab = optab_bit_xor_expr;
7642 binoptab = optab_lshift_expr;
7647 binoptab = optab_rshift_expr;
7650 case TRUTH_AND_EXPR:
7652 binoptab = optab_truth_and_expr;
7657 binoptab = optab_truth_or_expr;
7662 binoptab = optab_lt_expr;
7667 binoptab = optab_le_expr;
7672 binoptab = optab_ge_expr;
7677 binoptab = optab_gt_expr;
7682 binoptab = optab_eq_expr;
7687 binoptab = optab_ne_expr;
7692 unoptab = optab_negate_expr;
7697 unoptab = optab_bit_not_expr;
7700 case TRUTH_NOT_EXPR:
7702 unoptab = optab_truth_not_expr;
7705 case PREDECREMENT_EXPR:
7707 incroptab = optab_predecrement_expr;
7710 case PREINCREMENT_EXPR:
7712 incroptab = optab_preincrement_expr;
7715 case POSTDECREMENT_EXPR:
7717 incroptab = optab_postdecrement_expr;
7720 case POSTINCREMENT_EXPR:
7722 incroptab = optab_postincrement_expr;
7727 bc_expand_constructor (exp);
7737 tree vars = TREE_OPERAND (exp, 0);
7738 int vars_need_expansion = 0;
7740 /* Need to open a binding contour here because
7741 if there are any cleanups they most be contained here. */
7742 expand_start_bindings (0);
7744 /* Mark the corresponding BLOCK for output. */
7745 if (TREE_OPERAND (exp, 2) != 0)
7746 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7748 /* If VARS have not yet been expanded, expand them now. */
7751 if (DECL_RTL (vars) == 0)
7753 vars_need_expansion = 1;
7756 expand_decl_init (vars);
7757 vars = TREE_CHAIN (vars);
7760 bc_expand_expr (TREE_OPERAND (exp, 1));
7762 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7772 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7773 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7779 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7785 bc_expand_expr (TREE_OPERAND (exp, 0));
7786 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7787 lab = bc_get_bytecode_label ();
7789 bc_emit_instruction (duplicate);
7790 bc_emit_bytecode (opcode);
7791 bc_emit_bytecode_labelref (lab);
7793 #ifdef DEBUG_PRINT_CODE
7794 fputc ('\n', stderr);
7797 bc_emit_instruction (drop);
7799 bc_expand_expr (TREE_OPERAND (exp, 1));
7800 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7801 bc_emit_bytecode_labeldef (lab);
7807 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7809 /* Push the quantum. */
7810 bc_expand_expr (TREE_OPERAND (exp, 1));
7812 /* Convert it to the lvalue's type. */
7813 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7815 /* Push the address of the lvalue */
7816 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7818 /* Perform actual increment */
7819 bc_expand_increment (incroptab, type);
7823 /* Return the alignment in bits of EXP, a pointer valued expression.
7824 But don't return more than MAX_ALIGN no matter what.
7825 The alignment returned is, by default, the alignment of the thing that
7826 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7828 Otherwise, look at the expression to see if we can do better, i.e., if the
7829 expression is actually pointing at an object whose alignment is tighter. */
7832 get_pointer_alignment (exp, max_align)
7836 unsigned align, inner;
7838 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7841 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7842 align = MIN (align, max_align);
7846 switch (TREE_CODE (exp))
7850 case NON_LVALUE_EXPR:
7851 exp = TREE_OPERAND (exp, 0);
7852 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7854 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7855 align = MIN (inner, max_align);
7859 /* If sum of pointer + int, restrict our maximum alignment to that
7860 imposed by the integer. If not, we can't do any better than
7862 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7865 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7870 exp = TREE_OPERAND (exp, 0);
7874 /* See what we are pointing at and look at its alignment. */
7875 exp = TREE_OPERAND (exp, 0);
7876 if (TREE_CODE (exp) == FUNCTION_DECL)
7877 align = FUNCTION_BOUNDARY;
7878 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7879 align = DECL_ALIGN (exp);
7880 #ifdef CONSTANT_ALIGNMENT
7881 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7882 align = CONSTANT_ALIGNMENT (exp, align);
7884 return MIN (align, max_align);
7892 /* Return the tree node and offset if a given argument corresponds to
7893 a string constant. */
7896 string_constant (arg, ptr_offset)
7902 if (TREE_CODE (arg) == ADDR_EXPR
7903 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7905 *ptr_offset = integer_zero_node;
7906 return TREE_OPERAND (arg, 0);
7908 else if (TREE_CODE (arg) == PLUS_EXPR)
7910 tree arg0 = TREE_OPERAND (arg, 0);
7911 tree arg1 = TREE_OPERAND (arg, 1);
7916 if (TREE_CODE (arg0) == ADDR_EXPR
7917 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7920 return TREE_OPERAND (arg0, 0);
7922 else if (TREE_CODE (arg1) == ADDR_EXPR
7923 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7926 return TREE_OPERAND (arg1, 0);
7933 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7934 way, because it could contain a zero byte in the middle.
7935 TREE_STRING_LENGTH is the size of the character array, not the string.
7937 Unfortunately, string_constant can't access the values of const char
7938 arrays with initializers, so neither can we do so here. */
7948 src = string_constant (src, &offset_node);
7951 max = TREE_STRING_LENGTH (src);
7952 ptr = TREE_STRING_POINTER (src);
7953 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7955 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7956 compute the offset to the following null if we don't know where to
7957 start searching for it. */
7959 for (i = 0; i < max; i++)
7962 /* We don't know the starting offset, but we do know that the string
7963 has no internal zero bytes. We can assume that the offset falls
7964 within the bounds of the string; otherwise, the programmer deserves
7965 what he gets. Subtract the offset from the length of the string,
7967 /* This would perhaps not be valid if we were dealing with named
7968 arrays in addition to literal string constants. */
7969 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7972 /* We have a known offset into the string. Start searching there for
7973 a null character. */
7974 if (offset_node == 0)
7978 /* Did we get a long long offset? If so, punt. */
7979 if (TREE_INT_CST_HIGH (offset_node) != 0)
7981 offset = TREE_INT_CST_LOW (offset_node);
7983 /* If the offset is known to be out of bounds, warn, and call strlen at
7985 if (offset < 0 || offset > max)
7987 warning ("offset outside bounds of constant string");
7990 /* Use strlen to search for the first zero byte. Since any strings
7991 constructed with build_string will have nulls appended, we win even
7992 if we get handed something like (char[4])"abcd".
7994 Since OFFSET is our starting index into the string, no further
7995 calculation is needed. */
7996 return size_int (strlen (ptr + offset));
8000 expand_builtin_return_addr (fndecl_code, count, tem)
8001 enum built_in_function fndecl_code;
8007 /* Some machines need special handling before we can access
8008 arbitrary frames. For example, on the sparc, we must first flush
8009 all register windows to the stack. */
8010 #ifdef SETUP_FRAME_ADDRESSES
8012 SETUP_FRAME_ADDRESSES ();
8015 /* On the sparc, the return address is not in the frame, it is in a
8016 register. There is no way to access it off of the current frame
8017 pointer, but it can be accessed off the previous frame pointer by
8018 reading the value from the register window save area. */
8019 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8020 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8024 /* Scan back COUNT frames to the specified frame. */
8025 for (i = 0; i < count; i++)
8027 /* Assume the dynamic chain pointer is in the word that the
8028 frame address points to, unless otherwise specified. */
8029 #ifdef DYNAMIC_CHAIN_ADDRESS
8030 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8032 tem = memory_address (Pmode, tem);
8033 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
8036 /* For __builtin_frame_address, return what we've got. */
8037 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8040 /* For __builtin_return_address, Get the return address from that
8042 #ifdef RETURN_ADDR_RTX
8043 tem = RETURN_ADDR_RTX (count, tem);
8045 tem = memory_address (Pmode,
8046 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8047 tem = gen_rtx (MEM, Pmode, tem);
8052 /* __builtin_setjmp is passed a pointer to an array of five words (not
8053 all will be used on all machines). It operates similarly to the C
8054 library function of the same name, but is more efficient. Much of
8055 the code below (and for longjmp) is copied from the handling of
8058 NOTE: This is intended for use by GNAT and the exception handling
8059 scheme in the compiler and will only work in the method used by
8063 expand_builtin_setjmp (buf_addr, target)
8067 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8068 enum machine_mode sa_mode = Pmode, value_mode;
8070 int old_inhibit_defer_pop = inhibit_defer_pop;
8072 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8073 build_function_type (void_type_node, NULL_TREE),
8076 CUMULATIVE_ARGS args_so_far;
8080 value_mode = TYPE_MODE (integer_type_node);
8082 #ifdef POINTERS_EXTEND_UNSIGNED
8083 buf_addr = convert_memory_address (Pmode, buf_addr);
8086 buf_addr = force_reg (Pmode, buf_addr);
8088 if (target == 0 || GET_CODE (target) != REG
8089 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8090 target = gen_reg_rtx (value_mode);
8094 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8095 current_function_calls_setjmp = 1;
8097 /* We store the frame pointer and the address of lab1 in the buffer
8098 and use the rest of it for the stack save area, which is
8099 machine-dependent. */
8100 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8101 virtual_stack_vars_rtx);
8103 (validize_mem (gen_rtx (MEM, Pmode,
8104 plus_constant (buf_addr,
8105 GET_MODE_SIZE (Pmode)))),
8106 gen_rtx (LABEL_REF, Pmode, lab1));
8108 #ifdef HAVE_save_stack_nonlocal
8109 if (HAVE_save_stack_nonlocal)
8110 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8113 stack_save = gen_rtx (MEM, sa_mode,
8114 plus_constant (buf_addr,
8115 2 * GET_MODE_SIZE (Pmode)));
8116 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8120 emit_insn (gen_setjmp ());
8123 /* Set TARGET to zero and branch around the other case. */
8124 emit_move_insn (target, const0_rtx);
8125 emit_jump_insn (gen_jump (lab2));
8129 /* Note that setjmp clobbers FP when we get here, so we have to make
8130 sure it's marked as used by this function. */
8131 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8133 /* Mark the static chain as clobbered here so life information
8134 doesn't get messed up for it. */
8135 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8137 /* Now put in the code to restore the frame pointer, and argument
8138 pointer, if needed. The code below is from expand_end_bindings
8139 in stmt.c; see detailed documentation there. */
8140 #ifdef HAVE_nonlocal_goto
8141 if (! HAVE_nonlocal_goto)
8143 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8145 /* Do we need to do something like:
8147 current_function_has_nonlocal_label = 1;
8149 here? It seems like we might have to, or some subset of that
8150 functionality, but I am unsure. (mrs) */
8152 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8153 if (fixed_regs[ARG_POINTER_REGNUM])
8155 #ifdef ELIMINABLE_REGS
8156 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8158 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8159 if (elim_regs[i].from == ARG_POINTER_REGNUM
8160 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8163 if (i == sizeof elim_regs / sizeof elim_regs [0])
8166 /* Now restore our arg pointer from the address at which it
8167 was saved in our stack frame.
8168 If there hasn't be space allocated for it yet, make
8170 if (arg_pointer_save_area == 0)
8171 arg_pointer_save_area
8172 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8173 emit_move_insn (virtual_incoming_args_rtx,
8174 copy_to_reg (arg_pointer_save_area));
8179 #ifdef HAVE_nonlocal_goto_receiver
8180 if (HAVE_nonlocal_goto_receiver)
8181 emit_insn (gen_nonlocal_goto_receiver ());
8183 /* The static chain pointer contains the address of dummy function.
8184 We need to call it here to handle some PIC cases of restoring a
8185 global pointer. Then return 1. */
8186 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8188 /* We can't actually call emit_library_call here, so do everything
8189 it does, which isn't much for a libfunc with no args. */
8190 op0 = memory_address (FUNCTION_MODE, op0);
8192 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8193 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8194 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8196 #ifndef ACCUMULATE_OUTGOING_ARGS
8197 #ifdef HAVE_call_pop
8199 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8200 const0_rtx, next_arg_reg,
8201 GEN_INT (return_pops)));
8208 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8209 const0_rtx, next_arg_reg, const0_rtx));
8214 emit_move_insn (target, const1_rtx);
8220 /* Expand an expression EXP that calls a built-in function,
8221 with result going to TARGET if that's convenient
8222 (and in mode MODE if that's convenient).
8223 SUBTARGET may be used as the target for computing one of EXP's operands.
8224 IGNORE is nonzero if the value is to be ignored. */
8226 #define CALLED_AS_BUILT_IN(NODE) \
8227 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8230 expand_builtin (exp, target, subtarget, mode, ignore)
8234 enum machine_mode mode;
8237 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8238 tree arglist = TREE_OPERAND (exp, 1);
8241 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8242 optab builtin_optab;
8244 switch (DECL_FUNCTION_CODE (fndecl))
8249 /* build_function_call changes these into ABS_EXPR. */
8254 /* Treat these like sqrt, but only if the user asks for them. */
8255 if (! flag_fast_math)
8257 case BUILT_IN_FSQRT:
8258 /* If not optimizing, call the library function. */
8263 /* Arg could be wrong type if user redeclared this fcn wrong. */
8264 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8267 /* Stabilize and compute the argument. */
8268 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8269 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8271 exp = copy_node (exp);
8272 arglist = copy_node (arglist);
8273 TREE_OPERAND (exp, 1) = arglist;
8274 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8276 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8278 /* Make a suitable register to place result in. */
8279 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8284 switch (DECL_FUNCTION_CODE (fndecl))
8287 builtin_optab = sin_optab; break;
8289 builtin_optab = cos_optab; break;
8290 case BUILT_IN_FSQRT:
8291 builtin_optab = sqrt_optab; break;
8296 /* Compute into TARGET.
8297 Set TARGET to wherever the result comes back. */
8298 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8299 builtin_optab, op0, target, 0);
8301 /* If we were unable to expand via the builtin, stop the
8302 sequence (without outputting the insns) and break, causing
8303 a call the the library function. */
8310 /* Check the results by default. But if flag_fast_math is turned on,
8311 then assume sqrt will always be called with valid arguments. */
8313 if (! flag_fast_math)
8315 /* Don't define the builtin FP instructions
8316 if your machine is not IEEE. */
8317 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8320 lab1 = gen_label_rtx ();
8322 /* Test the result; if it is NaN, set errno=EDOM because
8323 the argument was not in the domain. */
8324 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8325 emit_jump_insn (gen_beq (lab1));
8329 #ifdef GEN_ERRNO_RTX
8330 rtx errno_rtx = GEN_ERRNO_RTX;
8333 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8336 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8339 /* We can't set errno=EDOM directly; let the library call do it.
8340 Pop the arguments right away in case the call gets deleted. */
8342 expand_call (exp, target, 0);
8349 /* Output the entire sequence. */
8350 insns = get_insns ();
8356 /* __builtin_apply_args returns block of memory allocated on
8357 the stack into which is stored the arg pointer, structure
8358 value address, static chain, and all the registers that might
8359 possibly be used in performing a function call. The code is
8360 moved to the start of the function so the incoming values are
8362 case BUILT_IN_APPLY_ARGS:
8363 /* Don't do __builtin_apply_args more than once in a function.
8364 Save the result of the first call and reuse it. */
8365 if (apply_args_value != 0)
8366 return apply_args_value;
8368 /* When this function is called, it means that registers must be
8369 saved on entry to this function. So we migrate the
8370 call to the first insn of this function. */
8375 temp = expand_builtin_apply_args ();
8379 apply_args_value = temp;
8381 /* Put the sequence after the NOTE that starts the function.
8382 If this is inside a SEQUENCE, make the outer-level insn
8383 chain current, so the code is placed at the start of the
8385 push_topmost_sequence ();
8386 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8387 pop_topmost_sequence ();
8391 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8392 FUNCTION with a copy of the parameters described by
8393 ARGUMENTS, and ARGSIZE. It returns a block of memory
8394 allocated on the stack into which is stored all the registers
8395 that might possibly be used for returning the result of a
8396 function. ARGUMENTS is the value returned by
8397 __builtin_apply_args. ARGSIZE is the number of bytes of
8398 arguments that must be copied. ??? How should this value be
8399 computed? We'll also need a safe worst case value for varargs
8401 case BUILT_IN_APPLY:
8403 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8404 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8405 || TREE_CHAIN (arglist) == 0
8406 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8407 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8408 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8416 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8417 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8419 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8422 /* __builtin_return (RESULT) causes the function to return the
8423 value described by RESULT. RESULT is address of the block of
8424 memory returned by __builtin_apply. */
8425 case BUILT_IN_RETURN:
8427 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8428 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8429 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8430 NULL_RTX, VOIDmode, 0));
8433 case BUILT_IN_SAVEREGS:
8434 /* Don't do __builtin_saveregs more than once in a function.
8435 Save the result of the first call and reuse it. */
8436 if (saveregs_value != 0)
8437 return saveregs_value;
8439 /* When this function is called, it means that registers must be
8440 saved on entry to this function. So we migrate the
8441 call to the first insn of this function. */
8445 /* Now really call the function. `expand_call' does not call
8446 expand_builtin, so there is no danger of infinite recursion here. */
8449 #ifdef EXPAND_BUILTIN_SAVEREGS
8450 /* Do whatever the machine needs done in this case. */
8451 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8453 /* The register where the function returns its value
8454 is likely to have something else in it, such as an argument.
8455 So preserve that register around the call. */
8457 if (value_mode != VOIDmode)
8459 rtx valreg = hard_libcall_value (value_mode);
8460 rtx saved_valreg = gen_reg_rtx (value_mode);
8462 emit_move_insn (saved_valreg, valreg);
8463 temp = expand_call (exp, target, ignore);
8464 emit_move_insn (valreg, saved_valreg);
8467 /* Generate the call, putting the value in a pseudo. */
8468 temp = expand_call (exp, target, ignore);
8474 saveregs_value = temp;
8476 /* Put the sequence after the NOTE that starts the function.
8477 If this is inside a SEQUENCE, make the outer-level insn
8478 chain current, so the code is placed at the start of the
8480 push_topmost_sequence ();
8481 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8482 pop_topmost_sequence ();
8486 /* __builtin_args_info (N) returns word N of the arg space info
8487 for the current function. The number and meanings of words
8488 is controlled by the definition of CUMULATIVE_ARGS. */
8489 case BUILT_IN_ARGS_INFO:
8491 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8493 int *word_ptr = (int *) ¤t_function_args_info;
8494 tree type, elts, result;
8496 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8497 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8498 __FILE__, __LINE__);
8502 tree arg = TREE_VALUE (arglist);
8503 if (TREE_CODE (arg) != INTEGER_CST)
8504 error ("argument of `__builtin_args_info' must be constant");
8507 int wordnum = TREE_INT_CST_LOW (arg);
8509 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8510 error ("argument of `__builtin_args_info' out of range");
8512 return GEN_INT (word_ptr[wordnum]);
8516 error ("missing argument in `__builtin_args_info'");
8521 for (i = 0; i < nwords; i++)
8522 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8524 type = build_array_type (integer_type_node,
8525 build_index_type (build_int_2 (nwords, 0)));
8526 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8527 TREE_CONSTANT (result) = 1;
8528 TREE_STATIC (result) = 1;
8529 result = build (INDIRECT_REF, build_pointer_type (type), result);
8530 TREE_CONSTANT (result) = 1;
8531 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8535 /* Return the address of the first anonymous stack arg. */
8536 case BUILT_IN_NEXT_ARG:
8538 tree fntype = TREE_TYPE (current_function_decl);
8540 if ((TYPE_ARG_TYPES (fntype) == 0
8541 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8543 && ! current_function_varargs)
8545 error ("`va_start' used in function with fixed args");
8551 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8552 tree arg = TREE_VALUE (arglist);
8554 /* Strip off all nops for the sake of the comparison. This
8555 is not quite the same as STRIP_NOPS. It does more.
8556 We must also strip off INDIRECT_EXPR for C++ reference
8558 while (TREE_CODE (arg) == NOP_EXPR
8559 || TREE_CODE (arg) == CONVERT_EXPR
8560 || TREE_CODE (arg) == NON_LVALUE_EXPR
8561 || TREE_CODE (arg) == INDIRECT_REF)
8562 arg = TREE_OPERAND (arg, 0);
8563 if (arg != last_parm)
8564 warning ("second parameter of `va_start' not last named argument");
8566 else if (! current_function_varargs)
8567 /* Evidently an out of date version of <stdarg.h>; can't validate
8568 va_start's second argument, but can still work as intended. */
8569 warning ("`__builtin_next_arg' called without an argument");
8572 return expand_binop (Pmode, add_optab,
8573 current_function_internal_arg_pointer,
8574 current_function_arg_offset_rtx,
8575 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8577 case BUILT_IN_CLASSIFY_TYPE:
8580 tree type = TREE_TYPE (TREE_VALUE (arglist));
8581 enum tree_code code = TREE_CODE (type);
8582 if (code == VOID_TYPE)
8583 return GEN_INT (void_type_class);
8584 if (code == INTEGER_TYPE)
8585 return GEN_INT (integer_type_class);
8586 if (code == CHAR_TYPE)
8587 return GEN_INT (char_type_class);
8588 if (code == ENUMERAL_TYPE)
8589 return GEN_INT (enumeral_type_class);
8590 if (code == BOOLEAN_TYPE)
8591 return GEN_INT (boolean_type_class);
8592 if (code == POINTER_TYPE)
8593 return GEN_INT (pointer_type_class);
8594 if (code == REFERENCE_TYPE)
8595 return GEN_INT (reference_type_class);
8596 if (code == OFFSET_TYPE)
8597 return GEN_INT (offset_type_class);
8598 if (code == REAL_TYPE)
8599 return GEN_INT (real_type_class);
8600 if (code == COMPLEX_TYPE)
8601 return GEN_INT (complex_type_class);
8602 if (code == FUNCTION_TYPE)
8603 return GEN_INT (function_type_class);
8604 if (code == METHOD_TYPE)
8605 return GEN_INT (method_type_class);
8606 if (code == RECORD_TYPE)
8607 return GEN_INT (record_type_class);
8608 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8609 return GEN_INT (union_type_class);
8610 if (code == ARRAY_TYPE)
8612 if (TYPE_STRING_FLAG (type))
8613 return GEN_INT (string_type_class);
8615 return GEN_INT (array_type_class);
8617 if (code == SET_TYPE)
8618 return GEN_INT (set_type_class);
8619 if (code == FILE_TYPE)
8620 return GEN_INT (file_type_class);
8621 if (code == LANG_TYPE)
8622 return GEN_INT (lang_type_class);
8624 return GEN_INT (no_type_class);
8626 case BUILT_IN_CONSTANT_P:
8631 tree arg = TREE_VALUE (arglist);
8634 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8635 || (TREE_CODE (arg) == ADDR_EXPR
8636 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8637 ? const1_rtx : const0_rtx);
8640 case BUILT_IN_FRAME_ADDRESS:
8641 /* The argument must be a nonnegative integer constant.
8642 It counts the number of frames to scan up the stack.
8643 The value is the address of that frame. */
8644 case BUILT_IN_RETURN_ADDRESS:
8645 /* The argument must be a nonnegative integer constant.
8646 It counts the number of frames to scan up the stack.
8647 The value is the return address saved in that frame. */
8649 /* Warning about missing arg was already issued. */
8651 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8652 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8654 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8655 error ("invalid arg to `__builtin_frame_address'");
8657 error ("invalid arg to `__builtin_return_address'");
8662 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8663 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8664 hard_frame_pointer_rtx);
8666 /* For __builtin_frame_address, return what we've got. */
8667 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8670 if (GET_CODE (tem) != REG)
8671 tem = copy_to_reg (tem);
8675 /* Returns the address of the area where the structure is returned.
8677 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8679 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8680 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8683 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8685 case BUILT_IN_ALLOCA:
8687 /* Arg could be non-integer if user redeclared this fcn wrong. */
8688 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8691 /* Compute the argument. */
8692 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8694 /* Allocate the desired space. */
8695 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8698 /* If not optimizing, call the library function. */
8699 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8703 /* Arg could be non-integer if user redeclared this fcn wrong. */
8704 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8707 /* Compute the argument. */
8708 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8709 /* Compute ffs, into TARGET if possible.
8710 Set TARGET to wherever the result comes back. */
8711 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8712 ffs_optab, op0, target, 1);
8717 case BUILT_IN_STRLEN:
8718 /* If not optimizing, call the library function. */
8719 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8723 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8724 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8728 tree src = TREE_VALUE (arglist);
8729 tree len = c_strlen (src);
8732 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8734 rtx result, src_rtx, char_rtx;
8735 enum machine_mode insn_mode = value_mode, char_mode;
8736 enum insn_code icode;
8738 /* If the length is known, just return it. */
8740 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8742 /* If SRC is not a pointer type, don't do this operation inline. */
8746 /* Call a function if we can't compute strlen in the right mode. */
8748 while (insn_mode != VOIDmode)
8750 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8751 if (icode != CODE_FOR_nothing)
8754 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8756 if (insn_mode == VOIDmode)
8759 /* Make a place to write the result of the instruction. */
8762 && GET_CODE (result) == REG
8763 && GET_MODE (result) == insn_mode
8764 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8765 result = gen_reg_rtx (insn_mode);
8767 /* Make sure the operands are acceptable to the predicates. */
8769 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8770 result = gen_reg_rtx (insn_mode);
8772 src_rtx = memory_address (BLKmode,
8773 expand_expr (src, NULL_RTX, ptr_mode,
8775 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8776 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8778 char_rtx = const0_rtx;
8779 char_mode = insn_operand_mode[(int)icode][2];
8780 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8781 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8783 emit_insn (GEN_FCN (icode) (result,
8784 gen_rtx (MEM, BLKmode, src_rtx),
8785 char_rtx, GEN_INT (align)));
8787 /* Return the value in the proper mode for this function. */
8788 if (GET_MODE (result) == value_mode)
8790 else if (target != 0)
8792 convert_move (target, result, 0);
8796 return convert_to_mode (value_mode, result, 0);
8799 case BUILT_IN_STRCPY:
8800 /* If not optimizing, call the library function. */
8801 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8805 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8806 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8807 || TREE_CHAIN (arglist) == 0
8808 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8812 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8817 len = size_binop (PLUS_EXPR, len, integer_one_node);
8819 chainon (arglist, build_tree_list (NULL_TREE, len));
8823 case BUILT_IN_MEMCPY:
8824 /* If not optimizing, call the library function. */
8825 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8829 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8830 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8831 || TREE_CHAIN (arglist) == 0
8832 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8833 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8834 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8838 tree dest = TREE_VALUE (arglist);
8839 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8840 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8844 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8846 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8847 rtx dest_rtx, dest_mem, src_mem;
8849 /* If either SRC or DEST is not a pointer type, don't do
8850 this operation in-line. */
8851 if (src_align == 0 || dest_align == 0)
8853 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8854 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8858 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8859 dest_mem = gen_rtx (MEM, BLKmode,
8860 memory_address (BLKmode, dest_rtx));
8861 /* There could be a void* cast on top of the object. */
8862 while (TREE_CODE (dest) == NOP_EXPR)
8863 dest = TREE_OPERAND (dest, 0);
8864 type = TREE_TYPE (TREE_TYPE (dest));
8865 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8866 src_mem = gen_rtx (MEM, BLKmode,
8867 memory_address (BLKmode,
8868 expand_expr (src, NULL_RTX,
8871 /* There could be a void* cast on top of the object. */
8872 while (TREE_CODE (src) == NOP_EXPR)
8873 src = TREE_OPERAND (src, 0);
8874 type = TREE_TYPE (TREE_TYPE (src));
8875 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8877 /* Copy word part most expediently. */
8878 emit_block_move (dest_mem, src_mem,
8879 expand_expr (len, NULL_RTX, VOIDmode, 0),
8880 MIN (src_align, dest_align));
8881 return force_operand (dest_rtx, NULL_RTX);
8884 case BUILT_IN_MEMSET:
8885 /* If not optimizing, call the library function. */
8886 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8890 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8891 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8892 || TREE_CHAIN (arglist) == 0
8893 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8895 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8897 != (TREE_CODE (TREE_TYPE
8899 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8903 tree dest = TREE_VALUE (arglist);
8904 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8905 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8909 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8910 rtx dest_rtx, dest_mem;
8912 /* If DEST is not a pointer type, don't do this
8913 operation in-line. */
8914 if (dest_align == 0)
8917 /* If VAL is not 0, don't do this operation in-line. */
8918 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8921 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8922 dest_mem = gen_rtx (MEM, BLKmode,
8923 memory_address (BLKmode, dest_rtx));
8924 /* There could be a void* cast on top of the object. */
8925 while (TREE_CODE (dest) == NOP_EXPR)
8926 dest = TREE_OPERAND (dest, 0);
8927 type = TREE_TYPE (TREE_TYPE (dest));
8928 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8930 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8933 return force_operand (dest_rtx, NULL_RTX);
8936 /* These comparison functions need an instruction that returns an actual
8937 index. An ordinary compare that just sets the condition codes
8939 #ifdef HAVE_cmpstrsi
8940 case BUILT_IN_STRCMP:
8941 /* If not optimizing, call the library function. */
8942 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8946 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8947 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8948 || TREE_CHAIN (arglist) == 0
8949 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8951 else if (!HAVE_cmpstrsi)
8954 tree arg1 = TREE_VALUE (arglist);
8955 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8959 len = c_strlen (arg1);
8961 len = size_binop (PLUS_EXPR, integer_one_node, len);
8962 len2 = c_strlen (arg2);
8964 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8966 /* If we don't have a constant length for the first, use the length
8967 of the second, if we know it. We don't require a constant for
8968 this case; some cost analysis could be done if both are available
8969 but neither is constant. For now, assume they're equally cheap.
8971 If both strings have constant lengths, use the smaller. This
8972 could arise if optimization results in strcpy being called with
8973 two fixed strings, or if the code was machine-generated. We should
8974 add some code to the `memcmp' handler below to deal with such
8975 situations, someday. */
8976 if (!len || TREE_CODE (len) != INTEGER_CST)
8983 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8985 if (tree_int_cst_lt (len2, len))
8989 chainon (arglist, build_tree_list (NULL_TREE, len));
8993 case BUILT_IN_MEMCMP:
8994 /* If not optimizing, call the library function. */
8995 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8999 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9000 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9001 || TREE_CHAIN (arglist) == 0
9002 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9003 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9004 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9006 else if (!HAVE_cmpstrsi)
9009 tree arg1 = TREE_VALUE (arglist);
9010 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9011 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9015 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9017 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9018 enum machine_mode insn_mode
9019 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9021 /* If we don't have POINTER_TYPE, call the function. */
9022 if (arg1_align == 0 || arg2_align == 0)
9024 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9025 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9029 /* Make a place to write the result of the instruction. */
9032 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9033 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9034 result = gen_reg_rtx (insn_mode);
9036 emit_insn (gen_cmpstrsi (result,
9037 gen_rtx (MEM, BLKmode,
9038 expand_expr (arg1, NULL_RTX,
9041 gen_rtx (MEM, BLKmode,
9042 expand_expr (arg2, NULL_RTX,
9045 expand_expr (len, NULL_RTX, VOIDmode, 0),
9046 GEN_INT (MIN (arg1_align, arg2_align))));
9048 /* Return the value in the proper mode for this function. */
9049 mode = TYPE_MODE (TREE_TYPE (exp));
9050 if (GET_MODE (result) == mode)
9052 else if (target != 0)
9054 convert_move (target, result, 0);
9058 return convert_to_mode (mode, result, 0);
9061 case BUILT_IN_STRCMP:
9062 case BUILT_IN_MEMCMP:
9066 case BUILT_IN_SETJMP:
9068 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9072 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9074 return expand_builtin_setjmp (buf_addr, target);
9077 /* __builtin_longjmp is passed a pointer to an array of five words
9078 and a value, which is a dummy. It's similar to the C library longjmp
9079 function but works with __builtin_setjmp above. */
9080 case BUILT_IN_LONGJMP:
9081 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9082 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9086 tree dummy_id = get_identifier ("__dummy");
9087 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
9088 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
9089 #ifdef POINTERS_EXTEND_UNSIGNED
9092 convert_memory_address
9094 expand_expr (TREE_VALUE (arglist),
9095 NULL_RTX, VOIDmode, 0)));
9098 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9102 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
9103 rtx lab = gen_rtx (MEM, Pmode,
9104 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
9105 enum machine_mode sa_mode
9106 #ifdef HAVE_save_stack_nonlocal
9107 = (HAVE_save_stack_nonlocal
9108 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9113 rtx stack = gen_rtx (MEM, sa_mode,
9114 plus_constant (buf_addr,
9115 2 * GET_MODE_SIZE (Pmode)));
9117 DECL_EXTERNAL (dummy_decl) = 1;
9118 TREE_PUBLIC (dummy_decl) = 1;
9119 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9121 /* Expand the second expression just for side-effects. */
9122 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9123 const0_rtx, VOIDmode, 0);
9125 assemble_external (dummy_decl);
9127 /* Pick up FP, label, and SP from the block and jump. This code is
9128 from expand_goto in stmt.c; see there for detailed comments. */
9129 #if HAVE_nonlocal_goto
9130 if (HAVE_nonlocal_goto)
9131 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9132 XEXP (DECL_RTL (dummy_decl), 0)));
9136 lab = copy_to_reg (lab);
9137 emit_move_insn (hard_frame_pointer_rtx, fp);
9138 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9140 /* Put in the static chain register the address of the dummy
9142 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9143 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9144 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9145 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9146 emit_indirect_jump (lab);
9152 /* Various hooks for the DWARF 2 __throw routine. */
9153 case BUILT_IN_UNWIND_INIT:
9154 expand_builtin_unwind_init ();
9157 return frame_pointer_rtx;
9159 return stack_pointer_rtx;
9160 #ifdef DWARF2_UNWIND_INFO
9161 case BUILT_IN_DWARF_FP_REGNUM:
9162 return expand_builtin_dwarf_fp_regnum ();
9163 case BUILT_IN_DWARF_REG_SIZE:
9164 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9166 case BUILT_IN_FROB_RETURN_ADDR:
9167 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9168 case BUILT_IN_EXTRACT_RETURN_ADDR:
9169 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9170 case BUILT_IN_SET_RETURN_ADDR_REG:
9171 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
9173 case BUILT_IN_EH_STUB:
9174 return expand_builtin_eh_stub ();
9175 case BUILT_IN_SET_EH_REGS:
9176 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
9177 TREE_VALUE (TREE_CHAIN (arglist)));
9180 default: /* just do library call, if unknown builtin */
9181 error ("built-in function `%s' not currently supported",
9182 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9185 /* The switch statement above can drop through to cause the function
9186 to be called normally. */
9188 return expand_call (exp, target, ignore);
9191 /* Built-in functions to perform an untyped call and return. */
9193 /* For each register that may be used for calling a function, this
9194 gives a mode used to copy the register's value. VOIDmode indicates
9195 the register is not used for calling a function. If the machine
9196 has register windows, this gives only the outbound registers.
9197 INCOMING_REGNO gives the corresponding inbound register. */
9198 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9200 /* For each register that may be used for returning values, this gives
9201 a mode used to copy the register's value. VOIDmode indicates the
9202 register is not used for returning values. If the machine has
9203 register windows, this gives only the outbound registers.
9204 INCOMING_REGNO gives the corresponding inbound register. */
9205 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9207 /* For each register that may be used for calling a function, this
9208 gives the offset of that register into the block returned by
9209 __builtin_apply_args. 0 indicates that the register is not
9210 used for calling a function. */
9211 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9213 /* Return the offset of register REGNO into the block returned by
9214 __builtin_apply_args. This is not declared static, since it is
9215 needed in objc-act.c. */
9218 apply_args_register_offset (regno)
9223 /* Arguments are always put in outgoing registers (in the argument
9224 block) if such make sense. */
9225 #ifdef OUTGOING_REGNO
9226 regno = OUTGOING_REGNO(regno);
9228 return apply_args_reg_offset[regno];
9231 /* Return the size required for the block returned by __builtin_apply_args,
9232 and initialize apply_args_mode. */
9237 static int size = -1;
9239 enum machine_mode mode;
9241 /* The values computed by this function never change. */
9244 /* The first value is the incoming arg-pointer. */
9245 size = GET_MODE_SIZE (Pmode);
9247 /* The second value is the structure value address unless this is
9248 passed as an "invisible" first argument. */
9249 if (struct_value_rtx)
9250 size += GET_MODE_SIZE (Pmode);
9252 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9253 if (FUNCTION_ARG_REGNO_P (regno))
9255 /* Search for the proper mode for copying this register's
9256 value. I'm not sure this is right, but it works so far. */
9257 enum machine_mode best_mode = VOIDmode;
9259 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9261 mode = GET_MODE_WIDER_MODE (mode))
9262 if (HARD_REGNO_MODE_OK (regno, mode)
9263 && HARD_REGNO_NREGS (regno, mode) == 1)
9266 if (best_mode == VOIDmode)
9267 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9269 mode = GET_MODE_WIDER_MODE (mode))
9270 if (HARD_REGNO_MODE_OK (regno, mode)
9271 && (mov_optab->handlers[(int) mode].insn_code
9272 != CODE_FOR_nothing))
9276 if (mode == VOIDmode)
9279 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9280 if (size % align != 0)
9281 size = CEIL (size, align) * align;
9282 apply_args_reg_offset[regno] = size;
9283 size += GET_MODE_SIZE (mode);
9284 apply_args_mode[regno] = mode;
9288 apply_args_mode[regno] = VOIDmode;
9289 apply_args_reg_offset[regno] = 0;
9295 /* Return the size required for the block returned by __builtin_apply,
9296 and initialize apply_result_mode. */
9299 apply_result_size ()
9301 static int size = -1;
9303 enum machine_mode mode;
9305 /* The values computed by this function never change. */
9310 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9311 if (FUNCTION_VALUE_REGNO_P (regno))
9313 /* Search for the proper mode for copying this register's
9314 value. I'm not sure this is right, but it works so far. */
9315 enum machine_mode best_mode = VOIDmode;
9317 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9319 mode = GET_MODE_WIDER_MODE (mode))
9320 if (HARD_REGNO_MODE_OK (regno, mode))
9323 if (best_mode == VOIDmode)
9324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9326 mode = GET_MODE_WIDER_MODE (mode))
9327 if (HARD_REGNO_MODE_OK (regno, mode)
9328 && (mov_optab->handlers[(int) mode].insn_code
9329 != CODE_FOR_nothing))
9333 if (mode == VOIDmode)
9336 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9337 if (size % align != 0)
9338 size = CEIL (size, align) * align;
9339 size += GET_MODE_SIZE (mode);
9340 apply_result_mode[regno] = mode;
9343 apply_result_mode[regno] = VOIDmode;
9345 /* Allow targets that use untyped_call and untyped_return to override
9346 the size so that machine-specific information can be stored here. */
9347 #ifdef APPLY_RESULT_SIZE
9348 size = APPLY_RESULT_SIZE;
9354 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9355 /* Create a vector describing the result block RESULT. If SAVEP is true,
9356 the result block is used to save the values; otherwise it is used to
9357 restore the values. */
9360 result_vector (savep, result)
9364 int regno, size, align, nelts;
9365 enum machine_mode mode;
9367 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9370 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9371 if ((mode = apply_result_mode[regno]) != VOIDmode)
9373 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9374 if (size % align != 0)
9375 size = CEIL (size, align) * align;
9376 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9377 mem = change_address (result, mode,
9378 plus_constant (XEXP (result, 0), size));
9379 savevec[nelts++] = (savep
9380 ? gen_rtx (SET, VOIDmode, mem, reg)
9381 : gen_rtx (SET, VOIDmode, reg, mem));
9382 size += GET_MODE_SIZE (mode);
9384 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9386 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9388 /* Save the state required to perform an untyped call with the same
9389 arguments as were passed to the current function. */
9392 expand_builtin_apply_args ()
9395 int size, align, regno;
9396 enum machine_mode mode;
9398 /* Create a block where the arg-pointer, structure value address,
9399 and argument registers can be saved. */
9400 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9402 /* Walk past the arg-pointer and structure value address. */
9403 size = GET_MODE_SIZE (Pmode);
9404 if (struct_value_rtx)
9405 size += GET_MODE_SIZE (Pmode);
9407 /* Save each register used in calling a function to the block. */
9408 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9409 if ((mode = apply_args_mode[regno]) != VOIDmode)
9413 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9414 if (size % align != 0)
9415 size = CEIL (size, align) * align;
9417 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9420 /* For reg-stack.c's stack register household.
9421 Compare with a similar piece of code in function.c. */
9423 emit_insn (gen_rtx (USE, mode, tem));
9426 emit_move_insn (change_address (registers, mode,
9427 plus_constant (XEXP (registers, 0),
9430 size += GET_MODE_SIZE (mode);
9433 /* Save the arg pointer to the block. */
9434 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9435 copy_to_reg (virtual_incoming_args_rtx));
9436 size = GET_MODE_SIZE (Pmode);
9438 /* Save the structure value address unless this is passed as an
9439 "invisible" first argument. */
9440 if (struct_value_incoming_rtx)
9442 emit_move_insn (change_address (registers, Pmode,
9443 plus_constant (XEXP (registers, 0),
9445 copy_to_reg (struct_value_incoming_rtx));
9446 size += GET_MODE_SIZE (Pmode);
9449 /* Return the address of the block. */
9450 return copy_addr_to_reg (XEXP (registers, 0));
9453 /* Perform an untyped call and save the state required to perform an
9454 untyped return of whatever value was returned by the given function. */
9457 expand_builtin_apply (function, arguments, argsize)
9458 rtx function, arguments, argsize;
9460 int size, align, regno;
9461 enum machine_mode mode;
9462 rtx incoming_args, result, reg, dest, call_insn;
9463 rtx old_stack_level = 0;
9464 rtx call_fusage = 0;
9466 /* Create a block where the return registers can be saved. */
9467 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9469 /* ??? The argsize value should be adjusted here. */
9471 /* Fetch the arg pointer from the ARGUMENTS block. */
9472 incoming_args = gen_reg_rtx (Pmode);
9473 emit_move_insn (incoming_args,
9474 gen_rtx (MEM, Pmode, arguments));
9475 #ifndef STACK_GROWS_DOWNWARD
9476 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9477 incoming_args, 0, OPTAB_LIB_WIDEN);
9480 /* Perform postincrements before actually calling the function. */
9483 /* Push a new argument block and copy the arguments. */
9484 do_pending_stack_adjust ();
9485 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9487 /* Push a block of memory onto the stack to store the memory arguments.
9488 Save the address in a register, and copy the memory arguments. ??? I
9489 haven't figured out how the calling convention macros effect this,
9490 but it's likely that the source and/or destination addresses in
9491 the block copy will need updating in machine specific ways. */
9492 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9493 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9494 gen_rtx (MEM, BLKmode, incoming_args),
9496 PARM_BOUNDARY / BITS_PER_UNIT);
9498 /* Refer to the argument block. */
9500 arguments = gen_rtx (MEM, BLKmode, arguments);
9502 /* Walk past the arg-pointer and structure value address. */
9503 size = GET_MODE_SIZE (Pmode);
9504 if (struct_value_rtx)
9505 size += GET_MODE_SIZE (Pmode);
9507 /* Restore each of the registers previously saved. Make USE insns
9508 for each of these registers for use in making the call. */
9509 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9510 if ((mode = apply_args_mode[regno]) != VOIDmode)
9512 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9513 if (size % align != 0)
9514 size = CEIL (size, align) * align;
9515 reg = gen_rtx (REG, mode, regno);
9516 emit_move_insn (reg,
9517 change_address (arguments, mode,
9518 plus_constant (XEXP (arguments, 0),
9521 use_reg (&call_fusage, reg);
9522 size += GET_MODE_SIZE (mode);
9525 /* Restore the structure value address unless this is passed as an
9526 "invisible" first argument. */
9527 size = GET_MODE_SIZE (Pmode);
9528 if (struct_value_rtx)
9530 rtx value = gen_reg_rtx (Pmode);
9531 emit_move_insn (value,
9532 change_address (arguments, Pmode,
9533 plus_constant (XEXP (arguments, 0),
9535 emit_move_insn (struct_value_rtx, value);
9536 if (GET_CODE (struct_value_rtx) == REG)
9537 use_reg (&call_fusage, struct_value_rtx);
9538 size += GET_MODE_SIZE (Pmode);
9541 /* All arguments and registers used for the call are set up by now! */
9542 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9544 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9545 and we don't want to load it into a register as an optimization,
9546 because prepare_call_address already did it if it should be done. */
9547 if (GET_CODE (function) != SYMBOL_REF)
9548 function = memory_address (FUNCTION_MODE, function);
9550 /* Generate the actual call instruction and save the return value. */
9551 #ifdef HAVE_untyped_call
9552 if (HAVE_untyped_call)
9553 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9554 result, result_vector (1, result)));
9557 #ifdef HAVE_call_value
9558 if (HAVE_call_value)
9562 /* Locate the unique return register. It is not possible to
9563 express a call that sets more than one return register using
9564 call_value; use untyped_call for that. In fact, untyped_call
9565 only needs to save the return registers in the given block. */
9566 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9567 if ((mode = apply_result_mode[regno]) != VOIDmode)
9570 abort (); /* HAVE_untyped_call required. */
9571 valreg = gen_rtx (REG, mode, regno);
9574 emit_call_insn (gen_call_value (valreg,
9575 gen_rtx (MEM, FUNCTION_MODE, function),
9576 const0_rtx, NULL_RTX, const0_rtx));
9578 emit_move_insn (change_address (result, GET_MODE (valreg),
9586 /* Find the CALL insn we just emitted. */
9587 for (call_insn = get_last_insn ();
9588 call_insn && GET_CODE (call_insn) != CALL_INSN;
9589 call_insn = PREV_INSN (call_insn))
9595 /* Put the register usage information on the CALL. If there is already
9596 some usage information, put ours at the end. */
9597 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9601 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9602 link = XEXP (link, 1))
9605 XEXP (link, 1) = call_fusage;
9608 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9610 /* Restore the stack. */
9611 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9613 /* Return the address of the result block. */
9614 return copy_addr_to_reg (XEXP (result, 0));
9617 /* Perform an untyped return. */
9620 expand_builtin_return (result)
9623 int size, align, regno;
9624 enum machine_mode mode;
9626 rtx call_fusage = 0;
9628 apply_result_size ();
9629 result = gen_rtx (MEM, BLKmode, result);
9631 #ifdef HAVE_untyped_return
9632 if (HAVE_untyped_return)
9634 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9640 /* Restore the return value and note that each value is used. */
9642 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9643 if ((mode = apply_result_mode[regno]) != VOIDmode)
9645 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9646 if (size % align != 0)
9647 size = CEIL (size, align) * align;
9648 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9649 emit_move_insn (reg,
9650 change_address (result, mode,
9651 plus_constant (XEXP (result, 0),
9654 push_to_sequence (call_fusage);
9655 emit_insn (gen_rtx (USE, VOIDmode, reg));
9656 call_fusage = get_insns ();
9658 size += GET_MODE_SIZE (mode);
9661 /* Put the USE insns before the return. */
9662 emit_insns (call_fusage);
9664 /* Return whatever values was restored by jumping directly to the end
9666 expand_null_return ();
9669 /* Expand code for a post- or pre- increment or decrement
9670 and return the RTX for the result.
9671 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9674 expand_increment (exp, post, ignore)
9678 register rtx op0, op1;
9679 register rtx temp, value;
9680 register tree incremented = TREE_OPERAND (exp, 0);
9681 optab this_optab = add_optab;
9683 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9684 int op0_is_copy = 0;
9685 int single_insn = 0;
9686 /* 1 means we can't store into OP0 directly,
9687 because it is a subreg narrower than a word,
9688 and we don't dare clobber the rest of the word. */
9691 if (output_bytecode)
9693 bc_expand_expr (exp);
9697 /* Stabilize any component ref that might need to be
9698 evaluated more than once below. */
9700 || TREE_CODE (incremented) == BIT_FIELD_REF
9701 || (TREE_CODE (incremented) == COMPONENT_REF
9702 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9703 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9704 incremented = stabilize_reference (incremented);
9705 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9706 ones into save exprs so that they don't accidentally get evaluated
9707 more than once by the code below. */
9708 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9709 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9710 incremented = save_expr (incremented);
9712 /* Compute the operands as RTX.
9713 Note whether OP0 is the actual lvalue or a copy of it:
9714 I believe it is a copy iff it is a register or subreg
9715 and insns were generated in computing it. */
9717 temp = get_last_insn ();
9718 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9720 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9721 in place but instead must do sign- or zero-extension during assignment,
9722 so we copy it into a new register and let the code below use it as
9725 Note that we can safely modify this SUBREG since it is know not to be
9726 shared (it was made by the expand_expr call above). */
9728 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9731 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9735 else if (GET_CODE (op0) == SUBREG
9736 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9738 /* We cannot increment this SUBREG in place. If we are
9739 post-incrementing, get a copy of the old value. Otherwise,
9740 just mark that we cannot increment in place. */
9742 op0 = copy_to_reg (op0);
9747 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9748 && temp != get_last_insn ());
9749 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9750 EXPAND_MEMORY_USE_BAD);
9752 /* Decide whether incrementing or decrementing. */
9753 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9754 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9755 this_optab = sub_optab;
9757 /* Convert decrement by a constant into a negative increment. */
9758 if (this_optab == sub_optab
9759 && GET_CODE (op1) == CONST_INT)
9761 op1 = GEN_INT (- INTVAL (op1));
9762 this_optab = add_optab;
9765 /* For a preincrement, see if we can do this with a single instruction. */
9768 icode = (int) this_optab->handlers[(int) mode].insn_code;
9769 if (icode != (int) CODE_FOR_nothing
9770 /* Make sure that OP0 is valid for operands 0 and 1
9771 of the insn we want to queue. */
9772 && (*insn_operand_predicate[icode][0]) (op0, mode)
9773 && (*insn_operand_predicate[icode][1]) (op0, mode)
9774 && (*insn_operand_predicate[icode][2]) (op1, mode))
9778 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9779 then we cannot just increment OP0. We must therefore contrive to
9780 increment the original value. Then, for postincrement, we can return
9781 OP0 since it is a copy of the old value. For preincrement, expand here
9782 unless we can do it with a single insn.
9784 Likewise if storing directly into OP0 would clobber high bits
9785 we need to preserve (bad_subreg). */
9786 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9788 /* This is the easiest way to increment the value wherever it is.
9789 Problems with multiple evaluation of INCREMENTED are prevented
9790 because either (1) it is a component_ref or preincrement,
9791 in which case it was stabilized above, or (2) it is an array_ref
9792 with constant index in an array in a register, which is
9793 safe to reevaluate. */
9794 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9795 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9796 ? MINUS_EXPR : PLUS_EXPR),
9799 TREE_OPERAND (exp, 1));
9801 while (TREE_CODE (incremented) == NOP_EXPR
9802 || TREE_CODE (incremented) == CONVERT_EXPR)
9804 newexp = convert (TREE_TYPE (incremented), newexp);
9805 incremented = TREE_OPERAND (incremented, 0);
9808 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9809 return post ? op0 : temp;
9814 /* We have a true reference to the value in OP0.
9815 If there is an insn to add or subtract in this mode, queue it.
9816 Queueing the increment insn avoids the register shuffling
9817 that often results if we must increment now and first save
9818 the old value for subsequent use. */
9820 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9821 op0 = stabilize (op0);
9824 icode = (int) this_optab->handlers[(int) mode].insn_code;
9825 if (icode != (int) CODE_FOR_nothing
9826 /* Make sure that OP0 is valid for operands 0 and 1
9827 of the insn we want to queue. */
9828 && (*insn_operand_predicate[icode][0]) (op0, mode)
9829 && (*insn_operand_predicate[icode][1]) (op0, mode))
9831 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9832 op1 = force_reg (mode, op1);
9834 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9836 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9838 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9841 op0 = change_address (op0, VOIDmode, addr);
9842 temp = force_reg (GET_MODE (op0), op0);
9843 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9844 op1 = force_reg (mode, op1);
9846 /* The increment queue is LIFO, thus we have to `queue'
9847 the instructions in reverse order. */
9848 enqueue_insn (op0, gen_move_insn (op0, temp));
9849 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9854 /* Preincrement, or we can't increment with one simple insn. */
9856 /* Save a copy of the value before inc or dec, to return it later. */
9857 temp = value = copy_to_reg (op0);
9859 /* Arrange to return the incremented value. */
9860 /* Copy the rtx because expand_binop will protect from the queue,
9861 and the results of that would be invalid for us to return
9862 if our caller does emit_queue before using our result. */
9863 temp = copy_rtx (value = op0);
9865 /* Increment however we can. */
9866 op1 = expand_binop (mode, this_optab, value, op1,
9867 flag_check_memory_usage ? NULL_RTX : op0,
9868 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9869 /* Make sure the value is stored into OP0. */
9871 emit_move_insn (op0, op1);
9876 /* Expand all function calls contained within EXP, innermost ones first.
9877 But don't look within expressions that have sequence points.
9878 For each CALL_EXPR, record the rtx for its value
9879 in the CALL_EXPR_RTL field. */
9882 preexpand_calls (exp)
9885 register int nops, i;
9886 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9888 if (! do_preexpand_calls)
9891 /* Only expressions and references can contain calls. */
9893 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9896 switch (TREE_CODE (exp))
9899 /* Do nothing if already expanded. */
9900 if (CALL_EXPR_RTL (exp) != 0
9901 /* Do nothing if the call returns a variable-sized object. */
9902 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9903 /* Do nothing to built-in functions. */
9904 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9905 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9907 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9910 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9915 case TRUTH_ANDIF_EXPR:
9916 case TRUTH_ORIF_EXPR:
9917 /* If we find one of these, then we can be sure
9918 the adjust will be done for it (since it makes jumps).
9919 Do it now, so that if this is inside an argument
9920 of a function, we don't get the stack adjustment
9921 after some other args have already been pushed. */
9922 do_pending_stack_adjust ();
9927 case WITH_CLEANUP_EXPR:
9928 case CLEANUP_POINT_EXPR:
9932 if (SAVE_EXPR_RTL (exp) != 0)
9936 nops = tree_code_length[(int) TREE_CODE (exp)];
9937 for (i = 0; i < nops; i++)
9938 if (TREE_OPERAND (exp, i) != 0)
9940 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9941 if (type == 'e' || type == '<' || type == '1' || type == '2'
9943 preexpand_calls (TREE_OPERAND (exp, i));
9947 /* At the start of a function, record that we have no previously-pushed
9948 arguments waiting to be popped. */
9951 init_pending_stack_adjust ()
9953 pending_stack_adjust = 0;
9956 /* When exiting from function, if safe, clear out any pending stack adjust
9957 so the adjustment won't get done. */
9960 clear_pending_stack_adjust ()
9962 #ifdef EXIT_IGNORE_STACK
9964 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9965 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9966 && ! flag_inline_functions)
9967 pending_stack_adjust = 0;
9971 /* Pop any previously-pushed arguments that have not been popped yet. */
9974 do_pending_stack_adjust ()
9976 if (inhibit_defer_pop == 0)
9978 if (pending_stack_adjust != 0)
9979 adjust_stack (GEN_INT (pending_stack_adjust));
9980 pending_stack_adjust = 0;
9984 /* Expand conditional expressions. */
9986 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9987 LABEL is an rtx of code CODE_LABEL, in this function and all the
9991 jumpifnot (exp, label)
9995 do_jump (exp, label, NULL_RTX);
9998 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10001 jumpif (exp, label)
10005 do_jump (exp, NULL_RTX, label);
10008 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10009 the result is zero, or IF_TRUE_LABEL if the result is one.
10010 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10011 meaning fall through in that case.
10013 do_jump always does any pending stack adjust except when it does not
10014 actually perform a jump. An example where there is no jump
10015 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10017 This function is responsible for optimizing cases such as
10018 &&, || and comparison operators in EXP. */
10021 do_jump (exp, if_false_label, if_true_label)
10023 rtx if_false_label, if_true_label;
10025 register enum tree_code code = TREE_CODE (exp);
10026 /* Some cases need to create a label to jump to
10027 in order to properly fall through.
10028 These cases set DROP_THROUGH_LABEL nonzero. */
10029 rtx drop_through_label = 0;
10031 rtx comparison = 0;
10034 enum machine_mode mode;
10044 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10050 /* This is not true with #pragma weak */
10052 /* The address of something can never be zero. */
10054 emit_jump (if_true_label);
10059 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10060 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10061 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10064 /* If we are narrowing the operand, we have to do the compare in the
10066 if ((TYPE_PRECISION (TREE_TYPE (exp))
10067 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10069 case NON_LVALUE_EXPR:
10070 case REFERENCE_EXPR:
10075 /* These cannot change zero->non-zero or vice versa. */
10076 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10080 /* This is never less insns than evaluating the PLUS_EXPR followed by
10081 a test and can be longer if the test is eliminated. */
10083 /* Reduce to minus. */
10084 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10085 TREE_OPERAND (exp, 0),
10086 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10087 TREE_OPERAND (exp, 1))));
10088 /* Process as MINUS. */
10092 /* Non-zero iff operands of minus differ. */
10093 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10094 TREE_OPERAND (exp, 0),
10095 TREE_OPERAND (exp, 1)),
10100 /* If we are AND'ing with a small constant, do this comparison in the
10101 smallest type that fits. If the machine doesn't have comparisons
10102 that small, it will be converted back to the wider comparison.
10103 This helps if we are testing the sign bit of a narrower object.
10104 combine can't do this for us because it can't know whether a
10105 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10107 if (! SLOW_BYTE_ACCESS
10108 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10109 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10110 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10111 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10112 && (type = type_for_mode (mode, 1)) != 0
10113 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10114 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10115 != CODE_FOR_nothing))
10117 do_jump (convert (type, exp), if_false_label, if_true_label);
10122 case TRUTH_NOT_EXPR:
10123 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10126 case TRUTH_ANDIF_EXPR:
10127 if (if_false_label == 0)
10128 if_false_label = drop_through_label = gen_label_rtx ();
10129 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10130 start_cleanup_deferal ();
10131 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10132 end_cleanup_deferal ();
10135 case TRUTH_ORIF_EXPR:
10136 if (if_true_label == 0)
10137 if_true_label = drop_through_label = gen_label_rtx ();
10138 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10139 start_cleanup_deferal ();
10140 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10141 end_cleanup_deferal ();
10144 case COMPOUND_EXPR:
10145 push_temp_slots ();
10146 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10147 preserve_temp_slots (NULL_RTX);
10148 free_temp_slots ();
10151 do_pending_stack_adjust ();
10152 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10155 case COMPONENT_REF:
10156 case BIT_FIELD_REF:
10159 int bitsize, bitpos, unsignedp;
10160 enum machine_mode mode;
10166 /* Get description of this reference. We don't actually care
10167 about the underlying object here. */
10168 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10169 &mode, &unsignedp, &volatilep,
10172 type = type_for_size (bitsize, unsignedp);
10173 if (! SLOW_BYTE_ACCESS
10174 && type != 0 && bitsize >= 0
10175 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10176 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10177 != CODE_FOR_nothing))
10179 do_jump (convert (type, exp), if_false_label, if_true_label);
10186 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10187 if (integer_onep (TREE_OPERAND (exp, 1))
10188 && integer_zerop (TREE_OPERAND (exp, 2)))
10189 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10191 else if (integer_zerop (TREE_OPERAND (exp, 1))
10192 && integer_onep (TREE_OPERAND (exp, 2)))
10193 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10197 register rtx label1 = gen_label_rtx ();
10198 drop_through_label = gen_label_rtx ();
10200 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10202 start_cleanup_deferal ();
10203 /* Now the THEN-expression. */
10204 do_jump (TREE_OPERAND (exp, 1),
10205 if_false_label ? if_false_label : drop_through_label,
10206 if_true_label ? if_true_label : drop_through_label);
10207 /* In case the do_jump just above never jumps. */
10208 do_pending_stack_adjust ();
10209 emit_label (label1);
10211 /* Now the ELSE-expression. */
10212 do_jump (TREE_OPERAND (exp, 2),
10213 if_false_label ? if_false_label : drop_through_label,
10214 if_true_label ? if_true_label : drop_through_label);
10215 end_cleanup_deferal ();
10221 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10223 if (integer_zerop (TREE_OPERAND (exp, 1)))
10224 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10225 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10226 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10229 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10230 fold (build (EQ_EXPR, TREE_TYPE (exp),
10231 fold (build1 (REALPART_EXPR,
10232 TREE_TYPE (inner_type),
10233 TREE_OPERAND (exp, 0))),
10234 fold (build1 (REALPART_EXPR,
10235 TREE_TYPE (inner_type),
10236 TREE_OPERAND (exp, 1))))),
10237 fold (build (EQ_EXPR, TREE_TYPE (exp),
10238 fold (build1 (IMAGPART_EXPR,
10239 TREE_TYPE (inner_type),
10240 TREE_OPERAND (exp, 0))),
10241 fold (build1 (IMAGPART_EXPR,
10242 TREE_TYPE (inner_type),
10243 TREE_OPERAND (exp, 1))))))),
10244 if_false_label, if_true_label);
10245 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10246 && !can_compare_p (TYPE_MODE (inner_type)))
10247 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10249 comparison = compare (exp, EQ, EQ);
10255 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10257 if (integer_zerop (TREE_OPERAND (exp, 1)))
10258 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10259 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10260 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10263 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10264 fold (build (NE_EXPR, TREE_TYPE (exp),
10265 fold (build1 (REALPART_EXPR,
10266 TREE_TYPE (inner_type),
10267 TREE_OPERAND (exp, 0))),
10268 fold (build1 (REALPART_EXPR,
10269 TREE_TYPE (inner_type),
10270 TREE_OPERAND (exp, 1))))),
10271 fold (build (NE_EXPR, TREE_TYPE (exp),
10272 fold (build1 (IMAGPART_EXPR,
10273 TREE_TYPE (inner_type),
10274 TREE_OPERAND (exp, 0))),
10275 fold (build1 (IMAGPART_EXPR,
10276 TREE_TYPE (inner_type),
10277 TREE_OPERAND (exp, 1))))))),
10278 if_false_label, if_true_label);
10279 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10280 && !can_compare_p (TYPE_MODE (inner_type)))
10281 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10283 comparison = compare (exp, NE, NE);
10288 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10290 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10291 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10293 comparison = compare (exp, LT, LTU);
10297 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10299 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10300 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10302 comparison = compare (exp, LE, LEU);
10306 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10308 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10309 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10311 comparison = compare (exp, GT, GTU);
10315 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10317 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10318 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10320 comparison = compare (exp, GE, GEU);
10325 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10327 /* This is not needed any more and causes poor code since it causes
10328 comparisons and tests from non-SI objects to have different code
10330 /* Copy to register to avoid generating bad insns by cse
10331 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10332 if (!cse_not_expected && GET_CODE (temp) == MEM)
10333 temp = copy_to_reg (temp);
10335 do_pending_stack_adjust ();
10336 if (GET_CODE (temp) == CONST_INT)
10337 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10338 else if (GET_CODE (temp) == LABEL_REF)
10339 comparison = const_true_rtx;
10340 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10341 && !can_compare_p (GET_MODE (temp)))
10342 /* Note swapping the labels gives us not-equal. */
10343 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10344 else if (GET_MODE (temp) != VOIDmode)
10345 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10346 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10347 GET_MODE (temp), NULL_RTX, 0);
10352 /* Do any postincrements in the expression that was tested. */
10355 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10356 straight into a conditional jump instruction as the jump condition.
10357 Otherwise, all the work has been done already. */
10359 if (comparison == const_true_rtx)
10362 emit_jump (if_true_label);
10364 else if (comparison == const0_rtx)
10366 if (if_false_label)
10367 emit_jump (if_false_label);
10369 else if (comparison)
10370 do_jump_for_compare (comparison, if_false_label, if_true_label);
10372 if (drop_through_label)
10374 /* If do_jump produces code that might be jumped around,
10375 do any stack adjusts from that code, before the place
10376 where control merges in. */
10377 do_pending_stack_adjust ();
10378 emit_label (drop_through_label);
10382 /* Given a comparison expression EXP for values too wide to be compared
10383 with one insn, test the comparison and jump to the appropriate label.
10384 The code of EXP is ignored; we always test GT if SWAP is 0,
10385 and LT if SWAP is 1. */
10388 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10391 rtx if_false_label, if_true_label;
10393 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10394 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10395 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10396 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10397 rtx drop_through_label = 0;
10398 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10401 if (! if_true_label || ! if_false_label)
10402 drop_through_label = gen_label_rtx ();
10403 if (! if_true_label)
10404 if_true_label = drop_through_label;
10405 if (! if_false_label)
10406 if_false_label = drop_through_label;
10408 /* Compare a word at a time, high order first. */
10409 for (i = 0; i < nwords; i++)
10412 rtx op0_word, op1_word;
10414 if (WORDS_BIG_ENDIAN)
10416 op0_word = operand_subword_force (op0, i, mode);
10417 op1_word = operand_subword_force (op1, i, mode);
10421 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10422 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10425 /* All but high-order word must be compared as unsigned. */
10426 comp = compare_from_rtx (op0_word, op1_word,
10427 (unsignedp || i > 0) ? GTU : GT,
10428 unsignedp, word_mode, NULL_RTX, 0);
10429 if (comp == const_true_rtx)
10430 emit_jump (if_true_label);
10431 else if (comp != const0_rtx)
10432 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10434 /* Consider lower words only if these are equal. */
10435 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10437 if (comp == const_true_rtx)
10438 emit_jump (if_false_label);
10439 else if (comp != const0_rtx)
10440 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10443 if (if_false_label)
10444 emit_jump (if_false_label);
10445 if (drop_through_label)
10446 emit_label (drop_through_label);
10449 /* Compare OP0 with OP1, word at a time, in mode MODE.
10450 UNSIGNEDP says to do unsigned comparison.
10451 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10454 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10455 enum machine_mode mode;
10458 rtx if_false_label, if_true_label;
10460 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10461 rtx drop_through_label = 0;
10464 if (! if_true_label || ! if_false_label)
10465 drop_through_label = gen_label_rtx ();
10466 if (! if_true_label)
10467 if_true_label = drop_through_label;
10468 if (! if_false_label)
10469 if_false_label = drop_through_label;
10471 /* Compare a word at a time, high order first. */
10472 for (i = 0; i < nwords; i++)
10475 rtx op0_word, op1_word;
10477 if (WORDS_BIG_ENDIAN)
10479 op0_word = operand_subword_force (op0, i, mode);
10480 op1_word = operand_subword_force (op1, i, mode);
10484 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10485 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10488 /* All but high-order word must be compared as unsigned. */
10489 comp = compare_from_rtx (op0_word, op1_word,
10490 (unsignedp || i > 0) ? GTU : GT,
10491 unsignedp, word_mode, NULL_RTX, 0);
10492 if (comp == const_true_rtx)
10493 emit_jump (if_true_label);
10494 else if (comp != const0_rtx)
10495 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10497 /* Consider lower words only if these are equal. */
10498 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10500 if (comp == const_true_rtx)
10501 emit_jump (if_false_label);
10502 else if (comp != const0_rtx)
10503 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10506 if (if_false_label)
10507 emit_jump (if_false_label);
10508 if (drop_through_label)
10509 emit_label (drop_through_label);
10512 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10513 with one insn, test the comparison and jump to the appropriate label. */
10516 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10518 rtx if_false_label, if_true_label;
10520 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10521 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10522 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10523 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10525 rtx drop_through_label = 0;
10527 if (! if_false_label)
10528 drop_through_label = if_false_label = gen_label_rtx ();
10530 for (i = 0; i < nwords; i++)
10532 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10533 operand_subword_force (op1, i, mode),
10534 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10535 word_mode, NULL_RTX, 0);
10536 if (comp == const_true_rtx)
10537 emit_jump (if_false_label);
10538 else if (comp != const0_rtx)
10539 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10543 emit_jump (if_true_label);
10544 if (drop_through_label)
10545 emit_label (drop_through_label);
10548 /* Jump according to whether OP0 is 0.
10549 We assume that OP0 has an integer mode that is too wide
10550 for the available compare insns. */
10553 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10555 rtx if_false_label, if_true_label;
10557 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10559 rtx drop_through_label = 0;
10561 if (! if_false_label)
10562 drop_through_label = if_false_label = gen_label_rtx ();
10564 for (i = 0; i < nwords; i++)
10566 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10568 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10569 if (comp == const_true_rtx)
10570 emit_jump (if_false_label);
10571 else if (comp != const0_rtx)
10572 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10576 emit_jump (if_true_label);
10577 if (drop_through_label)
10578 emit_label (drop_through_label);
10581 /* Given a comparison expression in rtl form, output conditional branches to
10582 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10585 do_jump_for_compare (comparison, if_false_label, if_true_label)
10586 rtx comparison, if_false_label, if_true_label;
10590 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10591 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10595 if (if_false_label)
10596 emit_jump (if_false_label);
10598 else if (if_false_label)
10601 rtx prev = get_last_insn ();
10604 /* Output the branch with the opposite condition. Then try to invert
10605 what is generated. If more than one insn is a branch, or if the
10606 branch is not the last insn written, abort. If we can't invert
10607 the branch, emit make a true label, redirect this jump to that,
10608 emit a jump to the false label and define the true label. */
10610 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10611 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10615 /* Here we get the first insn that was just emitted. It used to be the
10616 case that, on some machines, emitting the branch would discard
10617 the previous compare insn and emit a replacement. This isn't
10618 done anymore, but abort if we see that PREV is deleted. */
10621 insn = get_insns ();
10622 else if (INSN_DELETED_P (prev))
10625 insn = NEXT_INSN (prev);
10627 for (; insn; insn = NEXT_INSN (insn))
10628 if (GET_CODE (insn) == JUMP_INSN)
10635 if (branch != get_last_insn ())
10638 JUMP_LABEL (branch) = if_false_label;
10639 if (! invert_jump (branch, if_false_label))
10641 if_true_label = gen_label_rtx ();
10642 redirect_jump (branch, if_true_label);
10643 emit_jump (if_false_label);
10644 emit_label (if_true_label);
10649 /* Generate code for a comparison expression EXP
10650 (including code to compute the values to be compared)
10651 and set (CC0) according to the result.
10652 SIGNED_CODE should be the rtx operation for this comparison for
10653 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10655 We force a stack adjustment unless there are currently
10656 things pushed on the stack that aren't yet used. */
10659 compare (exp, signed_code, unsigned_code)
10661 enum rtx_code signed_code, unsigned_code;
10664 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10666 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10667 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10668 register enum machine_mode mode = TYPE_MODE (type);
10669 int unsignedp = TREE_UNSIGNED (type);
10670 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10672 #ifdef HAVE_canonicalize_funcptr_for_compare
10673 /* If function pointers need to be "canonicalized" before they can
10674 be reliably compared, then canonicalize them. */
10675 if (HAVE_canonicalize_funcptr_for_compare
10676 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10677 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10680 rtx new_op0 = gen_reg_rtx (mode);
10682 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10686 if (HAVE_canonicalize_funcptr_for_compare
10687 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10688 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10691 rtx new_op1 = gen_reg_rtx (mode);
10693 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10698 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10700 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10701 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10704 /* Like compare but expects the values to compare as two rtx's.
10705 The decision as to signed or unsigned comparison must be made by the caller.
10707 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10710 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10711 size of MODE should be used. */
10714 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10715 register rtx op0, op1;
10716 enum rtx_code code;
10718 enum machine_mode mode;
10724 /* If one operand is constant, make it the second one. Only do this
10725 if the other operand is not constant as well. */
10727 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10728 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10733 code = swap_condition (code);
10736 if (flag_force_mem)
10738 op0 = force_not_mem (op0);
10739 op1 = force_not_mem (op1);
10742 do_pending_stack_adjust ();
10744 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10745 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10749 /* There's no need to do this now that combine.c can eliminate lots of
10750 sign extensions. This can be less efficient in certain cases on other
10753 /* If this is a signed equality comparison, we can do it as an
10754 unsigned comparison since zero-extension is cheaper than sign
10755 extension and comparisons with zero are done as unsigned. This is
10756 the case even on machines that can do fast sign extension, since
10757 zero-extension is easier to combine with other operations than
10758 sign-extension is. If we are comparing against a constant, we must
10759 convert it to what it would look like unsigned. */
10760 if ((code == EQ || code == NE) && ! unsignedp
10761 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10763 if (GET_CODE (op1) == CONST_INT
10764 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10765 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10770 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10772 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10775 /* Generate code to calculate EXP using a store-flag instruction
10776 and return an rtx for the result. EXP is either a comparison
10777 or a TRUTH_NOT_EXPR whose operand is a comparison.
10779 If TARGET is nonzero, store the result there if convenient.
10781 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10784 Return zero if there is no suitable set-flag instruction
10785 available on this machine.
10787 Once expand_expr has been called on the arguments of the comparison,
10788 we are committed to doing the store flag, since it is not safe to
10789 re-evaluate the expression. We emit the store-flag insn by calling
10790 emit_store_flag, but only expand the arguments if we have a reason
10791 to believe that emit_store_flag will be successful. If we think that
10792 it will, but it isn't, we have to simulate the store-flag with a
10793 set/jump/set sequence. */
10796 do_store_flag (exp, target, mode, only_cheap)
10799 enum machine_mode mode;
10802 enum rtx_code code;
10803 tree arg0, arg1, type;
10805 enum machine_mode operand_mode;
10809 enum insn_code icode;
10810 rtx subtarget = target;
10811 rtx result, label, pattern, jump_pat;
10813 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10814 result at the end. We can't simply invert the test since it would
10815 have already been inverted if it were valid. This case occurs for
10816 some floating-point comparisons. */
10818 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10819 invert = 1, exp = TREE_OPERAND (exp, 0);
10821 arg0 = TREE_OPERAND (exp, 0);
10822 arg1 = TREE_OPERAND (exp, 1);
10823 type = TREE_TYPE (arg0);
10824 operand_mode = TYPE_MODE (type);
10825 unsignedp = TREE_UNSIGNED (type);
10827 /* We won't bother with BLKmode store-flag operations because it would mean
10828 passing a lot of information to emit_store_flag. */
10829 if (operand_mode == BLKmode)
10832 /* We won't bother with store-flag operations involving function pointers
10833 when function pointers must be canonicalized before comparisons. */
10834 #ifdef HAVE_canonicalize_funcptr_for_compare
10835 if (HAVE_canonicalize_funcptr_for_compare
10836 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10837 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10839 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10840 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10841 == FUNCTION_TYPE))))
10848 /* Get the rtx comparison code to use. We know that EXP is a comparison
10849 operation of some type. Some comparisons against 1 and -1 can be
10850 converted to comparisons with zero. Do so here so that the tests
10851 below will be aware that we have a comparison with zero. These
10852 tests will not catch constants in the first operand, but constants
10853 are rarely passed as the first operand. */
10855 switch (TREE_CODE (exp))
10864 if (integer_onep (arg1))
10865 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10867 code = unsignedp ? LTU : LT;
10870 if (! unsignedp && integer_all_onesp (arg1))
10871 arg1 = integer_zero_node, code = LT;
10873 code = unsignedp ? LEU : LE;
10876 if (! unsignedp && integer_all_onesp (arg1))
10877 arg1 = integer_zero_node, code = GE;
10879 code = unsignedp ? GTU : GT;
10882 if (integer_onep (arg1))
10883 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10885 code = unsignedp ? GEU : GE;
10891 /* Put a constant second. */
10892 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10894 tem = arg0; arg0 = arg1; arg1 = tem;
10895 code = swap_condition (code);
10898 /* If this is an equality or inequality test of a single bit, we can
10899 do this by shifting the bit being tested to the low-order bit and
10900 masking the result with the constant 1. If the condition was EQ,
10901 we xor it with 1. This does not require an scc insn and is faster
10902 than an scc insn even if we have it. */
10904 if ((code == NE || code == EQ)
10905 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10906 && integer_pow2p (TREE_OPERAND (arg0, 1))
10907 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10909 tree inner = TREE_OPERAND (arg0, 0);
10914 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10915 NULL_RTX, VOIDmode, 0));
10916 /* In this case, immed_double_const will sign extend the value to make
10917 it look the same on the host and target. We must remove the
10918 sign-extension before calling exact_log2, since exact_log2 will
10919 fail for negative values. */
10920 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10921 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
10922 /* We don't use the obvious constant shift to generate the mask,
10923 because that generates compiler warnings when BITS_PER_WORD is
10924 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10925 code is unreachable in that case. */
10926 tem = tem & GET_MODE_MASK (word_mode);
10927 bitnum = exact_log2 (tem);
10929 /* If INNER is a right shift of a constant and it plus BITNUM does
10930 not overflow, adjust BITNUM and INNER. */
10932 if (TREE_CODE (inner) == RSHIFT_EXPR
10933 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10934 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10935 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10936 < TYPE_PRECISION (type)))
10938 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10939 inner = TREE_OPERAND (inner, 0);
10942 /* If we are going to be able to omit the AND below, we must do our
10943 operations as unsigned. If we must use the AND, we have a choice.
10944 Normally unsigned is faster, but for some machines signed is. */
10945 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10946 #ifdef LOAD_EXTEND_OP
10947 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10953 if (subtarget == 0 || GET_CODE (subtarget) != REG
10954 || GET_MODE (subtarget) != operand_mode
10955 || ! safe_from_p (subtarget, inner))
10958 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10961 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10962 size_int (bitnum), subtarget, ops_unsignedp);
10964 if (GET_MODE (op0) != mode)
10965 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10967 if ((code == EQ && ! invert) || (code == NE && invert))
10968 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10969 ops_unsignedp, OPTAB_LIB_WIDEN);
10971 /* Put the AND last so it can combine with more things. */
10972 if (bitnum != TYPE_PRECISION (type) - 1)
10973 op0 = expand_and (op0, const1_rtx, subtarget);
10978 /* Now see if we are likely to be able to do this. Return if not. */
10979 if (! can_compare_p (operand_mode))
10981 icode = setcc_gen_code[(int) code];
10982 if (icode == CODE_FOR_nothing
10983 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10985 /* We can only do this if it is one of the special cases that
10986 can be handled without an scc insn. */
10987 if ((code == LT && integer_zerop (arg1))
10988 || (! only_cheap && code == GE && integer_zerop (arg1)))
10990 else if (BRANCH_COST >= 0
10991 && ! only_cheap && (code == NE || code == EQ)
10992 && TREE_CODE (type) != REAL_TYPE
10993 && ((abs_optab->handlers[(int) operand_mode].insn_code
10994 != CODE_FOR_nothing)
10995 || (ffs_optab->handlers[(int) operand_mode].insn_code
10996 != CODE_FOR_nothing)))
11002 preexpand_calls (exp);
11003 if (subtarget == 0 || GET_CODE (subtarget) != REG
11004 || GET_MODE (subtarget) != operand_mode
11005 || ! safe_from_p (subtarget, arg1))
11008 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11009 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11012 target = gen_reg_rtx (mode);
11014 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11015 because, if the emit_store_flag does anything it will succeed and
11016 OP0 and OP1 will not be used subsequently. */
11018 result = emit_store_flag (target, code,
11019 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11020 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11021 operand_mode, unsignedp, 1);
11026 result = expand_binop (mode, xor_optab, result, const1_rtx,
11027 result, 0, OPTAB_LIB_WIDEN);
11031 /* If this failed, we have to do this with set/compare/jump/set code. */
11032 if (GET_CODE (target) != REG
11033 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11034 target = gen_reg_rtx (GET_MODE (target));
11036 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11037 result = compare_from_rtx (op0, op1, code, unsignedp,
11038 operand_mode, NULL_RTX, 0);
11039 if (GET_CODE (result) == CONST_INT)
11040 return (((result == const0_rtx && ! invert)
11041 || (result != const0_rtx && invert))
11042 ? const0_rtx : const1_rtx);
11044 label = gen_label_rtx ();
11045 if (bcc_gen_fctn[(int) code] == 0)
11048 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11049 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11050 emit_label (label);
11055 /* Generate a tablejump instruction (used for switch statements). */
11057 #ifdef HAVE_tablejump
11059 /* INDEX is the value being switched on, with the lowest value
11060 in the table already subtracted.
11061 MODE is its expected mode (needed if INDEX is constant).
11062 RANGE is the length of the jump table.
11063 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11065 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11066 index value is out of range. */
11069 do_tablejump (index, mode, range, table_label, default_label)
11070 rtx index, range, table_label, default_label;
11071 enum machine_mode mode;
11073 register rtx temp, vector;
11075 /* Do an unsigned comparison (in the proper mode) between the index
11076 expression and the value which represents the length of the range.
11077 Since we just finished subtracting the lower bound of the range
11078 from the index expression, this comparison allows us to simultaneously
11079 check that the original index expression value is both greater than
11080 or equal to the minimum value of the range and less than or equal to
11081 the maximum value of the range. */
11083 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11084 emit_jump_insn (gen_bgtu (default_label));
11086 /* If index is in range, it must fit in Pmode.
11087 Convert to Pmode so we can index with it. */
11089 index = convert_to_mode (Pmode, index, 1);
11091 /* Don't let a MEM slip thru, because then INDEX that comes
11092 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11093 and break_out_memory_refs will go to work on it and mess it up. */
11094 #ifdef PIC_CASE_VECTOR_ADDRESS
11095 if (flag_pic && GET_CODE (index) != REG)
11096 index = copy_to_mode_reg (Pmode, index);
11099 /* If flag_force_addr were to affect this address
11100 it could interfere with the tricky assumptions made
11101 about addresses that contain label-refs,
11102 which may be valid only very near the tablejump itself. */
11103 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11104 GET_MODE_SIZE, because this indicates how large insns are. The other
11105 uses should all be Pmode, because they are addresses. This code
11106 could fail if addresses and insns are not the same size. */
11107 index = gen_rtx (PLUS, Pmode,
11108 gen_rtx (MULT, Pmode, index,
11109 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11110 gen_rtx (LABEL_REF, Pmode, table_label));
11111 #ifdef PIC_CASE_VECTOR_ADDRESS
11113 index = PIC_CASE_VECTOR_ADDRESS (index);
11116 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11117 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11118 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11119 RTX_UNCHANGING_P (vector) = 1;
11120 convert_move (temp, vector, 0);
11122 emit_jump_insn (gen_tablejump (temp, table_label));
11124 #ifndef CASE_VECTOR_PC_RELATIVE
11125 /* If we are generating PIC code or if the table is PC-relative, the
11126 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11132 #endif /* HAVE_tablejump */
11135 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11136 to that value is on the top of the stack. The resulting type is TYPE, and
11137 the source declaration is DECL. */
11140 bc_load_memory (type, decl)
11143 enum bytecode_opcode opcode;
11146 /* Bit fields are special. We only know about signed and
11147 unsigned ints, and enums. The latter are treated as
11148 signed integers. */
11150 if (DECL_BIT_FIELD (decl))
11151 if (TREE_CODE (type) == ENUMERAL_TYPE
11152 || TREE_CODE (type) == INTEGER_TYPE)
11153 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11157 /* See corresponding comment in bc_store_memory. */
11158 if (TYPE_MODE (type) == BLKmode
11159 || TYPE_MODE (type) == VOIDmode)
11162 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11164 if (opcode == neverneverland)
11167 bc_emit_bytecode (opcode);
11169 #ifdef DEBUG_PRINT_CODE
11170 fputc ('\n', stderr);
11175 /* Store the contents of the second stack slot to the address in the
11176 top stack slot. DECL is the declaration of the destination and is used
11177 to determine whether we're dealing with a bitfield. */
11180 bc_store_memory (type, decl)
11183 enum bytecode_opcode opcode;
11186 if (DECL_BIT_FIELD (decl))
11188 if (TREE_CODE (type) == ENUMERAL_TYPE
11189 || TREE_CODE (type) == INTEGER_TYPE)
11195 if (TYPE_MODE (type) == BLKmode)
11197 /* Copy structure. This expands to a block copy instruction, storeBLK.
11198 In addition to the arguments expected by the other store instructions,
11199 it also expects a type size (SImode) on top of the stack, which is the
11200 structure size in size units (usually bytes). The two first arguments
11201 are already on the stack; so we just put the size on level 1. For some
11202 other languages, the size may be variable, this is why we don't encode
11203 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11205 bc_expand_expr (TYPE_SIZE (type));
11209 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11211 if (opcode == neverneverland)
11214 bc_emit_bytecode (opcode);
11216 #ifdef DEBUG_PRINT_CODE
11217 fputc ('\n', stderr);
11222 /* Allocate local stack space sufficient to hold a value of the given
11223 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11224 integral power of 2. A special case is locals of type VOID, which
11225 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11226 remapped into the corresponding attribute of SI. */
11229 bc_allocate_local (size, alignment)
11230 int size, alignment;
11233 int byte_alignment;
11238 /* Normalize size and alignment */
11240 size = UNITS_PER_WORD;
11242 if (alignment < BITS_PER_UNIT)
11243 byte_alignment = 1 << (INT_ALIGN - 1);
11246 byte_alignment = alignment / BITS_PER_UNIT;
11248 if (local_vars_size & (byte_alignment - 1))
11249 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11251 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11252 local_vars_size += size;
11258 /* Allocate variable-sized local array. Variable-sized arrays are
11259 actually pointers to the address in memory where they are stored. */
11262 bc_allocate_variable_array (size)
11266 const int ptralign = (1 << (PTR_ALIGN - 1));
11268 /* Align pointer */
11269 if (local_vars_size & ptralign)
11270 local_vars_size += ptralign - (local_vars_size & ptralign);
11272 /* Note down local space needed: pointer to block; also return
11275 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11276 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11281 /* Push the machine address for the given external variable offset. */
11284 bc_load_externaddr (externaddr)
11287 bc_emit_bytecode (constP);
11288 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11289 BYTECODE_BC_LABEL (externaddr)->offset);
11291 #ifdef DEBUG_PRINT_CODE
11292 fputc ('\n', stderr);
11297 /* Like above, but expects an IDENTIFIER. */
11300 bc_load_externaddr_id (id, offset)
11304 if (!IDENTIFIER_POINTER (id))
11307 bc_emit_bytecode (constP);
11308 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11310 #ifdef DEBUG_PRINT_CODE
11311 fputc ('\n', stderr);
11316 /* Push the machine address for the given local variable offset. */
11319 bc_load_localaddr (localaddr)
11322 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11326 /* Push the machine address for the given parameter offset.
11327 NOTE: offset is in bits. */
11330 bc_load_parmaddr (parmaddr)
11333 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11338 /* Convert a[i] into *(a + i). */
11341 bc_canonicalize_array_ref (exp)
11344 tree type = TREE_TYPE (exp);
11345 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11346 TREE_OPERAND (exp, 0));
11347 tree index = TREE_OPERAND (exp, 1);
11350 /* Convert the integer argument to a type the same size as a pointer
11351 so the multiply won't overflow spuriously. */
11353 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11354 index = convert (type_for_size (POINTER_SIZE, 0), index);
11356 /* The array address isn't volatile even if the array is.
11357 (Of course this isn't terribly relevant since the bytecode
11358 translator treats nearly everything as volatile anyway.) */
11359 TREE_THIS_VOLATILE (array_adr) = 0;
11361 return build1 (INDIRECT_REF, type,
11362 fold (build (PLUS_EXPR,
11363 TYPE_POINTER_TO (type),
11365 fold (build (MULT_EXPR,
11366 TYPE_POINTER_TO (type),
11368 size_in_bytes (type))))));
11372 /* Load the address of the component referenced by the given
11373 COMPONENT_REF expression.
11375 Returns innermost lvalue. */
11378 bc_expand_component_address (exp)
11382 enum machine_mode mode;
11384 HOST_WIDE_INT SIval;
11387 tem = TREE_OPERAND (exp, 1);
11388 mode = DECL_MODE (tem);
11391 /* Compute cumulative bit offset for nested component refs
11392 and array refs, and find the ultimate containing object. */
11394 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11396 if (TREE_CODE (tem) == COMPONENT_REF)
11397 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11399 if (TREE_CODE (tem) == ARRAY_REF
11400 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11401 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11403 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11404 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11405 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11410 bc_expand_expr (tem);
11413 /* For bitfields also push their offset and size */
11414 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11415 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11417 if (SIval = bitpos / BITS_PER_UNIT)
11418 bc_emit_instruction (addconstPSI, SIval);
11420 return (TREE_OPERAND (exp, 1));
11424 /* Emit code to push two SI constants */
11427 bc_push_offset_and_size (offset, size)
11428 HOST_WIDE_INT offset, size;
11430 bc_emit_instruction (constSI, offset);
11431 bc_emit_instruction (constSI, size);
11435 /* Emit byte code to push the address of the given lvalue expression to
11436 the stack. If it's a bit field, we also push offset and size info.
11438 Returns innermost component, which allows us to determine not only
11439 its type, but also whether it's a bitfield. */
11442 bc_expand_address (exp)
11446 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11450 switch (TREE_CODE (exp))
11454 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11456 case COMPONENT_REF:
11458 return (bc_expand_component_address (exp));
11462 bc_expand_expr (TREE_OPERAND (exp, 0));
11464 /* For variable-sized types: retrieve pointer. Sometimes the
11465 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11466 also make sure we have an operand, just in case... */
11468 if (TREE_OPERAND (exp, 0)
11469 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11470 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11471 bc_emit_instruction (loadP);
11473 /* If packed, also return offset and size */
11474 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11476 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11477 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11479 return (TREE_OPERAND (exp, 0));
11481 case FUNCTION_DECL:
11483 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11484 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11489 bc_load_parmaddr (DECL_RTL (exp));
11491 /* For variable-sized types: retrieve pointer */
11492 if (TYPE_SIZE (TREE_TYPE (exp))
11493 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11494 bc_emit_instruction (loadP);
11496 /* If packed, also return offset and size */
11497 if (DECL_BIT_FIELD (exp))
11498 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11499 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11505 bc_emit_instruction (returnP);
11511 if (BYTECODE_LABEL (DECL_RTL (exp)))
11512 bc_load_externaddr (DECL_RTL (exp));
11515 if (DECL_EXTERNAL (exp))
11516 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11517 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11519 bc_load_localaddr (DECL_RTL (exp));
11521 /* For variable-sized types: retrieve pointer */
11522 if (TYPE_SIZE (TREE_TYPE (exp))
11523 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11524 bc_emit_instruction (loadP);
11526 /* If packed, also return offset and size */
11527 if (DECL_BIT_FIELD (exp))
11528 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11529 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11537 bc_emit_bytecode (constP);
11538 r = output_constant_def (exp);
11539 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11541 #ifdef DEBUG_PRINT_CODE
11542 fputc ('\n', stderr);
11553 /* Most lvalues don't have components. */
11558 /* Emit a type code to be used by the runtime support in handling
11559 parameter passing. The type code consists of the machine mode
11560 plus the minimal alignment shifted left 8 bits. */
11563 bc_runtime_type_code (type)
11568 switch (TREE_CODE (type))
11574 case ENUMERAL_TYPE:
11578 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11590 return build_int_2 (val, 0);
11594 /* Generate constructor label */
11597 bc_gen_constr_label ()
11599 static int label_counter;
11600 static char label[20];
11602 sprintf (label, "*LR%d", label_counter++);
11604 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11608 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11609 expand the constructor data as static data, and push a pointer to it.
11610 The pointer is put in the pointer table and is retrieved by a constP
11611 bytecode instruction. We then loop and store each constructor member in
11612 the corresponding component. Finally, we return the original pointer on
11616 bc_expand_constructor (constr)
11620 HOST_WIDE_INT ptroffs;
11624 /* Literal constructors are handled as constants, whereas
11625 non-literals are evaluated and stored element by element
11626 into the data segment. */
11628 /* Allocate space in proper segment and push pointer to space on stack.
11631 l = bc_gen_constr_label ();
11633 if (TREE_CONSTANT (constr))
11637 bc_emit_const_labeldef (l);
11638 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11644 bc_emit_data_labeldef (l);
11645 bc_output_data_constructor (constr);
11649 /* Add reference to pointer table and recall pointer to stack;
11650 this code is common for both types of constructors: literals
11651 and non-literals. */
11653 ptroffs = bc_define_pointer (l);
11654 bc_emit_instruction (constP, ptroffs);
11656 /* This is all that has to be done if it's a literal. */
11657 if (TREE_CONSTANT (constr))
11661 /* At this point, we have the pointer to the structure on top of the stack.
11662 Generate sequences of store_memory calls for the constructor. */
11664 /* constructor type is structure */
11665 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11669 /* If the constructor has fewer fields than the structure,
11670 clear the whole structure first. */
11672 if (list_length (CONSTRUCTOR_ELTS (constr))
11673 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11675 bc_emit_instruction (duplicate);
11676 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11677 bc_emit_instruction (clearBLK);
11680 /* Store each element of the constructor into the corresponding
11681 field of TARGET. */
11683 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11685 register tree field = TREE_PURPOSE (elt);
11686 register enum machine_mode mode;
11691 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11692 mode = DECL_MODE (field);
11693 unsignedp = TREE_UNSIGNED (field);
11695 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11697 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11698 /* The alignment of TARGET is
11699 at least what its type requires. */
11701 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11702 int_size_in_bytes (TREE_TYPE (constr)));
11707 /* Constructor type is array */
11708 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11712 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11713 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11714 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11715 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11717 /* If the constructor has fewer fields than the structure,
11718 clear the whole structure first. */
11720 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11722 bc_emit_instruction (duplicate);
11723 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11724 bc_emit_instruction (clearBLK);
11728 /* Store each element of the constructor into the corresponding
11729 element of TARGET, determined by counting the elements. */
11731 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11733 elt = TREE_CHAIN (elt), i++)
11735 register enum machine_mode mode;
11740 mode = TYPE_MODE (elttype);
11741 bitsize = GET_MODE_BITSIZE (mode);
11742 unsignedp = TREE_UNSIGNED (elttype);
11744 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11745 /* * TYPE_SIZE_UNIT (elttype) */ );
11747 bc_store_field (elt, bitsize, bitpos, mode,
11748 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11749 /* The alignment of TARGET is
11750 at least what its type requires. */
11752 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11753 int_size_in_bytes (TREE_TYPE (constr)));
11760 /* Store the value of EXP (an expression tree) into member FIELD of
11761 structure at address on stack, which has type TYPE, mode MODE and
11762 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11765 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11766 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11769 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11770 value_mode, unsignedp, align, total_size)
11771 int bitsize, bitpos;
11772 enum machine_mode mode;
11773 tree field, exp, type;
11774 enum machine_mode value_mode;
11780 /* Expand expression and copy pointer */
11781 bc_expand_expr (exp);
11782 bc_emit_instruction (over);
11785 /* If the component is a bit field, we cannot use addressing to access
11786 it. Use bit-field techniques to store in it. */
11788 if (DECL_BIT_FIELD (field))
11790 bc_store_bit_field (bitpos, bitsize, unsignedp);
11794 /* Not bit field */
11796 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11798 /* Advance pointer to the desired member */
11800 bc_emit_instruction (addconstPSI, offset);
11803 bc_store_memory (type, field);
11808 /* Store SI/SU in bitfield */
11811 bc_store_bit_field (offset, size, unsignedp)
11812 int offset, size, unsignedp;
11814 /* Push bitfield offset and size */
11815 bc_push_offset_and_size (offset, size);
11818 bc_emit_instruction (sstoreBI);
11822 /* Load SI/SU from bitfield */
11825 bc_load_bit_field (offset, size, unsignedp)
11826 int offset, size, unsignedp;
11828 /* Push bitfield offset and size */
11829 bc_push_offset_and_size (offset, size);
11831 /* Load: sign-extend if signed, else zero-extend */
11832 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11836 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11837 (adjust stack pointer upwards), negative means add that number of
11838 levels (adjust the stack pointer downwards). Only positive values
11839 normally make sense. */
11842 bc_adjust_stack (nlevels)
11851 bc_emit_instruction (drop);
11854 bc_emit_instruction (drop);
11859 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11860 stack_depth -= nlevels;
11863 #if defined (VALIDATE_STACK_FOR_BC)
11864 VALIDATE_STACK_FOR_BC ();