1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call;
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
105 int target_temp_slot_level;
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
110 static rtx saveregs_value;
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
115 /* This structure is used by move_by_pieces to describe the move to
118 struct move_by_pieces
128 int explicit_inc_from;
135 /* This structure is used by clear_by_pieces to describe the clear to
138 struct clear_by_pieces
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
154 extern int local_vars_size;
155 extern int stack_depth;
156 extern int max_stack_depth;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
160 static rtx enqueue_insn PROTO((rtx, rtx));
161 static int queued_subexp_p PROTO((rtx));
162 static void init_queue PROTO((void));
163 static void move_by_pieces PROTO((rtx, rtx, int, int));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
175 static int get_inner_unaligned_p PROTO((tree));
176 static tree save_noncopied_parts PROTO((tree, tree));
177 static tree init_noncopied_parts PROTO((tree, tree));
178 static int safe_from_p PROTO((rtx, tree));
179 static int fixed_type_p PROTO((tree));
180 static int get_pointer_alignment PROTO((tree, unsigned));
181 static tree string_constant PROTO((tree, tree *));
182 static tree c_strlen PROTO((tree));
183 static rtx expand_builtin PROTO((tree, rtx, rtx,
184 enum machine_mode, int));
185 static int apply_args_size PROTO((void));
186 static int apply_result_size PROTO((void));
187 static rtx result_vector PROTO((int, rtx));
188 static rtx expand_builtin_apply_args PROTO((void));
189 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
190 static void expand_builtin_return PROTO((rtx));
191 static rtx expand_increment PROTO((tree, int, int));
192 void bc_expand_increment PROTO((struct increment_operator *, tree));
193 rtx bc_allocate_local PROTO((int, int));
194 void bc_store_memory PROTO((tree, tree));
195 tree bc_expand_component_address PROTO((tree));
196 tree bc_expand_address PROTO((tree));
197 void bc_expand_constructor PROTO((tree));
198 void bc_adjust_stack PROTO((int));
199 tree bc_canonicalize_array_ref PROTO((tree));
200 void bc_load_memory PROTO((tree, tree));
201 void bc_load_externaddr PROTO((rtx));
202 void bc_load_externaddr_id PROTO((tree, int));
203 void bc_load_localaddr PROTO((rtx));
204 void bc_load_parmaddr PROTO((rtx));
205 static void preexpand_calls PROTO((tree));
206 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
207 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
208 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
209 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
210 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
211 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
212 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
213 static tree defer_cleanups_to PROTO((tree));
214 extern tree truthvalue_conversion PROTO((tree));
216 /* Record for each mode whether we can move a register directly to or
217 from an object of that mode in memory. If we can't, we won't try
218 to use that mode directly when accessing a field of that mode. */
220 static char direct_load[NUM_MACHINE_MODES];
221 static char direct_store[NUM_MACHINE_MODES];
223 /* MOVE_RATIO is the number of move instructions that is better than
227 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
230 /* A value of around 6 would minimize code size; infinity would minimize
232 #define MOVE_RATIO 15
236 /* This array records the insn_code of insns to perform block moves. */
237 enum insn_code movstr_optab[NUM_MACHINE_MODES];
239 /* This array records the insn_code of insns to perform block clears. */
240 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
244 #ifndef SLOW_UNALIGNED_ACCESS
245 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
248 /* Register mappings for target machines without register windows. */
249 #ifndef INCOMING_REGNO
250 #define INCOMING_REGNO(OUT) (OUT)
252 #ifndef OUTGOING_REGNO
253 #define OUTGOING_REGNO(IN) (IN)
256 /* Maps used to convert modes to const, load, and store bytecodes. */
257 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
258 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
261 /* Initialize maps used to convert modes to const, load, and store
265 bc_init_mode_to_opcode_maps ()
269 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
270 mode_to_const_map[mode] =
271 mode_to_load_map[mode] =
272 mode_to_store_map[mode] = neverneverland;
274 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
275 mode_to_const_map[(int) SYM] = CONST; \
276 mode_to_load_map[(int) SYM] = LOAD; \
277 mode_to_store_map[(int) SYM] = STORE;
279 #include "modemap.def"
283 /* This is run once per compilation to set up which modes can be used
284 directly in memory and to initialize the block move optab. */
290 enum machine_mode mode;
291 /* Try indexing by frame ptr and try by stack ptr.
292 It is known that on the Convex the stack ptr isn't a valid index.
293 With luck, one or the other is valid on any machine. */
294 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
295 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
298 insn = emit_insn (gen_rtx (SET, 0, 0));
299 pat = PATTERN (insn);
301 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
302 mode = (enum machine_mode) ((int) mode + 1))
308 direct_load[(int) mode] = direct_store[(int) mode] = 0;
309 PUT_MODE (mem, mode);
310 PUT_MODE (mem1, mode);
312 /* See if there is some register that can be used in this mode and
313 directly loaded or stored from memory. */
315 if (mode != VOIDmode && mode != BLKmode)
316 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
317 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
320 if (! HARD_REGNO_MODE_OK (regno, mode))
323 reg = gen_rtx (REG, mode, regno);
326 SET_DEST (pat) = reg;
327 if (recog (pat, insn, &num_clobbers) >= 0)
328 direct_load[(int) mode] = 1;
330 SET_SRC (pat) = mem1;
331 SET_DEST (pat) = reg;
332 if (recog (pat, insn, &num_clobbers) >= 0)
333 direct_load[(int) mode] = 1;
336 SET_DEST (pat) = mem;
337 if (recog (pat, insn, &num_clobbers) >= 0)
338 direct_store[(int) mode] = 1;
341 SET_DEST (pat) = mem1;
342 if (recog (pat, insn, &num_clobbers) >= 0)
343 direct_store[(int) mode] = 1;
350 /* This is run at the start of compiling a function. */
357 pending_stack_adjust = 0;
358 inhibit_defer_pop = 0;
359 cleanups_this_call = 0;
361 apply_args_value = 0;
365 /* Save all variables describing the current status into the structure *P.
366 This is used before starting a nested function. */
372 /* Instead of saving the postincrement queue, empty it. */
375 p->pending_stack_adjust = pending_stack_adjust;
376 p->inhibit_defer_pop = inhibit_defer_pop;
377 p->cleanups_this_call = cleanups_this_call;
378 p->saveregs_value = saveregs_value;
379 p->apply_args_value = apply_args_value;
380 p->forced_labels = forced_labels;
382 pending_stack_adjust = 0;
383 inhibit_defer_pop = 0;
384 cleanups_this_call = 0;
386 apply_args_value = 0;
390 /* Restore all variables describing the current status from the structure *P.
391 This is used after a nested function. */
394 restore_expr_status (p)
397 pending_stack_adjust = p->pending_stack_adjust;
398 inhibit_defer_pop = p->inhibit_defer_pop;
399 cleanups_this_call = p->cleanups_this_call;
400 saveregs_value = p->saveregs_value;
401 apply_args_value = p->apply_args_value;
402 forced_labels = p->forced_labels;
405 /* Manage the queue of increment instructions to be output
406 for POSTINCREMENT_EXPR expressions, etc. */
408 static rtx pending_chain;
410 /* Queue up to increment (or change) VAR later. BODY says how:
411 BODY should be the same thing you would pass to emit_insn
412 to increment right away. It will go to emit_insn later on.
414 The value is a QUEUED expression to be used in place of VAR
415 where you want to guarantee the pre-incrementation value of VAR. */
418 enqueue_insn (var, body)
421 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
422 var, NULL_RTX, NULL_RTX, body, pending_chain);
423 return pending_chain;
426 /* Use protect_from_queue to convert a QUEUED expression
427 into something that you can put immediately into an instruction.
428 If the queued incrementation has not happened yet,
429 protect_from_queue returns the variable itself.
430 If the incrementation has happened, protect_from_queue returns a temp
431 that contains a copy of the old value of the variable.
433 Any time an rtx which might possibly be a QUEUED is to be put
434 into an instruction, it must be passed through protect_from_queue first.
435 QUEUED expressions are not meaningful in instructions.
437 Do not pass a value through protect_from_queue and then hold
438 on to it for a while before putting it in an instruction!
439 If the queue is flushed in between, incorrect code will result. */
442 protect_from_queue (x, modify)
446 register RTX_CODE code = GET_CODE (x);
448 #if 0 /* A QUEUED can hang around after the queue is forced out. */
449 /* Shortcut for most common case. */
450 if (pending_chain == 0)
456 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
457 use of autoincrement. Make a copy of the contents of the memory
458 location rather than a copy of the address, but not if the value is
459 of mode BLKmode. Don't modify X in place since it might be
461 if (code == MEM && GET_MODE (x) != BLKmode
462 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
464 register rtx y = XEXP (x, 0);
465 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
467 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
468 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
469 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
473 register rtx temp = gen_reg_rtx (GET_MODE (new));
474 emit_insn_before (gen_move_insn (temp, new),
480 /* Otherwise, recursively protect the subexpressions of all
481 the kinds of rtx's that can contain a QUEUED. */
484 rtx tem = protect_from_queue (XEXP (x, 0), 0);
485 if (tem != XEXP (x, 0))
491 else if (code == PLUS || code == MULT)
493 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
494 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
495 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
504 /* If the increment has not happened, use the variable itself. */
505 if (QUEUED_INSN (x) == 0)
506 return QUEUED_VAR (x);
507 /* If the increment has happened and a pre-increment copy exists,
509 if (QUEUED_COPY (x) != 0)
510 return QUEUED_COPY (x);
511 /* The increment has happened but we haven't set up a pre-increment copy.
512 Set one up now, and use it. */
513 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
514 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
516 return QUEUED_COPY (x);
519 /* Return nonzero if X contains a QUEUED expression:
520 if it contains anything that will be altered by a queued increment.
521 We handle only combinations of MEM, PLUS, MINUS and MULT operators
522 since memory addresses generally contain only those. */
528 register enum rtx_code code = GET_CODE (x);
534 return queued_subexp_p (XEXP (x, 0));
538 return queued_subexp_p (XEXP (x, 0))
539 || queued_subexp_p (XEXP (x, 1));
544 /* Perform all the pending incrementations. */
550 while (p = pending_chain)
552 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
553 pending_chain = QUEUED_NEXT (p);
564 /* Copy data from FROM to TO, where the machine modes are not the same.
565 Both modes may be integer, or both may be floating.
566 UNSIGNEDP should be nonzero if FROM is an unsigned type.
567 This causes zero-extension instead of sign-extension. */
570 convert_move (to, from, unsignedp)
571 register rtx to, from;
574 enum machine_mode to_mode = GET_MODE (to);
575 enum machine_mode from_mode = GET_MODE (from);
576 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
577 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
581 /* rtx code for making an equivalent value. */
582 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
584 to = protect_from_queue (to, 1);
585 from = protect_from_queue (from, 0);
587 if (to_real != from_real)
590 /* If FROM is a SUBREG that indicates that we have already done at least
591 the required extension, strip it. We don't handle such SUBREGs as
594 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
595 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
596 >= GET_MODE_SIZE (to_mode))
597 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
598 from = gen_lowpart (to_mode, from), from_mode = to_mode;
600 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
603 if (to_mode == from_mode
604 || (from_mode == VOIDmode && CONSTANT_P (from)))
606 emit_move_insn (to, from);
614 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
616 /* Try converting directly if the insn is supported. */
617 if ((code = can_extend_p (to_mode, from_mode, 0))
620 emit_unop_insn (code, to, from, UNKNOWN);
625 #ifdef HAVE_trunchfqf2
626 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
628 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
632 #ifdef HAVE_truncsfqf2
633 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
639 #ifdef HAVE_truncdfqf2
640 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
646 #ifdef HAVE_truncxfqf2
647 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
653 #ifdef HAVE_trunctfqf2
654 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
661 #ifdef HAVE_trunctqfhf2
662 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncsfhf2
669 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
675 #ifdef HAVE_truncdfhf2
676 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncxfhf2
683 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
689 #ifdef HAVE_trunctfhf2
690 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
697 #ifdef HAVE_truncsftqf2
698 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
704 #ifdef HAVE_truncdftqf2
705 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
711 #ifdef HAVE_truncxftqf2
712 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
718 #ifdef HAVE_trunctftqf2
719 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
726 #ifdef HAVE_truncdfsf2
727 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
733 #ifdef HAVE_truncxfsf2
734 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
740 #ifdef HAVE_trunctfsf2
741 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
747 #ifdef HAVE_truncxfdf2
748 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
754 #ifdef HAVE_trunctfdf2
755 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
769 libcall = extendsfdf2_libfunc;
773 libcall = extendsfxf2_libfunc;
777 libcall = extendsftf2_libfunc;
786 libcall = truncdfsf2_libfunc;
790 libcall = extenddfxf2_libfunc;
794 libcall = extenddftf2_libfunc;
803 libcall = truncxfsf2_libfunc;
807 libcall = truncxfdf2_libfunc;
816 libcall = trunctfsf2_libfunc;
820 libcall = trunctfdf2_libfunc;
826 if (libcall == (rtx) 0)
827 /* This conversion is not implemented yet. */
830 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
832 emit_move_insn (to, value);
836 /* Now both modes are integers. */
838 /* Handle expanding beyond a word. */
839 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
840 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
847 enum machine_mode lowpart_mode;
848 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
850 /* Try converting directly if the insn is supported. */
851 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
854 /* If FROM is a SUBREG, put it into a register. Do this
855 so that we always generate the same set of insns for
856 better cse'ing; if an intermediate assignment occurred,
857 we won't be doing the operation directly on the SUBREG. */
858 if (optimize > 0 && GET_CODE (from) == SUBREG)
859 from = force_reg (from_mode, from);
860 emit_unop_insn (code, to, from, equiv_code);
863 /* Next, try converting via full word. */
864 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
865 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
866 != CODE_FOR_nothing))
868 if (GET_CODE (to) == REG)
869 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
870 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
871 emit_unop_insn (code, to,
872 gen_lowpart (word_mode, to), equiv_code);
876 /* No special multiword conversion insn; do it by hand. */
879 /* Since we will turn this into a no conflict block, we must ensure
880 that the source does not overlap the target. */
882 if (reg_overlap_mentioned_p (to, from))
883 from = force_reg (from_mode, from);
885 /* Get a copy of FROM widened to a word, if necessary. */
886 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
887 lowpart_mode = word_mode;
889 lowpart_mode = from_mode;
891 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
893 lowpart = gen_lowpart (lowpart_mode, to);
894 emit_move_insn (lowpart, lowfrom);
896 /* Compute the value to put in each remaining word. */
898 fill_value = const0_rtx;
903 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
904 && STORE_FLAG_VALUE == -1)
906 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
908 fill_value = gen_reg_rtx (word_mode);
909 emit_insn (gen_slt (fill_value));
915 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
916 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
918 fill_value = convert_to_mode (word_mode, fill_value, 1);
922 /* Fill the remaining words. */
923 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
925 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
926 rtx subword = operand_subword (to, index, 1, to_mode);
931 if (fill_value != subword)
932 emit_move_insn (subword, fill_value);
935 insns = get_insns ();
938 emit_no_conflict_block (insns, to, from, NULL_RTX,
939 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
943 /* Truncating multi-word to a word or less. */
944 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
945 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
947 if (!((GET_CODE (from) == MEM
948 && ! MEM_VOLATILE_P (from)
949 && direct_load[(int) to_mode]
950 && ! mode_dependent_address_p (XEXP (from, 0)))
951 || GET_CODE (from) == REG
952 || GET_CODE (from) == SUBREG))
953 from = force_reg (from_mode, from);
954 convert_move (to, gen_lowpart (word_mode, from), 0);
958 /* Handle pointer conversion */ /* SPEE 900220 */
959 if (to_mode == PSImode)
961 if (from_mode != SImode)
962 from = convert_to_mode (SImode, from, unsignedp);
964 #ifdef HAVE_truncsipsi2
965 if (HAVE_truncsipsi2)
967 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
970 #endif /* HAVE_truncsipsi2 */
974 if (from_mode == PSImode)
976 if (to_mode != SImode)
978 from = convert_to_mode (SImode, from, unsignedp);
983 #ifdef HAVE_extendpsisi2
984 if (HAVE_extendpsisi2)
986 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
989 #endif /* HAVE_extendpsisi2 */
994 if (to_mode == PDImode)
996 if (from_mode != DImode)
997 from = convert_to_mode (DImode, from, unsignedp);
999 #ifdef HAVE_truncdipdi2
1000 if (HAVE_truncdipdi2)
1002 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1005 #endif /* HAVE_truncdipdi2 */
1009 if (from_mode == PDImode)
1011 if (to_mode != DImode)
1013 from = convert_to_mode (DImode, from, unsignedp);
1018 #ifdef HAVE_extendpdidi2
1019 if (HAVE_extendpdidi2)
1021 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1024 #endif /* HAVE_extendpdidi2 */
1029 /* Now follow all the conversions between integers
1030 no more than a word long. */
1032 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1033 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1034 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1035 GET_MODE_BITSIZE (from_mode)))
1037 if (!((GET_CODE (from) == MEM
1038 && ! MEM_VOLATILE_P (from)
1039 && direct_load[(int) to_mode]
1040 && ! mode_dependent_address_p (XEXP (from, 0)))
1041 || GET_CODE (from) == REG
1042 || GET_CODE (from) == SUBREG))
1043 from = force_reg (from_mode, from);
1044 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1045 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1046 from = copy_to_reg (from);
1047 emit_move_insn (to, gen_lowpart (to_mode, from));
1051 /* Handle extension. */
1052 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1054 /* Convert directly if that works. */
1055 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1056 != CODE_FOR_nothing)
1058 emit_unop_insn (code, to, from, equiv_code);
1063 enum machine_mode intermediate;
1065 /* Search for a mode to convert via. */
1066 for (intermediate = from_mode; intermediate != VOIDmode;
1067 intermediate = GET_MODE_WIDER_MODE (intermediate))
1068 if (((can_extend_p (to_mode, intermediate, unsignedp)
1069 != CODE_FOR_nothing)
1070 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1071 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1072 && (can_extend_p (intermediate, from_mode, unsignedp)
1073 != CODE_FOR_nothing))
1075 convert_move (to, convert_to_mode (intermediate, from,
1076 unsignedp), unsignedp);
1080 /* No suitable intermediate mode. */
1085 /* Support special truncate insns for certain modes. */
1087 if (from_mode == DImode && to_mode == SImode)
1089 #ifdef HAVE_truncdisi2
1090 if (HAVE_truncdisi2)
1092 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1096 convert_move (to, force_reg (from_mode, from), unsignedp);
1100 if (from_mode == DImode && to_mode == HImode)
1102 #ifdef HAVE_truncdihi2
1103 if (HAVE_truncdihi2)
1105 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1113 if (from_mode == DImode && to_mode == QImode)
1115 #ifdef HAVE_truncdiqi2
1116 if (HAVE_truncdiqi2)
1118 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == SImode && to_mode == HImode)
1128 #ifdef HAVE_truncsihi2
1129 if (HAVE_truncsihi2)
1131 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == SImode && to_mode == QImode)
1141 #ifdef HAVE_truncsiqi2
1142 if (HAVE_truncsiqi2)
1144 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == HImode && to_mode == QImode)
1154 #ifdef HAVE_trunchiqi2
1155 if (HAVE_trunchiqi2)
1157 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == TImode && to_mode == DImode)
1167 #ifdef HAVE_trunctidi2
1168 if (HAVE_trunctidi2)
1170 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == TImode && to_mode == SImode)
1180 #ifdef HAVE_trunctisi2
1181 if (HAVE_trunctisi2)
1183 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == HImode)
1193 #ifdef HAVE_trunctihi2
1194 if (HAVE_trunctihi2)
1196 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == QImode)
1206 #ifdef HAVE_trunctiqi2
1207 if (HAVE_trunctiqi2)
1209 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 /* Handle truncation of volatile memrefs, and so on;
1218 the things that couldn't be truncated directly,
1219 and for which there was no special instruction. */
1220 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1222 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1223 emit_move_insn (to, temp);
1227 /* Mode combination is not recognized. */
1231 /* Return an rtx for a value that would result
1232 from converting X to mode MODE.
1233 Both X and MODE may be floating, or both integer.
1234 UNSIGNEDP is nonzero if X is an unsigned value.
1235 This can be done by referring to a part of X in place
1236 or by copying to a new temporary with conversion.
1238 This function *must not* call protect_from_queue
1239 except when putting X into an insn (in which case convert_move does it). */
1242 convert_to_mode (mode, x, unsignedp)
1243 enum machine_mode mode;
1247 return convert_modes (mode, VOIDmode, x, unsignedp);
1250 /* Return an rtx for a value that would result
1251 from converting X from mode OLDMODE to mode MODE.
1252 Both modes may be floating, or both integer.
1253 UNSIGNEDP is nonzero if X is an unsigned value.
1255 This can be done by referring to a part of X in place
1256 or by copying to a new temporary with conversion.
1258 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1260 This function *must not* call protect_from_queue
1261 except when putting X into an insn (in which case convert_move does it). */
1264 convert_modes (mode, oldmode, x, unsignedp)
1265 enum machine_mode mode, oldmode;
1271 /* If FROM is a SUBREG that indicates that we have already done at least
1272 the required extension, strip it. */
1274 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1275 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1276 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1277 x = gen_lowpart (mode, x);
1279 if (GET_MODE (x) != VOIDmode)
1280 oldmode = GET_MODE (x);
1282 if (mode == oldmode)
1285 /* There is one case that we must handle specially: If we are converting
1286 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1287 we are to interpret the constant as unsigned, gen_lowpart will do
1288 the wrong if the constant appears negative. What we want to do is
1289 make the high-order word of the constant zero, not all ones. */
1291 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1292 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1293 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1294 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1296 /* We can do this with a gen_lowpart if both desired and current modes
1297 are integer, and this is either a constant integer, a register, or a
1298 non-volatile MEM. Except for the constant case where MODE is no
1299 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1301 if ((GET_CODE (x) == CONST_INT
1302 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1303 || (GET_MODE_CLASS (mode) == MODE_INT
1304 && GET_MODE_CLASS (oldmode) == MODE_INT
1305 && (GET_CODE (x) == CONST_DOUBLE
1306 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1307 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1308 && direct_load[(int) mode])
1309 || (GET_CODE (x) == REG
1310 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1311 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1313 /* ?? If we don't know OLDMODE, we have to assume here that
1314 X does not need sign- or zero-extension. This may not be
1315 the case, but it's the best we can do. */
1316 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1317 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1319 HOST_WIDE_INT val = INTVAL (x);
1320 int width = GET_MODE_BITSIZE (oldmode);
1322 /* We must sign or zero-extend in this case. Start by
1323 zero-extending, then sign extend if we need to. */
1324 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1326 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1327 val |= (HOST_WIDE_INT) (-1) << width;
1329 return GEN_INT (val);
1332 return gen_lowpart (mode, x);
1335 temp = gen_reg_rtx (mode);
1336 convert_move (temp, x, unsignedp);
1340 /* Generate several move instructions to copy LEN bytes
1341 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1342 The caller must pass FROM and TO
1343 through protect_from_queue before calling.
1344 ALIGN (in bytes) is maximum alignment we can assume. */
1347 move_by_pieces (to, from, len, align)
1351 struct move_by_pieces data;
1352 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1353 int max_size = MOVE_MAX + 1;
1356 data.to_addr = to_addr;
1357 data.from_addr = from_addr;
1361 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1362 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1364 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1365 || GET_CODE (from_addr) == POST_INC
1366 || GET_CODE (from_addr) == POST_DEC);
1368 data.explicit_inc_from = 0;
1369 data.explicit_inc_to = 0;
1371 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1372 if (data.reverse) data.offset = len;
1375 data.to_struct = MEM_IN_STRUCT_P (to);
1376 data.from_struct = MEM_IN_STRUCT_P (from);
1378 /* If copying requires more than two move insns,
1379 copy addresses to registers (to make displacements shorter)
1380 and use post-increment if available. */
1381 if (!(data.autinc_from && data.autinc_to)
1382 && move_by_pieces_ninsns (len, align) > 2)
1384 #ifdef HAVE_PRE_DECREMENT
1385 if (data.reverse && ! data.autinc_from)
1387 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1388 data.autinc_from = 1;
1389 data.explicit_inc_from = -1;
1392 #ifdef HAVE_POST_INCREMENT
1393 if (! data.autinc_from)
1395 data.from_addr = copy_addr_to_reg (from_addr);
1396 data.autinc_from = 1;
1397 data.explicit_inc_from = 1;
1400 if (!data.autinc_from && CONSTANT_P (from_addr))
1401 data.from_addr = copy_addr_to_reg (from_addr);
1402 #ifdef HAVE_PRE_DECREMENT
1403 if (data.reverse && ! data.autinc_to)
1405 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1407 data.explicit_inc_to = -1;
1410 #ifdef HAVE_POST_INCREMENT
1411 if (! data.reverse && ! data.autinc_to)
1413 data.to_addr = copy_addr_to_reg (to_addr);
1415 data.explicit_inc_to = 1;
1418 if (!data.autinc_to && CONSTANT_P (to_addr))
1419 data.to_addr = copy_addr_to_reg (to_addr);
1422 if (! SLOW_UNALIGNED_ACCESS
1423 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1426 /* First move what we can in the largest integer mode, then go to
1427 successively smaller modes. */
1429 while (max_size > 1)
1431 enum machine_mode mode = VOIDmode, tmode;
1432 enum insn_code icode;
1434 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1435 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1436 if (GET_MODE_SIZE (tmode) < max_size)
1439 if (mode == VOIDmode)
1442 icode = mov_optab->handlers[(int) mode].insn_code;
1443 if (icode != CODE_FOR_nothing
1444 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1445 GET_MODE_SIZE (mode)))
1446 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1448 max_size = GET_MODE_SIZE (mode);
1451 /* The code above should have handled everything. */
1456 /* Return number of insns required to move L bytes by pieces.
1457 ALIGN (in bytes) is maximum alignment we can assume. */
1460 move_by_pieces_ninsns (l, align)
1464 register int n_insns = 0;
1465 int max_size = MOVE_MAX + 1;
1467 if (! SLOW_UNALIGNED_ACCESS
1468 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1471 while (max_size > 1)
1473 enum machine_mode mode = VOIDmode, tmode;
1474 enum insn_code icode;
1476 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1477 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1478 if (GET_MODE_SIZE (tmode) < max_size)
1481 if (mode == VOIDmode)
1484 icode = mov_optab->handlers[(int) mode].insn_code;
1485 if (icode != CODE_FOR_nothing
1486 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1487 GET_MODE_SIZE (mode)))
1488 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1490 max_size = GET_MODE_SIZE (mode);
1496 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1497 with move instructions for mode MODE. GENFUN is the gen_... function
1498 to make a move insn for that mode. DATA has all the other info. */
1501 move_by_pieces_1 (genfun, mode, data)
1503 enum machine_mode mode;
1504 struct move_by_pieces *data;
1506 register int size = GET_MODE_SIZE (mode);
1507 register rtx to1, from1;
1509 while (data->len >= size)
1511 if (data->reverse) data->offset -= size;
1513 to1 = (data->autinc_to
1514 ? gen_rtx (MEM, mode, data->to_addr)
1515 : change_address (data->to, mode,
1516 plus_constant (data->to_addr, data->offset)));
1517 MEM_IN_STRUCT_P (to1) = data->to_struct;
1520 ? gen_rtx (MEM, mode, data->from_addr)
1521 : change_address (data->from, mode,
1522 plus_constant (data->from_addr, data->offset)));
1523 MEM_IN_STRUCT_P (from1) = data->from_struct;
1525 #ifdef HAVE_PRE_DECREMENT
1526 if (data->explicit_inc_to < 0)
1527 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1528 if (data->explicit_inc_from < 0)
1529 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1532 emit_insn ((*genfun) (to1, from1));
1533 #ifdef HAVE_POST_INCREMENT
1534 if (data->explicit_inc_to > 0)
1535 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1536 if (data->explicit_inc_from > 0)
1537 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1540 if (! data->reverse) data->offset += size;
1546 /* Emit code to move a block Y to a block X.
1547 This may be done with string-move instructions,
1548 with multiple scalar move instructions, or with a library call.
1550 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1552 SIZE is an rtx that says how long they are.
1553 ALIGN is the maximum alignment we can assume they have,
1554 measured in bytes. */
1557 emit_block_move (x, y, size, align)
1562 if (GET_MODE (x) != BLKmode)
1565 if (GET_MODE (y) != BLKmode)
1568 x = protect_from_queue (x, 1);
1569 y = protect_from_queue (y, 0);
1570 size = protect_from_queue (size, 0);
1572 if (GET_CODE (x) != MEM)
1574 if (GET_CODE (y) != MEM)
1579 if (GET_CODE (size) == CONST_INT
1580 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1581 move_by_pieces (x, y, INTVAL (size), align);
1584 /* Try the most limited insn first, because there's no point
1585 including more than one in the machine description unless
1586 the more limited one has some advantage. */
1588 rtx opalign = GEN_INT (align);
1589 enum machine_mode mode;
1591 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1592 mode = GET_MODE_WIDER_MODE (mode))
1594 enum insn_code code = movstr_optab[(int) mode];
1596 if (code != CODE_FOR_nothing
1597 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1598 here because if SIZE is less than the mode mask, as it is
1599 returned by the macro, it will definitely be less than the
1600 actual mode mask. */
1601 && ((GET_CODE (size) == CONST_INT
1602 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1603 <= GET_MODE_MASK (mode)))
1604 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1605 && (insn_operand_predicate[(int) code][0] == 0
1606 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1607 && (insn_operand_predicate[(int) code][1] == 0
1608 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1609 && (insn_operand_predicate[(int) code][3] == 0
1610 || (*insn_operand_predicate[(int) code][3]) (opalign,
1614 rtx last = get_last_insn ();
1617 op2 = convert_to_mode (mode, size, 1);
1618 if (insn_operand_predicate[(int) code][2] != 0
1619 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1620 op2 = copy_to_mode_reg (mode, op2);
1622 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1629 delete_insns_since (last);
1633 #ifdef TARGET_MEM_FUNCTIONS
1634 emit_library_call (memcpy_libfunc, 0,
1635 VOIDmode, 3, XEXP (x, 0), Pmode,
1637 convert_to_mode (TYPE_MODE (sizetype), size,
1638 TREE_UNSIGNED (sizetype)),
1639 TYPE_MODE (sizetype));
1641 emit_library_call (bcopy_libfunc, 0,
1642 VOIDmode, 3, XEXP (y, 0), Pmode,
1644 convert_to_mode (TYPE_MODE (integer_type_node), size,
1645 TREE_UNSIGNED (integer_type_node)),
1646 TYPE_MODE (integer_type_node));
1651 /* Copy all or part of a value X into registers starting at REGNO.
1652 The number of registers to be filled is NREGS. */
1655 move_block_to_reg (regno, x, nregs, mode)
1659 enum machine_mode mode;
1667 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1668 x = validize_mem (force_const_mem (mode, x));
1670 /* See if the machine can do this with a load multiple insn. */
1671 #ifdef HAVE_load_multiple
1672 if (HAVE_load_multiple)
1674 last = get_last_insn ();
1675 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1683 delete_insns_since (last);
1687 for (i = 0; i < nregs; i++)
1688 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1689 operand_subword_force (x, i, mode));
1692 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1693 The number of registers to be filled is NREGS. SIZE indicates the number
1694 of bytes in the object X. */
1698 move_block_from_reg (regno, x, nregs, size)
1707 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1708 to the left before storing to memory. */
1709 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1711 rtx tem = operand_subword (x, 0, 1, BLKmode);
1717 shift = expand_shift (LSHIFT_EXPR, word_mode,
1718 gen_rtx (REG, word_mode, regno),
1719 build_int_2 ((UNITS_PER_WORD - size)
1720 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1721 emit_move_insn (tem, shift);
1725 /* See if the machine can do this with a store multiple insn. */
1726 #ifdef HAVE_store_multiple
1727 if (HAVE_store_multiple)
1729 last = get_last_insn ();
1730 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1738 delete_insns_since (last);
1742 for (i = 0; i < nregs; i++)
1744 rtx tem = operand_subword (x, i, 1, BLKmode);
1749 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1753 /* Emit code to move a block Y to a block X, where X is non-consecutive
1754 registers represented by a PARALLEL. */
1757 emit_group_load (x, y)
1760 rtx target_reg, source;
1763 if (GET_CODE (x) != PARALLEL)
1766 /* Check for a NULL entry, used to indicate that the parameter goes
1767 both on the stack and in registers. */
1768 if (XEXP (XVECEXP (x, 0, 0), 0))
1773 for (; i < XVECLEN (x, 0); i++)
1775 rtx element = XVECEXP (x, 0, i);
1777 target_reg = XEXP (element, 0);
1779 if (GET_CODE (y) == MEM)
1780 source = change_address (y, GET_MODE (target_reg),
1781 plus_constant (XEXP (y, 0),
1782 INTVAL (XEXP (element, 1))));
1783 else if (XEXP (element, 1) == const0_rtx)
1785 if (GET_MODE (target_reg) == GET_MODE (y))
1787 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1788 == GET_MODE_SIZE (GET_MODE (y)))
1789 source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
1796 emit_move_insn (target_reg, source);
1800 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1801 registers represented by a PARALLEL. */
1804 emit_group_store (x, y)
1807 rtx source_reg, target;
1810 if (GET_CODE (y) != PARALLEL)
1813 /* Check for a NULL entry, used to indicate that the parameter goes
1814 both on the stack and in registers. */
1815 if (XEXP (XVECEXP (y, 0, 0), 0))
1820 for (; i < XVECLEN (y, 0); i++)
1822 rtx element = XVECEXP (y, 0, i);
1824 source_reg = XEXP (element, 0);
1826 if (GET_CODE (x) == MEM)
1827 target = change_address (x, GET_MODE (source_reg),
1828 plus_constant (XEXP (x, 0),
1829 INTVAL (XEXP (element, 1))));
1830 else if (XEXP (element, 1) == const0_rtx)
1835 emit_move_insn (target, source_reg);
1839 /* Add a USE expression for REG to the (possibly empty) list pointed
1840 to by CALL_FUSAGE. REG must denote a hard register. */
1843 use_reg (call_fusage, reg)
1844 rtx *call_fusage, reg;
1846 if (GET_CODE (reg) != REG
1847 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1851 = gen_rtx (EXPR_LIST, VOIDmode,
1852 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1855 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1856 starting at REGNO. All of these registers must be hard registers. */
1859 use_regs (call_fusage, regno, nregs)
1866 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1869 for (i = 0; i < nregs; i++)
1870 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1873 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1874 PARALLEL REGS. This is for calls that pass values in multiple
1875 non-contiguous locations. The Irix 6 ABI has examples of this. */
1878 use_group_regs (call_fusage, regs)
1884 /* Check for a NULL entry, used to indicate that the parameter goes
1885 both on the stack and in registers. */
1886 if (XEXP (XVECEXP (regs, 0, 0), 0))
1891 for (; i < XVECLEN (regs, 0); i++)
1892 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1895 /* Generate several move instructions to clear LEN bytes of block TO.
1896 (A MEM rtx with BLKmode). The caller must pass TO through
1897 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1901 clear_by_pieces (to, len, align)
1905 struct clear_by_pieces data;
1906 rtx to_addr = XEXP (to, 0);
1907 int max_size = MOVE_MAX + 1;
1910 data.to_addr = to_addr;
1913 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1914 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1916 data.explicit_inc_to = 0;
1918 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1919 if (data.reverse) data.offset = len;
1922 data.to_struct = MEM_IN_STRUCT_P (to);
1924 /* If copying requires more than two move insns,
1925 copy addresses to registers (to make displacements shorter)
1926 and use post-increment if available. */
1928 && move_by_pieces_ninsns (len, align) > 2)
1930 #ifdef HAVE_PRE_DECREMENT
1931 if (data.reverse && ! data.autinc_to)
1933 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1935 data.explicit_inc_to = -1;
1938 #ifdef HAVE_POST_INCREMENT
1939 if (! data.reverse && ! data.autinc_to)
1941 data.to_addr = copy_addr_to_reg (to_addr);
1943 data.explicit_inc_to = 1;
1946 if (!data.autinc_to && CONSTANT_P (to_addr))
1947 data.to_addr = copy_addr_to_reg (to_addr);
1950 if (! SLOW_UNALIGNED_ACCESS
1951 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1954 /* First move what we can in the largest integer mode, then go to
1955 successively smaller modes. */
1957 while (max_size > 1)
1959 enum machine_mode mode = VOIDmode, tmode;
1960 enum insn_code icode;
1962 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1963 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1964 if (GET_MODE_SIZE (tmode) < max_size)
1967 if (mode == VOIDmode)
1970 icode = mov_optab->handlers[(int) mode].insn_code;
1971 if (icode != CODE_FOR_nothing
1972 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1973 GET_MODE_SIZE (mode)))
1974 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
1976 max_size = GET_MODE_SIZE (mode);
1979 /* The code above should have handled everything. */
1984 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
1985 with move instructions for mode MODE. GENFUN is the gen_... function
1986 to make a move insn for that mode. DATA has all the other info. */
1989 clear_by_pieces_1 (genfun, mode, data)
1991 enum machine_mode mode;
1992 struct clear_by_pieces *data;
1994 register int size = GET_MODE_SIZE (mode);
1997 while (data->len >= size)
1999 if (data->reverse) data->offset -= size;
2001 to1 = (data->autinc_to
2002 ? gen_rtx (MEM, mode, data->to_addr)
2003 : change_address (data->to, mode,
2004 plus_constant (data->to_addr, data->offset)));
2005 MEM_IN_STRUCT_P (to1) = data->to_struct;
2007 #ifdef HAVE_PRE_DECREMENT
2008 if (data->explicit_inc_to < 0)
2009 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2012 emit_insn ((*genfun) (to1, const0_rtx));
2013 #ifdef HAVE_POST_INCREMENT
2014 if (data->explicit_inc_to > 0)
2015 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2018 if (! data->reverse) data->offset += size;
2024 /* Write zeros through the storage of OBJECT.
2025 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2026 the maximum alignment we can is has, measured in bytes. */
2029 clear_storage (object, size, align)
2034 if (GET_MODE (object) == BLKmode)
2036 object = protect_from_queue (object, 1);
2037 size = protect_from_queue (size, 0);
2039 if (GET_CODE (size) == CONST_INT
2040 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2041 clear_by_pieces (object, INTVAL (size), align);
2045 /* Try the most limited insn first, because there's no point
2046 including more than one in the machine description unless
2047 the more limited one has some advantage. */
2049 rtx opalign = GEN_INT (align);
2050 enum machine_mode mode;
2052 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2053 mode = GET_MODE_WIDER_MODE (mode))
2055 enum insn_code code = clrstr_optab[(int) mode];
2057 if (code != CODE_FOR_nothing
2058 /* We don't need MODE to be narrower than
2059 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2060 the mode mask, as it is returned by the macro, it will
2061 definitely be less than the actual mode mask. */
2062 && ((GET_CODE (size) == CONST_INT
2063 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2064 <= GET_MODE_MASK (mode)))
2065 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2066 && (insn_operand_predicate[(int) code][0] == 0
2067 || (*insn_operand_predicate[(int) code][0]) (object,
2069 && (insn_operand_predicate[(int) code][2] == 0
2070 || (*insn_operand_predicate[(int) code][2]) (opalign,
2074 rtx last = get_last_insn ();
2077 op1 = convert_to_mode (mode, size, 1);
2078 if (insn_operand_predicate[(int) code][1] != 0
2079 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2081 op1 = copy_to_mode_reg (mode, op1);
2083 pat = GEN_FCN ((int) code) (object, op1, opalign);
2090 delete_insns_since (last);
2095 #ifdef TARGET_MEM_FUNCTIONS
2096 emit_library_call (memset_libfunc, 0,
2098 XEXP (object, 0), Pmode,
2099 const0_rtx, TYPE_MODE (integer_type_node),
2100 convert_to_mode (TYPE_MODE (sizetype),
2101 size, TREE_UNSIGNED (sizetype)),
2102 TYPE_MODE (sizetype));
2104 emit_library_call (bzero_libfunc, 0,
2106 XEXP (object, 0), Pmode,
2107 convert_to_mode (TYPE_MODE (integer_type_node),
2109 TREE_UNSIGNED (integer_type_node)),
2110 TYPE_MODE (integer_type_node));
2115 emit_move_insn (object, const0_rtx);
2118 /* Generate code to copy Y into X.
2119 Both Y and X must have the same mode, except that
2120 Y can be a constant with VOIDmode.
2121 This mode cannot be BLKmode; use emit_block_move for that.
2123 Return the last instruction emitted. */
2126 emit_move_insn (x, y)
2129 enum machine_mode mode = GET_MODE (x);
2131 x = protect_from_queue (x, 1);
2132 y = protect_from_queue (y, 0);
2134 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2137 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2138 y = force_const_mem (mode, y);
2140 /* If X or Y are memory references, verify that their addresses are valid
2142 if (GET_CODE (x) == MEM
2143 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2144 && ! push_operand (x, GET_MODE (x)))
2146 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2147 x = change_address (x, VOIDmode, XEXP (x, 0));
2149 if (GET_CODE (y) == MEM
2150 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2152 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2153 y = change_address (y, VOIDmode, XEXP (y, 0));
2155 if (mode == BLKmode)
2158 return emit_move_insn_1 (x, y);
2161 /* Low level part of emit_move_insn.
2162 Called just like emit_move_insn, but assumes X and Y
2163 are basically valid. */
2166 emit_move_insn_1 (x, y)
2169 enum machine_mode mode = GET_MODE (x);
2170 enum machine_mode submode;
2171 enum mode_class class = GET_MODE_CLASS (mode);
2174 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2176 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2178 /* Expand complex moves by moving real part and imag part, if possible. */
2179 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2180 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2182 (class == MODE_COMPLEX_INT
2183 ? MODE_INT : MODE_FLOAT),
2185 && (mov_optab->handlers[(int) submode].insn_code
2186 != CODE_FOR_nothing))
2188 /* Don't split destination if it is a stack push. */
2189 int stack = push_operand (x, GET_MODE (x));
2192 /* If this is a stack, push the highpart first, so it
2193 will be in the argument order.
2195 In that case, change_address is used only to convert
2196 the mode, not to change the address. */
2199 /* Note that the real part always precedes the imag part in memory
2200 regardless of machine's endianness. */
2201 #ifdef STACK_GROWS_DOWNWARD
2202 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2203 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2204 gen_imagpart (submode, y)));
2205 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2206 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2207 gen_realpart (submode, y)));
2209 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2210 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2211 gen_realpart (submode, y)));
2212 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2213 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2214 gen_imagpart (submode, y)));
2219 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2220 (gen_realpart (submode, x), gen_realpart (submode, y)));
2221 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2222 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2225 return get_last_insn ();
2228 /* This will handle any multi-word mode that lacks a move_insn pattern.
2229 However, you will get better code if you define such patterns,
2230 even if they must turn into multiple assembler instructions. */
2231 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2236 #ifdef PUSH_ROUNDING
2238 /* If X is a push on the stack, do the push now and replace
2239 X with a reference to the stack pointer. */
2240 if (push_operand (x, GET_MODE (x)))
2242 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2243 x = change_address (x, VOIDmode, stack_pointer_rtx);
2247 /* Show the output dies here. */
2249 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2252 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2255 rtx xpart = operand_subword (x, i, 1, mode);
2256 rtx ypart = operand_subword (y, i, 1, mode);
2258 /* If we can't get a part of Y, put Y into memory if it is a
2259 constant. Otherwise, force it into a register. If we still
2260 can't get a part of Y, abort. */
2261 if (ypart == 0 && CONSTANT_P (y))
2263 y = force_const_mem (mode, y);
2264 ypart = operand_subword (y, i, 1, mode);
2266 else if (ypart == 0)
2267 ypart = operand_subword_force (y, i, mode);
2269 if (xpart == 0 || ypart == 0)
2272 last_insn = emit_move_insn (xpart, ypart);
2281 /* Pushing data onto the stack. */
2283 /* Push a block of length SIZE (perhaps variable)
2284 and return an rtx to address the beginning of the block.
2285 Note that it is not possible for the value returned to be a QUEUED.
2286 The value may be virtual_outgoing_args_rtx.
2288 EXTRA is the number of bytes of padding to push in addition to SIZE.
2289 BELOW nonzero means this padding comes at low addresses;
2290 otherwise, the padding comes at high addresses. */
2293 push_block (size, extra, below)
2299 size = convert_modes (Pmode, ptr_mode, size, 1);
2300 if (CONSTANT_P (size))
2301 anti_adjust_stack (plus_constant (size, extra));
2302 else if (GET_CODE (size) == REG && extra == 0)
2303 anti_adjust_stack (size);
2306 rtx temp = copy_to_mode_reg (Pmode, size);
2308 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2309 temp, 0, OPTAB_LIB_WIDEN);
2310 anti_adjust_stack (temp);
2313 #ifdef STACK_GROWS_DOWNWARD
2314 temp = virtual_outgoing_args_rtx;
2315 if (extra != 0 && below)
2316 temp = plus_constant (temp, extra);
2318 if (GET_CODE (size) == CONST_INT)
2319 temp = plus_constant (virtual_outgoing_args_rtx,
2320 - INTVAL (size) - (below ? 0 : extra));
2321 else if (extra != 0 && !below)
2322 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2323 negate_rtx (Pmode, plus_constant (size, extra)));
2325 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2326 negate_rtx (Pmode, size));
2329 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2335 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2338 /* Generate code to push X onto the stack, assuming it has mode MODE and
2340 MODE is redundant except when X is a CONST_INT (since they don't
2342 SIZE is an rtx for the size of data to be copied (in bytes),
2343 needed only if X is BLKmode.
2345 ALIGN (in bytes) is maximum alignment we can assume.
2347 If PARTIAL and REG are both nonzero, then copy that many of the first
2348 words of X into registers starting with REG, and push the rest of X.
2349 The amount of space pushed is decreased by PARTIAL words,
2350 rounded *down* to a multiple of PARM_BOUNDARY.
2351 REG must be a hard register in this case.
2352 If REG is zero but PARTIAL is not, take any all others actions for an
2353 argument partially in registers, but do not actually load any
2356 EXTRA is the amount in bytes of extra space to leave next to this arg.
2357 This is ignored if an argument block has already been allocated.
2359 On a machine that lacks real push insns, ARGS_ADDR is the address of
2360 the bottom of the argument block for this call. We use indexing off there
2361 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2362 argument block has not been preallocated.
2364 ARGS_SO_FAR is the size of args previously pushed for this call. */
2367 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2368 args_addr, args_so_far)
2370 enum machine_mode mode;
2381 enum direction stack_direction
2382 #ifdef STACK_GROWS_DOWNWARD
2388 /* Decide where to pad the argument: `downward' for below,
2389 `upward' for above, or `none' for don't pad it.
2390 Default is below for small data on big-endian machines; else above. */
2391 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2393 /* If we're placing part of X into a register and part of X onto
2394 the stack, indicate that the entire register is clobbered to
2395 keep flow from thinking the unused part of the register is live. */
2396 if (partial > 0 && reg != 0)
2397 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2399 /* Invert direction if stack is post-update. */
2400 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2401 if (where_pad != none)
2402 where_pad = (where_pad == downward ? upward : downward);
2404 xinner = x = protect_from_queue (x, 0);
2406 if (mode == BLKmode)
2408 /* Copy a block into the stack, entirely or partially. */
2411 int used = partial * UNITS_PER_WORD;
2412 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2420 /* USED is now the # of bytes we need not copy to the stack
2421 because registers will take care of them. */
2424 xinner = change_address (xinner, BLKmode,
2425 plus_constant (XEXP (xinner, 0), used));
2427 /* If the partial register-part of the arg counts in its stack size,
2428 skip the part of stack space corresponding to the registers.
2429 Otherwise, start copying to the beginning of the stack space,
2430 by setting SKIP to 0. */
2431 #ifndef REG_PARM_STACK_SPACE
2437 #ifdef PUSH_ROUNDING
2438 /* Do it with several push insns if that doesn't take lots of insns
2439 and if there is no difficulty with push insns that skip bytes
2440 on the stack for alignment purposes. */
2442 && GET_CODE (size) == CONST_INT
2444 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2446 /* Here we avoid the case of a structure whose weak alignment
2447 forces many pushes of a small amount of data,
2448 and such small pushes do rounding that causes trouble. */
2449 && ((! SLOW_UNALIGNED_ACCESS)
2450 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2451 || PUSH_ROUNDING (align) == align)
2452 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2454 /* Push padding now if padding above and stack grows down,
2455 or if padding below and stack grows up.
2456 But if space already allocated, this has already been done. */
2457 if (extra && args_addr == 0
2458 && where_pad != none && where_pad != stack_direction)
2459 anti_adjust_stack (GEN_INT (extra));
2461 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2462 INTVAL (size) - used, align);
2465 #endif /* PUSH_ROUNDING */
2467 /* Otherwise make space on the stack and copy the data
2468 to the address of that space. */
2470 /* Deduct words put into registers from the size we must copy. */
2473 if (GET_CODE (size) == CONST_INT)
2474 size = GEN_INT (INTVAL (size) - used);
2476 size = expand_binop (GET_MODE (size), sub_optab, size,
2477 GEN_INT (used), NULL_RTX, 0,
2481 /* Get the address of the stack space.
2482 In this case, we do not deal with EXTRA separately.
2483 A single stack adjust will do. */
2486 temp = push_block (size, extra, where_pad == downward);
2489 else if (GET_CODE (args_so_far) == CONST_INT)
2490 temp = memory_address (BLKmode,
2491 plus_constant (args_addr,
2492 skip + INTVAL (args_so_far)));
2494 temp = memory_address (BLKmode,
2495 plus_constant (gen_rtx (PLUS, Pmode,
2496 args_addr, args_so_far),
2499 /* TEMP is the address of the block. Copy the data there. */
2500 if (GET_CODE (size) == CONST_INT
2501 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2504 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2505 INTVAL (size), align);
2508 /* Try the most limited insn first, because there's no point
2509 including more than one in the machine description unless
2510 the more limited one has some advantage. */
2511 #ifdef HAVE_movstrqi
2513 && GET_CODE (size) == CONST_INT
2514 && ((unsigned) INTVAL (size)
2515 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2517 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2518 xinner, size, GEN_INT (align));
2526 #ifdef HAVE_movstrhi
2528 && GET_CODE (size) == CONST_INT
2529 && ((unsigned) INTVAL (size)
2530 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2532 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2533 xinner, size, GEN_INT (align));
2541 #ifdef HAVE_movstrsi
2544 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2545 xinner, size, GEN_INT (align));
2553 #ifdef HAVE_movstrdi
2556 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2557 xinner, size, GEN_INT (align));
2566 #ifndef ACCUMULATE_OUTGOING_ARGS
2567 /* If the source is referenced relative to the stack pointer,
2568 copy it to another register to stabilize it. We do not need
2569 to do this if we know that we won't be changing sp. */
2571 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2572 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2573 temp = copy_to_reg (temp);
2576 /* Make inhibit_defer_pop nonzero around the library call
2577 to force it to pop the bcopy-arguments right away. */
2579 #ifdef TARGET_MEM_FUNCTIONS
2580 emit_library_call (memcpy_libfunc, 0,
2581 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2582 convert_to_mode (TYPE_MODE (sizetype),
2583 size, TREE_UNSIGNED (sizetype)),
2584 TYPE_MODE (sizetype));
2586 emit_library_call (bcopy_libfunc, 0,
2587 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2588 convert_to_mode (TYPE_MODE (integer_type_node),
2590 TREE_UNSIGNED (integer_type_node)),
2591 TYPE_MODE (integer_type_node));
2596 else if (partial > 0)
2598 /* Scalar partly in registers. */
2600 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2603 /* # words of start of argument
2604 that we must make space for but need not store. */
2605 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2606 int args_offset = INTVAL (args_so_far);
2609 /* Push padding now if padding above and stack grows down,
2610 or if padding below and stack grows up.
2611 But if space already allocated, this has already been done. */
2612 if (extra && args_addr == 0
2613 && where_pad != none && where_pad != stack_direction)
2614 anti_adjust_stack (GEN_INT (extra));
2616 /* If we make space by pushing it, we might as well push
2617 the real data. Otherwise, we can leave OFFSET nonzero
2618 and leave the space uninitialized. */
2622 /* Now NOT_STACK gets the number of words that we don't need to
2623 allocate on the stack. */
2624 not_stack = partial - offset;
2626 /* If the partial register-part of the arg counts in its stack size,
2627 skip the part of stack space corresponding to the registers.
2628 Otherwise, start copying to the beginning of the stack space,
2629 by setting SKIP to 0. */
2630 #ifndef REG_PARM_STACK_SPACE
2636 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2637 x = validize_mem (force_const_mem (mode, x));
2639 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2640 SUBREGs of such registers are not allowed. */
2641 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2642 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2643 x = copy_to_reg (x);
2645 /* Loop over all the words allocated on the stack for this arg. */
2646 /* We can do it by words, because any scalar bigger than a word
2647 has a size a multiple of a word. */
2648 #ifndef PUSH_ARGS_REVERSED
2649 for (i = not_stack; i < size; i++)
2651 for (i = size - 1; i >= not_stack; i--)
2653 if (i >= not_stack + offset)
2654 emit_push_insn (operand_subword_force (x, i, mode),
2655 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2657 GEN_INT (args_offset + ((i - not_stack + skip)
2658 * UNITS_PER_WORD)));
2664 /* Push padding now if padding above and stack grows down,
2665 or if padding below and stack grows up.
2666 But if space already allocated, this has already been done. */
2667 if (extra && args_addr == 0
2668 && where_pad != none && where_pad != stack_direction)
2669 anti_adjust_stack (GEN_INT (extra));
2671 #ifdef PUSH_ROUNDING
2673 addr = gen_push_operand ();
2676 if (GET_CODE (args_so_far) == CONST_INT)
2678 = memory_address (mode,
2679 plus_constant (args_addr, INTVAL (args_so_far)));
2681 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2684 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2688 /* If part should go in registers, copy that part
2689 into the appropriate registers. Do this now, at the end,
2690 since mem-to-mem copies above may do function calls. */
2691 if (partial > 0 && reg != 0)
2693 /* Handle calls that pass values in multiple non-contiguous locations.
2694 The Irix 6 ABI has examples of this. */
2695 if (GET_CODE (reg) == PARALLEL)
2696 emit_group_load (reg, x);
2698 move_block_to_reg (REGNO (reg), x, partial, mode);
2701 if (extra && args_addr == 0 && where_pad == stack_direction)
2702 anti_adjust_stack (GEN_INT (extra));
2705 /* Expand an assignment that stores the value of FROM into TO.
2706 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2707 (This may contain a QUEUED rtx;
2708 if the value is constant, this rtx is a constant.)
2709 Otherwise, the returned value is NULL_RTX.
2711 SUGGEST_REG is no longer actually used.
2712 It used to mean, copy the value through a register
2713 and return that register, if that is possible.
2714 We now use WANT_VALUE to decide whether to do this. */
2717 expand_assignment (to, from, want_value, suggest_reg)
2722 register rtx to_rtx = 0;
2725 /* Don't crash if the lhs of the assignment was erroneous. */
2727 if (TREE_CODE (to) == ERROR_MARK)
2729 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2730 return want_value ? result : NULL_RTX;
2733 if (output_bytecode)
2735 tree dest_innermost;
2737 bc_expand_expr (from);
2738 bc_emit_instruction (duplicate);
2740 dest_innermost = bc_expand_address (to);
2742 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2743 take care of it here. */
2745 bc_store_memory (TREE_TYPE (to), dest_innermost);
2749 /* Assignment of a structure component needs special treatment
2750 if the structure component's rtx is not simply a MEM.
2751 Assignment of an array element at a constant index, and assignment of
2752 an array element in an unaligned packed structure field, has the same
2755 if (TREE_CODE (to) == COMPONENT_REF
2756 || TREE_CODE (to) == BIT_FIELD_REF
2757 || (TREE_CODE (to) == ARRAY_REF
2758 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2759 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2760 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2762 enum machine_mode mode1;
2772 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2773 &mode1, &unsignedp, &volatilep);
2775 /* If we are going to use store_bit_field and extract_bit_field,
2776 make sure to_rtx will be safe for multiple use. */
2778 if (mode1 == VOIDmode && want_value)
2779 tem = stabilize_reference (tem);
2781 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2782 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2785 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2787 if (GET_CODE (to_rtx) != MEM)
2789 to_rtx = change_address (to_rtx, VOIDmode,
2790 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2791 force_reg (ptr_mode, offset_rtx)));
2792 /* If we have a variable offset, the known alignment
2793 is only that of the innermost structure containing the field.
2794 (Actually, we could sometimes do better by using the
2795 align of an element of the innermost array, but no need.) */
2796 if (TREE_CODE (to) == COMPONENT_REF
2797 || TREE_CODE (to) == BIT_FIELD_REF)
2799 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2803 if (GET_CODE (to_rtx) == MEM)
2805 /* When the offset is zero, to_rtx is the address of the
2806 structure we are storing into, and hence may be shared.
2807 We must make a new MEM before setting the volatile bit. */
2809 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2810 MEM_VOLATILE_P (to_rtx) = 1;
2812 #if 0 /* This was turned off because, when a field is volatile
2813 in an object which is not volatile, the object may be in a register,
2814 and then we would abort over here. */
2820 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2822 /* Spurious cast makes HPUX compiler happy. */
2823 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2826 /* Required alignment of containing datum. */
2828 int_size_in_bytes (TREE_TYPE (tem)));
2829 preserve_temp_slots (result);
2833 /* If the value is meaningful, convert RESULT to the proper mode.
2834 Otherwise, return nothing. */
2835 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2836 TYPE_MODE (TREE_TYPE (from)),
2838 TREE_UNSIGNED (TREE_TYPE (to)))
2842 /* If the rhs is a function call and its value is not an aggregate,
2843 call the function before we start to compute the lhs.
2844 This is needed for correct code for cases such as
2845 val = setjmp (buf) on machines where reference to val
2846 requires loading up part of an address in a separate insn.
2848 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2849 a promoted variable where the zero- or sign- extension needs to be done.
2850 Handling this in the normal way is safe because no computation is done
2852 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2853 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2854 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2859 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2861 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2863 /* Handle calls that return values in multiple non-contiguous locations.
2864 The Irix 6 ABI has examples of this. */
2865 if (GET_CODE (to_rtx) == PARALLEL)
2866 emit_group_load (to_rtx, value);
2867 else if (GET_MODE (to_rtx) == BLKmode)
2868 emit_block_move (to_rtx, value, expr_size (from),
2869 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2871 emit_move_insn (to_rtx, value);
2872 preserve_temp_slots (to_rtx);
2875 return want_value ? to_rtx : NULL_RTX;
2878 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2879 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2882 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2884 /* Don't move directly into a return register. */
2885 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2890 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2891 emit_move_insn (to_rtx, temp);
2892 preserve_temp_slots (to_rtx);
2895 return want_value ? to_rtx : NULL_RTX;
2898 /* In case we are returning the contents of an object which overlaps
2899 the place the value is being stored, use a safe function when copying
2900 a value through a pointer into a structure value return block. */
2901 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2902 && current_function_returns_struct
2903 && !current_function_returns_pcc_struct)
2908 size = expr_size (from);
2909 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2911 #ifdef TARGET_MEM_FUNCTIONS
2912 emit_library_call (memcpy_libfunc, 0,
2913 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2914 XEXP (from_rtx, 0), Pmode,
2915 convert_to_mode (TYPE_MODE (sizetype),
2916 size, TREE_UNSIGNED (sizetype)),
2917 TYPE_MODE (sizetype));
2919 emit_library_call (bcopy_libfunc, 0,
2920 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2921 XEXP (to_rtx, 0), Pmode,
2922 convert_to_mode (TYPE_MODE (integer_type_node),
2923 size, TREE_UNSIGNED (integer_type_node)),
2924 TYPE_MODE (integer_type_node));
2927 preserve_temp_slots (to_rtx);
2930 return want_value ? to_rtx : NULL_RTX;
2933 /* Compute FROM and store the value in the rtx we got. */
2936 result = store_expr (from, to_rtx, want_value);
2937 preserve_temp_slots (result);
2940 return want_value ? result : NULL_RTX;
2943 /* Generate code for computing expression EXP,
2944 and storing the value into TARGET.
2945 TARGET may contain a QUEUED rtx.
2947 If WANT_VALUE is nonzero, return a copy of the value
2948 not in TARGET, so that we can be sure to use the proper
2949 value in a containing expression even if TARGET has something
2950 else stored in it. If possible, we copy the value through a pseudo
2951 and return that pseudo. Or, if the value is constant, we try to
2952 return the constant. In some cases, we return a pseudo
2953 copied *from* TARGET.
2955 If the mode is BLKmode then we may return TARGET itself.
2956 It turns out that in BLKmode it doesn't cause a problem.
2957 because C has no operators that could combine two different
2958 assignments into the same BLKmode object with different values
2959 with no sequence point. Will other languages need this to
2962 If WANT_VALUE is 0, we return NULL, to make sure
2963 to catch quickly any cases where the caller uses the value
2964 and fails to set WANT_VALUE. */
2967 store_expr (exp, target, want_value)
2969 register rtx target;
2973 int dont_return_target = 0;
2975 if (TREE_CODE (exp) == COMPOUND_EXPR)
2977 /* Perform first part of compound expression, then assign from second
2979 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2981 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2983 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2985 /* For conditional expression, get safe form of the target. Then
2986 test the condition, doing the appropriate assignment on either
2987 side. This avoids the creation of unnecessary temporaries.
2988 For non-BLKmode, it is more efficient not to do this. */
2990 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2991 rtx flag = NULL_RTX;
2992 tree left_cleanups = NULL_TREE;
2993 tree right_cleanups = NULL_TREE;
2994 tree old_cleanups = cleanups_this_call;
2996 /* Used to save a pointer to the place to put the setting of
2997 the flag that indicates if this side of the conditional was
2998 taken. We backpatch the code, if we find out later that we
2999 have any conditional cleanups that need to be performed. */
3000 rtx dest_right_flag = NULL_RTX;
3001 rtx dest_left_flag = NULL_RTX;
3004 target = protect_from_queue (target, 1);
3006 do_pending_stack_adjust ();
3008 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3009 store_expr (TREE_OPERAND (exp, 1), target, 0);
3010 dest_left_flag = get_last_insn ();
3011 /* Handle conditional cleanups, if any. */
3012 left_cleanups = defer_cleanups_to (old_cleanups);
3014 emit_jump_insn (gen_jump (lab2));
3017 store_expr (TREE_OPERAND (exp, 2), target, 0);
3018 dest_right_flag = get_last_insn ();
3019 /* Handle conditional cleanups, if any. */
3020 right_cleanups = defer_cleanups_to (old_cleanups);
3025 /* Add back in any conditional cleanups. */
3026 if (left_cleanups || right_cleanups)
3032 /* Now that we know that a flag is needed, go back and add in the
3033 setting of the flag. */
3035 flag = gen_reg_rtx (word_mode);
3037 /* Do the left side flag. */
3038 last = get_last_insn ();
3039 /* Flag left cleanups as needed. */
3040 emit_move_insn (flag, const1_rtx);
3041 /* ??? deprecated, use sequences instead. */
3042 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3044 /* Do the right side flag. */
3045 last = get_last_insn ();
3046 /* Flag left cleanups as needed. */
3047 emit_move_insn (flag, const0_rtx);
3048 /* ??? deprecated, use sequences instead. */
3049 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3051 /* All cleanups must be on the function_obstack. */
3052 push_obstacks_nochange ();
3053 resume_temporary_allocation ();
3055 /* convert flag, which is an rtx, into a tree. */
3056 cond = make_node (RTL_EXPR);
3057 TREE_TYPE (cond) = integer_type_node;
3058 RTL_EXPR_RTL (cond) = flag;
3059 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3060 cond = save_expr (cond);
3062 if (! left_cleanups)
3063 left_cleanups = integer_zero_node;
3064 if (! right_cleanups)
3065 right_cleanups = integer_zero_node;
3066 new_cleanups = build (COND_EXPR, void_type_node,
3067 truthvalue_conversion (cond),
3068 left_cleanups, right_cleanups);
3069 new_cleanups = fold (new_cleanups);
3073 /* Now add in the conditionalized cleanups. */
3075 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3076 expand_eh_region_start ();
3078 return want_value ? target : NULL_RTX;
3080 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3081 && GET_MODE (target) != BLKmode)
3082 /* If target is in memory and caller wants value in a register instead,
3083 arrange that. Pass TARGET as target for expand_expr so that,
3084 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3085 We know expand_expr will not use the target in that case.
3086 Don't do this if TARGET is volatile because we are supposed
3087 to write it and then read it. */
3089 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3090 GET_MODE (target), 0);
3091 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3092 temp = copy_to_reg (temp);
3093 dont_return_target = 1;
3095 else if (queued_subexp_p (target))
3096 /* If target contains a postincrement, let's not risk
3097 using it as the place to generate the rhs. */
3099 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3101 /* Expand EXP into a new pseudo. */
3102 temp = gen_reg_rtx (GET_MODE (target));
3103 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3106 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3108 /* If target is volatile, ANSI requires accessing the value
3109 *from* the target, if it is accessed. So make that happen.
3110 In no case return the target itself. */
3111 if (! MEM_VOLATILE_P (target) && want_value)
3112 dont_return_target = 1;
3114 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3115 /* If this is an scalar in a register that is stored in a wider mode
3116 than the declared mode, compute the result into its declared mode
3117 and then convert to the wider mode. Our value is the computed
3120 /* If we don't want a value, we can do the conversion inside EXP,
3121 which will often result in some optimizations. Do the conversion
3122 in two steps: first change the signedness, if needed, then
3126 if (TREE_UNSIGNED (TREE_TYPE (exp))
3127 != SUBREG_PROMOTED_UNSIGNED_P (target))
3130 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3134 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3135 SUBREG_PROMOTED_UNSIGNED_P (target)),
3139 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3141 /* If TEMP is a volatile MEM and we want a result value, make
3142 the access now so it gets done only once. Likewise if
3143 it contains TARGET. */
3144 if (GET_CODE (temp) == MEM && want_value
3145 && (MEM_VOLATILE_P (temp)
3146 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3147 temp = copy_to_reg (temp);
3149 /* If TEMP is a VOIDmode constant, use convert_modes to make
3150 sure that we properly convert it. */
3151 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3152 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3153 TYPE_MODE (TREE_TYPE (exp)), temp,
3154 SUBREG_PROMOTED_UNSIGNED_P (target));
3156 convert_move (SUBREG_REG (target), temp,
3157 SUBREG_PROMOTED_UNSIGNED_P (target));
3158 return want_value ? temp : NULL_RTX;
3162 temp = expand_expr (exp, target, GET_MODE (target), 0);
3163 /* Return TARGET if it's a specified hardware register.
3164 If TARGET is a volatile mem ref, either return TARGET
3165 or return a reg copied *from* TARGET; ANSI requires this.
3167 Otherwise, if TEMP is not TARGET, return TEMP
3168 if it is constant (for efficiency),
3169 or if we really want the correct value. */
3170 if (!(target && GET_CODE (target) == REG
3171 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3172 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3174 && (CONSTANT_P (temp) || want_value))
3175 dont_return_target = 1;
3178 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3179 the same as that of TARGET, adjust the constant. This is needed, for
3180 example, in case it is a CONST_DOUBLE and we want only a word-sized
3182 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3183 && TREE_CODE (exp) != ERROR_MARK
3184 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3185 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3186 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3188 /* If value was not generated in the target, store it there.
3189 Convert the value to TARGET's type first if nec. */
3191 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3193 target = protect_from_queue (target, 1);
3194 if (GET_MODE (temp) != GET_MODE (target)
3195 && GET_MODE (temp) != VOIDmode)
3197 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3198 if (dont_return_target)
3200 /* In this case, we will return TEMP,
3201 so make sure it has the proper mode.
3202 But don't forget to store the value into TARGET. */
3203 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3204 emit_move_insn (target, temp);
3207 convert_move (target, temp, unsignedp);
3210 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3212 /* Handle copying a string constant into an array.
3213 The string constant may be shorter than the array.
3214 So copy just the string's actual length, and clear the rest. */
3218 /* Get the size of the data type of the string,
3219 which is actually the size of the target. */
3220 size = expr_size (exp);
3221 if (GET_CODE (size) == CONST_INT
3222 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3223 emit_block_move (target, temp, size,
3224 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3227 /* Compute the size of the data to copy from the string. */
3229 = size_binop (MIN_EXPR,
3230 make_tree (sizetype, size),
3232 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3233 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3237 /* Copy that much. */
3238 emit_block_move (target, temp, copy_size_rtx,
3239 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3241 /* Figure out how much is left in TARGET that we have to clear.
3242 Do all calculations in ptr_mode. */
3244 addr = XEXP (target, 0);
3245 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3247 if (GET_CODE (copy_size_rtx) == CONST_INT)
3249 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3250 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3254 addr = force_reg (ptr_mode, addr);
3255 addr = expand_binop (ptr_mode, add_optab, addr,
3256 copy_size_rtx, NULL_RTX, 0,
3259 size = expand_binop (ptr_mode, sub_optab, size,
3260 copy_size_rtx, NULL_RTX, 0,
3263 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3264 GET_MODE (size), 0, 0);
3265 label = gen_label_rtx ();
3266 emit_jump_insn (gen_blt (label));
3269 if (size != const0_rtx)
3271 #ifdef TARGET_MEM_FUNCTIONS
3272 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3274 const0_rtx, TYPE_MODE (integer_type_node),
3275 convert_to_mode (TYPE_MODE (sizetype),
3277 TREE_UNSIGNED (sizetype)),
3278 TYPE_MODE (sizetype));
3280 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3282 convert_to_mode (TYPE_MODE (integer_type_node),
3284 TREE_UNSIGNED (integer_type_node)),
3285 TYPE_MODE (integer_type_node));
3293 /* Handle calls that return values in multiple non-contiguous locations.
3294 The Irix 6 ABI has examples of this. */
3295 else if (GET_CODE (target) == PARALLEL)
3296 emit_group_load (target, temp);
3297 else if (GET_MODE (temp) == BLKmode)
3298 emit_block_move (target, temp, expr_size (exp),
3299 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3301 emit_move_insn (target, temp);
3304 /* If we don't want a value, return NULL_RTX. */
3308 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3309 ??? The latter test doesn't seem to make sense. */
3310 else if (dont_return_target && GET_CODE (temp) != MEM)
3313 /* Return TARGET itself if it is a hard register. */
3314 else if (want_value && GET_MODE (target) != BLKmode
3315 && ! (GET_CODE (target) == REG
3316 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3317 return copy_to_reg (target);
3323 /* Return 1 if EXP just contains zeros. */
3331 switch (TREE_CODE (exp))
3335 case NON_LVALUE_EXPR:
3336 return is_zeros_p (TREE_OPERAND (exp, 0));
3339 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3343 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3346 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3349 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3350 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3351 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3352 if (! is_zeros_p (TREE_VALUE (elt)))
3361 /* Return 1 if EXP contains mostly (3/4) zeros. */
3364 mostly_zeros_p (exp)
3367 if (TREE_CODE (exp) == CONSTRUCTOR)
3369 int elts = 0, zeros = 0;
3370 tree elt = CONSTRUCTOR_ELTS (exp);
3371 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3373 /* If there are no ranges of true bits, it is all zero. */
3374 return elt == NULL_TREE;
3376 for (; elt; elt = TREE_CHAIN (elt))
3378 /* We do not handle the case where the index is a RANGE_EXPR,
3379 so the statistic will be somewhat inaccurate.
3380 We do make a more accurate count in store_constructor itself,
3381 so since this function is only used for nested array elements,
3382 this should be close enough. */
3383 if (mostly_zeros_p (TREE_VALUE (elt)))
3388 return 4 * zeros >= 3 * elts;
3391 return is_zeros_p (exp);
3394 /* Helper function for store_constructor.
3395 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3396 TYPE is the type of the CONSTRUCTOR, not the element type.
3397 CLEARED is as for store_constructor.
3399 This provides a recursive shortcut back to store_constructor when it isn't
3400 necessary to go through store_field. This is so that we can pass through
3401 the cleared field to let store_constructor know that we may not have to
3402 clear a substructure if the outer structure has already been cleared. */
3405 store_constructor_field (target, bitsize, bitpos,
3406 mode, exp, type, cleared)
3408 int bitsize, bitpos;
3409 enum machine_mode mode;
3413 if (TREE_CODE (exp) == CONSTRUCTOR
3414 && bitpos % BITS_PER_UNIT == 0
3415 /* If we have a non-zero bitpos for a register target, then we just
3416 let store_field do the bitfield handling. This is unlikely to
3417 generate unnecessary clear instructions anyways. */
3418 && (bitpos == 0 || GET_CODE (target) == MEM))
3421 target = change_address (target, VOIDmode,
3422 plus_constant (XEXP (target, 0),
3423 bitpos / BITS_PER_UNIT));
3424 store_constructor (exp, target, cleared);
3427 store_field (target, bitsize, bitpos, mode, exp,
3428 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3429 int_size_in_bytes (type));
3432 /* Store the value of constructor EXP into the rtx TARGET.
3433 TARGET is either a REG or a MEM.
3434 CLEARED is true if TARGET is known to have been zero'd. */
3437 store_constructor (exp, target, cleared)
3442 tree type = TREE_TYPE (exp);
3444 /* We know our target cannot conflict, since safe_from_p has been called. */
3446 /* Don't try copying piece by piece into a hard register
3447 since that is vulnerable to being clobbered by EXP.
3448 Instead, construct in a pseudo register and then copy it all. */
3449 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3451 rtx temp = gen_reg_rtx (GET_MODE (target));
3452 store_constructor (exp, temp, 0);
3453 emit_move_insn (target, temp);
3458 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3459 || TREE_CODE (type) == QUAL_UNION_TYPE)
3463 /* Inform later passes that the whole union value is dead. */
3464 if (TREE_CODE (type) == UNION_TYPE
3465 || TREE_CODE (type) == QUAL_UNION_TYPE)
3466 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3468 /* If we are building a static constructor into a register,
3469 set the initial value as zero so we can fold the value into
3470 a constant. But if more than one register is involved,
3471 this probably loses. */
3472 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3473 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3476 emit_move_insn (target, const0_rtx);
3481 /* If the constructor has fewer fields than the structure
3482 or if we are initializing the structure to mostly zeros,
3483 clear the whole structure first. */
3484 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3485 != list_length (TYPE_FIELDS (type)))
3486 || mostly_zeros_p (exp))
3489 clear_storage (target, expr_size (exp),
3490 TYPE_ALIGN (type) / BITS_PER_UNIT);
3495 /* Inform later passes that the old value is dead. */
3496 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3498 /* Store each element of the constructor into
3499 the corresponding field of TARGET. */
3501 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3503 register tree field = TREE_PURPOSE (elt);
3504 register enum machine_mode mode;
3508 tree pos, constant = 0, offset = 0;
3509 rtx to_rtx = target;
3511 /* Just ignore missing fields.
3512 We cleared the whole structure, above,
3513 if any fields are missing. */
3517 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3520 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3521 unsignedp = TREE_UNSIGNED (field);
3522 mode = DECL_MODE (field);
3523 if (DECL_BIT_FIELD (field))
3526 pos = DECL_FIELD_BITPOS (field);
3527 if (TREE_CODE (pos) == INTEGER_CST)
3529 else if (TREE_CODE (pos) == PLUS_EXPR
3530 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3531 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3536 bitpos = TREE_INT_CST_LOW (constant);
3542 if (contains_placeholder_p (offset))
3543 offset = build (WITH_RECORD_EXPR, sizetype,
3546 offset = size_binop (FLOOR_DIV_EXPR, offset,
3547 size_int (BITS_PER_UNIT));
3549 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3550 if (GET_CODE (to_rtx) != MEM)
3554 = change_address (to_rtx, VOIDmode,
3555 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3556 force_reg (ptr_mode, offset_rtx)));
3558 if (TREE_READONLY (field))
3560 if (GET_CODE (to_rtx) == MEM)
3561 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3563 RTX_UNCHANGING_P (to_rtx) = 1;
3566 store_constructor_field (to_rtx, bitsize, bitpos,
3567 mode, TREE_VALUE (elt), type, cleared);
3570 else if (TREE_CODE (type) == ARRAY_TYPE)
3575 tree domain = TYPE_DOMAIN (type);
3576 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3577 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3578 tree elttype = TREE_TYPE (type);
3580 /* If the constructor has fewer elements than the array,
3581 clear the whole array first. Similarly if this this is
3582 static constructor of a non-BLKmode object. */
3583 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3587 HOST_WIDE_INT count = 0, zero_count = 0;
3589 /* This loop is a more accurate version of the loop in
3590 mostly_zeros_p (it handles RANGE_EXPR in an index).
3591 It is also needed to check for missing elements. */
3592 for (elt = CONSTRUCTOR_ELTS (exp);
3594 elt = TREE_CHAIN (elt), i++)
3596 tree index = TREE_PURPOSE (elt);
3597 HOST_WIDE_INT this_node_count;
3598 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3600 tree lo_index = TREE_OPERAND (index, 0);
3601 tree hi_index = TREE_OPERAND (index, 1);
3602 if (TREE_CODE (lo_index) != INTEGER_CST
3603 || TREE_CODE (hi_index) != INTEGER_CST)
3608 this_node_count = TREE_INT_CST_LOW (hi_index)
3609 - TREE_INT_CST_LOW (lo_index) + 1;
3612 this_node_count = 1;
3613 count += this_node_count;
3614 if (mostly_zeros_p (TREE_VALUE (elt)))
3615 zero_count += this_node_count;
3617 /* Clear the entire array first if there are any missing elements,
3618 or if the incidence of zero elements is >= 75%. */
3619 if (count < maxelt - minelt + 1
3620 || 4 * zero_count >= 3 * count)
3626 clear_storage (target, expr_size (exp),
3627 TYPE_ALIGN (type) / BITS_PER_UNIT);
3631 /* Inform later passes that the old value is dead. */
3632 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3634 /* Store each element of the constructor into
3635 the corresponding element of TARGET, determined
3636 by counting the elements. */
3637 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3639 elt = TREE_CHAIN (elt), i++)
3641 register enum machine_mode mode;
3645 tree value = TREE_VALUE (elt);
3646 tree index = TREE_PURPOSE (elt);
3647 rtx xtarget = target;
3649 if (cleared && is_zeros_p (value))
3652 mode = TYPE_MODE (elttype);
3653 bitsize = GET_MODE_BITSIZE (mode);
3654 unsignedp = TREE_UNSIGNED (elttype);
3656 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3658 tree lo_index = TREE_OPERAND (index, 0);
3659 tree hi_index = TREE_OPERAND (index, 1);
3660 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3661 struct nesting *loop;
3662 HOST_WIDE_INT lo, hi, count;
3665 /* If the range is constant and "small", unroll the loop. */
3666 if (TREE_CODE (lo_index) == INTEGER_CST
3667 && TREE_CODE (hi_index) == INTEGER_CST
3668 && (lo = TREE_INT_CST_LOW (lo_index),
3669 hi = TREE_INT_CST_LOW (hi_index),
3670 count = hi - lo + 1,
3671 (GET_CODE (target) != MEM
3673 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3674 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3677 lo -= minelt; hi -= minelt;
3678 for (; lo <= hi; lo++)
3680 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3681 store_constructor_field (target, bitsize, bitpos,
3682 mode, value, type, cleared);
3687 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3688 loop_top = gen_label_rtx ();
3689 loop_end = gen_label_rtx ();
3691 unsignedp = TREE_UNSIGNED (domain);
3693 index = build_decl (VAR_DECL, NULL_TREE, domain);
3695 DECL_RTL (index) = index_r
3696 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3699 if (TREE_CODE (value) == SAVE_EXPR
3700 && SAVE_EXPR_RTL (value) == 0)
3702 /* Make sure value gets expanded once before the
3704 expand_expr (value, const0_rtx, VOIDmode, 0);
3707 store_expr (lo_index, index_r, 0);
3708 loop = expand_start_loop (0);
3710 /* Assign value to element index. */
3711 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3712 size_int (BITS_PER_UNIT));
3713 position = size_binop (MULT_EXPR,
3714 size_binop (MINUS_EXPR, index,
3715 TYPE_MIN_VALUE (domain)),
3717 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3718 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3719 xtarget = change_address (target, mode, addr);
3720 if (TREE_CODE (value) == CONSTRUCTOR)
3721 store_constructor (value, xtarget, cleared);
3723 store_expr (value, xtarget, 0);
3725 expand_exit_loop_if_false (loop,
3726 build (LT_EXPR, integer_type_node,
3729 expand_increment (build (PREINCREMENT_EXPR,
3731 index, integer_one_node), 0, 0);
3733 emit_label (loop_end);
3735 /* Needed by stupid register allocation. to extend the
3736 lifetime of pseudo-regs used by target past the end
3738 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3741 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3742 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3748 index = size_int (i);
3751 index = size_binop (MINUS_EXPR, index,
3752 TYPE_MIN_VALUE (domain));
3753 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3754 size_int (BITS_PER_UNIT));
3755 position = size_binop (MULT_EXPR, index, position);
3756 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3757 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3758 xtarget = change_address (target, mode, addr);
3759 store_expr (value, xtarget, 0);
3764 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3765 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3767 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3768 store_constructor_field (target, bitsize, bitpos,
3769 mode, value, type, cleared);
3773 /* set constructor assignments */
3774 else if (TREE_CODE (type) == SET_TYPE)
3776 tree elt = CONSTRUCTOR_ELTS (exp);
3777 rtx xtarget = XEXP (target, 0);
3778 int set_word_size = TYPE_ALIGN (type);
3779 int nbytes = int_size_in_bytes (type), nbits;
3780 tree domain = TYPE_DOMAIN (type);
3781 tree domain_min, domain_max, bitlength;
3783 /* The default implementation strategy is to extract the constant
3784 parts of the constructor, use that to initialize the target,
3785 and then "or" in whatever non-constant ranges we need in addition.
3787 If a large set is all zero or all ones, it is
3788 probably better to set it using memset (if available) or bzero.
3789 Also, if a large set has just a single range, it may also be
3790 better to first clear all the first clear the set (using
3791 bzero/memset), and set the bits we want. */
3793 /* Check for all zeros. */
3794 if (elt == NULL_TREE)
3797 clear_storage (target, expr_size (exp),
3798 TYPE_ALIGN (type) / BITS_PER_UNIT);
3802 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3803 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3804 bitlength = size_binop (PLUS_EXPR,
3805 size_binop (MINUS_EXPR, domain_max, domain_min),
3808 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3810 nbits = TREE_INT_CST_LOW (bitlength);
3812 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3813 are "complicated" (more than one range), initialize (the
3814 constant parts) by copying from a constant. */
3815 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3816 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3818 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3819 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3820 char *bit_buffer = (char *) alloca (nbits);
3821 HOST_WIDE_INT word = 0;
3824 int offset = 0; /* In bytes from beginning of set. */
3825 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3828 if (bit_buffer[ibit])
3830 if (BYTES_BIG_ENDIAN)
3831 word |= (1 << (set_word_size - 1 - bit_pos));
3833 word |= 1 << bit_pos;
3836 if (bit_pos >= set_word_size || ibit == nbits)
3838 if (word != 0 || ! cleared)
3840 rtx datum = GEN_INT (word);
3842 /* The assumption here is that it is safe to use
3843 XEXP if the set is multi-word, but not if
3844 it's single-word. */
3845 if (GET_CODE (target) == MEM)
3847 to_rtx = plus_constant (XEXP (target, 0), offset);
3848 to_rtx = change_address (target, mode, to_rtx);
3850 else if (offset == 0)
3854 emit_move_insn (to_rtx, datum);
3860 offset += set_word_size / BITS_PER_UNIT;
3866 /* Don't bother clearing storage if the set is all ones. */
3867 if (TREE_CHAIN (elt) != NULL_TREE
3868 || (TREE_PURPOSE (elt) == NULL_TREE
3870 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3871 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3872 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3873 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3875 clear_storage (target, expr_size (exp),
3876 TYPE_ALIGN (type) / BITS_PER_UNIT);
3879 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3881 /* start of range of element or NULL */
3882 tree startbit = TREE_PURPOSE (elt);
3883 /* end of range of element, or element value */
3884 tree endbit = TREE_VALUE (elt);
3885 HOST_WIDE_INT startb, endb;
3886 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3888 bitlength_rtx = expand_expr (bitlength,
3889 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3891 /* handle non-range tuple element like [ expr ] */
3892 if (startbit == NULL_TREE)
3894 startbit = save_expr (endbit);
3897 startbit = convert (sizetype, startbit);
3898 endbit = convert (sizetype, endbit);
3899 if (! integer_zerop (domain_min))
3901 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3902 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3904 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3905 EXPAND_CONST_ADDRESS);
3906 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3907 EXPAND_CONST_ADDRESS);
3911 targetx = assign_stack_temp (GET_MODE (target),
3912 GET_MODE_SIZE (GET_MODE (target)),
3914 emit_move_insn (targetx, target);
3916 else if (GET_CODE (target) == MEM)
3921 #ifdef TARGET_MEM_FUNCTIONS
3922 /* Optimization: If startbit and endbit are
3923 constants divisible by BITS_PER_UNIT,
3924 call memset instead. */
3925 if (TREE_CODE (startbit) == INTEGER_CST
3926 && TREE_CODE (endbit) == INTEGER_CST
3927 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3928 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3930 emit_library_call (memset_libfunc, 0,
3932 plus_constant (XEXP (targetx, 0),
3933 startb / BITS_PER_UNIT),
3935 constm1_rtx, TYPE_MODE (integer_type_node),
3936 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3937 TYPE_MODE (sizetype));
3942 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3943 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3944 bitlength_rtx, TYPE_MODE (sizetype),
3945 startbit_rtx, TYPE_MODE (sizetype),
3946 endbit_rtx, TYPE_MODE (sizetype));
3949 emit_move_insn (target, targetx);
3957 /* Store the value of EXP (an expression tree)
3958 into a subfield of TARGET which has mode MODE and occupies
3959 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3960 If MODE is VOIDmode, it means that we are storing into a bit-field.
3962 If VALUE_MODE is VOIDmode, return nothing in particular.
3963 UNSIGNEDP is not used in this case.
3965 Otherwise, return an rtx for the value stored. This rtx
3966 has mode VALUE_MODE if that is convenient to do.
3967 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3969 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3970 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3973 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3974 unsignedp, align, total_size)
3976 int bitsize, bitpos;
3977 enum machine_mode mode;
3979 enum machine_mode value_mode;
3984 HOST_WIDE_INT width_mask = 0;
3986 if (bitsize < HOST_BITS_PER_WIDE_INT)
3987 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3989 /* If we are storing into an unaligned field of an aligned union that is
3990 in a register, we may have the mode of TARGET being an integer mode but
3991 MODE == BLKmode. In that case, get an aligned object whose size and
3992 alignment are the same as TARGET and store TARGET into it (we can avoid
3993 the store if the field being stored is the entire width of TARGET). Then
3994 call ourselves recursively to store the field into a BLKmode version of
3995 that object. Finally, load from the object into TARGET. This is not
3996 very efficient in general, but should only be slightly more expensive
3997 than the otherwise-required unaligned accesses. Perhaps this can be
3998 cleaned up later. */
4001 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4003 rtx object = assign_stack_temp (GET_MODE (target),
4004 GET_MODE_SIZE (GET_MODE (target)), 0);
4005 rtx blk_object = copy_rtx (object);
4007 MEM_IN_STRUCT_P (object) = 1;
4008 MEM_IN_STRUCT_P (blk_object) = 1;
4009 PUT_MODE (blk_object, BLKmode);
4011 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4012 emit_move_insn (object, target);
4014 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4017 /* Even though we aren't returning target, we need to
4018 give it the updated value. */
4019 emit_move_insn (target, object);
4024 /* If the structure is in a register or if the component
4025 is a bit field, we cannot use addressing to access it.
4026 Use bit-field techniques or SUBREG to store in it. */
4028 if (mode == VOIDmode
4029 || (mode != BLKmode && ! direct_store[(int) mode])
4030 || GET_CODE (target) == REG
4031 || GET_CODE (target) == SUBREG
4032 /* If the field isn't aligned enough to store as an ordinary memref,
4033 store it as a bit field. */
4034 || (SLOW_UNALIGNED_ACCESS
4035 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4036 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4038 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4040 /* If BITSIZE is narrower than the size of the type of EXP
4041 we will be narrowing TEMP. Normally, what's wanted are the
4042 low-order bits. However, if EXP's type is a record and this is
4043 big-endian machine, we want the upper BITSIZE bits. */
4044 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4045 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4046 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4047 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4048 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4052 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4054 if (mode != VOIDmode && mode != BLKmode
4055 && mode != TYPE_MODE (TREE_TYPE (exp)))
4056 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4058 /* If the modes of TARGET and TEMP are both BLKmode, both
4059 must be in memory and BITPOS must be aligned on a byte
4060 boundary. If so, we simply do a block copy. */
4061 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4063 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4064 || bitpos % BITS_PER_UNIT != 0)
4067 target = change_address (target, VOIDmode,
4068 plus_constant (XEXP (target, 0),
4069 bitpos / BITS_PER_UNIT));
4071 emit_block_move (target, temp,
4072 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4076 return value_mode == VOIDmode ? const0_rtx : target;
4079 /* Store the value in the bitfield. */
4080 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4081 if (value_mode != VOIDmode)
4083 /* The caller wants an rtx for the value. */
4084 /* If possible, avoid refetching from the bitfield itself. */
4086 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4089 enum machine_mode tmode;
4092 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4093 tmode = GET_MODE (temp);
4094 if (tmode == VOIDmode)
4096 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4097 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4098 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4100 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4101 NULL_RTX, value_mode, 0, align,
4108 rtx addr = XEXP (target, 0);
4111 /* If a value is wanted, it must be the lhs;
4112 so make the address stable for multiple use. */
4114 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4115 && ! CONSTANT_ADDRESS_P (addr)
4116 /* A frame-pointer reference is already stable. */
4117 && ! (GET_CODE (addr) == PLUS
4118 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4119 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4120 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4121 addr = copy_to_reg (addr);
4123 /* Now build a reference to just the desired component. */
4125 to_rtx = change_address (target, mode,
4126 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4127 MEM_IN_STRUCT_P (to_rtx) = 1;
4129 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4133 /* Return true if any object containing the innermost array is an unaligned
4134 packed structure field. */
4137 get_inner_unaligned_p (exp)
4140 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4144 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4146 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4150 else if (TREE_CODE (exp) != ARRAY_REF
4151 && TREE_CODE (exp) != NON_LVALUE_EXPR
4152 && ! ((TREE_CODE (exp) == NOP_EXPR
4153 || TREE_CODE (exp) == CONVERT_EXPR)
4154 && (TYPE_MODE (TREE_TYPE (exp))
4155 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4158 exp = TREE_OPERAND (exp, 0);
4164 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4165 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4166 ARRAY_REFs and find the ultimate containing object, which we return.
4168 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4169 bit position, and *PUNSIGNEDP to the signedness of the field.
4170 If the position of the field is variable, we store a tree
4171 giving the variable offset (in units) in *POFFSET.
4172 This offset is in addition to the bit position.
4173 If the position is not variable, we store 0 in *POFFSET.
4175 If any of the extraction expressions is volatile,
4176 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4178 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4179 is a mode that can be used to access the field. In that case, *PBITSIZE
4182 If the field describes a variable-sized object, *PMODE is set to
4183 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4184 this case, but the address of the object can be found. */
4187 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4188 punsignedp, pvolatilep)
4193 enum machine_mode *pmode;
4197 tree orig_exp = exp;
4199 enum machine_mode mode = VOIDmode;
4200 tree offset = integer_zero_node;
4202 if (TREE_CODE (exp) == COMPONENT_REF)
4204 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4205 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4206 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4207 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4209 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4211 size_tree = TREE_OPERAND (exp, 1);
4212 *punsignedp = TREE_UNSIGNED (exp);
4216 mode = TYPE_MODE (TREE_TYPE (exp));
4217 *pbitsize = GET_MODE_BITSIZE (mode);
4218 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4223 if (TREE_CODE (size_tree) != INTEGER_CST)
4224 mode = BLKmode, *pbitsize = -1;
4226 *pbitsize = TREE_INT_CST_LOW (size_tree);
4229 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4230 and find the ultimate containing object. */
4236 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4238 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4239 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4240 : TREE_OPERAND (exp, 2));
4241 tree constant = integer_zero_node, var = pos;
4243 /* If this field hasn't been filled in yet, don't go
4244 past it. This should only happen when folding expressions
4245 made during type construction. */
4249 /* Assume here that the offset is a multiple of a unit.
4250 If not, there should be an explicitly added constant. */
4251 if (TREE_CODE (pos) == PLUS_EXPR
4252 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4253 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4254 else if (TREE_CODE (pos) == INTEGER_CST)
4255 constant = pos, var = integer_zero_node;
4257 *pbitpos += TREE_INT_CST_LOW (constant);
4258 offset = size_binop (PLUS_EXPR, offset,
4259 size_binop (EXACT_DIV_EXPR, var,
4260 size_int (BITS_PER_UNIT)));
4263 else if (TREE_CODE (exp) == ARRAY_REF)
4265 /* This code is based on the code in case ARRAY_REF in expand_expr
4266 below. We assume here that the size of an array element is
4267 always an integral multiple of BITS_PER_UNIT. */
4269 tree index = TREE_OPERAND (exp, 1);
4270 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4272 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4273 tree index_type = TREE_TYPE (index);
4275 if (! integer_zerop (low_bound))
4276 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4278 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4280 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4282 index_type = TREE_TYPE (index);
4285 index = fold (build (MULT_EXPR, index_type, index,
4286 TYPE_SIZE (TREE_TYPE (exp))));
4288 if (TREE_CODE (index) == INTEGER_CST
4289 && TREE_INT_CST_HIGH (index) == 0)
4290 *pbitpos += TREE_INT_CST_LOW (index);
4292 offset = size_binop (PLUS_EXPR, offset,
4293 size_binop (FLOOR_DIV_EXPR, index,
4294 size_int (BITS_PER_UNIT)));
4296 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4297 && ! ((TREE_CODE (exp) == NOP_EXPR
4298 || TREE_CODE (exp) == CONVERT_EXPR)
4299 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4300 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4302 && (TYPE_MODE (TREE_TYPE (exp))
4303 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4306 /* If any reference in the chain is volatile, the effect is volatile. */
4307 if (TREE_THIS_VOLATILE (exp))
4309 exp = TREE_OPERAND (exp, 0);
4312 if (integer_zerop (offset))
4315 if (offset != 0 && contains_placeholder_p (offset))
4316 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4323 /* Given an rtx VALUE that may contain additions and multiplications,
4324 return an equivalent value that just refers to a register or memory.
4325 This is done by generating instructions to perform the arithmetic
4326 and returning a pseudo-register containing the value.
4328 The returned value may be a REG, SUBREG, MEM or constant. */
4331 force_operand (value, target)
4334 register optab binoptab = 0;
4335 /* Use a temporary to force order of execution of calls to
4339 /* Use subtarget as the target for operand 0 of a binary operation. */
4340 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4342 if (GET_CODE (value) == PLUS)
4343 binoptab = add_optab;
4344 else if (GET_CODE (value) == MINUS)
4345 binoptab = sub_optab;
4346 else if (GET_CODE (value) == MULT)
4348 op2 = XEXP (value, 1);
4349 if (!CONSTANT_P (op2)
4350 && !(GET_CODE (op2) == REG && op2 != subtarget))
4352 tmp = force_operand (XEXP (value, 0), subtarget);
4353 return expand_mult (GET_MODE (value), tmp,
4354 force_operand (op2, NULL_RTX),
4360 op2 = XEXP (value, 1);
4361 if (!CONSTANT_P (op2)
4362 && !(GET_CODE (op2) == REG && op2 != subtarget))
4364 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4366 binoptab = add_optab;
4367 op2 = negate_rtx (GET_MODE (value), op2);
4370 /* Check for an addition with OP2 a constant integer and our first
4371 operand a PLUS of a virtual register and something else. In that
4372 case, we want to emit the sum of the virtual register and the
4373 constant first and then add the other value. This allows virtual
4374 register instantiation to simply modify the constant rather than
4375 creating another one around this addition. */
4376 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4377 && GET_CODE (XEXP (value, 0)) == PLUS
4378 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4379 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4380 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4382 rtx temp = expand_binop (GET_MODE (value), binoptab,
4383 XEXP (XEXP (value, 0), 0), op2,
4384 subtarget, 0, OPTAB_LIB_WIDEN);
4385 return expand_binop (GET_MODE (value), binoptab, temp,
4386 force_operand (XEXP (XEXP (value, 0), 1), 0),
4387 target, 0, OPTAB_LIB_WIDEN);
4390 tmp = force_operand (XEXP (value, 0), subtarget);
4391 return expand_binop (GET_MODE (value), binoptab, tmp,
4392 force_operand (op2, NULL_RTX),
4393 target, 0, OPTAB_LIB_WIDEN);
4394 /* We give UNSIGNEDP = 0 to expand_binop
4395 because the only operations we are expanding here are signed ones. */
4400 /* Subroutine of expand_expr:
4401 save the non-copied parts (LIST) of an expr (LHS), and return a list
4402 which can restore these values to their previous values,
4403 should something modify their storage. */
4406 save_noncopied_parts (lhs, list)
4413 for (tail = list; tail; tail = TREE_CHAIN (tail))
4414 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4415 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4418 tree part = TREE_VALUE (tail);
4419 tree part_type = TREE_TYPE (part);
4420 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4421 rtx target = assign_temp (part_type, 0, 1, 1);
4422 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4423 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4424 parts = tree_cons (to_be_saved,
4425 build (RTL_EXPR, part_type, NULL_TREE,
4428 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4433 /* Subroutine of expand_expr:
4434 record the non-copied parts (LIST) of an expr (LHS), and return a list
4435 which specifies the initial values of these parts. */
4438 init_noncopied_parts (lhs, list)
4445 for (tail = list; tail; tail = TREE_CHAIN (tail))
4446 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4447 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4450 tree part = TREE_VALUE (tail);
4451 tree part_type = TREE_TYPE (part);
4452 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4453 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4458 /* Subroutine of expand_expr: return nonzero iff there is no way that
4459 EXP can reference X, which is being modified. */
4462 safe_from_p (x, exp)
4470 /* If EXP has varying size, we MUST use a target since we currently
4471 have no way of allocating temporaries of variable size
4472 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4473 So we assume here that something at a higher level has prevented a
4474 clash. This is somewhat bogus, but the best we can do. Only
4475 do this when X is BLKmode. */
4476 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4477 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4478 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4479 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4480 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4482 && GET_MODE (x) == BLKmode))
4485 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4486 find the underlying pseudo. */
4487 if (GET_CODE (x) == SUBREG)
4490 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4494 /* If X is a location in the outgoing argument area, it is always safe. */
4495 if (GET_CODE (x) == MEM
4496 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4497 || (GET_CODE (XEXP (x, 0)) == PLUS
4498 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4501 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4504 exp_rtl = DECL_RTL (exp);
4511 if (TREE_CODE (exp) == TREE_LIST)
4512 return ((TREE_VALUE (exp) == 0
4513 || safe_from_p (x, TREE_VALUE (exp)))
4514 && (TREE_CHAIN (exp) == 0
4515 || safe_from_p (x, TREE_CHAIN (exp))));
4520 return safe_from_p (x, TREE_OPERAND (exp, 0));
4524 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4525 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4529 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4530 the expression. If it is set, we conflict iff we are that rtx or
4531 both are in memory. Otherwise, we check all operands of the
4532 expression recursively. */
4534 switch (TREE_CODE (exp))
4537 return (staticp (TREE_OPERAND (exp, 0))
4538 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4541 if (GET_CODE (x) == MEM)
4546 exp_rtl = CALL_EXPR_RTL (exp);
4549 /* Assume that the call will clobber all hard registers and
4551 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4552 || GET_CODE (x) == MEM)
4559 /* If a sequence exists, we would have to scan every instruction
4560 in the sequence to see if it was safe. This is probably not
4562 if (RTL_EXPR_SEQUENCE (exp))
4565 exp_rtl = RTL_EXPR_RTL (exp);
4568 case WITH_CLEANUP_EXPR:
4569 exp_rtl = RTL_EXPR_RTL (exp);
4572 case CLEANUP_POINT_EXPR:
4573 return safe_from_p (x, TREE_OPERAND (exp, 0));
4576 exp_rtl = SAVE_EXPR_RTL (exp);
4580 /* The only operand we look at is operand 1. The rest aren't
4581 part of the expression. */
4582 return safe_from_p (x, TREE_OPERAND (exp, 1));
4584 case METHOD_CALL_EXPR:
4585 /* This takes a rtx argument, but shouldn't appear here. */
4589 /* If we have an rtx, we do not need to scan our operands. */
4593 nops = tree_code_length[(int) TREE_CODE (exp)];
4594 for (i = 0; i < nops; i++)
4595 if (TREE_OPERAND (exp, i) != 0
4596 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4600 /* If we have an rtl, find any enclosed object. Then see if we conflict
4604 if (GET_CODE (exp_rtl) == SUBREG)
4606 exp_rtl = SUBREG_REG (exp_rtl);
4607 if (GET_CODE (exp_rtl) == REG
4608 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4612 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4613 are memory and EXP is not readonly. */
4614 return ! (rtx_equal_p (x, exp_rtl)
4615 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4616 && ! TREE_READONLY (exp)));
4619 /* If we reach here, it is safe. */
4623 /* Subroutine of expand_expr: return nonzero iff EXP is an
4624 expression whose type is statically determinable. */
4630 if (TREE_CODE (exp) == PARM_DECL
4631 || TREE_CODE (exp) == VAR_DECL
4632 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4633 || TREE_CODE (exp) == COMPONENT_REF
4634 || TREE_CODE (exp) == ARRAY_REF)
4639 /* expand_expr: generate code for computing expression EXP.
4640 An rtx for the computed value is returned. The value is never null.
4641 In the case of a void EXP, const0_rtx is returned.
4643 The value may be stored in TARGET if TARGET is nonzero.
4644 TARGET is just a suggestion; callers must assume that
4645 the rtx returned may not be the same as TARGET.
4647 If TARGET is CONST0_RTX, it means that the value will be ignored.
4649 If TMODE is not VOIDmode, it suggests generating the
4650 result in mode TMODE. But this is done only when convenient.
4651 Otherwise, TMODE is ignored and the value generated in its natural mode.
4652 TMODE is just a suggestion; callers must assume that
4653 the rtx returned may not have mode TMODE.
4655 Note that TARGET may have neither TMODE nor MODE. In that case, it
4656 probably will not be used.
4658 If MODIFIER is EXPAND_SUM then when EXP is an addition
4659 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4660 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4661 products as above, or REG or MEM, or constant.
4662 Ordinarily in such cases we would output mul or add instructions
4663 and then return a pseudo reg containing the sum.
4665 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4666 it also marks a label as absolutely required (it can't be dead).
4667 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4668 This is used for outputting expressions used in initializers.
4670 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4671 with a constant address even if that address is not normally legitimate.
4672 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4675 expand_expr (exp, target, tmode, modifier)
4678 enum machine_mode tmode;
4679 enum expand_modifier modifier;
4681 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4682 This is static so it will be accessible to our recursive callees. */
4683 static tree placeholder_list = 0;
4684 register rtx op0, op1, temp;
4685 tree type = TREE_TYPE (exp);
4686 int unsignedp = TREE_UNSIGNED (type);
4687 register enum machine_mode mode = TYPE_MODE (type);
4688 register enum tree_code code = TREE_CODE (exp);
4690 /* Use subtarget as the target for operand 0 of a binary operation. */
4691 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4692 rtx original_target = target;
4693 /* Maybe defer this until sure not doing bytecode? */
4694 int ignore = (target == const0_rtx
4695 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4696 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4697 || code == COND_EXPR)
4698 && TREE_CODE (type) == VOID_TYPE));
4702 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4704 bc_expand_expr (exp);
4708 /* Don't use hard regs as subtargets, because the combiner
4709 can only handle pseudo regs. */
4710 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4712 /* Avoid subtargets inside loops,
4713 since they hide some invariant expressions. */
4714 if (preserve_subexpressions_p ())
4717 /* If we are going to ignore this result, we need only do something
4718 if there is a side-effect somewhere in the expression. If there
4719 is, short-circuit the most common cases here. Note that we must
4720 not call expand_expr with anything but const0_rtx in case this
4721 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4725 if (! TREE_SIDE_EFFECTS (exp))
4728 /* Ensure we reference a volatile object even if value is ignored. */
4729 if (TREE_THIS_VOLATILE (exp)
4730 && TREE_CODE (exp) != FUNCTION_DECL
4731 && mode != VOIDmode && mode != BLKmode)
4733 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4734 if (GET_CODE (temp) == MEM)
4735 temp = copy_to_reg (temp);
4739 if (TREE_CODE_CLASS (code) == '1')
4740 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4741 VOIDmode, modifier);
4742 else if (TREE_CODE_CLASS (code) == '2'
4743 || TREE_CODE_CLASS (code) == '<')
4745 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4746 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4749 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4750 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4751 /* If the second operand has no side effects, just evaluate
4753 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4754 VOIDmode, modifier);
4759 /* If will do cse, generate all results into pseudo registers
4760 since 1) that allows cse to find more things
4761 and 2) otherwise cse could produce an insn the machine
4764 if (! cse_not_expected && mode != BLKmode && target
4765 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4772 tree function = decl_function_context (exp);
4773 /* Handle using a label in a containing function. */
4774 if (function != current_function_decl && function != 0)
4776 struct function *p = find_function_data (function);
4777 /* Allocate in the memory associated with the function
4778 that the label is in. */
4779 push_obstacks (p->function_obstack,
4780 p->function_maybepermanent_obstack);
4782 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4783 label_rtx (exp), p->forced_labels);
4786 else if (modifier == EXPAND_INITIALIZER)
4787 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4788 label_rtx (exp), forced_labels);
4789 temp = gen_rtx (MEM, FUNCTION_MODE,
4790 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4791 if (function != current_function_decl && function != 0)
4792 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4797 if (DECL_RTL (exp) == 0)
4799 error_with_decl (exp, "prior parameter's size depends on `%s'");
4800 return CONST0_RTX (mode);
4803 /* ... fall through ... */
4806 /* If a static var's type was incomplete when the decl was written,
4807 but the type is complete now, lay out the decl now. */
4808 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4809 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4811 push_obstacks_nochange ();
4812 end_temporary_allocation ();
4813 layout_decl (exp, 0);
4814 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4818 /* ... fall through ... */
4822 if (DECL_RTL (exp) == 0)
4825 /* Ensure variable marked as used even if it doesn't go through
4826 a parser. If it hasn't be used yet, write out an external
4828 if (! TREE_USED (exp))
4830 assemble_external (exp);
4831 TREE_USED (exp) = 1;
4834 /* Show we haven't gotten RTL for this yet. */
4837 /* Handle variables inherited from containing functions. */
4838 context = decl_function_context (exp);
4840 /* We treat inline_function_decl as an alias for the current function
4841 because that is the inline function whose vars, types, etc.
4842 are being merged into the current function.
4843 See expand_inline_function. */
4845 if (context != 0 && context != current_function_decl
4846 && context != inline_function_decl
4847 /* If var is static, we don't need a static chain to access it. */
4848 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4849 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4853 /* Mark as non-local and addressable. */
4854 DECL_NONLOCAL (exp) = 1;
4855 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4857 mark_addressable (exp);
4858 if (GET_CODE (DECL_RTL (exp)) != MEM)
4860 addr = XEXP (DECL_RTL (exp), 0);
4861 if (GET_CODE (addr) == MEM)
4862 addr = gen_rtx (MEM, Pmode,
4863 fix_lexical_addr (XEXP (addr, 0), exp));
4865 addr = fix_lexical_addr (addr, exp);
4866 temp = change_address (DECL_RTL (exp), mode, addr);
4869 /* This is the case of an array whose size is to be determined
4870 from its initializer, while the initializer is still being parsed.
4873 else if (GET_CODE (DECL_RTL (exp)) == MEM
4874 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4875 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4876 XEXP (DECL_RTL (exp), 0));
4878 /* If DECL_RTL is memory, we are in the normal case and either
4879 the address is not valid or it is not a register and -fforce-addr
4880 is specified, get the address into a register. */
4882 else if (GET_CODE (DECL_RTL (exp)) == MEM
4883 && modifier != EXPAND_CONST_ADDRESS
4884 && modifier != EXPAND_SUM
4885 && modifier != EXPAND_INITIALIZER
4886 && (! memory_address_p (DECL_MODE (exp),
4887 XEXP (DECL_RTL (exp), 0))
4889 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4890 temp = change_address (DECL_RTL (exp), VOIDmode,
4891 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4893 /* If we got something, return it. But first, set the alignment
4894 the address is a register. */
4897 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4898 mark_reg_pointer (XEXP (temp, 0),
4899 DECL_ALIGN (exp) / BITS_PER_UNIT);
4904 /* If the mode of DECL_RTL does not match that of the decl, it
4905 must be a promoted value. We return a SUBREG of the wanted mode,
4906 but mark it so that we know that it was already extended. */
4908 if (GET_CODE (DECL_RTL (exp)) == REG
4909 && GET_MODE (DECL_RTL (exp)) != mode)
4911 /* Get the signedness used for this variable. Ensure we get the
4912 same mode we got when the variable was declared. */
4913 if (GET_MODE (DECL_RTL (exp))
4914 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4917 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4918 SUBREG_PROMOTED_VAR_P (temp) = 1;
4919 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4923 return DECL_RTL (exp);
4926 return immed_double_const (TREE_INT_CST_LOW (exp),
4927 TREE_INT_CST_HIGH (exp),
4931 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4934 /* If optimized, generate immediate CONST_DOUBLE
4935 which will be turned into memory by reload if necessary.
4937 We used to force a register so that loop.c could see it. But
4938 this does not allow gen_* patterns to perform optimizations with
4939 the constants. It also produces two insns in cases like "x = 1.0;".
4940 On most machines, floating-point constants are not permitted in
4941 many insns, so we'd end up copying it to a register in any case.
4943 Now, we do the copying in expand_binop, if appropriate. */
4944 return immed_real_const (exp);
4948 if (! TREE_CST_RTL (exp))
4949 output_constant_def (exp);
4951 /* TREE_CST_RTL probably contains a constant address.
4952 On RISC machines where a constant address isn't valid,
4953 make some insns to get that address into a register. */
4954 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4955 && modifier != EXPAND_CONST_ADDRESS
4956 && modifier != EXPAND_INITIALIZER
4957 && modifier != EXPAND_SUM
4958 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4960 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4961 return change_address (TREE_CST_RTL (exp), VOIDmode,
4962 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4963 return TREE_CST_RTL (exp);
4966 context = decl_function_context (exp);
4968 /* We treat inline_function_decl as an alias for the current function
4969 because that is the inline function whose vars, types, etc.
4970 are being merged into the current function.
4971 See expand_inline_function. */
4972 if (context == current_function_decl || context == inline_function_decl)
4975 /* If this is non-local, handle it. */
4978 temp = SAVE_EXPR_RTL (exp);
4979 if (temp && GET_CODE (temp) == REG)
4981 put_var_into_stack (exp);
4982 temp = SAVE_EXPR_RTL (exp);
4984 if (temp == 0 || GET_CODE (temp) != MEM)
4986 return change_address (temp, mode,
4987 fix_lexical_addr (XEXP (temp, 0), exp));
4989 if (SAVE_EXPR_RTL (exp) == 0)
4991 if (mode == VOIDmode)
4994 temp = assign_temp (type, 0, 0, 0);
4996 SAVE_EXPR_RTL (exp) = temp;
4997 if (!optimize && GET_CODE (temp) == REG)
4998 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5001 /* If the mode of TEMP does not match that of the expression, it
5002 must be a promoted value. We pass store_expr a SUBREG of the
5003 wanted mode but mark it so that we know that it was already
5004 extended. Note that `unsignedp' was modified above in
5007 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5009 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5010 SUBREG_PROMOTED_VAR_P (temp) = 1;
5011 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5014 if (temp == const0_rtx)
5015 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5017 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5020 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5021 must be a promoted value. We return a SUBREG of the wanted mode,
5022 but mark it so that we know that it was already extended. */
5024 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5025 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5027 /* Compute the signedness and make the proper SUBREG. */
5028 promote_mode (type, mode, &unsignedp, 0);
5029 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5030 SUBREG_PROMOTED_VAR_P (temp) = 1;
5031 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5035 return SAVE_EXPR_RTL (exp);
5040 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5041 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5045 case PLACEHOLDER_EXPR:
5046 /* If there is an object on the head of the placeholder list,
5047 see if some object in it's references is of type TYPE. For
5048 further information, see tree.def. */
5049 if (placeholder_list)
5052 tree old_list = placeholder_list;
5054 for (object = TREE_PURPOSE (placeholder_list);
5055 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5056 != TYPE_MAIN_VARIANT (type))
5057 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
5058 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
5059 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
5060 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
5061 object = TREE_OPERAND (object, 0))
5065 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5066 == TYPE_MAIN_VARIANT (type)))
5068 /* Expand this object skipping the list entries before
5069 it was found in case it is also a PLACEHOLDER_EXPR.
5070 In that case, we want to translate it using subsequent
5072 placeholder_list = TREE_CHAIN (placeholder_list);
5073 temp = expand_expr (object, original_target, tmode, modifier);
5074 placeholder_list = old_list;
5079 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5082 case WITH_RECORD_EXPR:
5083 /* Put the object on the placeholder list, expand our first operand,
5084 and pop the list. */
5085 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5087 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5089 placeholder_list = TREE_CHAIN (placeholder_list);
5093 expand_exit_loop_if_false (NULL_PTR,
5094 invert_truthvalue (TREE_OPERAND (exp, 0)));
5099 expand_start_loop (1);
5100 expand_expr_stmt (TREE_OPERAND (exp, 0));
5108 tree vars = TREE_OPERAND (exp, 0);
5109 int vars_need_expansion = 0;
5111 /* Need to open a binding contour here because
5112 if there are any cleanups they most be contained here. */
5113 expand_start_bindings (0);
5115 /* Mark the corresponding BLOCK for output in its proper place. */
5116 if (TREE_OPERAND (exp, 2) != 0
5117 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5118 insert_block (TREE_OPERAND (exp, 2));
5120 /* If VARS have not yet been expanded, expand them now. */
5123 if (DECL_RTL (vars) == 0)
5125 vars_need_expansion = 1;
5128 expand_decl_init (vars);
5129 vars = TREE_CHAIN (vars);
5132 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5134 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5140 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5142 emit_insns (RTL_EXPR_SEQUENCE (exp));
5143 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5144 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5145 free_temps_for_rtl_expr (exp);
5146 return RTL_EXPR_RTL (exp);
5149 /* If we don't need the result, just ensure we evaluate any
5154 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5155 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5159 /* All elts simple constants => refer to a constant in memory. But
5160 if this is a non-BLKmode mode, let it store a field at a time
5161 since that should make a CONST_INT or CONST_DOUBLE when we
5162 fold. Likewise, if we have a target we can use, it is best to
5163 store directly into the target unless the type is large enough
5164 that memcpy will be used. If we are making an initializer and
5165 all operands are constant, put it in memory as well. */
5166 else if ((TREE_STATIC (exp)
5167 && ((mode == BLKmode
5168 && ! (target != 0 && safe_from_p (target, exp)))
5169 || TREE_ADDRESSABLE (exp)
5170 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5171 && (move_by_pieces_ninsns
5172 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5173 TYPE_ALIGN (type) / BITS_PER_UNIT)
5175 && ! mostly_zeros_p (exp))))
5176 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5178 rtx constructor = output_constant_def (exp);
5179 if (modifier != EXPAND_CONST_ADDRESS
5180 && modifier != EXPAND_INITIALIZER
5181 && modifier != EXPAND_SUM
5182 && (! memory_address_p (GET_MODE (constructor),
5183 XEXP (constructor, 0))
5185 && GET_CODE (XEXP (constructor, 0)) != REG)))
5186 constructor = change_address (constructor, VOIDmode,
5187 XEXP (constructor, 0));
5193 if (target == 0 || ! safe_from_p (target, exp))
5195 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5196 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5198 target = assign_temp (type, 0, 1, 1);
5201 if (TREE_READONLY (exp))
5203 if (GET_CODE (target) == MEM)
5204 target = change_address (target, GET_MODE (target),
5206 RTX_UNCHANGING_P (target) = 1;
5209 store_constructor (exp, target, 0);
5215 tree exp1 = TREE_OPERAND (exp, 0);
5218 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5219 op0 = memory_address (mode, op0);
5221 temp = gen_rtx (MEM, mode, op0);
5222 /* If address was computed by addition,
5223 mark this as an element of an aggregate. */
5224 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5225 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5226 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5227 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5228 || (TREE_CODE (exp1) == ADDR_EXPR
5229 && (exp2 = TREE_OPERAND (exp1, 0))
5230 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5231 MEM_IN_STRUCT_P (temp) = 1;
5232 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5234 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5235 here, because, in C and C++, the fact that a location is accessed
5236 through a pointer to const does not mean that the value there can
5237 never change. Languages where it can never change should
5238 also set TREE_STATIC. */
5239 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5244 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5248 tree array = TREE_OPERAND (exp, 0);
5249 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5250 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5251 tree index = TREE_OPERAND (exp, 1);
5252 tree index_type = TREE_TYPE (index);
5255 if (TREE_CODE (low_bound) != INTEGER_CST
5256 && contains_placeholder_p (low_bound))
5257 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5259 /* Optimize the special-case of a zero lower bound.
5261 We convert the low_bound to sizetype to avoid some problems
5262 with constant folding. (E.g. suppose the lower bound is 1,
5263 and its mode is QI. Without the conversion, (ARRAY
5264 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5265 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5267 But sizetype isn't quite right either (especially if
5268 the lowbound is negative). FIXME */
5270 if (! integer_zerop (low_bound))
5271 index = fold (build (MINUS_EXPR, index_type, index,
5272 convert (sizetype, low_bound)));
5274 if ((TREE_CODE (index) != INTEGER_CST
5275 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5276 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5278 /* Nonconstant array index or nonconstant element size, and
5279 not an array in an unaligned (packed) structure field.
5280 Generate the tree for *(&array+index) and expand that,
5281 except do it in a language-independent way
5282 and don't complain about non-lvalue arrays.
5283 `mark_addressable' should already have been called
5284 for any array for which this case will be reached. */
5286 /* Don't forget the const or volatile flag from the array
5288 tree variant_type = build_type_variant (type,
5289 TREE_READONLY (exp),
5290 TREE_THIS_VOLATILE (exp));
5291 tree array_adr = build1 (ADDR_EXPR,
5292 build_pointer_type (variant_type), array);
5294 tree size = size_in_bytes (type);
5296 /* Convert the integer argument to a type the same size as sizetype
5297 so the multiply won't overflow spuriously. */
5298 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5299 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5302 if (TREE_CODE (size) != INTEGER_CST
5303 && contains_placeholder_p (size))
5304 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5306 /* Don't think the address has side effects
5307 just because the array does.
5308 (In some cases the address might have side effects,
5309 and we fail to record that fact here. However, it should not
5310 matter, since expand_expr should not care.) */
5311 TREE_SIDE_EFFECTS (array_adr) = 0;
5315 (INDIRECT_REF, type,
5316 fold (build (PLUS_EXPR,
5317 TYPE_POINTER_TO (variant_type),
5322 TYPE_POINTER_TO (variant_type),
5323 fold (build (MULT_EXPR, TREE_TYPE (index),
5325 convert (TREE_TYPE (index),
5328 /* Volatility, etc., of new expression is same as old
5330 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5331 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5332 TREE_READONLY (elt) = TREE_READONLY (exp);
5334 return expand_expr (elt, target, tmode, modifier);
5337 /* Fold an expression like: "foo"[2].
5338 This is not done in fold so it won't happen inside &.
5339 Don't fold if this is for wide characters since it's too
5340 difficult to do correctly and this is a very rare case. */
5342 if (TREE_CODE (array) == STRING_CST
5343 && TREE_CODE (index) == INTEGER_CST
5344 && !TREE_INT_CST_HIGH (index)
5345 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5346 && GET_MODE_CLASS (mode) == MODE_INT
5347 && GET_MODE_SIZE (mode) == 1)
5348 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5350 /* If this is a constant index into a constant array,
5351 just get the value from the array. Handle both the cases when
5352 we have an explicit constructor and when our operand is a variable
5353 that was declared const. */
5355 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5357 if (TREE_CODE (index) == INTEGER_CST
5358 && TREE_INT_CST_HIGH (index) == 0)
5360 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5362 i = TREE_INT_CST_LOW (index);
5364 elem = TREE_CHAIN (elem);
5366 return expand_expr (fold (TREE_VALUE (elem)), target,
5371 else if (optimize >= 1
5372 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5373 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5374 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5376 if (TREE_CODE (index) == INTEGER_CST
5377 && TREE_INT_CST_HIGH (index) == 0)
5379 tree init = DECL_INITIAL (array);
5381 i = TREE_INT_CST_LOW (index);
5382 if (TREE_CODE (init) == CONSTRUCTOR)
5384 tree elem = CONSTRUCTOR_ELTS (init);
5387 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5388 elem = TREE_CHAIN (elem);
5390 return expand_expr (fold (TREE_VALUE (elem)), target,
5393 else if (TREE_CODE (init) == STRING_CST
5394 && i < TREE_STRING_LENGTH (init))
5395 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5400 /* Treat array-ref with constant index as a component-ref. */
5404 /* If the operand is a CONSTRUCTOR, we can just extract the
5405 appropriate field if it is present. Don't do this if we have
5406 already written the data since we want to refer to that copy
5407 and varasm.c assumes that's what we'll do. */
5408 if (code != ARRAY_REF
5409 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5410 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5414 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5415 elt = TREE_CHAIN (elt))
5416 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5417 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5421 enum machine_mode mode1;
5426 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5427 &mode1, &unsignedp, &volatilep);
5430 /* If we got back the original object, something is wrong. Perhaps
5431 we are evaluating an expression too early. In any event, don't
5432 infinitely recurse. */
5436 /* If TEM's type is a union of variable size, pass TARGET to the inner
5437 computation, since it will need a temporary and TARGET is known
5438 to have to do. This occurs in unchecked conversion in Ada. */
5440 op0 = expand_expr (tem,
5441 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5442 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5444 ? target : NULL_RTX),
5446 modifier == EXPAND_INITIALIZER ? modifier : 0);
5448 /* If this is a constant, put it into a register if it is a
5449 legitimate constant and memory if it isn't. */
5450 if (CONSTANT_P (op0))
5452 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5453 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5454 op0 = force_reg (mode, op0);
5456 op0 = validize_mem (force_const_mem (mode, op0));
5459 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
5462 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5464 if (GET_CODE (op0) != MEM)
5466 op0 = change_address (op0, VOIDmode,
5467 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5468 force_reg (ptr_mode, offset_rtx)));
5469 /* If we have a variable offset, the known alignment
5470 is only that of the innermost structure containing the field.
5471 (Actually, we could sometimes do better by using the
5472 size of an element of the innermost array, but no need.) */
5473 if (TREE_CODE (exp) == COMPONENT_REF
5474 || TREE_CODE (exp) == BIT_FIELD_REF)
5475 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5479 /* Don't forget about volatility even if this is a bitfield. */
5480 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5482 op0 = copy_rtx (op0);
5483 MEM_VOLATILE_P (op0) = 1;
5486 /* In cases where an aligned union has an unaligned object
5487 as a field, we might be extracting a BLKmode value from
5488 an integer-mode (e.g., SImode) object. Handle this case
5489 by doing the extract into an object as wide as the field
5490 (which we know to be the width of a basic mode), then
5491 storing into memory, and changing the mode to BLKmode.
5492 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5493 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5494 if (mode1 == VOIDmode
5495 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5496 || (modifier != EXPAND_CONST_ADDRESS
5497 && modifier != EXPAND_INITIALIZER
5498 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
5499 /* If the field isn't aligned enough to fetch as a memref,
5500 fetch it as a bit field. */
5501 || (SLOW_UNALIGNED_ACCESS
5502 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5503 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5505 enum machine_mode ext_mode = mode;
5507 if (ext_mode == BLKmode)
5508 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5510 if (ext_mode == BLKmode)
5512 /* In this case, BITPOS must start at a byte boundary and
5513 TARGET, if specified, must be a MEM. */
5514 if (GET_CODE (op0) != MEM
5515 || (target != 0 && GET_CODE (target) != MEM)
5516 || bitpos % BITS_PER_UNIT != 0)
5519 op0 = change_address (op0, VOIDmode,
5520 plus_constant (XEXP (op0, 0),
5521 bitpos / BITS_PER_UNIT));
5523 target = assign_temp (type, 0, 1, 1);
5525 emit_block_move (target, op0,
5526 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5533 op0 = validize_mem (op0);
5535 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5536 mark_reg_pointer (XEXP (op0, 0), alignment);
5538 op0 = extract_bit_field (op0, bitsize, bitpos,
5539 unsignedp, target, ext_mode, ext_mode,
5541 int_size_in_bytes (TREE_TYPE (tem)));
5543 /* If the result is a record type and BITSIZE is narrower than
5544 the mode of OP0, an integral mode, and this is a big endian
5545 machine, we must put the field into the high-order bits. */
5546 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5547 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5548 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5549 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5550 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5554 if (mode == BLKmode)
5556 rtx new = assign_stack_temp (ext_mode,
5557 bitsize / BITS_PER_UNIT, 0);
5559 emit_move_insn (new, op0);
5560 op0 = copy_rtx (new);
5561 PUT_MODE (op0, BLKmode);
5562 MEM_IN_STRUCT_P (op0) = 1;
5568 /* If the result is BLKmode, use that to access the object
5570 if (mode == BLKmode)
5573 /* Get a reference to just this component. */
5574 if (modifier == EXPAND_CONST_ADDRESS
5575 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5576 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5577 (bitpos / BITS_PER_UNIT)));
5579 op0 = change_address (op0, mode1,
5580 plus_constant (XEXP (op0, 0),
5581 (bitpos / BITS_PER_UNIT)));
5582 if (GET_CODE (XEXP (op0, 0)) == REG)
5583 mark_reg_pointer (XEXP (op0, 0), alignment);
5585 MEM_IN_STRUCT_P (op0) = 1;
5586 MEM_VOLATILE_P (op0) |= volatilep;
5587 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5590 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5591 convert_move (target, op0, unsignedp);
5595 /* Intended for a reference to a buffer of a file-object in Pascal.
5596 But it's not certain that a special tree code will really be
5597 necessary for these. INDIRECT_REF might work for them. */
5603 /* Pascal set IN expression.
5606 rlo = set_low - (set_low%bits_per_word);
5607 the_word = set [ (index - rlo)/bits_per_word ];
5608 bit_index = index % bits_per_word;
5609 bitmask = 1 << bit_index;
5610 return !!(the_word & bitmask); */
5612 tree set = TREE_OPERAND (exp, 0);
5613 tree index = TREE_OPERAND (exp, 1);
5614 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5615 tree set_type = TREE_TYPE (set);
5616 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5617 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5618 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5619 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5620 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5621 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5622 rtx setaddr = XEXP (setval, 0);
5623 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5625 rtx diff, quo, rem, addr, bit, result;
5627 preexpand_calls (exp);
5629 /* If domain is empty, answer is no. Likewise if index is constant
5630 and out of bounds. */
5631 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5632 && TREE_CODE (set_low_bound) == INTEGER_CST
5633 && tree_int_cst_lt (set_high_bound, set_low_bound)
5634 || (TREE_CODE (index) == INTEGER_CST
5635 && TREE_CODE (set_low_bound) == INTEGER_CST
5636 && tree_int_cst_lt (index, set_low_bound))
5637 || (TREE_CODE (set_high_bound) == INTEGER_CST
5638 && TREE_CODE (index) == INTEGER_CST
5639 && tree_int_cst_lt (set_high_bound, index))))
5643 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5645 /* If we get here, we have to generate the code for both cases
5646 (in range and out of range). */
5648 op0 = gen_label_rtx ();
5649 op1 = gen_label_rtx ();
5651 if (! (GET_CODE (index_val) == CONST_INT
5652 && GET_CODE (lo_r) == CONST_INT))
5654 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5655 GET_MODE (index_val), iunsignedp, 0);
5656 emit_jump_insn (gen_blt (op1));
5659 if (! (GET_CODE (index_val) == CONST_INT
5660 && GET_CODE (hi_r) == CONST_INT))
5662 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5663 GET_MODE (index_val), iunsignedp, 0);
5664 emit_jump_insn (gen_bgt (op1));
5667 /* Calculate the element number of bit zero in the first word
5669 if (GET_CODE (lo_r) == CONST_INT)
5670 rlow = GEN_INT (INTVAL (lo_r)
5671 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5673 rlow = expand_binop (index_mode, and_optab, lo_r,
5674 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5675 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5677 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5678 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5680 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5681 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5682 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5683 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5685 addr = memory_address (byte_mode,
5686 expand_binop (index_mode, add_optab, diff,
5687 setaddr, NULL_RTX, iunsignedp,
5690 /* Extract the bit we want to examine */
5691 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5692 gen_rtx (MEM, byte_mode, addr),
5693 make_tree (TREE_TYPE (index), rem),
5695 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5696 GET_MODE (target) == byte_mode ? target : 0,
5697 1, OPTAB_LIB_WIDEN);
5699 if (result != target)
5700 convert_move (target, result, 1);
5702 /* Output the code to handle the out-of-range case. */
5705 emit_move_insn (target, const0_rtx);
5710 case WITH_CLEANUP_EXPR:
5711 if (RTL_EXPR_RTL (exp) == 0)
5714 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5716 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5717 /* That's it for this cleanup. */
5718 TREE_OPERAND (exp, 2) = 0;
5719 expand_eh_region_start ();
5721 return RTL_EXPR_RTL (exp);
5723 case CLEANUP_POINT_EXPR:
5725 extern int temp_slot_level;
5726 tree old_cleanups = cleanups_this_call;
5727 int old_temp_level = target_temp_slot_level;
5729 target_temp_slot_level = temp_slot_level;
5730 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5731 /* If we're going to use this value, load it up now. */
5733 op0 = force_not_mem (op0);
5734 expand_cleanups_to (old_cleanups);
5735 preserve_temp_slots (op0);
5738 target_temp_slot_level = old_temp_level;
5743 /* Check for a built-in function. */
5744 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5745 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5747 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5748 return expand_builtin (exp, target, subtarget, tmode, ignore);
5750 /* If this call was expanded already by preexpand_calls,
5751 just return the result we got. */
5752 if (CALL_EXPR_RTL (exp) != 0)
5753 return CALL_EXPR_RTL (exp);
5755 return expand_call (exp, target, ignore);
5757 case NON_LVALUE_EXPR:
5760 case REFERENCE_EXPR:
5761 if (TREE_CODE (type) == UNION_TYPE)
5763 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5766 if (mode != BLKmode)
5767 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5769 target = assign_temp (type, 0, 1, 1);
5772 if (GET_CODE (target) == MEM)
5773 /* Store data into beginning of memory target. */
5774 store_expr (TREE_OPERAND (exp, 0),
5775 change_address (target, TYPE_MODE (valtype), 0), 0);
5777 else if (GET_CODE (target) == REG)
5778 /* Store this field into a union of the proper type. */
5779 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5780 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5782 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5786 /* Return the entire union. */
5790 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5792 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5795 /* If the signedness of the conversion differs and OP0 is
5796 a promoted SUBREG, clear that indication since we now
5797 have to do the proper extension. */
5798 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5799 && GET_CODE (op0) == SUBREG)
5800 SUBREG_PROMOTED_VAR_P (op0) = 0;
5805 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5806 if (GET_MODE (op0) == mode)
5809 /* If OP0 is a constant, just convert it into the proper mode. */
5810 if (CONSTANT_P (op0))
5812 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5813 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5815 if (modifier == EXPAND_INITIALIZER)
5816 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5820 convert_to_mode (mode, op0,
5821 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5823 convert_move (target, op0,
5824 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5828 /* We come here from MINUS_EXPR when the second operand is a
5831 this_optab = add_optab;
5833 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5834 something else, make sure we add the register to the constant and
5835 then to the other thing. This case can occur during strength
5836 reduction and doing it this way will produce better code if the
5837 frame pointer or argument pointer is eliminated.
5839 fold-const.c will ensure that the constant is always in the inner
5840 PLUS_EXPR, so the only case we need to do anything about is if
5841 sp, ap, or fp is our second argument, in which case we must swap
5842 the innermost first argument and our second argument. */
5844 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5845 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5846 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5847 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5848 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5849 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5851 tree t = TREE_OPERAND (exp, 1);
5853 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5854 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5857 /* If the result is to be ptr_mode and we are adding an integer to
5858 something, we might be forming a constant. So try to use
5859 plus_constant. If it produces a sum and we can't accept it,
5860 use force_operand. This allows P = &ARR[const] to generate
5861 efficient code on machines where a SYMBOL_REF is not a valid
5864 If this is an EXPAND_SUM call, always return the sum. */
5865 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5866 || mode == ptr_mode)
5868 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5869 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5870 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5872 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5874 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5875 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5876 op1 = force_operand (op1, target);
5880 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5881 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5882 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5884 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5886 if (! CONSTANT_P (op0))
5888 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5889 VOIDmode, modifier);
5890 /* Don't go to both_summands if modifier
5891 says it's not right to return a PLUS. */
5892 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5896 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5897 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5898 op0 = force_operand (op0, target);
5903 /* No sense saving up arithmetic to be done
5904 if it's all in the wrong mode to form part of an address.
5905 And force_operand won't know whether to sign-extend or
5907 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5908 || mode != ptr_mode)
5911 preexpand_calls (exp);
5912 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5915 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5916 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5919 /* Make sure any term that's a sum with a constant comes last. */
5920 if (GET_CODE (op0) == PLUS
5921 && CONSTANT_P (XEXP (op0, 1)))
5927 /* If adding to a sum including a constant,
5928 associate it to put the constant outside. */
5929 if (GET_CODE (op1) == PLUS
5930 && CONSTANT_P (XEXP (op1, 1)))
5932 rtx constant_term = const0_rtx;
5934 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5937 /* Ensure that MULT comes first if there is one. */
5938 else if (GET_CODE (op0) == MULT)
5939 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5941 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5943 /* Let's also eliminate constants from op0 if possible. */
5944 op0 = eliminate_constant_term (op0, &constant_term);
5946 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5947 their sum should be a constant. Form it into OP1, since the
5948 result we want will then be OP0 + OP1. */
5950 temp = simplify_binary_operation (PLUS, mode, constant_term,
5955 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5958 /* Put a constant term last and put a multiplication first. */
5959 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5960 temp = op1, op1 = op0, op0 = temp;
5962 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5963 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5966 /* For initializers, we are allowed to return a MINUS of two
5967 symbolic constants. Here we handle all cases when both operands
5969 /* Handle difference of two symbolic constants,
5970 for the sake of an initializer. */
5971 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5972 && really_constant_p (TREE_OPERAND (exp, 0))
5973 && really_constant_p (TREE_OPERAND (exp, 1)))
5975 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5976 VOIDmode, modifier);
5977 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5978 VOIDmode, modifier);
5980 /* If the last operand is a CONST_INT, use plus_constant of
5981 the negated constant. Else make the MINUS. */
5982 if (GET_CODE (op1) == CONST_INT)
5983 return plus_constant (op0, - INTVAL (op1));
5985 return gen_rtx (MINUS, mode, op0, op1);
5987 /* Convert A - const to A + (-const). */
5988 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5990 tree negated = fold (build1 (NEGATE_EXPR, type,
5991 TREE_OPERAND (exp, 1)));
5993 /* Deal with the case where we can't negate the constant
5995 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5997 tree newtype = signed_type (type);
5998 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5999 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6000 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6002 if (! TREE_OVERFLOW (newneg))
6003 return expand_expr (convert (type,
6004 build (PLUS_EXPR, newtype,
6006 target, tmode, modifier);
6010 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6014 this_optab = sub_optab;
6018 preexpand_calls (exp);
6019 /* If first operand is constant, swap them.
6020 Thus the following special case checks need only
6021 check the second operand. */
6022 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6024 register tree t1 = TREE_OPERAND (exp, 0);
6025 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6026 TREE_OPERAND (exp, 1) = t1;
6029 /* Attempt to return something suitable for generating an
6030 indexed address, for machines that support that. */
6032 if (modifier == EXPAND_SUM && mode == ptr_mode
6033 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6034 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6036 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6038 /* Apply distributive law if OP0 is x+c. */
6039 if (GET_CODE (op0) == PLUS
6040 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6041 return gen_rtx (PLUS, mode,
6042 gen_rtx (MULT, mode, XEXP (op0, 0),
6043 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6044 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6045 * INTVAL (XEXP (op0, 1))));
6047 if (GET_CODE (op0) != REG)
6048 op0 = force_operand (op0, NULL_RTX);
6049 if (GET_CODE (op0) != REG)
6050 op0 = copy_to_mode_reg (mode, op0);
6052 return gen_rtx (MULT, mode, op0,
6053 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6056 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6059 /* Check for multiplying things that have been extended
6060 from a narrower type. If this machine supports multiplying
6061 in that narrower type with a result in the desired type,
6062 do it that way, and avoid the explicit type-conversion. */
6063 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6064 && TREE_CODE (type) == INTEGER_TYPE
6065 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6066 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6067 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6068 && int_fits_type_p (TREE_OPERAND (exp, 1),
6069 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6070 /* Don't use a widening multiply if a shift will do. */
6071 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6072 > HOST_BITS_PER_WIDE_INT)
6073 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6075 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6076 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6078 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6079 /* If both operands are extended, they must either both
6080 be zero-extended or both be sign-extended. */
6081 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6083 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6085 enum machine_mode innermode
6086 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6087 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6088 ? smul_widen_optab : umul_widen_optab);
6089 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6090 ? umul_widen_optab : smul_widen_optab);
6091 if (mode == GET_MODE_WIDER_MODE (innermode))
6093 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6095 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6096 NULL_RTX, VOIDmode, 0);
6097 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6098 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6101 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6102 NULL_RTX, VOIDmode, 0);
6105 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6106 && innermode == word_mode)
6109 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6110 NULL_RTX, VOIDmode, 0);
6111 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6112 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6115 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6116 NULL_RTX, VOIDmode, 0);
6117 temp = expand_binop (mode, other_optab, op0, op1, target,
6118 unsignedp, OPTAB_LIB_WIDEN);
6119 htem = expand_mult_highpart_adjust (innermode,
6120 gen_highpart (innermode, temp),
6122 gen_highpart (innermode, temp),
6124 emit_move_insn (gen_highpart (innermode, temp), htem);
6129 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6130 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6131 return expand_mult (mode, op0, op1, target, unsignedp);
6133 case TRUNC_DIV_EXPR:
6134 case FLOOR_DIV_EXPR:
6136 case ROUND_DIV_EXPR:
6137 case EXACT_DIV_EXPR:
6138 preexpand_calls (exp);
6139 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6141 /* Possible optimization: compute the dividend with EXPAND_SUM
6142 then if the divisor is constant can optimize the case
6143 where some terms of the dividend have coeffs divisible by it. */
6144 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6145 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6146 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6149 this_optab = flodiv_optab;
6152 case TRUNC_MOD_EXPR:
6153 case FLOOR_MOD_EXPR:
6155 case ROUND_MOD_EXPR:
6156 preexpand_calls (exp);
6157 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6159 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6160 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6161 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6163 case FIX_ROUND_EXPR:
6164 case FIX_FLOOR_EXPR:
6166 abort (); /* Not used for C. */
6168 case FIX_TRUNC_EXPR:
6169 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6171 target = gen_reg_rtx (mode);
6172 expand_fix (target, op0, unsignedp);
6176 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6178 target = gen_reg_rtx (mode);
6179 /* expand_float can't figure out what to do if FROM has VOIDmode.
6180 So give it the correct mode. With -O, cse will optimize this. */
6181 if (GET_MODE (op0) == VOIDmode)
6182 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6184 expand_float (target, op0,
6185 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6189 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6190 temp = expand_unop (mode, neg_optab, op0, target, 0);
6196 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6198 /* Handle complex values specially. */
6199 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6200 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6201 return expand_complex_abs (mode, op0, target, unsignedp);
6203 /* Unsigned abs is simply the operand. Testing here means we don't
6204 risk generating incorrect code below. */
6205 if (TREE_UNSIGNED (type))
6208 return expand_abs (mode, op0, target, unsignedp,
6209 safe_from_p (target, TREE_OPERAND (exp, 0)));
6213 target = original_target;
6214 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6215 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6216 || GET_MODE (target) != mode
6217 || (GET_CODE (target) == REG
6218 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6219 target = gen_reg_rtx (mode);
6220 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6221 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6223 /* First try to do it with a special MIN or MAX instruction.
6224 If that does not win, use a conditional jump to select the proper
6226 this_optab = (TREE_UNSIGNED (type)
6227 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6228 : (code == MIN_EXPR ? smin_optab : smax_optab));
6230 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6235 /* At this point, a MEM target is no longer useful; we will get better
6238 if (GET_CODE (target) == MEM)
6239 target = gen_reg_rtx (mode);
6242 emit_move_insn (target, op0);
6244 op0 = gen_label_rtx ();
6246 /* If this mode is an integer too wide to compare properly,
6247 compare word by word. Rely on cse to optimize constant cases. */
6248 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6250 if (code == MAX_EXPR)
6251 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6252 target, op1, NULL_RTX, op0);
6254 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6255 op1, target, NULL_RTX, op0);
6256 emit_move_insn (target, op1);
6260 if (code == MAX_EXPR)
6261 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6262 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6263 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6265 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6266 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6267 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6268 if (temp == const0_rtx)
6269 emit_move_insn (target, op1);
6270 else if (temp != const_true_rtx)
6272 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6273 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6276 emit_move_insn (target, op1);
6283 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6284 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6290 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6291 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6296 /* ??? Can optimize bitwise operations with one arg constant.
6297 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6298 and (a bitwise1 b) bitwise2 b (etc)
6299 but that is probably not worth while. */
6301 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6302 boolean values when we want in all cases to compute both of them. In
6303 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6304 as actual zero-or-1 values and then bitwise anding. In cases where
6305 there cannot be any side effects, better code would be made by
6306 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6307 how to recognize those cases. */
6309 case TRUTH_AND_EXPR:
6311 this_optab = and_optab;
6316 this_optab = ior_optab;
6319 case TRUTH_XOR_EXPR:
6321 this_optab = xor_optab;
6328 preexpand_calls (exp);
6329 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6331 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6332 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6335 /* Could determine the answer when only additive constants differ. Also,
6336 the addition of one can be handled by changing the condition. */
6343 preexpand_calls (exp);
6344 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6348 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6349 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6351 && GET_CODE (original_target) == REG
6352 && (GET_MODE (original_target)
6353 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6355 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6358 if (temp != original_target)
6359 temp = copy_to_reg (temp);
6361 op1 = gen_label_rtx ();
6362 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6363 GET_MODE (temp), unsignedp, 0);
6364 emit_jump_insn (gen_beq (op1));
6365 emit_move_insn (temp, const1_rtx);
6370 /* If no set-flag instruction, must generate a conditional
6371 store into a temporary variable. Drop through
6372 and handle this like && and ||. */
6374 case TRUTH_ANDIF_EXPR:
6375 case TRUTH_ORIF_EXPR:
6377 && (target == 0 || ! safe_from_p (target, exp)
6378 /* Make sure we don't have a hard reg (such as function's return
6379 value) live across basic blocks, if not optimizing. */
6380 || (!optimize && GET_CODE (target) == REG
6381 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6382 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6385 emit_clr_insn (target);
6387 op1 = gen_label_rtx ();
6388 jumpifnot (exp, op1);
6391 emit_0_to_1_insn (target);
6394 return ignore ? const0_rtx : target;
6396 case TRUTH_NOT_EXPR:
6397 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6398 /* The parser is careful to generate TRUTH_NOT_EXPR
6399 only with operands that are always zero or one. */
6400 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6401 target, 1, OPTAB_LIB_WIDEN);
6407 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6409 return expand_expr (TREE_OPERAND (exp, 1),
6410 (ignore ? const0_rtx : target),
6415 rtx flag = NULL_RTX;
6416 tree left_cleanups = NULL_TREE;
6417 tree right_cleanups = NULL_TREE;
6419 /* Used to save a pointer to the place to put the setting of
6420 the flag that indicates if this side of the conditional was
6421 taken. We backpatch the code, if we find out later that we
6422 have any conditional cleanups that need to be performed. */
6423 rtx dest_right_flag = NULL_RTX;
6424 rtx dest_left_flag = NULL_RTX;
6426 /* Note that COND_EXPRs whose type is a structure or union
6427 are required to be constructed to contain assignments of
6428 a temporary variable, so that we can evaluate them here
6429 for side effect only. If type is void, we must do likewise. */
6431 /* If an arm of the branch requires a cleanup,
6432 only that cleanup is performed. */
6435 tree binary_op = 0, unary_op = 0;
6436 tree old_cleanups = cleanups_this_call;
6438 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6439 convert it to our mode, if necessary. */
6440 if (integer_onep (TREE_OPERAND (exp, 1))
6441 && integer_zerop (TREE_OPERAND (exp, 2))
6442 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6446 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6451 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6452 if (GET_MODE (op0) == mode)
6456 target = gen_reg_rtx (mode);
6457 convert_move (target, op0, unsignedp);
6461 /* If we are not to produce a result, we have no target. Otherwise,
6462 if a target was specified use it; it will not be used as an
6463 intermediate target unless it is safe. If no target, use a
6468 else if (original_target
6469 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
6470 && GET_MODE (original_target) == mode
6471 && ! (GET_CODE (original_target) == MEM
6472 && MEM_VOLATILE_P (original_target)))
6473 temp = original_target;
6474 else if (TREE_ADDRESSABLE (type))
6477 temp = assign_temp (type, 0, 0, 1);
6479 /* Check for X ? A + B : A. If we have this, we can copy
6480 A to the output and conditionally add B. Similarly for unary
6481 operations. Don't do this if X has side-effects because
6482 those side effects might affect A or B and the "?" operation is
6483 a sequence point in ANSI. (We test for side effects later.) */
6485 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6486 && operand_equal_p (TREE_OPERAND (exp, 2),
6487 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6488 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6489 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6490 && operand_equal_p (TREE_OPERAND (exp, 1),
6491 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6492 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6493 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6494 && operand_equal_p (TREE_OPERAND (exp, 2),
6495 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6496 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6497 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6498 && operand_equal_p (TREE_OPERAND (exp, 1),
6499 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6500 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6502 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6503 operation, do this as A + (X != 0). Similarly for other simple
6504 binary operators. */
6505 if (temp && singleton && binary_op
6506 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6507 && (TREE_CODE (binary_op) == PLUS_EXPR
6508 || TREE_CODE (binary_op) == MINUS_EXPR
6509 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6510 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6511 && integer_onep (TREE_OPERAND (binary_op, 1))
6512 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6515 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6516 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6517 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6520 /* If we had X ? A : A + 1, do this as A + (X == 0).
6522 We have to invert the truth value here and then put it
6523 back later if do_store_flag fails. We cannot simply copy
6524 TREE_OPERAND (exp, 0) to another variable and modify that
6525 because invert_truthvalue can modify the tree pointed to
6527 if (singleton == TREE_OPERAND (exp, 1))
6528 TREE_OPERAND (exp, 0)
6529 = invert_truthvalue (TREE_OPERAND (exp, 0));
6531 result = do_store_flag (TREE_OPERAND (exp, 0),
6532 (safe_from_p (temp, singleton)
6534 mode, BRANCH_COST <= 1);
6538 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6539 return expand_binop (mode, boptab, op1, result, temp,
6540 unsignedp, OPTAB_LIB_WIDEN);
6542 else if (singleton == TREE_OPERAND (exp, 1))
6543 TREE_OPERAND (exp, 0)
6544 = invert_truthvalue (TREE_OPERAND (exp, 0));
6547 do_pending_stack_adjust ();
6549 op0 = gen_label_rtx ();
6551 flag = gen_reg_rtx (word_mode);
6552 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6556 /* If the target conflicts with the other operand of the
6557 binary op, we can't use it. Also, we can't use the target
6558 if it is a hard register, because evaluating the condition
6559 might clobber it. */
6561 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6562 || (GET_CODE (temp) == REG
6563 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6564 temp = gen_reg_rtx (mode);
6565 store_expr (singleton, temp, 0);
6568 expand_expr (singleton,
6569 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6570 dest_left_flag = get_last_insn ();
6571 if (singleton == TREE_OPERAND (exp, 1))
6572 jumpif (TREE_OPERAND (exp, 0), op0);
6574 jumpifnot (TREE_OPERAND (exp, 0), op0);
6576 /* Allows cleanups up to here. */
6577 old_cleanups = cleanups_this_call;
6578 if (binary_op && temp == 0)
6579 /* Just touch the other operand. */
6580 expand_expr (TREE_OPERAND (binary_op, 1),
6581 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6583 store_expr (build (TREE_CODE (binary_op), type,
6584 make_tree (type, temp),
6585 TREE_OPERAND (binary_op, 1)),
6588 store_expr (build1 (TREE_CODE (unary_op), type,
6589 make_tree (type, temp)),
6592 dest_right_flag = get_last_insn ();
6595 /* This is now done in jump.c and is better done there because it
6596 produces shorter register lifetimes. */
6598 /* Check for both possibilities either constants or variables
6599 in registers (but not the same as the target!). If so, can
6600 save branches by assigning one, branching, and assigning the
6602 else if (temp && GET_MODE (temp) != BLKmode
6603 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6604 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6605 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6606 && DECL_RTL (TREE_OPERAND (exp, 1))
6607 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6608 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6609 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6610 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6611 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6612 && DECL_RTL (TREE_OPERAND (exp, 2))
6613 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6614 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6616 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6617 temp = gen_reg_rtx (mode);
6618 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6619 dest_left_flag = get_last_insn ();
6620 jumpifnot (TREE_OPERAND (exp, 0), op0);
6622 /* Allows cleanups up to here. */
6623 old_cleanups = cleanups_this_call;
6624 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6626 dest_right_flag = get_last_insn ();
6629 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6630 comparison operator. If we have one of these cases, set the
6631 output to A, branch on A (cse will merge these two references),
6632 then set the output to FOO. */
6634 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6635 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6636 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6637 TREE_OPERAND (exp, 1), 0)
6638 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6639 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6641 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6642 temp = gen_reg_rtx (mode);
6643 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6644 dest_left_flag = get_last_insn ();
6645 jumpif (TREE_OPERAND (exp, 0), op0);
6647 /* Allows cleanups up to here. */
6648 old_cleanups = cleanups_this_call;
6649 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6651 dest_right_flag = get_last_insn ();
6654 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6655 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6656 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6657 TREE_OPERAND (exp, 2), 0)
6658 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6659 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6661 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6662 temp = gen_reg_rtx (mode);
6663 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6664 dest_left_flag = get_last_insn ();
6665 jumpifnot (TREE_OPERAND (exp, 0), op0);
6667 /* Allows cleanups up to here. */
6668 old_cleanups = cleanups_this_call;
6669 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6671 dest_right_flag = get_last_insn ();
6675 op1 = gen_label_rtx ();
6676 jumpifnot (TREE_OPERAND (exp, 0), op0);
6678 /* Allows cleanups up to here. */
6679 old_cleanups = cleanups_this_call;
6681 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6683 expand_expr (TREE_OPERAND (exp, 1),
6684 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6685 dest_left_flag = get_last_insn ();
6687 /* Handle conditional cleanups, if any. */
6688 left_cleanups = defer_cleanups_to (old_cleanups);
6691 emit_jump_insn (gen_jump (op1));
6695 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6697 expand_expr (TREE_OPERAND (exp, 2),
6698 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6699 dest_right_flag = get_last_insn ();
6702 /* Handle conditional cleanups, if any. */
6703 right_cleanups = defer_cleanups_to (old_cleanups);
6709 /* Add back in, any conditional cleanups. */
6710 if (left_cleanups || right_cleanups)
6716 /* Now that we know that a flag is needed, go back and add in the
6717 setting of the flag. */
6719 /* Do the left side flag. */
6720 last = get_last_insn ();
6721 /* Flag left cleanups as needed. */
6722 emit_move_insn (flag, const1_rtx);
6723 /* ??? deprecated, use sequences instead. */
6724 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6726 /* Do the right side flag. */
6727 last = get_last_insn ();
6728 /* Flag left cleanups as needed. */
6729 emit_move_insn (flag, const0_rtx);
6730 /* ??? deprecated, use sequences instead. */
6731 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6733 /* All cleanups must be on the function_obstack. */
6734 push_obstacks_nochange ();
6735 resume_temporary_allocation ();
6737 /* convert flag, which is an rtx, into a tree. */
6738 cond = make_node (RTL_EXPR);
6739 TREE_TYPE (cond) = integer_type_node;
6740 RTL_EXPR_RTL (cond) = flag;
6741 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6742 cond = save_expr (cond);
6744 if (! left_cleanups)
6745 left_cleanups = integer_zero_node;
6746 if (! right_cleanups)
6747 right_cleanups = integer_zero_node;
6748 new_cleanups = build (COND_EXPR, void_type_node,
6749 truthvalue_conversion (cond),
6750 left_cleanups, right_cleanups);
6751 new_cleanups = fold (new_cleanups);
6755 /* Now add in the conditionalized cleanups. */
6757 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6758 expand_eh_region_start ();
6765 /* Something needs to be initialized, but we didn't know
6766 where that thing was when building the tree. For example,
6767 it could be the return value of a function, or a parameter
6768 to a function which lays down in the stack, or a temporary
6769 variable which must be passed by reference.
6771 We guarantee that the expression will either be constructed
6772 or copied into our original target. */
6774 tree slot = TREE_OPERAND (exp, 0);
6775 tree cleanups = NULL_TREE;
6779 if (TREE_CODE (slot) != VAR_DECL)
6783 target = original_target;
6787 if (DECL_RTL (slot) != 0)
6789 target = DECL_RTL (slot);
6790 /* If we have already expanded the slot, so don't do
6792 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6797 target = assign_temp (type, 2, 1, 1);
6798 /* All temp slots at this level must not conflict. */
6799 preserve_temp_slots (target);
6800 DECL_RTL (slot) = target;
6802 /* Since SLOT is not known to the called function
6803 to belong to its stack frame, we must build an explicit
6804 cleanup. This case occurs when we must build up a reference
6805 to pass the reference as an argument. In this case,
6806 it is very likely that such a reference need not be
6809 if (TREE_OPERAND (exp, 2) == 0)
6810 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6811 cleanups = TREE_OPERAND (exp, 2);
6816 /* This case does occur, when expanding a parameter which
6817 needs to be constructed on the stack. The target
6818 is the actual stack address that we want to initialize.
6819 The function we call will perform the cleanup in this case. */
6821 /* If we have already assigned it space, use that space,
6822 not target that we were passed in, as our target
6823 parameter is only a hint. */
6824 if (DECL_RTL (slot) != 0)
6826 target = DECL_RTL (slot);
6827 /* If we have already expanded the slot, so don't do
6829 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6833 DECL_RTL (slot) = target;
6836 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6837 /* Mark it as expanded. */
6838 TREE_OPERAND (exp, 1) = NULL_TREE;
6840 store_expr (exp1, target, 0);
6844 cleanups_this_call = tree_cons (NULL_TREE,
6846 cleanups_this_call);
6847 expand_eh_region_start ();
6855 tree lhs = TREE_OPERAND (exp, 0);
6856 tree rhs = TREE_OPERAND (exp, 1);
6857 tree noncopied_parts = 0;
6858 tree lhs_type = TREE_TYPE (lhs);
6860 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6861 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6862 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6863 TYPE_NONCOPIED_PARTS (lhs_type));
6864 while (noncopied_parts != 0)
6866 expand_assignment (TREE_VALUE (noncopied_parts),
6867 TREE_PURPOSE (noncopied_parts), 0, 0);
6868 noncopied_parts = TREE_CHAIN (noncopied_parts);
6875 /* If lhs is complex, expand calls in rhs before computing it.
6876 That's so we don't compute a pointer and save it over a call.
6877 If lhs is simple, compute it first so we can give it as a
6878 target if the rhs is just a call. This avoids an extra temp and copy
6879 and that prevents a partial-subsumption which makes bad code.
6880 Actually we could treat component_ref's of vars like vars. */
6882 tree lhs = TREE_OPERAND (exp, 0);
6883 tree rhs = TREE_OPERAND (exp, 1);
6884 tree noncopied_parts = 0;
6885 tree lhs_type = TREE_TYPE (lhs);
6889 if (TREE_CODE (lhs) != VAR_DECL
6890 && TREE_CODE (lhs) != RESULT_DECL
6891 && TREE_CODE (lhs) != PARM_DECL)
6892 preexpand_calls (exp);
6894 /* Check for |= or &= of a bitfield of size one into another bitfield
6895 of size 1. In this case, (unless we need the result of the
6896 assignment) we can do this more efficiently with a
6897 test followed by an assignment, if necessary.
6899 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6900 things change so we do, this code should be enhanced to
6903 && TREE_CODE (lhs) == COMPONENT_REF
6904 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6905 || TREE_CODE (rhs) == BIT_AND_EXPR)
6906 && TREE_OPERAND (rhs, 0) == lhs
6907 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6908 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6909 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6911 rtx label = gen_label_rtx ();
6913 do_jump (TREE_OPERAND (rhs, 1),
6914 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6915 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6916 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6917 (TREE_CODE (rhs) == BIT_IOR_EXPR
6919 : integer_zero_node)),
6921 do_pending_stack_adjust ();
6926 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6927 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6928 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6929 TYPE_NONCOPIED_PARTS (lhs_type));
6931 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6932 while (noncopied_parts != 0)
6934 expand_assignment (TREE_PURPOSE (noncopied_parts),
6935 TREE_VALUE (noncopied_parts), 0, 0);
6936 noncopied_parts = TREE_CHAIN (noncopied_parts);
6941 case PREINCREMENT_EXPR:
6942 case PREDECREMENT_EXPR:
6943 return expand_increment (exp, 0, ignore);
6945 case POSTINCREMENT_EXPR:
6946 case POSTDECREMENT_EXPR:
6947 /* Faster to treat as pre-increment if result is not used. */
6948 return expand_increment (exp, ! ignore, ignore);
6951 /* If nonzero, TEMP will be set to the address of something that might
6952 be a MEM corresponding to a stack slot. */
6955 /* Are we taking the address of a nested function? */
6956 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6957 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
6958 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
6960 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6961 op0 = force_operand (op0, target);
6963 /* If we are taking the address of something erroneous, just
6965 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6969 /* We make sure to pass const0_rtx down if we came in with
6970 ignore set, to avoid doing the cleanups twice for something. */
6971 op0 = expand_expr (TREE_OPERAND (exp, 0),
6972 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6973 (modifier == EXPAND_INITIALIZER
6974 ? modifier : EXPAND_CONST_ADDRESS));
6976 /* If we are going to ignore the result, OP0 will have been set
6977 to const0_rtx, so just return it. Don't get confused and
6978 think we are taking the address of the constant. */
6982 op0 = protect_from_queue (op0, 0);
6984 /* We would like the object in memory. If it is a constant,
6985 we can have it be statically allocated into memory. For
6986 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6987 memory and store the value into it. */
6989 if (CONSTANT_P (op0))
6990 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6992 else if (GET_CODE (op0) == MEM)
6994 mark_temp_addr_taken (op0);
6995 temp = XEXP (op0, 0);
6998 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6999 || GET_CODE (op0) == CONCAT)
7001 /* If this object is in a register, it must be not
7003 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7004 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7006 mark_temp_addr_taken (memloc);
7007 emit_move_insn (memloc, op0);
7011 if (GET_CODE (op0) != MEM)
7014 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7016 temp = XEXP (op0, 0);
7017 #ifdef POINTERS_EXTEND_UNSIGNED
7018 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7019 && mode == ptr_mode)
7020 temp = convert_memory_address (ptr_mode, temp);
7025 op0 = force_operand (XEXP (op0, 0), target);
7028 if (flag_force_addr && GET_CODE (op0) != REG)
7029 op0 = force_reg (Pmode, op0);
7031 if (GET_CODE (op0) == REG
7032 && ! REG_USERVAR_P (op0))
7033 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7035 /* If we might have had a temp slot, add an equivalent address
7038 update_temp_slot_address (temp, op0);
7040 #ifdef POINTERS_EXTEND_UNSIGNED
7041 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7042 && mode == ptr_mode)
7043 op0 = convert_memory_address (ptr_mode, op0);
7048 case ENTRY_VALUE_EXPR:
7051 /* COMPLEX type for Extended Pascal & Fortran */
7054 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7057 /* Get the rtx code of the operands. */
7058 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7059 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7062 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7066 /* Move the real (op0) and imaginary (op1) parts to their location. */
7067 emit_move_insn (gen_realpart (mode, target), op0);
7068 emit_move_insn (gen_imagpart (mode, target), op1);
7070 insns = get_insns ();
7073 /* Complex construction should appear as a single unit. */
7074 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7075 each with a separate pseudo as destination.
7076 It's not correct for flow to treat them as a unit. */
7077 if (GET_CODE (target) != CONCAT)
7078 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7086 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7087 return gen_realpart (mode, op0);
7090 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7091 return gen_imagpart (mode, op0);
7095 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7099 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7102 target = gen_reg_rtx (mode);
7106 /* Store the realpart and the negated imagpart to target. */
7107 emit_move_insn (gen_realpart (partmode, target),
7108 gen_realpart (partmode, op0));
7110 imag_t = gen_imagpart (partmode, target);
7111 temp = expand_unop (partmode, neg_optab,
7112 gen_imagpart (partmode, op0), imag_t, 0);
7114 emit_move_insn (imag_t, temp);
7116 insns = get_insns ();
7119 /* Conjugate should appear as a single unit
7120 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7121 each with a separate pseudo as destination.
7122 It's not correct for flow to treat them as a unit. */
7123 if (GET_CODE (target) != CONCAT)
7124 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7132 op0 = CONST0_RTX (tmode);
7138 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7141 /* Here to do an ordinary binary operator, generating an instruction
7142 from the optab already placed in `this_optab'. */
7144 preexpand_calls (exp);
7145 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7147 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7148 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7150 temp = expand_binop (mode, this_optab, op0, op1, target,
7151 unsignedp, OPTAB_LIB_WIDEN);
7158 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7161 bc_expand_expr (exp)
7164 enum tree_code code;
7167 struct binary_operator *binoptab;
7168 struct unary_operator *unoptab;
7169 struct increment_operator *incroptab;
7170 struct bc_label *lab, *lab1;
7171 enum bytecode_opcode opcode;
7174 code = TREE_CODE (exp);
7180 if (DECL_RTL (exp) == 0)
7182 error_with_decl (exp, "prior parameter's size depends on `%s'");
7186 bc_load_parmaddr (DECL_RTL (exp));
7187 bc_load_memory (TREE_TYPE (exp), exp);
7193 if (DECL_RTL (exp) == 0)
7197 if (BYTECODE_LABEL (DECL_RTL (exp)))
7198 bc_load_externaddr (DECL_RTL (exp));
7200 bc_load_localaddr (DECL_RTL (exp));
7202 if (TREE_PUBLIC (exp))
7203 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7204 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7206 bc_load_localaddr (DECL_RTL (exp));
7208 bc_load_memory (TREE_TYPE (exp), exp);
7213 #ifdef DEBUG_PRINT_CODE
7214 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7216 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7218 : TYPE_MODE (TREE_TYPE (exp)))],
7219 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7225 #ifdef DEBUG_PRINT_CODE
7226 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7228 /* FIX THIS: find a better way to pass real_cst's. -bson */
7229 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7230 (double) TREE_REAL_CST (exp));
7239 /* We build a call description vector describing the type of
7240 the return value and of the arguments; this call vector,
7241 together with a pointer to a location for the return value
7242 and the base of the argument list, is passed to the low
7243 level machine dependent call subroutine, which is responsible
7244 for putting the arguments wherever real functions expect
7245 them, as well as getting the return value back. */
7247 tree calldesc = 0, arg;
7251 /* Push the evaluated args on the evaluation stack in reverse
7252 order. Also make an entry for each arg in the calldesc
7253 vector while we're at it. */
7255 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7257 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7260 bc_expand_expr (TREE_VALUE (arg));
7262 calldesc = tree_cons ((tree) 0,
7263 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7265 calldesc = tree_cons ((tree) 0,
7266 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7270 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7272 /* Allocate a location for the return value and push its
7273 address on the evaluation stack. Also make an entry
7274 at the front of the calldesc for the return value type. */
7276 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7277 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7278 bc_load_localaddr (retval);
7280 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7281 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7283 /* Prepend the argument count. */
7284 calldesc = tree_cons ((tree) 0,
7285 build_int_2 (nargs, 0),
7288 /* Push the address of the call description vector on the stack. */
7289 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7290 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7291 build_index_type (build_int_2 (nargs * 2, 0)));
7292 r = output_constant_def (calldesc);
7293 bc_load_externaddr (r);
7295 /* Push the address of the function to be called. */
7296 bc_expand_expr (TREE_OPERAND (exp, 0));
7298 /* Call the function, popping its address and the calldesc vector
7299 address off the evaluation stack in the process. */
7300 bc_emit_instruction (call);
7302 /* Pop the arguments off the stack. */
7303 bc_adjust_stack (nargs);
7305 /* Load the return value onto the stack. */
7306 bc_load_localaddr (retval);
7307 bc_load_memory (type, TREE_OPERAND (exp, 0));
7313 if (!SAVE_EXPR_RTL (exp))
7315 /* First time around: copy to local variable */
7316 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7317 TYPE_ALIGN (TREE_TYPE(exp)));
7318 bc_expand_expr (TREE_OPERAND (exp, 0));
7319 bc_emit_instruction (duplicate);
7321 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7322 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7326 /* Consecutive reference: use saved copy */
7327 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7328 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7333 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7334 how are they handled instead? */
7337 TREE_USED (exp) = 1;
7338 bc_expand_expr (STMT_BODY (exp));
7345 bc_expand_expr (TREE_OPERAND (exp, 0));
7346 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7351 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7356 bc_expand_address (TREE_OPERAND (exp, 0));
7361 bc_expand_expr (TREE_OPERAND (exp, 0));
7362 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7367 bc_expand_expr (bc_canonicalize_array_ref (exp));
7372 bc_expand_component_address (exp);
7374 /* If we have a bitfield, generate a proper load */
7375 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7380 bc_expand_expr (TREE_OPERAND (exp, 0));
7381 bc_emit_instruction (drop);
7382 bc_expand_expr (TREE_OPERAND (exp, 1));
7387 bc_expand_expr (TREE_OPERAND (exp, 0));
7388 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7389 lab = bc_get_bytecode_label ();
7390 bc_emit_bytecode (xjumpifnot);
7391 bc_emit_bytecode_labelref (lab);
7393 #ifdef DEBUG_PRINT_CODE
7394 fputc ('\n', stderr);
7396 bc_expand_expr (TREE_OPERAND (exp, 1));
7397 lab1 = bc_get_bytecode_label ();
7398 bc_emit_bytecode (jump);
7399 bc_emit_bytecode_labelref (lab1);
7401 #ifdef DEBUG_PRINT_CODE
7402 fputc ('\n', stderr);
7405 bc_emit_bytecode_labeldef (lab);
7406 bc_expand_expr (TREE_OPERAND (exp, 2));
7407 bc_emit_bytecode_labeldef (lab1);
7410 case TRUTH_ANDIF_EXPR:
7412 opcode = xjumpifnot;
7415 case TRUTH_ORIF_EXPR:
7422 binoptab = optab_plus_expr;
7427 binoptab = optab_minus_expr;
7432 binoptab = optab_mult_expr;
7435 case TRUNC_DIV_EXPR:
7436 case FLOOR_DIV_EXPR:
7438 case ROUND_DIV_EXPR:
7439 case EXACT_DIV_EXPR:
7441 binoptab = optab_trunc_div_expr;
7444 case TRUNC_MOD_EXPR:
7445 case FLOOR_MOD_EXPR:
7447 case ROUND_MOD_EXPR:
7449 binoptab = optab_trunc_mod_expr;
7452 case FIX_ROUND_EXPR:
7453 case FIX_FLOOR_EXPR:
7455 abort (); /* Not used for C. */
7457 case FIX_TRUNC_EXPR:
7464 abort (); /* FIXME */
7468 binoptab = optab_rdiv_expr;
7473 binoptab = optab_bit_and_expr;
7478 binoptab = optab_bit_ior_expr;
7483 binoptab = optab_bit_xor_expr;
7488 binoptab = optab_lshift_expr;
7493 binoptab = optab_rshift_expr;
7496 case TRUTH_AND_EXPR:
7498 binoptab = optab_truth_and_expr;
7503 binoptab = optab_truth_or_expr;
7508 binoptab = optab_lt_expr;
7513 binoptab = optab_le_expr;
7518 binoptab = optab_ge_expr;
7523 binoptab = optab_gt_expr;
7528 binoptab = optab_eq_expr;
7533 binoptab = optab_ne_expr;
7538 unoptab = optab_negate_expr;
7543 unoptab = optab_bit_not_expr;
7546 case TRUTH_NOT_EXPR:
7548 unoptab = optab_truth_not_expr;
7551 case PREDECREMENT_EXPR:
7553 incroptab = optab_predecrement_expr;
7556 case PREINCREMENT_EXPR:
7558 incroptab = optab_preincrement_expr;
7561 case POSTDECREMENT_EXPR:
7563 incroptab = optab_postdecrement_expr;
7566 case POSTINCREMENT_EXPR:
7568 incroptab = optab_postincrement_expr;
7573 bc_expand_constructor (exp);
7583 tree vars = TREE_OPERAND (exp, 0);
7584 int vars_need_expansion = 0;
7586 /* Need to open a binding contour here because
7587 if there are any cleanups they most be contained here. */
7588 expand_start_bindings (0);
7590 /* Mark the corresponding BLOCK for output. */
7591 if (TREE_OPERAND (exp, 2) != 0)
7592 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7594 /* If VARS have not yet been expanded, expand them now. */
7597 if (DECL_RTL (vars) == 0)
7599 vars_need_expansion = 1;
7602 expand_decl_init (vars);
7603 vars = TREE_CHAIN (vars);
7606 bc_expand_expr (TREE_OPERAND (exp, 1));
7608 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7618 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7619 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7625 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7631 bc_expand_expr (TREE_OPERAND (exp, 0));
7632 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7633 lab = bc_get_bytecode_label ();
7635 bc_emit_instruction (duplicate);
7636 bc_emit_bytecode (opcode);
7637 bc_emit_bytecode_labelref (lab);
7639 #ifdef DEBUG_PRINT_CODE
7640 fputc ('\n', stderr);
7643 bc_emit_instruction (drop);
7645 bc_expand_expr (TREE_OPERAND (exp, 1));
7646 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7647 bc_emit_bytecode_labeldef (lab);
7653 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7655 /* Push the quantum. */
7656 bc_expand_expr (TREE_OPERAND (exp, 1));
7658 /* Convert it to the lvalue's type. */
7659 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7661 /* Push the address of the lvalue */
7662 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7664 /* Perform actual increment */
7665 bc_expand_increment (incroptab, type);
7669 /* Return the alignment in bits of EXP, a pointer valued expression.
7670 But don't return more than MAX_ALIGN no matter what.
7671 The alignment returned is, by default, the alignment of the thing that
7672 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7674 Otherwise, look at the expression to see if we can do better, i.e., if the
7675 expression is actually pointing at an object whose alignment is tighter. */
7678 get_pointer_alignment (exp, max_align)
7682 unsigned align, inner;
7684 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7687 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7688 align = MIN (align, max_align);
7692 switch (TREE_CODE (exp))
7696 case NON_LVALUE_EXPR:
7697 exp = TREE_OPERAND (exp, 0);
7698 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7700 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7701 align = MIN (inner, max_align);
7705 /* If sum of pointer + int, restrict our maximum alignment to that
7706 imposed by the integer. If not, we can't do any better than
7708 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7711 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7716 exp = TREE_OPERAND (exp, 0);
7720 /* See what we are pointing at and look at its alignment. */
7721 exp = TREE_OPERAND (exp, 0);
7722 if (TREE_CODE (exp) == FUNCTION_DECL)
7723 align = FUNCTION_BOUNDARY;
7724 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7725 align = DECL_ALIGN (exp);
7726 #ifdef CONSTANT_ALIGNMENT
7727 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7728 align = CONSTANT_ALIGNMENT (exp, align);
7730 return MIN (align, max_align);
7738 /* Return the tree node and offset if a given argument corresponds to
7739 a string constant. */
7742 string_constant (arg, ptr_offset)
7748 if (TREE_CODE (arg) == ADDR_EXPR
7749 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7751 *ptr_offset = integer_zero_node;
7752 return TREE_OPERAND (arg, 0);
7754 else if (TREE_CODE (arg) == PLUS_EXPR)
7756 tree arg0 = TREE_OPERAND (arg, 0);
7757 tree arg1 = TREE_OPERAND (arg, 1);
7762 if (TREE_CODE (arg0) == ADDR_EXPR
7763 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7766 return TREE_OPERAND (arg0, 0);
7768 else if (TREE_CODE (arg1) == ADDR_EXPR
7769 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7772 return TREE_OPERAND (arg1, 0);
7779 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7780 way, because it could contain a zero byte in the middle.
7781 TREE_STRING_LENGTH is the size of the character array, not the string.
7783 Unfortunately, string_constant can't access the values of const char
7784 arrays with initializers, so neither can we do so here. */
7794 src = string_constant (src, &offset_node);
7797 max = TREE_STRING_LENGTH (src);
7798 ptr = TREE_STRING_POINTER (src);
7799 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7801 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7802 compute the offset to the following null if we don't know where to
7803 start searching for it. */
7805 for (i = 0; i < max; i++)
7808 /* We don't know the starting offset, but we do know that the string
7809 has no internal zero bytes. We can assume that the offset falls
7810 within the bounds of the string; otherwise, the programmer deserves
7811 what he gets. Subtract the offset from the length of the string,
7813 /* This would perhaps not be valid if we were dealing with named
7814 arrays in addition to literal string constants. */
7815 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7818 /* We have a known offset into the string. Start searching there for
7819 a null character. */
7820 if (offset_node == 0)
7824 /* Did we get a long long offset? If so, punt. */
7825 if (TREE_INT_CST_HIGH (offset_node) != 0)
7827 offset = TREE_INT_CST_LOW (offset_node);
7829 /* If the offset is known to be out of bounds, warn, and call strlen at
7831 if (offset < 0 || offset > max)
7833 warning ("offset outside bounds of constant string");
7836 /* Use strlen to search for the first zero byte. Since any strings
7837 constructed with build_string will have nulls appended, we win even
7838 if we get handed something like (char[4])"abcd".
7840 Since OFFSET is our starting index into the string, no further
7841 calculation is needed. */
7842 return size_int (strlen (ptr + offset));
7846 expand_builtin_return_addr (fndecl_code, count, tem)
7847 enum built_in_function fndecl_code;
7853 /* Some machines need special handling before we can access
7854 arbitrary frames. For example, on the sparc, we must first flush
7855 all register windows to the stack. */
7856 #ifdef SETUP_FRAME_ADDRESSES
7857 SETUP_FRAME_ADDRESSES ();
7860 /* On the sparc, the return address is not in the frame, it is in a
7861 register. There is no way to access it off of the current frame
7862 pointer, but it can be accessed off the previous frame pointer by
7863 reading the value from the register window save area. */
7864 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7865 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7869 /* Scan back COUNT frames to the specified frame. */
7870 for (i = 0; i < count; i++)
7872 /* Assume the dynamic chain pointer is in the word that the
7873 frame address points to, unless otherwise specified. */
7874 #ifdef DYNAMIC_CHAIN_ADDRESS
7875 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7877 tem = memory_address (Pmode, tem);
7878 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7881 /* For __builtin_frame_address, return what we've got. */
7882 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7885 /* For __builtin_return_address, Get the return address from that
7887 #ifdef RETURN_ADDR_RTX
7888 tem = RETURN_ADDR_RTX (count, tem);
7890 tem = memory_address (Pmode,
7891 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7892 tem = gen_rtx (MEM, Pmode, tem);
7897 /* Expand an expression EXP that calls a built-in function,
7898 with result going to TARGET if that's convenient
7899 (and in mode MODE if that's convenient).
7900 SUBTARGET may be used as the target for computing one of EXP's operands.
7901 IGNORE is nonzero if the value is to be ignored. */
7903 #define CALLED_AS_BUILT_IN(NODE) \
7904 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7907 expand_builtin (exp, target, subtarget, mode, ignore)
7911 enum machine_mode mode;
7914 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7915 tree arglist = TREE_OPERAND (exp, 1);
7918 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7919 optab builtin_optab;
7921 switch (DECL_FUNCTION_CODE (fndecl))
7926 /* build_function_call changes these into ABS_EXPR. */
7931 /* Treat these like sqrt, but only if the user asks for them. */
7932 if (! flag_fast_math)
7934 case BUILT_IN_FSQRT:
7935 /* If not optimizing, call the library function. */
7940 /* Arg could be wrong type if user redeclared this fcn wrong. */
7941 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7944 /* Stabilize and compute the argument. */
7945 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7946 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7948 exp = copy_node (exp);
7949 arglist = copy_node (arglist);
7950 TREE_OPERAND (exp, 1) = arglist;
7951 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7953 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7955 /* Make a suitable register to place result in. */
7956 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7961 switch (DECL_FUNCTION_CODE (fndecl))
7964 builtin_optab = sin_optab; break;
7966 builtin_optab = cos_optab; break;
7967 case BUILT_IN_FSQRT:
7968 builtin_optab = sqrt_optab; break;
7973 /* Compute into TARGET.
7974 Set TARGET to wherever the result comes back. */
7975 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7976 builtin_optab, op0, target, 0);
7978 /* If we were unable to expand via the builtin, stop the
7979 sequence (without outputting the insns) and break, causing
7980 a call the the library function. */
7987 /* Check the results by default. But if flag_fast_math is turned on,
7988 then assume sqrt will always be called with valid arguments. */
7990 if (! flag_fast_math)
7992 /* Don't define the builtin FP instructions
7993 if your machine is not IEEE. */
7994 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7997 lab1 = gen_label_rtx ();
7999 /* Test the result; if it is NaN, set errno=EDOM because
8000 the argument was not in the domain. */
8001 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8002 emit_jump_insn (gen_beq (lab1));
8006 #ifdef GEN_ERRNO_RTX
8007 rtx errno_rtx = GEN_ERRNO_RTX;
8010 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8013 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8016 /* We can't set errno=EDOM directly; let the library call do it.
8017 Pop the arguments right away in case the call gets deleted. */
8019 expand_call (exp, target, 0);
8026 /* Output the entire sequence. */
8027 insns = get_insns ();
8033 /* __builtin_apply_args returns block of memory allocated on
8034 the stack into which is stored the arg pointer, structure
8035 value address, static chain, and all the registers that might
8036 possibly be used in performing a function call. The code is
8037 moved to the start of the function so the incoming values are
8039 case BUILT_IN_APPLY_ARGS:
8040 /* Don't do __builtin_apply_args more than once in a function.
8041 Save the result of the first call and reuse it. */
8042 if (apply_args_value != 0)
8043 return apply_args_value;
8045 /* When this function is called, it means that registers must be
8046 saved on entry to this function. So we migrate the
8047 call to the first insn of this function. */
8052 temp = expand_builtin_apply_args ();
8056 apply_args_value = temp;
8058 /* Put the sequence after the NOTE that starts the function.
8059 If this is inside a SEQUENCE, make the outer-level insn
8060 chain current, so the code is placed at the start of the
8062 push_topmost_sequence ();
8063 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8064 pop_topmost_sequence ();
8068 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8069 FUNCTION with a copy of the parameters described by
8070 ARGUMENTS, and ARGSIZE. It returns a block of memory
8071 allocated on the stack into which is stored all the registers
8072 that might possibly be used for returning the result of a
8073 function. ARGUMENTS is the value returned by
8074 __builtin_apply_args. ARGSIZE is the number of bytes of
8075 arguments that must be copied. ??? How should this value be
8076 computed? We'll also need a safe worst case value for varargs
8078 case BUILT_IN_APPLY:
8080 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8081 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8082 || TREE_CHAIN (arglist) == 0
8083 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8084 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8085 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8093 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8094 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8096 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8099 /* __builtin_return (RESULT) causes the function to return the
8100 value described by RESULT. RESULT is address of the block of
8101 memory returned by __builtin_apply. */
8102 case BUILT_IN_RETURN:
8104 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8105 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8106 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8107 NULL_RTX, VOIDmode, 0));
8110 case BUILT_IN_SAVEREGS:
8111 /* Don't do __builtin_saveregs more than once in a function.
8112 Save the result of the first call and reuse it. */
8113 if (saveregs_value != 0)
8114 return saveregs_value;
8116 /* When this function is called, it means that registers must be
8117 saved on entry to this function. So we migrate the
8118 call to the first insn of this function. */
8122 /* Now really call the function. `expand_call' does not call
8123 expand_builtin, so there is no danger of infinite recursion here. */
8126 #ifdef EXPAND_BUILTIN_SAVEREGS
8127 /* Do whatever the machine needs done in this case. */
8128 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8130 /* The register where the function returns its value
8131 is likely to have something else in it, such as an argument.
8132 So preserve that register around the call. */
8134 if (value_mode != VOIDmode)
8136 rtx valreg = hard_libcall_value (value_mode);
8137 rtx saved_valreg = gen_reg_rtx (value_mode);
8139 emit_move_insn (saved_valreg, valreg);
8140 temp = expand_call (exp, target, ignore);
8141 emit_move_insn (valreg, saved_valreg);
8144 /* Generate the call, putting the value in a pseudo. */
8145 temp = expand_call (exp, target, ignore);
8151 saveregs_value = temp;
8153 /* Put the sequence after the NOTE that starts the function.
8154 If this is inside a SEQUENCE, make the outer-level insn
8155 chain current, so the code is placed at the start of the
8157 push_topmost_sequence ();
8158 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8159 pop_topmost_sequence ();
8163 /* __builtin_args_info (N) returns word N of the arg space info
8164 for the current function. The number and meanings of words
8165 is controlled by the definition of CUMULATIVE_ARGS. */
8166 case BUILT_IN_ARGS_INFO:
8168 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8170 int *word_ptr = (int *) ¤t_function_args_info;
8171 tree type, elts, result;
8173 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8174 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8175 __FILE__, __LINE__);
8179 tree arg = TREE_VALUE (arglist);
8180 if (TREE_CODE (arg) != INTEGER_CST)
8181 error ("argument of `__builtin_args_info' must be constant");
8184 int wordnum = TREE_INT_CST_LOW (arg);
8186 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8187 error ("argument of `__builtin_args_info' out of range");
8189 return GEN_INT (word_ptr[wordnum]);
8193 error ("missing argument in `__builtin_args_info'");
8198 for (i = 0; i < nwords; i++)
8199 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8201 type = build_array_type (integer_type_node,
8202 build_index_type (build_int_2 (nwords, 0)));
8203 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8204 TREE_CONSTANT (result) = 1;
8205 TREE_STATIC (result) = 1;
8206 result = build (INDIRECT_REF, build_pointer_type (type), result);
8207 TREE_CONSTANT (result) = 1;
8208 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8212 /* Return the address of the first anonymous stack arg. */
8213 case BUILT_IN_NEXT_ARG:
8215 tree fntype = TREE_TYPE (current_function_decl);
8217 if ((TYPE_ARG_TYPES (fntype) == 0
8218 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8220 && ! current_function_varargs)
8222 error ("`va_start' used in function with fixed args");
8228 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8229 tree arg = TREE_VALUE (arglist);
8231 /* Strip off all nops for the sake of the comparison. This
8232 is not quite the same as STRIP_NOPS. It does more.
8233 We must also strip off INDIRECT_EXPR for C++ reference
8235 while (TREE_CODE (arg) == NOP_EXPR
8236 || TREE_CODE (arg) == CONVERT_EXPR
8237 || TREE_CODE (arg) == NON_LVALUE_EXPR
8238 || TREE_CODE (arg) == INDIRECT_REF)
8239 arg = TREE_OPERAND (arg, 0);
8240 if (arg != last_parm)
8241 warning ("second parameter of `va_start' not last named argument");
8243 else if (! current_function_varargs)
8244 /* Evidently an out of date version of <stdarg.h>; can't validate
8245 va_start's second argument, but can still work as intended. */
8246 warning ("`__builtin_next_arg' called without an argument");
8249 return expand_binop (Pmode, add_optab,
8250 current_function_internal_arg_pointer,
8251 current_function_arg_offset_rtx,
8252 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8254 case BUILT_IN_CLASSIFY_TYPE:
8257 tree type = TREE_TYPE (TREE_VALUE (arglist));
8258 enum tree_code code = TREE_CODE (type);
8259 if (code == VOID_TYPE)
8260 return GEN_INT (void_type_class);
8261 if (code == INTEGER_TYPE)
8262 return GEN_INT (integer_type_class);
8263 if (code == CHAR_TYPE)
8264 return GEN_INT (char_type_class);
8265 if (code == ENUMERAL_TYPE)
8266 return GEN_INT (enumeral_type_class);
8267 if (code == BOOLEAN_TYPE)
8268 return GEN_INT (boolean_type_class);
8269 if (code == POINTER_TYPE)
8270 return GEN_INT (pointer_type_class);
8271 if (code == REFERENCE_TYPE)
8272 return GEN_INT (reference_type_class);
8273 if (code == OFFSET_TYPE)
8274 return GEN_INT (offset_type_class);
8275 if (code == REAL_TYPE)
8276 return GEN_INT (real_type_class);
8277 if (code == COMPLEX_TYPE)
8278 return GEN_INT (complex_type_class);
8279 if (code == FUNCTION_TYPE)
8280 return GEN_INT (function_type_class);
8281 if (code == METHOD_TYPE)
8282 return GEN_INT (method_type_class);
8283 if (code == RECORD_TYPE)
8284 return GEN_INT (record_type_class);
8285 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8286 return GEN_INT (union_type_class);
8287 if (code == ARRAY_TYPE)
8289 if (TYPE_STRING_FLAG (type))
8290 return GEN_INT (string_type_class);
8292 return GEN_INT (array_type_class);
8294 if (code == SET_TYPE)
8295 return GEN_INT (set_type_class);
8296 if (code == FILE_TYPE)
8297 return GEN_INT (file_type_class);
8298 if (code == LANG_TYPE)
8299 return GEN_INT (lang_type_class);
8301 return GEN_INT (no_type_class);
8303 case BUILT_IN_CONSTANT_P:
8308 tree arg = TREE_VALUE (arglist);
8311 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8312 || (TREE_CODE (arg) == ADDR_EXPR
8313 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8314 ? const1_rtx : const0_rtx);
8317 case BUILT_IN_FRAME_ADDRESS:
8318 /* The argument must be a nonnegative integer constant.
8319 It counts the number of frames to scan up the stack.
8320 The value is the address of that frame. */
8321 case BUILT_IN_RETURN_ADDRESS:
8322 /* The argument must be a nonnegative integer constant.
8323 It counts the number of frames to scan up the stack.
8324 The value is the return address saved in that frame. */
8326 /* Warning about missing arg was already issued. */
8328 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8330 error ("invalid arg to `__builtin_return_address'");
8333 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8335 error ("invalid arg to `__builtin_return_address'");
8340 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8341 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8342 hard_frame_pointer_rtx);
8344 /* For __builtin_frame_address, return what we've got. */
8345 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8348 if (GET_CODE (tem) != REG)
8349 tem = copy_to_reg (tem);
8353 case BUILT_IN_ALLOCA:
8355 /* Arg could be non-integer if user redeclared this fcn wrong. */
8356 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8359 /* Compute the argument. */
8360 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8362 /* Allocate the desired space. */
8363 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8366 /* If not optimizing, call the library function. */
8367 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8371 /* Arg could be non-integer if user redeclared this fcn wrong. */
8372 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8375 /* Compute the argument. */
8376 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8377 /* Compute ffs, into TARGET if possible.
8378 Set TARGET to wherever the result comes back. */
8379 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8380 ffs_optab, op0, target, 1);
8385 case BUILT_IN_STRLEN:
8386 /* If not optimizing, call the library function. */
8387 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8391 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8392 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8396 tree src = TREE_VALUE (arglist);
8397 tree len = c_strlen (src);
8400 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8402 rtx result, src_rtx, char_rtx;
8403 enum machine_mode insn_mode = value_mode, char_mode;
8404 enum insn_code icode;
8406 /* If the length is known, just return it. */
8408 return expand_expr (len, target, mode, 0);
8410 /* If SRC is not a pointer type, don't do this operation inline. */
8414 /* Call a function if we can't compute strlen in the right mode. */
8416 while (insn_mode != VOIDmode)
8418 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8419 if (icode != CODE_FOR_nothing)
8422 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8424 if (insn_mode == VOIDmode)
8427 /* Make a place to write the result of the instruction. */
8430 && GET_CODE (result) == REG
8431 && GET_MODE (result) == insn_mode
8432 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8433 result = gen_reg_rtx (insn_mode);
8435 /* Make sure the operands are acceptable to the predicates. */
8437 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8438 result = gen_reg_rtx (insn_mode);
8440 src_rtx = memory_address (BLKmode,
8441 expand_expr (src, NULL_RTX, ptr_mode,
8443 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8444 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8446 char_rtx = const0_rtx;
8447 char_mode = insn_operand_mode[(int)icode][2];
8448 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8449 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8451 emit_insn (GEN_FCN (icode) (result,
8452 gen_rtx (MEM, BLKmode, src_rtx),
8453 char_rtx, GEN_INT (align)));
8455 /* Return the value in the proper mode for this function. */
8456 if (GET_MODE (result) == value_mode)
8458 else if (target != 0)
8460 convert_move (target, result, 0);
8464 return convert_to_mode (value_mode, result, 0);
8467 case BUILT_IN_STRCPY:
8468 /* If not optimizing, call the library function. */
8469 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8473 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8474 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8475 || TREE_CHAIN (arglist) == 0
8476 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8480 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8485 len = size_binop (PLUS_EXPR, len, integer_one_node);
8487 chainon (arglist, build_tree_list (NULL_TREE, len));
8491 case BUILT_IN_MEMCPY:
8492 /* If not optimizing, call the library function. */
8493 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8497 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8498 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8499 || TREE_CHAIN (arglist) == 0
8500 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8501 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8502 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8506 tree dest = TREE_VALUE (arglist);
8507 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8508 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8512 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8514 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8515 rtx dest_rtx, dest_mem, src_mem;
8517 /* If either SRC or DEST is not a pointer type, don't do
8518 this operation in-line. */
8519 if (src_align == 0 || dest_align == 0)
8521 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8522 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8526 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8527 dest_mem = gen_rtx (MEM, BLKmode,
8528 memory_address (BLKmode, dest_rtx));
8529 /* There could be a void* cast on top of the object. */
8530 while (TREE_CODE (dest) == NOP_EXPR)
8531 dest = TREE_OPERAND (dest, 0);
8532 type = TREE_TYPE (TREE_TYPE (dest));
8533 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8534 src_mem = gen_rtx (MEM, BLKmode,
8535 memory_address (BLKmode,
8536 expand_expr (src, NULL_RTX,
8539 /* There could be a void* cast on top of the object. */
8540 while (TREE_CODE (src) == NOP_EXPR)
8541 src = TREE_OPERAND (src, 0);
8542 type = TREE_TYPE (TREE_TYPE (src));
8543 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8545 /* Copy word part most expediently. */
8546 emit_block_move (dest_mem, src_mem,
8547 expand_expr (len, NULL_RTX, VOIDmode, 0),
8548 MIN (src_align, dest_align));
8549 return force_operand (dest_rtx, NULL_RTX);
8552 case BUILT_IN_MEMSET:
8553 /* If not optimizing, call the library function. */
8554 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8558 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8559 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8560 || TREE_CHAIN (arglist) == 0
8561 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8563 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8565 != (TREE_CODE (TREE_TYPE
8567 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8571 tree dest = TREE_VALUE (arglist);
8572 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8573 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8577 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8578 rtx dest_rtx, dest_mem;
8580 /* If DEST is not a pointer type, don't do this
8581 operation in-line. */
8582 if (dest_align == 0)
8585 /* If VAL is not 0, don't do this operation in-line. */
8586 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8589 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8590 dest_mem = gen_rtx (MEM, BLKmode,
8591 memory_address (BLKmode, dest_rtx));
8592 /* There could be a void* cast on top of the object. */
8593 while (TREE_CODE (dest) == NOP_EXPR)
8594 dest = TREE_OPERAND (dest, 0);
8595 type = TREE_TYPE (TREE_TYPE (dest));
8596 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8598 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8601 return force_operand (dest_rtx, NULL_RTX);
8604 /* These comparison functions need an instruction that returns an actual
8605 index. An ordinary compare that just sets the condition codes
8607 #ifdef HAVE_cmpstrsi
8608 case BUILT_IN_STRCMP:
8609 /* If not optimizing, call the library function. */
8610 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8614 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8615 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8616 || TREE_CHAIN (arglist) == 0
8617 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8619 else if (!HAVE_cmpstrsi)
8622 tree arg1 = TREE_VALUE (arglist);
8623 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8627 len = c_strlen (arg1);
8629 len = size_binop (PLUS_EXPR, integer_one_node, len);
8630 len2 = c_strlen (arg2);
8632 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8634 /* If we don't have a constant length for the first, use the length
8635 of the second, if we know it. We don't require a constant for
8636 this case; some cost analysis could be done if both are available
8637 but neither is constant. For now, assume they're equally cheap.
8639 If both strings have constant lengths, use the smaller. This
8640 could arise if optimization results in strcpy being called with
8641 two fixed strings, or if the code was machine-generated. We should
8642 add some code to the `memcmp' handler below to deal with such
8643 situations, someday. */
8644 if (!len || TREE_CODE (len) != INTEGER_CST)
8651 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8653 if (tree_int_cst_lt (len2, len))
8657 chainon (arglist, build_tree_list (NULL_TREE, len));
8661 case BUILT_IN_MEMCMP:
8662 /* If not optimizing, call the library function. */
8663 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8667 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8668 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8669 || TREE_CHAIN (arglist) == 0
8670 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8671 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8672 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8674 else if (!HAVE_cmpstrsi)
8677 tree arg1 = TREE_VALUE (arglist);
8678 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8679 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8683 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8685 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8686 enum machine_mode insn_mode
8687 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8689 /* If we don't have POINTER_TYPE, call the function. */
8690 if (arg1_align == 0 || arg2_align == 0)
8692 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8693 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8697 /* Make a place to write the result of the instruction. */
8700 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8701 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8702 result = gen_reg_rtx (insn_mode);
8704 emit_insn (gen_cmpstrsi (result,
8705 gen_rtx (MEM, BLKmode,
8706 expand_expr (arg1, NULL_RTX,
8709 gen_rtx (MEM, BLKmode,
8710 expand_expr (arg2, NULL_RTX,
8713 expand_expr (len, NULL_RTX, VOIDmode, 0),
8714 GEN_INT (MIN (arg1_align, arg2_align))));
8716 /* Return the value in the proper mode for this function. */
8717 mode = TYPE_MODE (TREE_TYPE (exp));
8718 if (GET_MODE (result) == mode)
8720 else if (target != 0)
8722 convert_move (target, result, 0);
8726 return convert_to_mode (mode, result, 0);
8729 case BUILT_IN_STRCMP:
8730 case BUILT_IN_MEMCMP:
8734 /* __builtin_setjmp is passed a pointer to an array of five words
8735 (not all will be used on all machines). It operates similarly to
8736 the C library function of the same name, but is more efficient.
8737 Much of the code below (and for longjmp) is copied from the handling
8740 NOTE: This is intended for use by GNAT and will only work in
8741 the method used by it. This code will likely NOT survive to
8742 the GCC 2.8.0 release. */
8743 case BUILT_IN_SETJMP:
8745 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8749 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8751 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8752 enum machine_mode sa_mode = Pmode;
8754 int old_inhibit_defer_pop = inhibit_defer_pop;
8755 int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8756 get_identifier ("__dummy"), 0);
8758 CUMULATIVE_ARGS args_so_far;
8761 #ifdef POINTERS_EXTEND_UNSIGNED
8762 buf_addr = convert_memory_address (Pmode, buf_addr);
8765 buf_addr = force_reg (Pmode, buf_addr);
8767 if (target == 0 || GET_CODE (target) != REG
8768 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8769 target = gen_reg_rtx (value_mode);
8773 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8774 current_function_calls_setjmp = 1;
8776 /* We store the frame pointer and the address of lab1 in the buffer
8777 and use the rest of it for the stack save area, which is
8778 machine-dependent. */
8779 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8780 virtual_stack_vars_rtx);
8782 (validize_mem (gen_rtx (MEM, Pmode,
8783 plus_constant (buf_addr,
8784 GET_MODE_SIZE (Pmode)))),
8785 gen_rtx (LABEL_REF, Pmode, lab1));
8787 #ifdef HAVE_save_stack_nonlocal
8788 if (HAVE_save_stack_nonlocal)
8789 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8792 stack_save = gen_rtx (MEM, sa_mode,
8793 plus_constant (buf_addr,
8794 2 * GET_MODE_SIZE (Pmode)));
8795 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8799 emit_insn (gen_setjmp ());
8802 /* Set TARGET to zero and branch around the other case. */
8803 emit_move_insn (target, const0_rtx);
8804 emit_jump_insn (gen_jump (lab2));
8808 /* Note that setjmp clobbers FP when we get here, so we have to
8809 make sure it's marked as used by this function. */
8810 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8812 /* Mark the static chain as clobbered here so life information
8813 doesn't get messed up for it. */
8814 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8816 /* Now put in the code to restore the frame pointer, and argument
8817 pointer, if needed. The code below is from expand_end_bindings
8818 in stmt.c; see detailed documentation there. */
8819 #ifdef HAVE_nonlocal_goto
8820 if (! HAVE_nonlocal_goto)
8822 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8824 current_function_has_nonlocal_goto = 1;
8826 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8827 if (fixed_regs[ARG_POINTER_REGNUM])
8829 #ifdef ELIMINABLE_REGS
8830 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8832 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8833 if (elim_regs[i].from == ARG_POINTER_REGNUM
8834 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8837 if (i == sizeof elim_regs / sizeof elim_regs [0])
8840 /* Now restore our arg pointer from the address at which it
8841 was saved in our stack frame.
8842 If there hasn't be space allocated for it yet, make
8844 if (arg_pointer_save_area == 0)
8845 arg_pointer_save_area
8846 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8847 emit_move_insn (virtual_incoming_args_rtx,
8848 copy_to_reg (arg_pointer_save_area));
8853 #ifdef HAVE_nonlocal_goto_receiver
8854 if (HAVE_nonlocal_goto_receiver)
8855 emit_insn (gen_nonlocal_goto_receiver ());
8857 /* The static chain pointer contains the address of dummy function.
8858 We need to call it here to handle some PIC cases of restoring
8859 a global pointer. Then return 1. */
8860 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8862 /* We can't actually call emit_library_call here, so do everything
8863 it does, which isn't much for a libfunc with no args. */
8864 op0 = memory_address (FUNCTION_MODE, op0);
8866 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8867 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8868 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8870 #ifndef ACCUMULATE_OUTGOING_ARGS
8871 #ifdef HAVE_call_pop
8873 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8874 const0_rtx, next_arg_reg,
8875 GEN_INT (return_pops)));
8882 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8883 const0_rtx, next_arg_reg, const0_rtx));
8888 emit_move_insn (target, const1_rtx);
8893 /* __builtin_longjmp is passed a pointer to an array of five words
8894 and a value, which is a dummy. It's similar to the C library longjmp
8895 function but works with __builtin_setjmp above. */
8896 case BUILT_IN_LONGJMP:
8897 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8898 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8902 tree dummy_id = get_identifier ("__dummy");
8903 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8904 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
8905 #ifdef POINTERS_EXTEND_UNSIGNED
8908 convert_memory_address
8910 expand_expr (TREE_VALUE (arglist),
8911 NULL_RTX, VOIDmode, 0)));
8914 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
8918 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8919 rtx lab = gen_rtx (MEM, Pmode,
8920 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8921 enum machine_mode sa_mode
8922 #ifdef HAVE_save_stack_nonlocal
8923 = (HAVE_save_stack_nonlocal
8924 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8929 rtx stack = gen_rtx (MEM, sa_mode,
8930 plus_constant (buf_addr,
8931 2 * GET_MODE_SIZE (Pmode)));
8933 DECL_EXTERNAL (dummy_decl) = 1;
8934 TREE_PUBLIC (dummy_decl) = 1;
8935 make_decl_rtl (dummy_decl, NULL_PTR, 1);
8937 /* Expand the second expression just for side-effects. */
8938 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8939 const0_rtx, VOIDmode, 0);
8941 assemble_external (dummy_decl);
8943 /* Pick up FP, label, and SP from the block and jump. This code is
8944 from expand_goto in stmt.c; see there for detailed comments. */
8945 #if HAVE_nonlocal_goto
8946 if (HAVE_nonlocal_goto)
8947 emit_insn (gen_nonlocal_goto (fp, lab, stack,
8948 XEXP (DECL_RTL (dummy_decl), 0)));
8952 lab = copy_to_reg (lab);
8953 emit_move_insn (hard_frame_pointer_rtx, fp);
8954 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8956 /* Put in the static chain register the address of the dummy
8958 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
8959 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8960 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8961 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
8962 emit_indirect_jump (lab);
8968 default: /* just do library call, if unknown builtin */
8969 error ("built-in function `%s' not currently supported",
8970 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8973 /* The switch statement above can drop through to cause the function
8974 to be called normally. */
8976 return expand_call (exp, target, ignore);
8979 /* Built-in functions to perform an untyped call and return. */
8981 /* For each register that may be used for calling a function, this
8982 gives a mode used to copy the register's value. VOIDmode indicates
8983 the register is not used for calling a function. If the machine
8984 has register windows, this gives only the outbound registers.
8985 INCOMING_REGNO gives the corresponding inbound register. */
8986 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
8988 /* For each register that may be used for returning values, this gives
8989 a mode used to copy the register's value. VOIDmode indicates the
8990 register is not used for returning values. If the machine has
8991 register windows, this gives only the outbound registers.
8992 INCOMING_REGNO gives the corresponding inbound register. */
8993 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
8995 /* For each register that may be used for calling a function, this
8996 gives the offset of that register into the block returned by
8997 __builtin_apply_args. 0 indicates that the register is not
8998 used for calling a function. */
8999 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9001 /* Return the offset of register REGNO into the block returned by
9002 __builtin_apply_args. This is not declared static, since it is
9003 needed in objc-act.c. */
9006 apply_args_register_offset (regno)
9011 /* Arguments are always put in outgoing registers (in the argument
9012 block) if such make sense. */
9013 #ifdef OUTGOING_REGNO
9014 regno = OUTGOING_REGNO(regno);
9016 return apply_args_reg_offset[regno];
9019 /* Return the size required for the block returned by __builtin_apply_args,
9020 and initialize apply_args_mode. */
9025 static int size = -1;
9027 enum machine_mode mode;
9029 /* The values computed by this function never change. */
9032 /* The first value is the incoming arg-pointer. */
9033 size = GET_MODE_SIZE (Pmode);
9035 /* The second value is the structure value address unless this is
9036 passed as an "invisible" first argument. */
9037 if (struct_value_rtx)
9038 size += GET_MODE_SIZE (Pmode);
9040 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9041 if (FUNCTION_ARG_REGNO_P (regno))
9043 /* Search for the proper mode for copying this register's
9044 value. I'm not sure this is right, but it works so far. */
9045 enum machine_mode best_mode = VOIDmode;
9047 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9049 mode = GET_MODE_WIDER_MODE (mode))
9050 if (HARD_REGNO_MODE_OK (regno, mode)
9051 && HARD_REGNO_NREGS (regno, mode) == 1)
9054 if (best_mode == VOIDmode)
9055 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9057 mode = GET_MODE_WIDER_MODE (mode))
9058 if (HARD_REGNO_MODE_OK (regno, mode)
9059 && (mov_optab->handlers[(int) mode].insn_code
9060 != CODE_FOR_nothing))
9064 if (mode == VOIDmode)
9067 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9068 if (size % align != 0)
9069 size = CEIL (size, align) * align;
9070 apply_args_reg_offset[regno] = size;
9071 size += GET_MODE_SIZE (mode);
9072 apply_args_mode[regno] = mode;
9076 apply_args_mode[regno] = VOIDmode;
9077 apply_args_reg_offset[regno] = 0;
9083 /* Return the size required for the block returned by __builtin_apply,
9084 and initialize apply_result_mode. */
9087 apply_result_size ()
9089 static int size = -1;
9091 enum machine_mode mode;
9093 /* The values computed by this function never change. */
9098 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9099 if (FUNCTION_VALUE_REGNO_P (regno))
9101 /* Search for the proper mode for copying this register's
9102 value. I'm not sure this is right, but it works so far. */
9103 enum machine_mode best_mode = VOIDmode;
9105 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9107 mode = GET_MODE_WIDER_MODE (mode))
9108 if (HARD_REGNO_MODE_OK (regno, mode))
9111 if (best_mode == VOIDmode)
9112 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9114 mode = GET_MODE_WIDER_MODE (mode))
9115 if (HARD_REGNO_MODE_OK (regno, mode)
9116 && (mov_optab->handlers[(int) mode].insn_code
9117 != CODE_FOR_nothing))
9121 if (mode == VOIDmode)
9124 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9125 if (size % align != 0)
9126 size = CEIL (size, align) * align;
9127 size += GET_MODE_SIZE (mode);
9128 apply_result_mode[regno] = mode;
9131 apply_result_mode[regno] = VOIDmode;
9133 /* Allow targets that use untyped_call and untyped_return to override
9134 the size so that machine-specific information can be stored here. */
9135 #ifdef APPLY_RESULT_SIZE
9136 size = APPLY_RESULT_SIZE;
9142 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9143 /* Create a vector describing the result block RESULT. If SAVEP is true,
9144 the result block is used to save the values; otherwise it is used to
9145 restore the values. */
9148 result_vector (savep, result)
9152 int regno, size, align, nelts;
9153 enum machine_mode mode;
9155 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9158 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9159 if ((mode = apply_result_mode[regno]) != VOIDmode)
9161 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9162 if (size % align != 0)
9163 size = CEIL (size, align) * align;
9164 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9165 mem = change_address (result, mode,
9166 plus_constant (XEXP (result, 0), size));
9167 savevec[nelts++] = (savep
9168 ? gen_rtx (SET, VOIDmode, mem, reg)
9169 : gen_rtx (SET, VOIDmode, reg, mem));
9170 size += GET_MODE_SIZE (mode);
9172 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9174 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9176 /* Save the state required to perform an untyped call with the same
9177 arguments as were passed to the current function. */
9180 expand_builtin_apply_args ()
9183 int size, align, regno;
9184 enum machine_mode mode;
9186 /* Create a block where the arg-pointer, structure value address,
9187 and argument registers can be saved. */
9188 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9190 /* Walk past the arg-pointer and structure value address. */
9191 size = GET_MODE_SIZE (Pmode);
9192 if (struct_value_rtx)
9193 size += GET_MODE_SIZE (Pmode);
9195 /* Save each register used in calling a function to the block. */
9196 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9197 if ((mode = apply_args_mode[regno]) != VOIDmode)
9201 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9202 if (size % align != 0)
9203 size = CEIL (size, align) * align;
9205 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9208 /* For reg-stack.c's stack register household.
9209 Compare with a similar piece of code in function.c. */
9211 emit_insn (gen_rtx (USE, mode, tem));
9214 emit_move_insn (change_address (registers, mode,
9215 plus_constant (XEXP (registers, 0),
9218 size += GET_MODE_SIZE (mode);
9221 /* Save the arg pointer to the block. */
9222 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9223 copy_to_reg (virtual_incoming_args_rtx));
9224 size = GET_MODE_SIZE (Pmode);
9226 /* Save the structure value address unless this is passed as an
9227 "invisible" first argument. */
9228 if (struct_value_incoming_rtx)
9230 emit_move_insn (change_address (registers, Pmode,
9231 plus_constant (XEXP (registers, 0),
9233 copy_to_reg (struct_value_incoming_rtx));
9234 size += GET_MODE_SIZE (Pmode);
9237 /* Return the address of the block. */
9238 return copy_addr_to_reg (XEXP (registers, 0));
9241 /* Perform an untyped call and save the state required to perform an
9242 untyped return of whatever value was returned by the given function. */
9245 expand_builtin_apply (function, arguments, argsize)
9246 rtx function, arguments, argsize;
9248 int size, align, regno;
9249 enum machine_mode mode;
9250 rtx incoming_args, result, reg, dest, call_insn;
9251 rtx old_stack_level = 0;
9252 rtx call_fusage = 0;
9254 /* Create a block where the return registers can be saved. */
9255 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9257 /* ??? The argsize value should be adjusted here. */
9259 /* Fetch the arg pointer from the ARGUMENTS block. */
9260 incoming_args = gen_reg_rtx (Pmode);
9261 emit_move_insn (incoming_args,
9262 gen_rtx (MEM, Pmode, arguments));
9263 #ifndef STACK_GROWS_DOWNWARD
9264 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9265 incoming_args, 0, OPTAB_LIB_WIDEN);
9268 /* Perform postincrements before actually calling the function. */
9271 /* Push a new argument block and copy the arguments. */
9272 do_pending_stack_adjust ();
9273 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9275 /* Push a block of memory onto the stack to store the memory arguments.
9276 Save the address in a register, and copy the memory arguments. ??? I
9277 haven't figured out how the calling convention macros effect this,
9278 but it's likely that the source and/or destination addresses in
9279 the block copy will need updating in machine specific ways. */
9280 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9281 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9282 gen_rtx (MEM, BLKmode, incoming_args),
9284 PARM_BOUNDARY / BITS_PER_UNIT);
9286 /* Refer to the argument block. */
9288 arguments = gen_rtx (MEM, BLKmode, arguments);
9290 /* Walk past the arg-pointer and structure value address. */
9291 size = GET_MODE_SIZE (Pmode);
9292 if (struct_value_rtx)
9293 size += GET_MODE_SIZE (Pmode);
9295 /* Restore each of the registers previously saved. Make USE insns
9296 for each of these registers for use in making the call. */
9297 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9298 if ((mode = apply_args_mode[regno]) != VOIDmode)
9300 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9301 if (size % align != 0)
9302 size = CEIL (size, align) * align;
9303 reg = gen_rtx (REG, mode, regno);
9304 emit_move_insn (reg,
9305 change_address (arguments, mode,
9306 plus_constant (XEXP (arguments, 0),
9309 use_reg (&call_fusage, reg);
9310 size += GET_MODE_SIZE (mode);
9313 /* Restore the structure value address unless this is passed as an
9314 "invisible" first argument. */
9315 size = GET_MODE_SIZE (Pmode);
9316 if (struct_value_rtx)
9318 rtx value = gen_reg_rtx (Pmode);
9319 emit_move_insn (value,
9320 change_address (arguments, Pmode,
9321 plus_constant (XEXP (arguments, 0),
9323 emit_move_insn (struct_value_rtx, value);
9324 if (GET_CODE (struct_value_rtx) == REG)
9325 use_reg (&call_fusage, struct_value_rtx);
9326 size += GET_MODE_SIZE (Pmode);
9329 /* All arguments and registers used for the call are set up by now! */
9330 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9332 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9333 and we don't want to load it into a register as an optimization,
9334 because prepare_call_address already did it if it should be done. */
9335 if (GET_CODE (function) != SYMBOL_REF)
9336 function = memory_address (FUNCTION_MODE, function);
9338 /* Generate the actual call instruction and save the return value. */
9339 #ifdef HAVE_untyped_call
9340 if (HAVE_untyped_call)
9341 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9342 result, result_vector (1, result)));
9345 #ifdef HAVE_call_value
9346 if (HAVE_call_value)
9350 /* Locate the unique return register. It is not possible to
9351 express a call that sets more than one return register using
9352 call_value; use untyped_call for that. In fact, untyped_call
9353 only needs to save the return registers in the given block. */
9354 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9355 if ((mode = apply_result_mode[regno]) != VOIDmode)
9358 abort (); /* HAVE_untyped_call required. */
9359 valreg = gen_rtx (REG, mode, regno);
9362 emit_call_insn (gen_call_value (valreg,
9363 gen_rtx (MEM, FUNCTION_MODE, function),
9364 const0_rtx, NULL_RTX, const0_rtx));
9366 emit_move_insn (change_address (result, GET_MODE (valreg),
9374 /* Find the CALL insn we just emitted. */
9375 for (call_insn = get_last_insn ();
9376 call_insn && GET_CODE (call_insn) != CALL_INSN;
9377 call_insn = PREV_INSN (call_insn))
9383 /* Put the register usage information on the CALL. If there is already
9384 some usage information, put ours at the end. */
9385 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9389 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9390 link = XEXP (link, 1))
9393 XEXP (link, 1) = call_fusage;
9396 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9398 /* Restore the stack. */
9399 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9401 /* Return the address of the result block. */
9402 return copy_addr_to_reg (XEXP (result, 0));
9405 /* Perform an untyped return. */
9408 expand_builtin_return (result)
9411 int size, align, regno;
9412 enum machine_mode mode;
9414 rtx call_fusage = 0;
9416 apply_result_size ();
9417 result = gen_rtx (MEM, BLKmode, result);
9419 #ifdef HAVE_untyped_return
9420 if (HAVE_untyped_return)
9422 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9428 /* Restore the return value and note that each value is used. */
9430 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9431 if ((mode = apply_result_mode[regno]) != VOIDmode)
9433 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9434 if (size % align != 0)
9435 size = CEIL (size, align) * align;
9436 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9437 emit_move_insn (reg,
9438 change_address (result, mode,
9439 plus_constant (XEXP (result, 0),
9442 push_to_sequence (call_fusage);
9443 emit_insn (gen_rtx (USE, VOIDmode, reg));
9444 call_fusage = get_insns ();
9446 size += GET_MODE_SIZE (mode);
9449 /* Put the USE insns before the return. */
9450 emit_insns (call_fusage);
9452 /* Return whatever values was restored by jumping directly to the end
9454 expand_null_return ();
9457 /* Expand code for a post- or pre- increment or decrement
9458 and return the RTX for the result.
9459 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9462 expand_increment (exp, post, ignore)
9466 register rtx op0, op1;
9467 register rtx temp, value;
9468 register tree incremented = TREE_OPERAND (exp, 0);
9469 optab this_optab = add_optab;
9471 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9472 int op0_is_copy = 0;
9473 int single_insn = 0;
9474 /* 1 means we can't store into OP0 directly,
9475 because it is a subreg narrower than a word,
9476 and we don't dare clobber the rest of the word. */
9479 if (output_bytecode)
9481 bc_expand_expr (exp);
9485 /* Stabilize any component ref that might need to be
9486 evaluated more than once below. */
9488 || TREE_CODE (incremented) == BIT_FIELD_REF
9489 || (TREE_CODE (incremented) == COMPONENT_REF
9490 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9491 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9492 incremented = stabilize_reference (incremented);
9493 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9494 ones into save exprs so that they don't accidentally get evaluated
9495 more than once by the code below. */
9496 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9497 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9498 incremented = save_expr (incremented);
9500 /* Compute the operands as RTX.
9501 Note whether OP0 is the actual lvalue or a copy of it:
9502 I believe it is a copy iff it is a register or subreg
9503 and insns were generated in computing it. */
9505 temp = get_last_insn ();
9506 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9508 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9509 in place but instead must do sign- or zero-extension during assignment,
9510 so we copy it into a new register and let the code below use it as
9513 Note that we can safely modify this SUBREG since it is know not to be
9514 shared (it was made by the expand_expr call above). */
9516 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9519 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9523 else if (GET_CODE (op0) == SUBREG
9524 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9526 /* We cannot increment this SUBREG in place. If we are
9527 post-incrementing, get a copy of the old value. Otherwise,
9528 just mark that we cannot increment in place. */
9530 op0 = copy_to_reg (op0);
9535 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9536 && temp != get_last_insn ());
9537 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9539 /* Decide whether incrementing or decrementing. */
9540 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9541 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9542 this_optab = sub_optab;
9544 /* Convert decrement by a constant into a negative increment. */
9545 if (this_optab == sub_optab
9546 && GET_CODE (op1) == CONST_INT)
9548 op1 = GEN_INT (- INTVAL (op1));
9549 this_optab = add_optab;
9552 /* For a preincrement, see if we can do this with a single instruction. */
9555 icode = (int) this_optab->handlers[(int) mode].insn_code;
9556 if (icode != (int) CODE_FOR_nothing
9557 /* Make sure that OP0 is valid for operands 0 and 1
9558 of the insn we want to queue. */
9559 && (*insn_operand_predicate[icode][0]) (op0, mode)
9560 && (*insn_operand_predicate[icode][1]) (op0, mode)
9561 && (*insn_operand_predicate[icode][2]) (op1, mode))
9565 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9566 then we cannot just increment OP0. We must therefore contrive to
9567 increment the original value. Then, for postincrement, we can return
9568 OP0 since it is a copy of the old value. For preincrement, expand here
9569 unless we can do it with a single insn.
9571 Likewise if storing directly into OP0 would clobber high bits
9572 we need to preserve (bad_subreg). */
9573 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9575 /* This is the easiest way to increment the value wherever it is.
9576 Problems with multiple evaluation of INCREMENTED are prevented
9577 because either (1) it is a component_ref or preincrement,
9578 in which case it was stabilized above, or (2) it is an array_ref
9579 with constant index in an array in a register, which is
9580 safe to reevaluate. */
9581 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9582 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9583 ? MINUS_EXPR : PLUS_EXPR),
9586 TREE_OPERAND (exp, 1));
9588 while (TREE_CODE (incremented) == NOP_EXPR
9589 || TREE_CODE (incremented) == CONVERT_EXPR)
9591 newexp = convert (TREE_TYPE (incremented), newexp);
9592 incremented = TREE_OPERAND (incremented, 0);
9595 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9596 return post ? op0 : temp;
9601 /* We have a true reference to the value in OP0.
9602 If there is an insn to add or subtract in this mode, queue it.
9603 Queueing the increment insn avoids the register shuffling
9604 that often results if we must increment now and first save
9605 the old value for subsequent use. */
9607 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9608 op0 = stabilize (op0);
9611 icode = (int) this_optab->handlers[(int) mode].insn_code;
9612 if (icode != (int) CODE_FOR_nothing
9613 /* Make sure that OP0 is valid for operands 0 and 1
9614 of the insn we want to queue. */
9615 && (*insn_operand_predicate[icode][0]) (op0, mode)
9616 && (*insn_operand_predicate[icode][1]) (op0, mode))
9618 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9619 op1 = force_reg (mode, op1);
9621 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9625 /* Preincrement, or we can't increment with one simple insn. */
9627 /* Save a copy of the value before inc or dec, to return it later. */
9628 temp = value = copy_to_reg (op0);
9630 /* Arrange to return the incremented value. */
9631 /* Copy the rtx because expand_binop will protect from the queue,
9632 and the results of that would be invalid for us to return
9633 if our caller does emit_queue before using our result. */
9634 temp = copy_rtx (value = op0);
9636 /* Increment however we can. */
9637 op1 = expand_binop (mode, this_optab, value, op1, op0,
9638 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9639 /* Make sure the value is stored into OP0. */
9641 emit_move_insn (op0, op1);
9646 /* Expand all function calls contained within EXP, innermost ones first.
9647 But don't look within expressions that have sequence points.
9648 For each CALL_EXPR, record the rtx for its value
9649 in the CALL_EXPR_RTL field. */
9652 preexpand_calls (exp)
9655 register int nops, i;
9656 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9658 if (! do_preexpand_calls)
9661 /* Only expressions and references can contain calls. */
9663 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9666 switch (TREE_CODE (exp))
9669 /* Do nothing if already expanded. */
9670 if (CALL_EXPR_RTL (exp) != 0
9671 /* Do nothing if the call returns a variable-sized object. */
9672 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9673 /* Do nothing to built-in functions. */
9674 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9675 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9677 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9680 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9685 case TRUTH_ANDIF_EXPR:
9686 case TRUTH_ORIF_EXPR:
9687 /* If we find one of these, then we can be sure
9688 the adjust will be done for it (since it makes jumps).
9689 Do it now, so that if this is inside an argument
9690 of a function, we don't get the stack adjustment
9691 after some other args have already been pushed. */
9692 do_pending_stack_adjust ();
9697 case WITH_CLEANUP_EXPR:
9698 case CLEANUP_POINT_EXPR:
9702 if (SAVE_EXPR_RTL (exp) != 0)
9706 nops = tree_code_length[(int) TREE_CODE (exp)];
9707 for (i = 0; i < nops; i++)
9708 if (TREE_OPERAND (exp, i) != 0)
9710 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9711 if (type == 'e' || type == '<' || type == '1' || type == '2'
9713 preexpand_calls (TREE_OPERAND (exp, i));
9717 /* At the start of a function, record that we have no previously-pushed
9718 arguments waiting to be popped. */
9721 init_pending_stack_adjust ()
9723 pending_stack_adjust = 0;
9726 /* When exiting from function, if safe, clear out any pending stack adjust
9727 so the adjustment won't get done. */
9730 clear_pending_stack_adjust ()
9732 #ifdef EXIT_IGNORE_STACK
9734 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9735 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9736 && ! flag_inline_functions)
9737 pending_stack_adjust = 0;
9741 /* Pop any previously-pushed arguments that have not been popped yet. */
9744 do_pending_stack_adjust ()
9746 if (inhibit_defer_pop == 0)
9748 if (pending_stack_adjust != 0)
9749 adjust_stack (GEN_INT (pending_stack_adjust));
9750 pending_stack_adjust = 0;
9754 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9755 Returns the cleanups to be performed. */
9758 defer_cleanups_to (old_cleanups)
9761 tree new_cleanups = NULL_TREE;
9762 tree cleanups = cleanups_this_call;
9763 tree last = NULL_TREE;
9765 while (cleanups_this_call != old_cleanups)
9767 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9768 last = cleanups_this_call;
9769 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9774 /* Remove the list from the chain of cleanups. */
9775 TREE_CHAIN (last) = NULL_TREE;
9777 /* reverse them so that we can build them in the right order. */
9778 cleanups = nreverse (cleanups);
9780 /* All cleanups must be on the function_obstack. */
9781 push_obstacks_nochange ();
9782 resume_temporary_allocation ();
9787 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9788 TREE_VALUE (cleanups), new_cleanups);
9790 new_cleanups = TREE_VALUE (cleanups);
9792 cleanups = TREE_CHAIN (cleanups);
9798 return new_cleanups;
9801 /* Expand all cleanups up to OLD_CLEANUPS.
9802 Needed here, and also for language-dependent calls. */
9805 expand_cleanups_to (old_cleanups)
9808 while (cleanups_this_call != old_cleanups)
9810 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9811 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9812 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9816 /* Expand conditional expressions. */
9818 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9819 LABEL is an rtx of code CODE_LABEL, in this function and all the
9823 jumpifnot (exp, label)
9827 do_jump (exp, label, NULL_RTX);
9830 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9837 do_jump (exp, NULL_RTX, label);
9840 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9841 the result is zero, or IF_TRUE_LABEL if the result is one.
9842 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9843 meaning fall through in that case.
9845 do_jump always does any pending stack adjust except when it does not
9846 actually perform a jump. An example where there is no jump
9847 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9849 This function is responsible for optimizing cases such as
9850 &&, || and comparison operators in EXP. */
9853 do_jump (exp, if_false_label, if_true_label)
9855 rtx if_false_label, if_true_label;
9857 register enum tree_code code = TREE_CODE (exp);
9858 /* Some cases need to create a label to jump to
9859 in order to properly fall through.
9860 These cases set DROP_THROUGH_LABEL nonzero. */
9861 rtx drop_through_label = 0;
9866 enum machine_mode mode;
9876 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9882 /* This is not true with #pragma weak */
9884 /* The address of something can never be zero. */
9886 emit_jump (if_true_label);
9891 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9892 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9893 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9896 /* If we are narrowing the operand, we have to do the compare in the
9898 if ((TYPE_PRECISION (TREE_TYPE (exp))
9899 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9901 case NON_LVALUE_EXPR:
9902 case REFERENCE_EXPR:
9907 /* These cannot change zero->non-zero or vice versa. */
9908 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9912 /* This is never less insns than evaluating the PLUS_EXPR followed by
9913 a test and can be longer if the test is eliminated. */
9915 /* Reduce to minus. */
9916 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9917 TREE_OPERAND (exp, 0),
9918 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9919 TREE_OPERAND (exp, 1))));
9920 /* Process as MINUS. */
9924 /* Non-zero iff operands of minus differ. */
9925 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9926 TREE_OPERAND (exp, 0),
9927 TREE_OPERAND (exp, 1)),
9932 /* If we are AND'ing with a small constant, do this comparison in the
9933 smallest type that fits. If the machine doesn't have comparisons
9934 that small, it will be converted back to the wider comparison.
9935 This helps if we are testing the sign bit of a narrower object.
9936 combine can't do this for us because it can't know whether a
9937 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9939 if (! SLOW_BYTE_ACCESS
9940 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9941 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9942 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9943 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9944 && (type = type_for_mode (mode, 1)) != 0
9945 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9946 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9947 != CODE_FOR_nothing))
9949 do_jump (convert (type, exp), if_false_label, if_true_label);
9954 case TRUTH_NOT_EXPR:
9955 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9958 case TRUTH_ANDIF_EXPR:
9961 tree cleanups, old_cleanups;
9963 if (if_false_label == 0)
9964 if_false_label = drop_through_label = gen_label_rtx ();
9966 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9967 seq1 = get_insns ();
9970 old_cleanups = cleanups_this_call;
9972 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9973 seq2 = get_insns ();
9974 cleanups = defer_cleanups_to (old_cleanups);
9979 rtx flag = gen_reg_rtx (word_mode);
9983 /* Flag cleanups as not needed. */
9984 emit_move_insn (flag, const0_rtx);
9987 /* Flag cleanups as needed. */
9988 emit_move_insn (flag, const1_rtx);
9991 /* All cleanups must be on the function_obstack. */
9992 push_obstacks_nochange ();
9993 resume_temporary_allocation ();
9995 /* convert flag, which is an rtx, into a tree. */
9996 cond = make_node (RTL_EXPR);
9997 TREE_TYPE (cond) = integer_type_node;
9998 RTL_EXPR_RTL (cond) = flag;
9999 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10000 cond = save_expr (cond);
10002 new_cleanups = build (COND_EXPR, void_type_node,
10003 truthvalue_conversion (cond),
10004 cleanups, integer_zero_node);
10005 new_cleanups = fold (new_cleanups);
10009 /* Now add in the conditionalized cleanups. */
10011 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10012 expand_eh_region_start ();
10022 case TRUTH_ORIF_EXPR:
10025 tree cleanups, old_cleanups;
10027 if (if_true_label == 0)
10028 if_true_label = drop_through_label = gen_label_rtx ();
10030 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10031 seq1 = get_insns ();
10034 old_cleanups = cleanups_this_call;
10036 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10037 seq2 = get_insns ();
10038 cleanups = defer_cleanups_to (old_cleanups);
10043 rtx flag = gen_reg_rtx (word_mode);
10047 /* Flag cleanups as not needed. */
10048 emit_move_insn (flag, const0_rtx);
10051 /* Flag cleanups as needed. */
10052 emit_move_insn (flag, const1_rtx);
10055 /* All cleanups must be on the function_obstack. */
10056 push_obstacks_nochange ();
10057 resume_temporary_allocation ();
10059 /* convert flag, which is an rtx, into a tree. */
10060 cond = make_node (RTL_EXPR);
10061 TREE_TYPE (cond) = integer_type_node;
10062 RTL_EXPR_RTL (cond) = flag;
10063 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10064 cond = save_expr (cond);
10066 new_cleanups = build (COND_EXPR, void_type_node,
10067 truthvalue_conversion (cond),
10068 cleanups, integer_zero_node);
10069 new_cleanups = fold (new_cleanups);
10073 /* Now add in the conditionalized cleanups. */
10075 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10076 expand_eh_region_start ();
10086 case COMPOUND_EXPR:
10087 push_temp_slots ();
10088 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10089 preserve_temp_slots (NULL_RTX);
10090 free_temp_slots ();
10093 do_pending_stack_adjust ();
10094 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10097 case COMPONENT_REF:
10098 case BIT_FIELD_REF:
10101 int bitsize, bitpos, unsignedp;
10102 enum machine_mode mode;
10107 /* Get description of this reference. We don't actually care
10108 about the underlying object here. */
10109 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10110 &mode, &unsignedp, &volatilep);
10112 type = type_for_size (bitsize, unsignedp);
10113 if (! SLOW_BYTE_ACCESS
10114 && type != 0 && bitsize >= 0
10115 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10116 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10117 != CODE_FOR_nothing))
10119 do_jump (convert (type, exp), if_false_label, if_true_label);
10126 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10127 if (integer_onep (TREE_OPERAND (exp, 1))
10128 && integer_zerop (TREE_OPERAND (exp, 2)))
10129 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10131 else if (integer_zerop (TREE_OPERAND (exp, 1))
10132 && integer_onep (TREE_OPERAND (exp, 2)))
10133 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10137 register rtx label1 = gen_label_rtx ();
10138 drop_through_label = gen_label_rtx ();
10139 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10140 /* Now the THEN-expression. */
10141 do_jump (TREE_OPERAND (exp, 1),
10142 if_false_label ? if_false_label : drop_through_label,
10143 if_true_label ? if_true_label : drop_through_label);
10144 /* In case the do_jump just above never jumps. */
10145 do_pending_stack_adjust ();
10146 emit_label (label1);
10147 /* Now the ELSE-expression. */
10148 do_jump (TREE_OPERAND (exp, 2),
10149 if_false_label ? if_false_label : drop_through_label,
10150 if_true_label ? if_true_label : drop_through_label);
10156 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10158 if (integer_zerop (TREE_OPERAND (exp, 1)))
10159 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10160 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10161 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10164 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10165 fold (build (EQ_EXPR, TREE_TYPE (exp),
10166 fold (build1 (REALPART_EXPR,
10167 TREE_TYPE (inner_type),
10168 TREE_OPERAND (exp, 0))),
10169 fold (build1 (REALPART_EXPR,
10170 TREE_TYPE (inner_type),
10171 TREE_OPERAND (exp, 1))))),
10172 fold (build (EQ_EXPR, TREE_TYPE (exp),
10173 fold (build1 (IMAGPART_EXPR,
10174 TREE_TYPE (inner_type),
10175 TREE_OPERAND (exp, 0))),
10176 fold (build1 (IMAGPART_EXPR,
10177 TREE_TYPE (inner_type),
10178 TREE_OPERAND (exp, 1))))))),
10179 if_false_label, if_true_label);
10180 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10181 && !can_compare_p (TYPE_MODE (inner_type)))
10182 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10184 comparison = compare (exp, EQ, EQ);
10190 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10192 if (integer_zerop (TREE_OPERAND (exp, 1)))
10193 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10194 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10195 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10198 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10199 fold (build (NE_EXPR, TREE_TYPE (exp),
10200 fold (build1 (REALPART_EXPR,
10201 TREE_TYPE (inner_type),
10202 TREE_OPERAND (exp, 0))),
10203 fold (build1 (REALPART_EXPR,
10204 TREE_TYPE (inner_type),
10205 TREE_OPERAND (exp, 1))))),
10206 fold (build (NE_EXPR, TREE_TYPE (exp),
10207 fold (build1 (IMAGPART_EXPR,
10208 TREE_TYPE (inner_type),
10209 TREE_OPERAND (exp, 0))),
10210 fold (build1 (IMAGPART_EXPR,
10211 TREE_TYPE (inner_type),
10212 TREE_OPERAND (exp, 1))))))),
10213 if_false_label, if_true_label);
10214 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10215 && !can_compare_p (TYPE_MODE (inner_type)))
10216 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10218 comparison = compare (exp, NE, NE);
10223 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10225 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10226 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10228 comparison = compare (exp, LT, LTU);
10232 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10234 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10235 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10237 comparison = compare (exp, LE, LEU);
10241 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10243 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10244 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10246 comparison = compare (exp, GT, GTU);
10250 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10252 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10253 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10255 comparison = compare (exp, GE, GEU);
10260 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10262 /* This is not needed any more and causes poor code since it causes
10263 comparisons and tests from non-SI objects to have different code
10265 /* Copy to register to avoid generating bad insns by cse
10266 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10267 if (!cse_not_expected && GET_CODE (temp) == MEM)
10268 temp = copy_to_reg (temp);
10270 do_pending_stack_adjust ();
10271 if (GET_CODE (temp) == CONST_INT)
10272 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10273 else if (GET_CODE (temp) == LABEL_REF)
10274 comparison = const_true_rtx;
10275 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10276 && !can_compare_p (GET_MODE (temp)))
10277 /* Note swapping the labels gives us not-equal. */
10278 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10279 else if (GET_MODE (temp) != VOIDmode)
10280 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10281 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10282 GET_MODE (temp), NULL_RTX, 0);
10287 /* Do any postincrements in the expression that was tested. */
10290 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10291 straight into a conditional jump instruction as the jump condition.
10292 Otherwise, all the work has been done already. */
10294 if (comparison == const_true_rtx)
10297 emit_jump (if_true_label);
10299 else if (comparison == const0_rtx)
10301 if (if_false_label)
10302 emit_jump (if_false_label);
10304 else if (comparison)
10305 do_jump_for_compare (comparison, if_false_label, if_true_label);
10307 if (drop_through_label)
10309 /* If do_jump produces code that might be jumped around,
10310 do any stack adjusts from that code, before the place
10311 where control merges in. */
10312 do_pending_stack_adjust ();
10313 emit_label (drop_through_label);
10317 /* Given a comparison expression EXP for values too wide to be compared
10318 with one insn, test the comparison and jump to the appropriate label.
10319 The code of EXP is ignored; we always test GT if SWAP is 0,
10320 and LT if SWAP is 1. */
10323 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10326 rtx if_false_label, if_true_label;
10328 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10329 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10330 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10331 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10332 rtx drop_through_label = 0;
10333 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10336 if (! if_true_label || ! if_false_label)
10337 drop_through_label = gen_label_rtx ();
10338 if (! if_true_label)
10339 if_true_label = drop_through_label;
10340 if (! if_false_label)
10341 if_false_label = drop_through_label;
10343 /* Compare a word at a time, high order first. */
10344 for (i = 0; i < nwords; i++)
10347 rtx op0_word, op1_word;
10349 if (WORDS_BIG_ENDIAN)
10351 op0_word = operand_subword_force (op0, i, mode);
10352 op1_word = operand_subword_force (op1, i, mode);
10356 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10357 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10360 /* All but high-order word must be compared as unsigned. */
10361 comp = compare_from_rtx (op0_word, op1_word,
10362 (unsignedp || i > 0) ? GTU : GT,
10363 unsignedp, word_mode, NULL_RTX, 0);
10364 if (comp == const_true_rtx)
10365 emit_jump (if_true_label);
10366 else if (comp != const0_rtx)
10367 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10369 /* Consider lower words only if these are equal. */
10370 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10372 if (comp == const_true_rtx)
10373 emit_jump (if_false_label);
10374 else if (comp != const0_rtx)
10375 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10378 if (if_false_label)
10379 emit_jump (if_false_label);
10380 if (drop_through_label)
10381 emit_label (drop_through_label);
10384 /* Compare OP0 with OP1, word at a time, in mode MODE.
10385 UNSIGNEDP says to do unsigned comparison.
10386 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10389 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10390 enum machine_mode mode;
10393 rtx if_false_label, if_true_label;
10395 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10396 rtx drop_through_label = 0;
10399 if (! if_true_label || ! if_false_label)
10400 drop_through_label = gen_label_rtx ();
10401 if (! if_true_label)
10402 if_true_label = drop_through_label;
10403 if (! if_false_label)
10404 if_false_label = drop_through_label;
10406 /* Compare a word at a time, high order first. */
10407 for (i = 0; i < nwords; i++)
10410 rtx op0_word, op1_word;
10412 if (WORDS_BIG_ENDIAN)
10414 op0_word = operand_subword_force (op0, i, mode);
10415 op1_word = operand_subword_force (op1, i, mode);
10419 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10420 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10423 /* All but high-order word must be compared as unsigned. */
10424 comp = compare_from_rtx (op0_word, op1_word,
10425 (unsignedp || i > 0) ? GTU : GT,
10426 unsignedp, word_mode, NULL_RTX, 0);
10427 if (comp == const_true_rtx)
10428 emit_jump (if_true_label);
10429 else if (comp != const0_rtx)
10430 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10432 /* Consider lower words only if these are equal. */
10433 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10435 if (comp == const_true_rtx)
10436 emit_jump (if_false_label);
10437 else if (comp != const0_rtx)
10438 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10441 if (if_false_label)
10442 emit_jump (if_false_label);
10443 if (drop_through_label)
10444 emit_label (drop_through_label);
10447 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10448 with one insn, test the comparison and jump to the appropriate label. */
10451 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10453 rtx if_false_label, if_true_label;
10455 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10456 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10457 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10458 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10460 rtx drop_through_label = 0;
10462 if (! if_false_label)
10463 drop_through_label = if_false_label = gen_label_rtx ();
10465 for (i = 0; i < nwords; i++)
10467 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10468 operand_subword_force (op1, i, mode),
10469 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10470 word_mode, NULL_RTX, 0);
10471 if (comp == const_true_rtx)
10472 emit_jump (if_false_label);
10473 else if (comp != const0_rtx)
10474 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10478 emit_jump (if_true_label);
10479 if (drop_through_label)
10480 emit_label (drop_through_label);
10483 /* Jump according to whether OP0 is 0.
10484 We assume that OP0 has an integer mode that is too wide
10485 for the available compare insns. */
10488 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10490 rtx if_false_label, if_true_label;
10492 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10494 rtx drop_through_label = 0;
10496 if (! if_false_label)
10497 drop_through_label = if_false_label = gen_label_rtx ();
10499 for (i = 0; i < nwords; i++)
10501 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10503 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10504 if (comp == const_true_rtx)
10505 emit_jump (if_false_label);
10506 else if (comp != const0_rtx)
10507 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10511 emit_jump (if_true_label);
10512 if (drop_through_label)
10513 emit_label (drop_through_label);
10516 /* Given a comparison expression in rtl form, output conditional branches to
10517 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10520 do_jump_for_compare (comparison, if_false_label, if_true_label)
10521 rtx comparison, if_false_label, if_true_label;
10525 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10526 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10530 if (if_false_label)
10531 emit_jump (if_false_label);
10533 else if (if_false_label)
10536 rtx prev = get_last_insn ();
10539 /* Output the branch with the opposite condition. Then try to invert
10540 what is generated. If more than one insn is a branch, or if the
10541 branch is not the last insn written, abort. If we can't invert
10542 the branch, emit make a true label, redirect this jump to that,
10543 emit a jump to the false label and define the true label. */
10545 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10546 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10550 /* Here we get the first insn that was just emitted. It used to be the
10551 case that, on some machines, emitting the branch would discard
10552 the previous compare insn and emit a replacement. This isn't
10553 done anymore, but abort if we see that PREV is deleted. */
10556 insn = get_insns ();
10557 else if (INSN_DELETED_P (prev))
10560 insn = NEXT_INSN (prev);
10562 for (; insn; insn = NEXT_INSN (insn))
10563 if (GET_CODE (insn) == JUMP_INSN)
10570 if (branch != get_last_insn ())
10573 JUMP_LABEL (branch) = if_false_label;
10574 if (! invert_jump (branch, if_false_label))
10576 if_true_label = gen_label_rtx ();
10577 redirect_jump (branch, if_true_label);
10578 emit_jump (if_false_label);
10579 emit_label (if_true_label);
10584 /* Generate code for a comparison expression EXP
10585 (including code to compute the values to be compared)
10586 and set (CC0) according to the result.
10587 SIGNED_CODE should be the rtx operation for this comparison for
10588 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10590 We force a stack adjustment unless there are currently
10591 things pushed on the stack that aren't yet used. */
10594 compare (exp, signed_code, unsigned_code)
10596 enum rtx_code signed_code, unsigned_code;
10599 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10601 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10602 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10603 register enum machine_mode mode = TYPE_MODE (type);
10604 int unsignedp = TREE_UNSIGNED (type);
10605 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10607 #ifdef HAVE_canonicalize_funcptr_for_compare
10608 /* If function pointers need to be "canonicalized" before they can
10609 be reliably compared, then canonicalize them. */
10610 if (HAVE_canonicalize_funcptr_for_compare
10611 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10612 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10615 rtx new_op0 = gen_reg_rtx (mode);
10617 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10621 if (HAVE_canonicalize_funcptr_for_compare
10622 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10623 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10626 rtx new_op1 = gen_reg_rtx (mode);
10628 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10633 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10635 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10636 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10639 /* Like compare but expects the values to compare as two rtx's.
10640 The decision as to signed or unsigned comparison must be made by the caller.
10642 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10645 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10646 size of MODE should be used. */
10649 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10650 register rtx op0, op1;
10651 enum rtx_code code;
10653 enum machine_mode mode;
10659 /* If one operand is constant, make it the second one. Only do this
10660 if the other operand is not constant as well. */
10662 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10663 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10668 code = swap_condition (code);
10671 if (flag_force_mem)
10673 op0 = force_not_mem (op0);
10674 op1 = force_not_mem (op1);
10677 do_pending_stack_adjust ();
10679 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10680 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10684 /* There's no need to do this now that combine.c can eliminate lots of
10685 sign extensions. This can be less efficient in certain cases on other
10688 /* If this is a signed equality comparison, we can do it as an
10689 unsigned comparison since zero-extension is cheaper than sign
10690 extension and comparisons with zero are done as unsigned. This is
10691 the case even on machines that can do fast sign extension, since
10692 zero-extension is easier to combine with other operations than
10693 sign-extension is. If we are comparing against a constant, we must
10694 convert it to what it would look like unsigned. */
10695 if ((code == EQ || code == NE) && ! unsignedp
10696 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10698 if (GET_CODE (op1) == CONST_INT
10699 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10700 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10705 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10707 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10710 /* Generate code to calculate EXP using a store-flag instruction
10711 and return an rtx for the result. EXP is either a comparison
10712 or a TRUTH_NOT_EXPR whose operand is a comparison.
10714 If TARGET is nonzero, store the result there if convenient.
10716 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10719 Return zero if there is no suitable set-flag instruction
10720 available on this machine.
10722 Once expand_expr has been called on the arguments of the comparison,
10723 we are committed to doing the store flag, since it is not safe to
10724 re-evaluate the expression. We emit the store-flag insn by calling
10725 emit_store_flag, but only expand the arguments if we have a reason
10726 to believe that emit_store_flag will be successful. If we think that
10727 it will, but it isn't, we have to simulate the store-flag with a
10728 set/jump/set sequence. */
10731 do_store_flag (exp, target, mode, only_cheap)
10734 enum machine_mode mode;
10737 enum rtx_code code;
10738 tree arg0, arg1, type;
10740 enum machine_mode operand_mode;
10744 enum insn_code icode;
10745 rtx subtarget = target;
10746 rtx result, label, pattern, jump_pat;
10748 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10749 result at the end. We can't simply invert the test since it would
10750 have already been inverted if it were valid. This case occurs for
10751 some floating-point comparisons. */
10753 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10754 invert = 1, exp = TREE_OPERAND (exp, 0);
10756 arg0 = TREE_OPERAND (exp, 0);
10757 arg1 = TREE_OPERAND (exp, 1);
10758 type = TREE_TYPE (arg0);
10759 operand_mode = TYPE_MODE (type);
10760 unsignedp = TREE_UNSIGNED (type);
10762 /* We won't bother with BLKmode store-flag operations because it would mean
10763 passing a lot of information to emit_store_flag. */
10764 if (operand_mode == BLKmode)
10767 /* We won't bother with store-flag operations involving function pointers
10768 when function pointers must be canonicalized before comparisons. */
10769 #ifdef HAVE_canonicalize_funcptr_for_compare
10770 if (HAVE_canonicalize_funcptr_for_compare
10771 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10772 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10774 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10775 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10776 == FUNCTION_TYPE))))
10783 /* Get the rtx comparison code to use. We know that EXP is a comparison
10784 operation of some type. Some comparisons against 1 and -1 can be
10785 converted to comparisons with zero. Do so here so that the tests
10786 below will be aware that we have a comparison with zero. These
10787 tests will not catch constants in the first operand, but constants
10788 are rarely passed as the first operand. */
10790 switch (TREE_CODE (exp))
10799 if (integer_onep (arg1))
10800 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10802 code = unsignedp ? LTU : LT;
10805 if (! unsignedp && integer_all_onesp (arg1))
10806 arg1 = integer_zero_node, code = LT;
10808 code = unsignedp ? LEU : LE;
10811 if (! unsignedp && integer_all_onesp (arg1))
10812 arg1 = integer_zero_node, code = GE;
10814 code = unsignedp ? GTU : GT;
10817 if (integer_onep (arg1))
10818 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10820 code = unsignedp ? GEU : GE;
10826 /* Put a constant second. */
10827 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10829 tem = arg0; arg0 = arg1; arg1 = tem;
10830 code = swap_condition (code);
10833 /* If this is an equality or inequality test of a single bit, we can
10834 do this by shifting the bit being tested to the low-order bit and
10835 masking the result with the constant 1. If the condition was EQ,
10836 we xor it with 1. This does not require an scc insn and is faster
10837 than an scc insn even if we have it. */
10839 if ((code == NE || code == EQ)
10840 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10841 && integer_pow2p (TREE_OPERAND (arg0, 1))
10842 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10844 tree inner = TREE_OPERAND (arg0, 0);
10849 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10850 NULL_RTX, VOIDmode, 0));
10851 /* In this case, immed_double_const will sign extend the value to make
10852 it look the same on the host and target. We must remove the
10853 sign-extension before calling exact_log2, since exact_log2 will
10854 fail for negative values. */
10855 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10856 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
10857 /* We don't use the obvious constant shift to generate the mask,
10858 because that generates compiler warnings when BITS_PER_WORD is
10859 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10860 code is unreachable in that case. */
10861 tem = tem & GET_MODE_MASK (word_mode);
10862 bitnum = exact_log2 (tem);
10864 /* If INNER is a right shift of a constant and it plus BITNUM does
10865 not overflow, adjust BITNUM and INNER. */
10867 if (TREE_CODE (inner) == RSHIFT_EXPR
10868 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10869 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10870 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10871 < TYPE_PRECISION (type)))
10873 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10874 inner = TREE_OPERAND (inner, 0);
10877 /* If we are going to be able to omit the AND below, we must do our
10878 operations as unsigned. If we must use the AND, we have a choice.
10879 Normally unsigned is faster, but for some machines signed is. */
10880 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10881 #ifdef LOAD_EXTEND_OP
10882 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10888 if (subtarget == 0 || GET_CODE (subtarget) != REG
10889 || GET_MODE (subtarget) != operand_mode
10890 || ! safe_from_p (subtarget, inner))
10893 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10896 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10897 size_int (bitnum), subtarget, ops_unsignedp);
10899 if (GET_MODE (op0) != mode)
10900 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10902 if ((code == EQ && ! invert) || (code == NE && invert))
10903 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10904 ops_unsignedp, OPTAB_LIB_WIDEN);
10906 /* Put the AND last so it can combine with more things. */
10907 if (bitnum != TYPE_PRECISION (type) - 1)
10908 op0 = expand_and (op0, const1_rtx, subtarget);
10913 /* Now see if we are likely to be able to do this. Return if not. */
10914 if (! can_compare_p (operand_mode))
10916 icode = setcc_gen_code[(int) code];
10917 if (icode == CODE_FOR_nothing
10918 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10920 /* We can only do this if it is one of the special cases that
10921 can be handled without an scc insn. */
10922 if ((code == LT && integer_zerop (arg1))
10923 || (! only_cheap && code == GE && integer_zerop (arg1)))
10925 else if (BRANCH_COST >= 0
10926 && ! only_cheap && (code == NE || code == EQ)
10927 && TREE_CODE (type) != REAL_TYPE
10928 && ((abs_optab->handlers[(int) operand_mode].insn_code
10929 != CODE_FOR_nothing)
10930 || (ffs_optab->handlers[(int) operand_mode].insn_code
10931 != CODE_FOR_nothing)))
10937 preexpand_calls (exp);
10938 if (subtarget == 0 || GET_CODE (subtarget) != REG
10939 || GET_MODE (subtarget) != operand_mode
10940 || ! safe_from_p (subtarget, arg1))
10943 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10944 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10947 target = gen_reg_rtx (mode);
10949 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10950 because, if the emit_store_flag does anything it will succeed and
10951 OP0 and OP1 will not be used subsequently. */
10953 result = emit_store_flag (target, code,
10954 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10955 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10956 operand_mode, unsignedp, 1);
10961 result = expand_binop (mode, xor_optab, result, const1_rtx,
10962 result, 0, OPTAB_LIB_WIDEN);
10966 /* If this failed, we have to do this with set/compare/jump/set code. */
10967 if (target == 0 || GET_CODE (target) != REG
10968 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10969 target = gen_reg_rtx (GET_MODE (target));
10971 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10972 result = compare_from_rtx (op0, op1, code, unsignedp,
10973 operand_mode, NULL_RTX, 0);
10974 if (GET_CODE (result) == CONST_INT)
10975 return (((result == const0_rtx && ! invert)
10976 || (result != const0_rtx && invert))
10977 ? const0_rtx : const1_rtx);
10979 label = gen_label_rtx ();
10980 if (bcc_gen_fctn[(int) code] == 0)
10983 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10984 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10985 emit_label (label);
10990 /* Generate a tablejump instruction (used for switch statements). */
10992 #ifdef HAVE_tablejump
10994 /* INDEX is the value being switched on, with the lowest value
10995 in the table already subtracted.
10996 MODE is its expected mode (needed if INDEX is constant).
10997 RANGE is the length of the jump table.
10998 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11000 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11001 index value is out of range. */
11004 do_tablejump (index, mode, range, table_label, default_label)
11005 rtx index, range, table_label, default_label;
11006 enum machine_mode mode;
11008 register rtx temp, vector;
11010 /* Do an unsigned comparison (in the proper mode) between the index
11011 expression and the value which represents the length of the range.
11012 Since we just finished subtracting the lower bound of the range
11013 from the index expression, this comparison allows us to simultaneously
11014 check that the original index expression value is both greater than
11015 or equal to the minimum value of the range and less than or equal to
11016 the maximum value of the range. */
11018 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11019 emit_jump_insn (gen_bgtu (default_label));
11021 /* If index is in range, it must fit in Pmode.
11022 Convert to Pmode so we can index with it. */
11024 index = convert_to_mode (Pmode, index, 1);
11026 /* Don't let a MEM slip thru, because then INDEX that comes
11027 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11028 and break_out_memory_refs will go to work on it and mess it up. */
11029 #ifdef PIC_CASE_VECTOR_ADDRESS
11030 if (flag_pic && GET_CODE (index) != REG)
11031 index = copy_to_mode_reg (Pmode, index);
11034 /* If flag_force_addr were to affect this address
11035 it could interfere with the tricky assumptions made
11036 about addresses that contain label-refs,
11037 which may be valid only very near the tablejump itself. */
11038 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11039 GET_MODE_SIZE, because this indicates how large insns are. The other
11040 uses should all be Pmode, because they are addresses. This code
11041 could fail if addresses and insns are not the same size. */
11042 index = gen_rtx (PLUS, Pmode,
11043 gen_rtx (MULT, Pmode, index,
11044 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11045 gen_rtx (LABEL_REF, Pmode, table_label));
11046 #ifdef PIC_CASE_VECTOR_ADDRESS
11048 index = PIC_CASE_VECTOR_ADDRESS (index);
11051 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11052 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11053 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11054 RTX_UNCHANGING_P (vector) = 1;
11055 convert_move (temp, vector, 0);
11057 emit_jump_insn (gen_tablejump (temp, table_label));
11059 #ifndef CASE_VECTOR_PC_RELATIVE
11060 /* If we are generating PIC code or if the table is PC-relative, the
11061 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11067 #endif /* HAVE_tablejump */
11070 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11071 to that value is on the top of the stack. The resulting type is TYPE, and
11072 the source declaration is DECL. */
11075 bc_load_memory (type, decl)
11078 enum bytecode_opcode opcode;
11081 /* Bit fields are special. We only know about signed and
11082 unsigned ints, and enums. The latter are treated as
11083 signed integers. */
11085 if (DECL_BIT_FIELD (decl))
11086 if (TREE_CODE (type) == ENUMERAL_TYPE
11087 || TREE_CODE (type) == INTEGER_TYPE)
11088 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11092 /* See corresponding comment in bc_store_memory(). */
11093 if (TYPE_MODE (type) == BLKmode
11094 || TYPE_MODE (type) == VOIDmode)
11097 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11099 if (opcode == neverneverland)
11102 bc_emit_bytecode (opcode);
11104 #ifdef DEBUG_PRINT_CODE
11105 fputc ('\n', stderr);
11110 /* Store the contents of the second stack slot to the address in the
11111 top stack slot. DECL is the declaration of the destination and is used
11112 to determine whether we're dealing with a bitfield. */
11115 bc_store_memory (type, decl)
11118 enum bytecode_opcode opcode;
11121 if (DECL_BIT_FIELD (decl))
11123 if (TREE_CODE (type) == ENUMERAL_TYPE
11124 || TREE_CODE (type) == INTEGER_TYPE)
11130 if (TYPE_MODE (type) == BLKmode)
11132 /* Copy structure. This expands to a block copy instruction, storeBLK.
11133 In addition to the arguments expected by the other store instructions,
11134 it also expects a type size (SImode) on top of the stack, which is the
11135 structure size in size units (usually bytes). The two first arguments
11136 are already on the stack; so we just put the size on level 1. For some
11137 other languages, the size may be variable, this is why we don't encode
11138 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11140 bc_expand_expr (TYPE_SIZE (type));
11144 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11146 if (opcode == neverneverland)
11149 bc_emit_bytecode (opcode);
11151 #ifdef DEBUG_PRINT_CODE
11152 fputc ('\n', stderr);
11157 /* Allocate local stack space sufficient to hold a value of the given
11158 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11159 integral power of 2. A special case is locals of type VOID, which
11160 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11161 remapped into the corresponding attribute of SI. */
11164 bc_allocate_local (size, alignment)
11165 int size, alignment;
11168 int byte_alignment;
11173 /* Normalize size and alignment */
11175 size = UNITS_PER_WORD;
11177 if (alignment < BITS_PER_UNIT)
11178 byte_alignment = 1 << (INT_ALIGN - 1);
11181 byte_alignment = alignment / BITS_PER_UNIT;
11183 if (local_vars_size & (byte_alignment - 1))
11184 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11186 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11187 local_vars_size += size;
11193 /* Allocate variable-sized local array. Variable-sized arrays are
11194 actually pointers to the address in memory where they are stored. */
11197 bc_allocate_variable_array (size)
11201 const int ptralign = (1 << (PTR_ALIGN - 1));
11203 /* Align pointer */
11204 if (local_vars_size & ptralign)
11205 local_vars_size += ptralign - (local_vars_size & ptralign);
11207 /* Note down local space needed: pointer to block; also return
11210 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11211 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11216 /* Push the machine address for the given external variable offset. */
11219 bc_load_externaddr (externaddr)
11222 bc_emit_bytecode (constP);
11223 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11224 BYTECODE_BC_LABEL (externaddr)->offset);
11226 #ifdef DEBUG_PRINT_CODE
11227 fputc ('\n', stderr);
11232 /* Like above, but expects an IDENTIFIER. */
11235 bc_load_externaddr_id (id, offset)
11239 if (!IDENTIFIER_POINTER (id))
11242 bc_emit_bytecode (constP);
11243 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11245 #ifdef DEBUG_PRINT_CODE
11246 fputc ('\n', stderr);
11251 /* Push the machine address for the given local variable offset. */
11254 bc_load_localaddr (localaddr)
11257 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11261 /* Push the machine address for the given parameter offset.
11262 NOTE: offset is in bits. */
11265 bc_load_parmaddr (parmaddr)
11268 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11273 /* Convert a[i] into *(a + i). */
11276 bc_canonicalize_array_ref (exp)
11279 tree type = TREE_TYPE (exp);
11280 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11281 TREE_OPERAND (exp, 0));
11282 tree index = TREE_OPERAND (exp, 1);
11285 /* Convert the integer argument to a type the same size as a pointer
11286 so the multiply won't overflow spuriously. */
11288 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11289 index = convert (type_for_size (POINTER_SIZE, 0), index);
11291 /* The array address isn't volatile even if the array is.
11292 (Of course this isn't terribly relevant since the bytecode
11293 translator treats nearly everything as volatile anyway.) */
11294 TREE_THIS_VOLATILE (array_adr) = 0;
11296 return build1 (INDIRECT_REF, type,
11297 fold (build (PLUS_EXPR,
11298 TYPE_POINTER_TO (type),
11300 fold (build (MULT_EXPR,
11301 TYPE_POINTER_TO (type),
11303 size_in_bytes (type))))));
11307 /* Load the address of the component referenced by the given
11308 COMPONENT_REF expression.
11310 Returns innermost lvalue. */
11313 bc_expand_component_address (exp)
11317 enum machine_mode mode;
11319 HOST_WIDE_INT SIval;
11322 tem = TREE_OPERAND (exp, 1);
11323 mode = DECL_MODE (tem);
11326 /* Compute cumulative bit offset for nested component refs
11327 and array refs, and find the ultimate containing object. */
11329 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11331 if (TREE_CODE (tem) == COMPONENT_REF)
11332 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11334 if (TREE_CODE (tem) == ARRAY_REF
11335 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11336 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11338 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11339 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11340 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11345 bc_expand_expr (tem);
11348 /* For bitfields also push their offset and size */
11349 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11350 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11352 if (SIval = bitpos / BITS_PER_UNIT)
11353 bc_emit_instruction (addconstPSI, SIval);
11355 return (TREE_OPERAND (exp, 1));
11359 /* Emit code to push two SI constants */
11362 bc_push_offset_and_size (offset, size)
11363 HOST_WIDE_INT offset, size;
11365 bc_emit_instruction (constSI, offset);
11366 bc_emit_instruction (constSI, size);
11370 /* Emit byte code to push the address of the given lvalue expression to
11371 the stack. If it's a bit field, we also push offset and size info.
11373 Returns innermost component, which allows us to determine not only
11374 its type, but also whether it's a bitfield. */
11377 bc_expand_address (exp)
11381 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11385 switch (TREE_CODE (exp))
11389 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11391 case COMPONENT_REF:
11393 return (bc_expand_component_address (exp));
11397 bc_expand_expr (TREE_OPERAND (exp, 0));
11399 /* For variable-sized types: retrieve pointer. Sometimes the
11400 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11401 also make sure we have an operand, just in case... */
11403 if (TREE_OPERAND (exp, 0)
11404 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11405 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11406 bc_emit_instruction (loadP);
11408 /* If packed, also return offset and size */
11409 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11411 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11412 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11414 return (TREE_OPERAND (exp, 0));
11416 case FUNCTION_DECL:
11418 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11419 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11424 bc_load_parmaddr (DECL_RTL (exp));
11426 /* For variable-sized types: retrieve pointer */
11427 if (TYPE_SIZE (TREE_TYPE (exp))
11428 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11429 bc_emit_instruction (loadP);
11431 /* If packed, also return offset and size */
11432 if (DECL_BIT_FIELD (exp))
11433 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11434 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11440 bc_emit_instruction (returnP);
11446 if (BYTECODE_LABEL (DECL_RTL (exp)))
11447 bc_load_externaddr (DECL_RTL (exp));
11450 if (DECL_EXTERNAL (exp))
11451 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11452 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11454 bc_load_localaddr (DECL_RTL (exp));
11456 /* For variable-sized types: retrieve pointer */
11457 if (TYPE_SIZE (TREE_TYPE (exp))
11458 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11459 bc_emit_instruction (loadP);
11461 /* If packed, also return offset and size */
11462 if (DECL_BIT_FIELD (exp))
11463 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11464 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11472 bc_emit_bytecode (constP);
11473 r = output_constant_def (exp);
11474 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11476 #ifdef DEBUG_PRINT_CODE
11477 fputc ('\n', stderr);
11488 /* Most lvalues don't have components. */
11493 /* Emit a type code to be used by the runtime support in handling
11494 parameter passing. The type code consists of the machine mode
11495 plus the minimal alignment shifted left 8 bits. */
11498 bc_runtime_type_code (type)
11503 switch (TREE_CODE (type))
11509 case ENUMERAL_TYPE:
11513 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11525 return build_int_2 (val, 0);
11529 /* Generate constructor label */
11532 bc_gen_constr_label ()
11534 static int label_counter;
11535 static char label[20];
11537 sprintf (label, "*LR%d", label_counter++);
11539 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11543 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11544 expand the constructor data as static data, and push a pointer to it.
11545 The pointer is put in the pointer table and is retrieved by a constP
11546 bytecode instruction. We then loop and store each constructor member in
11547 the corresponding component. Finally, we return the original pointer on
11551 bc_expand_constructor (constr)
11555 HOST_WIDE_INT ptroffs;
11559 /* Literal constructors are handled as constants, whereas
11560 non-literals are evaluated and stored element by element
11561 into the data segment. */
11563 /* Allocate space in proper segment and push pointer to space on stack.
11566 l = bc_gen_constr_label ();
11568 if (TREE_CONSTANT (constr))
11572 bc_emit_const_labeldef (l);
11573 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11579 bc_emit_data_labeldef (l);
11580 bc_output_data_constructor (constr);
11584 /* Add reference to pointer table and recall pointer to stack;
11585 this code is common for both types of constructors: literals
11586 and non-literals. */
11588 ptroffs = bc_define_pointer (l);
11589 bc_emit_instruction (constP, ptroffs);
11591 /* This is all that has to be done if it's a literal. */
11592 if (TREE_CONSTANT (constr))
11596 /* At this point, we have the pointer to the structure on top of the stack.
11597 Generate sequences of store_memory calls for the constructor. */
11599 /* constructor type is structure */
11600 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11604 /* If the constructor has fewer fields than the structure,
11605 clear the whole structure first. */
11607 if (list_length (CONSTRUCTOR_ELTS (constr))
11608 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11610 bc_emit_instruction (duplicate);
11611 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11612 bc_emit_instruction (clearBLK);
11615 /* Store each element of the constructor into the corresponding
11616 field of TARGET. */
11618 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11620 register tree field = TREE_PURPOSE (elt);
11621 register enum machine_mode mode;
11626 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11627 mode = DECL_MODE (field);
11628 unsignedp = TREE_UNSIGNED (field);
11630 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11632 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11633 /* The alignment of TARGET is
11634 at least what its type requires. */
11636 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11637 int_size_in_bytes (TREE_TYPE (constr)));
11642 /* Constructor type is array */
11643 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11647 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11648 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11649 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11650 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11652 /* If the constructor has fewer fields than the structure,
11653 clear the whole structure first. */
11655 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11657 bc_emit_instruction (duplicate);
11658 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11659 bc_emit_instruction (clearBLK);
11663 /* Store each element of the constructor into the corresponding
11664 element of TARGET, determined by counting the elements. */
11666 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11668 elt = TREE_CHAIN (elt), i++)
11670 register enum machine_mode mode;
11675 mode = TYPE_MODE (elttype);
11676 bitsize = GET_MODE_BITSIZE (mode);
11677 unsignedp = TREE_UNSIGNED (elttype);
11679 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11680 /* * TYPE_SIZE_UNIT (elttype) */ );
11682 bc_store_field (elt, bitsize, bitpos, mode,
11683 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11684 /* The alignment of TARGET is
11685 at least what its type requires. */
11687 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11688 int_size_in_bytes (TREE_TYPE (constr)));
11695 /* Store the value of EXP (an expression tree) into member FIELD of
11696 structure at address on stack, which has type TYPE, mode MODE and
11697 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11700 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11701 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11704 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11705 value_mode, unsignedp, align, total_size)
11706 int bitsize, bitpos;
11707 enum machine_mode mode;
11708 tree field, exp, type;
11709 enum machine_mode value_mode;
11715 /* Expand expression and copy pointer */
11716 bc_expand_expr (exp);
11717 bc_emit_instruction (over);
11720 /* If the component is a bit field, we cannot use addressing to access
11721 it. Use bit-field techniques to store in it. */
11723 if (DECL_BIT_FIELD (field))
11725 bc_store_bit_field (bitpos, bitsize, unsignedp);
11729 /* Not bit field */
11731 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11733 /* Advance pointer to the desired member */
11735 bc_emit_instruction (addconstPSI, offset);
11738 bc_store_memory (type, field);
11743 /* Store SI/SU in bitfield */
11746 bc_store_bit_field (offset, size, unsignedp)
11747 int offset, size, unsignedp;
11749 /* Push bitfield offset and size */
11750 bc_push_offset_and_size (offset, size);
11753 bc_emit_instruction (sstoreBI);
11757 /* Load SI/SU from bitfield */
11760 bc_load_bit_field (offset, size, unsignedp)
11761 int offset, size, unsignedp;
11763 /* Push bitfield offset and size */
11764 bc_push_offset_and_size (offset, size);
11766 /* Load: sign-extend if signed, else zero-extend */
11767 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11771 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11772 (adjust stack pointer upwards), negative means add that number of
11773 levels (adjust the stack pointer downwards). Only positive values
11774 normally make sense. */
11777 bc_adjust_stack (nlevels)
11786 bc_emit_instruction (drop);
11789 bc_emit_instruction (drop);
11794 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11795 stack_depth -= nlevels;
11798 #if defined (VALIDATE_STACK_FOR_BC)
11799 VALIDATE_STACK_FOR_BC ();