1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
40 #include "typeclass.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87 int do_preexpand_calls = 1;
89 /* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91 int pending_stack_adjust;
93 /* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97 int inhibit_defer_pop;
99 /* Nonzero means __builtin_saveregs has already been done in this function.
100 The value is the pseudoreg containing the value __builtin_saveregs
102 static rtx saveregs_value;
104 /* Similarly for __builtin_apply_args. */
105 static rtx apply_args_value;
107 /* Don't check memory usage, since code is being emitted to check a memory
108 usage. Used when flag_check_memory_usage is true, to avoid infinite
110 static int in_check_memory_usage;
112 /* This structure is used by move_by_pieces to describe the move to
114 struct move_by_pieces
124 int explicit_inc_from;
131 /* This structure is used by clear_by_pieces to describe the clear to
134 struct clear_by_pieces
146 extern struct obstack permanent_obstack;
147 extern rtx arg_pointer_save_area;
149 static rtx get_push_address PROTO ((int));
151 static rtx enqueue_insn PROTO((rtx, rtx));
152 static int queued_subexp_p PROTO((rtx));
153 static void init_queue PROTO((void));
154 static void move_by_pieces PROTO((rtx, rtx, int, int));
155 static int move_by_pieces_ninsns PROTO((unsigned int, int));
156 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
157 struct move_by_pieces *));
158 static void clear_by_pieces PROTO((rtx, int, int));
159 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
160 struct clear_by_pieces *));
161 static int is_zeros_p PROTO((tree));
162 static int mostly_zeros_p PROTO((tree));
163 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
165 static void store_constructor PROTO((tree, rtx, int));
166 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
167 enum machine_mode, int, int, int));
168 static enum memory_use_mode
169 get_memory_usage_from_modifier PROTO((enum expand_modifier));
170 static tree save_noncopied_parts PROTO((tree, tree));
171 static tree init_noncopied_parts PROTO((tree, tree));
172 static int safe_from_p PROTO((rtx, tree, int));
173 static int fixed_type_p PROTO((tree));
174 static rtx var_rtx PROTO((tree));
175 static int get_pointer_alignment PROTO((tree, unsigned));
176 static tree string_constant PROTO((tree, tree *));
177 static tree c_strlen PROTO((tree));
178 static rtx expand_builtin PROTO((tree, rtx, rtx,
179 enum machine_mode, int));
180 static int apply_args_size PROTO((void));
181 static int apply_result_size PROTO((void));
182 static rtx result_vector PROTO((int, rtx));
183 static rtx expand_builtin_apply_args PROTO((void));
184 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
185 static void expand_builtin_return PROTO((rtx));
186 static rtx expand_increment PROTO((tree, int, int));
187 static void preexpand_calls PROTO((tree));
188 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
189 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
190 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
191 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
192 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
193 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
194 extern tree truthvalue_conversion PROTO((tree));
196 /* Record for each mode whether we can move a register directly to or
197 from an object of that mode in memory. If we can't, we won't try
198 to use that mode directly when accessing a field of that mode. */
200 static char direct_load[NUM_MACHINE_MODES];
201 static char direct_store[NUM_MACHINE_MODES];
203 /* MOVE_RATIO is the number of move instructions that is better than
207 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
210 /* If we are optimizing for space (-Os), cut down the default move ratio */
211 #define MOVE_RATIO (optimize_size ? 3 : 15)
215 /* This array records the insn_code of insns to perform block moves. */
216 enum insn_code movstr_optab[NUM_MACHINE_MODES];
218 /* This array records the insn_code of insns to perform block clears. */
219 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
221 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
223 #ifndef SLOW_UNALIGNED_ACCESS
224 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
227 /* Register mappings for target machines without register windows. */
228 #ifndef INCOMING_REGNO
229 #define INCOMING_REGNO(OUT) (OUT)
231 #ifndef OUTGOING_REGNO
232 #define OUTGOING_REGNO(IN) (IN)
235 /* This is run once per compilation to set up which modes can be used
236 directly in memory and to initialize the block move optab. */
242 enum machine_mode mode;
243 /* Try indexing by frame ptr and try by stack ptr.
244 It is known that on the Convex the stack ptr isn't a valid index.
245 With luck, one or the other is valid on any machine. */
246 rtx mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
247 rtx mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
250 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
251 pat = PATTERN (insn);
253 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
254 mode = (enum machine_mode) ((int) mode + 1))
260 direct_load[(int) mode] = direct_store[(int) mode] = 0;
261 PUT_MODE (mem, mode);
262 PUT_MODE (mem1, mode);
264 /* See if there is some register that can be used in this mode and
265 directly loaded or stored from memory. */
267 if (mode != VOIDmode && mode != BLKmode)
268 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
269 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
272 if (! HARD_REGNO_MODE_OK (regno, mode))
275 reg = gen_rtx_REG (mode, regno);
278 SET_DEST (pat) = reg;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_load[(int) mode] = 1;
282 SET_SRC (pat) = mem1;
283 SET_DEST (pat) = reg;
284 if (recog (pat, insn, &num_clobbers) >= 0)
285 direct_load[(int) mode] = 1;
288 SET_DEST (pat) = mem;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_store[(int) mode] = 1;
293 SET_DEST (pat) = mem1;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_store[(int) mode] = 1;
302 /* This is run at the start of compiling a function. */
309 pending_stack_adjust = 0;
310 inhibit_defer_pop = 0;
312 apply_args_value = 0;
316 /* Save all variables describing the current status into the structure *P.
317 This is used before starting a nested function. */
323 /* Instead of saving the postincrement queue, empty it. */
326 p->pending_stack_adjust = pending_stack_adjust;
327 p->inhibit_defer_pop = inhibit_defer_pop;
328 p->saveregs_value = saveregs_value;
329 p->apply_args_value = apply_args_value;
330 p->forced_labels = forced_labels;
332 pending_stack_adjust = 0;
333 inhibit_defer_pop = 0;
335 apply_args_value = 0;
339 /* Restore all variables describing the current status from the structure *P.
340 This is used after a nested function. */
343 restore_expr_status (p)
346 pending_stack_adjust = p->pending_stack_adjust;
347 inhibit_defer_pop = p->inhibit_defer_pop;
348 saveregs_value = p->saveregs_value;
349 apply_args_value = p->apply_args_value;
350 forced_labels = p->forced_labels;
353 /* Manage the queue of increment instructions to be output
354 for POSTINCREMENT_EXPR expressions, etc. */
356 static rtx pending_chain;
358 /* Queue up to increment (or change) VAR later. BODY says how:
359 BODY should be the same thing you would pass to emit_insn
360 to increment right away. It will go to emit_insn later on.
362 The value is a QUEUED expression to be used in place of VAR
363 where you want to guarantee the pre-incrementation value of VAR. */
366 enqueue_insn (var, body)
369 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
370 var, NULL_RTX, NULL_RTX, body,
372 return pending_chain;
375 /* Use protect_from_queue to convert a QUEUED expression
376 into something that you can put immediately into an instruction.
377 If the queued incrementation has not happened yet,
378 protect_from_queue returns the variable itself.
379 If the incrementation has happened, protect_from_queue returns a temp
380 that contains a copy of the old value of the variable.
382 Any time an rtx which might possibly be a QUEUED is to be put
383 into an instruction, it must be passed through protect_from_queue first.
384 QUEUED expressions are not meaningful in instructions.
386 Do not pass a value through protect_from_queue and then hold
387 on to it for a while before putting it in an instruction!
388 If the queue is flushed in between, incorrect code will result. */
391 protect_from_queue (x, modify)
395 register RTX_CODE code = GET_CODE (x);
397 #if 0 /* A QUEUED can hang around after the queue is forced out. */
398 /* Shortcut for most common case. */
399 if (pending_chain == 0)
405 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
406 use of autoincrement. Make a copy of the contents of the memory
407 location rather than a copy of the address, but not if the value is
408 of mode BLKmode. Don't modify X in place since it might be
410 if (code == MEM && GET_MODE (x) != BLKmode
411 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
413 register rtx y = XEXP (x, 0);
414 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
416 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
417 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
418 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
422 register rtx temp = gen_reg_rtx (GET_MODE (new));
423 emit_insn_before (gen_move_insn (temp, new),
429 /* Otherwise, recursively protect the subexpressions of all
430 the kinds of rtx's that can contain a QUEUED. */
433 rtx tem = protect_from_queue (XEXP (x, 0), 0);
434 if (tem != XEXP (x, 0))
440 else if (code == PLUS || code == MULT)
442 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
443 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
444 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
453 /* If the increment has not happened, use the variable itself. */
454 if (QUEUED_INSN (x) == 0)
455 return QUEUED_VAR (x);
456 /* If the increment has happened and a pre-increment copy exists,
458 if (QUEUED_COPY (x) != 0)
459 return QUEUED_COPY (x);
460 /* The increment has happened but we haven't set up a pre-increment copy.
461 Set one up now, and use it. */
462 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
463 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
465 return QUEUED_COPY (x);
468 /* Return nonzero if X contains a QUEUED expression:
469 if it contains anything that will be altered by a queued increment.
470 We handle only combinations of MEM, PLUS, MINUS and MULT operators
471 since memory addresses generally contain only those. */
477 register enum rtx_code code = GET_CODE (x);
483 return queued_subexp_p (XEXP (x, 0));
487 return (queued_subexp_p (XEXP (x, 0))
488 || queued_subexp_p (XEXP (x, 1)));
494 /* Perform all the pending incrementations. */
500 while ((p = pending_chain))
502 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
503 pending_chain = QUEUED_NEXT (p);
514 /* Copy data from FROM to TO, where the machine modes are not the same.
515 Both modes may be integer, or both may be floating.
516 UNSIGNEDP should be nonzero if FROM is an unsigned type.
517 This causes zero-extension instead of sign-extension. */
520 convert_move (to, from, unsignedp)
521 register rtx to, from;
524 enum machine_mode to_mode = GET_MODE (to);
525 enum machine_mode from_mode = GET_MODE (from);
526 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
527 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
531 /* rtx code for making an equivalent value. */
532 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
534 to = protect_from_queue (to, 1);
535 from = protect_from_queue (from, 0);
537 if (to_real != from_real)
540 /* If FROM is a SUBREG that indicates that we have already done at least
541 the required extension, strip it. We don't handle such SUBREGs as
544 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
545 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
546 >= GET_MODE_SIZE (to_mode))
547 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
548 from = gen_lowpart (to_mode, from), from_mode = to_mode;
550 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
553 if (to_mode == from_mode
554 || (from_mode == VOIDmode && CONSTANT_P (from)))
556 emit_move_insn (to, from);
564 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
566 /* Try converting directly if the insn is supported. */
567 if ((code = can_extend_p (to_mode, from_mode, 0))
570 emit_unop_insn (code, to, from, UNKNOWN);
575 #ifdef HAVE_trunchfqf2
576 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
582 #ifdef HAVE_trunctqfqf2
583 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
589 #ifdef HAVE_truncsfqf2
590 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
596 #ifdef HAVE_truncdfqf2
597 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
599 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
603 #ifdef HAVE_truncxfqf2
604 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
606 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
610 #ifdef HAVE_trunctfqf2
611 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
613 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
618 #ifdef HAVE_trunctqfhf2
619 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
625 #ifdef HAVE_truncsfhf2
626 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
632 #ifdef HAVE_truncdfhf2
633 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
635 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
639 #ifdef HAVE_truncxfhf2
640 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
642 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
646 #ifdef HAVE_trunctfhf2
647 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
649 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
654 #ifdef HAVE_truncsftqf2
655 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
661 #ifdef HAVE_truncdftqf2
662 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
664 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncxftqf2
669 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
671 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
675 #ifdef HAVE_trunctftqf2
676 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
678 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncdfsf2
684 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
686 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
690 #ifdef HAVE_truncxfsf2
691 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
693 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
697 #ifdef HAVE_trunctfsf2
698 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
700 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
704 #ifdef HAVE_truncxfdf2
705 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
707 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
711 #ifdef HAVE_trunctfdf2
712 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
714 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
726 libcall = extendsfdf2_libfunc;
730 libcall = extendsfxf2_libfunc;
734 libcall = extendsftf2_libfunc;
746 libcall = truncdfsf2_libfunc;
750 libcall = extenddfxf2_libfunc;
754 libcall = extenddftf2_libfunc;
766 libcall = truncxfsf2_libfunc;
770 libcall = truncxfdf2_libfunc;
782 libcall = trunctfsf2_libfunc;
786 libcall = trunctfdf2_libfunc;
798 if (libcall == (rtx) 0)
799 /* This conversion is not implemented yet. */
802 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
804 emit_move_insn (to, value);
808 /* Now both modes are integers. */
810 /* Handle expanding beyond a word. */
811 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
812 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
819 enum machine_mode lowpart_mode;
820 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
822 /* Try converting directly if the insn is supported. */
823 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
826 /* If FROM is a SUBREG, put it into a register. Do this
827 so that we always generate the same set of insns for
828 better cse'ing; if an intermediate assignment occurred,
829 we won't be doing the operation directly on the SUBREG. */
830 if (optimize > 0 && GET_CODE (from) == SUBREG)
831 from = force_reg (from_mode, from);
832 emit_unop_insn (code, to, from, equiv_code);
835 /* Next, try converting via full word. */
836 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
837 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
838 != CODE_FOR_nothing))
840 if (GET_CODE (to) == REG)
841 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
842 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
843 emit_unop_insn (code, to,
844 gen_lowpart (word_mode, to), equiv_code);
848 /* No special multiword conversion insn; do it by hand. */
851 /* Since we will turn this into a no conflict block, we must ensure
852 that the source does not overlap the target. */
854 if (reg_overlap_mentioned_p (to, from))
855 from = force_reg (from_mode, from);
857 /* Get a copy of FROM widened to a word, if necessary. */
858 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
859 lowpart_mode = word_mode;
861 lowpart_mode = from_mode;
863 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
865 lowpart = gen_lowpart (lowpart_mode, to);
866 emit_move_insn (lowpart, lowfrom);
868 /* Compute the value to put in each remaining word. */
870 fill_value = const0_rtx;
875 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
876 && STORE_FLAG_VALUE == -1)
878 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
880 fill_value = gen_reg_rtx (word_mode);
881 emit_insn (gen_slt (fill_value));
887 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
888 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
890 fill_value = convert_to_mode (word_mode, fill_value, 1);
894 /* Fill the remaining words. */
895 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
897 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
898 rtx subword = operand_subword (to, index, 1, to_mode);
903 if (fill_value != subword)
904 emit_move_insn (subword, fill_value);
907 insns = get_insns ();
910 emit_no_conflict_block (insns, to, from, NULL_RTX,
911 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
915 /* Truncating multi-word to a word or less. */
916 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
917 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
919 if (!((GET_CODE (from) == MEM
920 && ! MEM_VOLATILE_P (from)
921 && direct_load[(int) to_mode]
922 && ! mode_dependent_address_p (XEXP (from, 0)))
923 || GET_CODE (from) == REG
924 || GET_CODE (from) == SUBREG))
925 from = force_reg (from_mode, from);
926 convert_move (to, gen_lowpart (word_mode, from), 0);
930 /* Handle pointer conversion */ /* SPEE 900220 */
931 if (to_mode == PQImode)
933 if (from_mode != QImode)
934 from = convert_to_mode (QImode, from, unsignedp);
936 #ifdef HAVE_truncqipqi2
937 if (HAVE_truncqipqi2)
939 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
942 #endif /* HAVE_truncqipqi2 */
946 if (from_mode == PQImode)
948 if (to_mode != QImode)
950 from = convert_to_mode (QImode, from, unsignedp);
955 #ifdef HAVE_extendpqiqi2
956 if (HAVE_extendpqiqi2)
958 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
961 #endif /* HAVE_extendpqiqi2 */
966 if (to_mode == PSImode)
968 if (from_mode != SImode)
969 from = convert_to_mode (SImode, from, unsignedp);
971 #ifdef HAVE_truncsipsi2
972 if (HAVE_truncsipsi2)
974 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
977 #endif /* HAVE_truncsipsi2 */
981 if (from_mode == PSImode)
983 if (to_mode != SImode)
985 from = convert_to_mode (SImode, from, unsignedp);
990 #ifdef HAVE_extendpsisi2
991 if (HAVE_extendpsisi2)
993 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
996 #endif /* HAVE_extendpsisi2 */
1001 if (to_mode == PDImode)
1003 if (from_mode != DImode)
1004 from = convert_to_mode (DImode, from, unsignedp);
1006 #ifdef HAVE_truncdipdi2
1007 if (HAVE_truncdipdi2)
1009 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1012 #endif /* HAVE_truncdipdi2 */
1016 if (from_mode == PDImode)
1018 if (to_mode != DImode)
1020 from = convert_to_mode (DImode, from, unsignedp);
1025 #ifdef HAVE_extendpdidi2
1026 if (HAVE_extendpdidi2)
1028 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1031 #endif /* HAVE_extendpdidi2 */
1036 /* Now follow all the conversions between integers
1037 no more than a word long. */
1039 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1040 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1041 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1042 GET_MODE_BITSIZE (from_mode)))
1044 if (!((GET_CODE (from) == MEM
1045 && ! MEM_VOLATILE_P (from)
1046 && direct_load[(int) to_mode]
1047 && ! mode_dependent_address_p (XEXP (from, 0)))
1048 || GET_CODE (from) == REG
1049 || GET_CODE (from) == SUBREG))
1050 from = force_reg (from_mode, from);
1051 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1052 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1053 from = copy_to_reg (from);
1054 emit_move_insn (to, gen_lowpart (to_mode, from));
1058 /* Handle extension. */
1059 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1061 /* Convert directly if that works. */
1062 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1063 != CODE_FOR_nothing)
1065 emit_unop_insn (code, to, from, equiv_code);
1070 enum machine_mode intermediate;
1072 /* Search for a mode to convert via. */
1073 for (intermediate = from_mode; intermediate != VOIDmode;
1074 intermediate = GET_MODE_WIDER_MODE (intermediate))
1075 if (((can_extend_p (to_mode, intermediate, unsignedp)
1076 != CODE_FOR_nothing)
1077 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1078 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1079 && (can_extend_p (intermediate, from_mode, unsignedp)
1080 != CODE_FOR_nothing))
1082 convert_move (to, convert_to_mode (intermediate, from,
1083 unsignedp), unsignedp);
1087 /* No suitable intermediate mode. */
1092 /* Support special truncate insns for certain modes. */
1094 if (from_mode == DImode && to_mode == SImode)
1096 #ifdef HAVE_truncdisi2
1097 if (HAVE_truncdisi2)
1099 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1103 convert_move (to, force_reg (from_mode, from), unsignedp);
1107 if (from_mode == DImode && to_mode == HImode)
1109 #ifdef HAVE_truncdihi2
1110 if (HAVE_truncdihi2)
1112 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1116 convert_move (to, force_reg (from_mode, from), unsignedp);
1120 if (from_mode == DImode && to_mode == QImode)
1122 #ifdef HAVE_truncdiqi2
1123 if (HAVE_truncdiqi2)
1125 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1129 convert_move (to, force_reg (from_mode, from), unsignedp);
1133 if (from_mode == SImode && to_mode == HImode)
1135 #ifdef HAVE_truncsihi2
1136 if (HAVE_truncsihi2)
1138 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1142 convert_move (to, force_reg (from_mode, from), unsignedp);
1146 if (from_mode == SImode && to_mode == QImode)
1148 #ifdef HAVE_truncsiqi2
1149 if (HAVE_truncsiqi2)
1151 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1155 convert_move (to, force_reg (from_mode, from), unsignedp);
1159 if (from_mode == HImode && to_mode == QImode)
1161 #ifdef HAVE_trunchiqi2
1162 if (HAVE_trunchiqi2)
1164 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1168 convert_move (to, force_reg (from_mode, from), unsignedp);
1172 if (from_mode == TImode && to_mode == DImode)
1174 #ifdef HAVE_trunctidi2
1175 if (HAVE_trunctidi2)
1177 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1181 convert_move (to, force_reg (from_mode, from), unsignedp);
1185 if (from_mode == TImode && to_mode == SImode)
1187 #ifdef HAVE_trunctisi2
1188 if (HAVE_trunctisi2)
1190 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1194 convert_move (to, force_reg (from_mode, from), unsignedp);
1198 if (from_mode == TImode && to_mode == HImode)
1200 #ifdef HAVE_trunctihi2
1201 if (HAVE_trunctihi2)
1203 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1207 convert_move (to, force_reg (from_mode, from), unsignedp);
1211 if (from_mode == TImode && to_mode == QImode)
1213 #ifdef HAVE_trunctiqi2
1214 if (HAVE_trunctiqi2)
1216 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1220 convert_move (to, force_reg (from_mode, from), unsignedp);
1224 /* Handle truncation of volatile memrefs, and so on;
1225 the things that couldn't be truncated directly,
1226 and for which there was no special instruction. */
1227 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1229 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1230 emit_move_insn (to, temp);
1234 /* Mode combination is not recognized. */
1238 /* Return an rtx for a value that would result
1239 from converting X to mode MODE.
1240 Both X and MODE may be floating, or both integer.
1241 UNSIGNEDP is nonzero if X is an unsigned value.
1242 This can be done by referring to a part of X in place
1243 or by copying to a new temporary with conversion.
1245 This function *must not* call protect_from_queue
1246 except when putting X into an insn (in which case convert_move does it). */
1249 convert_to_mode (mode, x, unsignedp)
1250 enum machine_mode mode;
1254 return convert_modes (mode, VOIDmode, x, unsignedp);
1257 /* Return an rtx for a value that would result
1258 from converting X from mode OLDMODE to mode MODE.
1259 Both modes may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1262 This can be done by referring to a part of X in place
1263 or by copying to a new temporary with conversion.
1265 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1267 This function *must not* call protect_from_queue
1268 except when putting X into an insn (in which case convert_move does it). */
1271 convert_modes (mode, oldmode, x, unsignedp)
1272 enum machine_mode mode, oldmode;
1278 /* If FROM is a SUBREG that indicates that we have already done at least
1279 the required extension, strip it. */
1281 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1282 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1283 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1284 x = gen_lowpart (mode, x);
1286 if (GET_MODE (x) != VOIDmode)
1287 oldmode = GET_MODE (x);
1289 if (mode == oldmode)
1292 /* There is one case that we must handle specially: If we are converting
1293 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1294 we are to interpret the constant as unsigned, gen_lowpart will do
1295 the wrong if the constant appears negative. What we want to do is
1296 make the high-order word of the constant zero, not all ones. */
1298 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1299 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1300 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1302 HOST_WIDE_INT val = INTVAL (x);
1304 if (oldmode != VOIDmode
1305 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1307 int width = GET_MODE_BITSIZE (oldmode);
1309 /* We need to zero extend VAL. */
1310 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1313 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1316 /* We can do this with a gen_lowpart if both desired and current modes
1317 are integer, and this is either a constant integer, a register, or a
1318 non-volatile MEM. Except for the constant case where MODE is no
1319 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1321 if ((GET_CODE (x) == CONST_INT
1322 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1323 || (GET_MODE_CLASS (mode) == MODE_INT
1324 && GET_MODE_CLASS (oldmode) == MODE_INT
1325 && (GET_CODE (x) == CONST_DOUBLE
1326 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1327 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1328 && direct_load[(int) mode])
1329 || (GET_CODE (x) == REG
1330 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1331 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1333 /* ?? If we don't know OLDMODE, we have to assume here that
1334 X does not need sign- or zero-extension. This may not be
1335 the case, but it's the best we can do. */
1336 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1337 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1339 HOST_WIDE_INT val = INTVAL (x);
1340 int width = GET_MODE_BITSIZE (oldmode);
1342 /* We must sign or zero-extend in this case. Start by
1343 zero-extending, then sign extend if we need to. */
1344 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1346 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1347 val |= (HOST_WIDE_INT) (-1) << width;
1349 return GEN_INT (val);
1352 return gen_lowpart (mode, x);
1355 temp = gen_reg_rtx (mode);
1356 convert_move (temp, x, unsignedp);
1360 /* Generate several move instructions to copy LEN bytes
1361 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1362 The caller must pass FROM and TO
1363 through protect_from_queue before calling.
1364 ALIGN (in bytes) is maximum alignment we can assume. */
1367 move_by_pieces (to, from, len, align)
1371 struct move_by_pieces data;
1372 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1373 int max_size = MOVE_MAX + 1;
1376 data.to_addr = to_addr;
1377 data.from_addr = from_addr;
1381 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1382 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1384 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1385 || GET_CODE (from_addr) == POST_INC
1386 || GET_CODE (from_addr) == POST_DEC);
1388 data.explicit_inc_from = 0;
1389 data.explicit_inc_to = 0;
1391 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1392 if (data.reverse) data.offset = len;
1395 data.to_struct = MEM_IN_STRUCT_P (to);
1396 data.from_struct = MEM_IN_STRUCT_P (from);
1398 /* If copying requires more than two move insns,
1399 copy addresses to registers (to make displacements shorter)
1400 and use post-increment if available. */
1401 if (!(data.autinc_from && data.autinc_to)
1402 && move_by_pieces_ninsns (len, align) > 2)
1404 #ifdef HAVE_PRE_DECREMENT
1405 if (data.reverse && ! data.autinc_from)
1407 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1408 data.autinc_from = 1;
1409 data.explicit_inc_from = -1;
1412 #ifdef HAVE_POST_INCREMENT
1413 if (! data.autinc_from)
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416 data.autinc_from = 1;
1417 data.explicit_inc_from = 1;
1420 if (!data.autinc_from && CONSTANT_P (from_addr))
1421 data.from_addr = copy_addr_to_reg (from_addr);
1422 #ifdef HAVE_PRE_DECREMENT
1423 if (data.reverse && ! data.autinc_to)
1425 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1427 data.explicit_inc_to = -1;
1430 #ifdef HAVE_POST_INCREMENT
1431 if (! data.reverse && ! data.autinc_to)
1433 data.to_addr = copy_addr_to_reg (to_addr);
1435 data.explicit_inc_to = 1;
1438 if (!data.autinc_to && CONSTANT_P (to_addr))
1439 data.to_addr = copy_addr_to_reg (to_addr);
1442 if (! SLOW_UNALIGNED_ACCESS
1443 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1446 /* First move what we can in the largest integer mode, then go to
1447 successively smaller modes. */
1449 while (max_size > 1)
1451 enum machine_mode mode = VOIDmode, tmode;
1452 enum insn_code icode;
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1459 if (mode == VOIDmode)
1462 icode = mov_optab->handlers[(int) mode].insn_code;
1463 if (icode != CODE_FOR_nothing
1464 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1465 GET_MODE_SIZE (mode)))
1466 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1468 max_size = GET_MODE_SIZE (mode);
1471 /* The code above should have handled everything. */
1476 /* Return number of insns required to move L bytes by pieces.
1477 ALIGN (in bytes) is maximum alignment we can assume. */
1480 move_by_pieces_ninsns (l, align)
1484 register int n_insns = 0;
1485 int max_size = MOVE_MAX + 1;
1487 if (! SLOW_UNALIGNED_ACCESS
1488 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1491 while (max_size > 1)
1493 enum machine_mode mode = VOIDmode, tmode;
1494 enum insn_code icode;
1496 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1497 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1498 if (GET_MODE_SIZE (tmode) < max_size)
1501 if (mode == VOIDmode)
1504 icode = mov_optab->handlers[(int) mode].insn_code;
1505 if (icode != CODE_FOR_nothing
1506 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1507 GET_MODE_SIZE (mode)))
1508 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1510 max_size = GET_MODE_SIZE (mode);
1516 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1517 with move instructions for mode MODE. GENFUN is the gen_... function
1518 to make a move insn for that mode. DATA has all the other info. */
1521 move_by_pieces_1 (genfun, mode, data)
1522 rtx (*genfun) PROTO ((rtx, ...));
1523 enum machine_mode mode;
1524 struct move_by_pieces *data;
1526 register int size = GET_MODE_SIZE (mode);
1527 register rtx to1, from1;
1529 while (data->len >= size)
1531 if (data->reverse) data->offset -= size;
1533 to1 = (data->autinc_to
1534 ? gen_rtx_MEM (mode, data->to_addr)
1535 : copy_rtx (change_address (data->to, mode,
1536 plus_constant (data->to_addr,
1538 MEM_IN_STRUCT_P (to1) = data->to_struct;
1541 = (data->autinc_from
1542 ? gen_rtx_MEM (mode, data->from_addr)
1543 : copy_rtx (change_address (data->from, mode,
1544 plus_constant (data->from_addr,
1546 MEM_IN_STRUCT_P (from1) = data->from_struct;
1548 #ifdef HAVE_PRE_DECREMENT
1549 if (data->explicit_inc_to < 0)
1550 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1551 if (data->explicit_inc_from < 0)
1552 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1555 emit_insn ((*genfun) (to1, from1));
1556 #ifdef HAVE_POST_INCREMENT
1557 if (data->explicit_inc_to > 0)
1558 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1559 if (data->explicit_inc_from > 0)
1560 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1563 if (! data->reverse) data->offset += size;
1569 /* Emit code to move a block Y to a block X.
1570 This may be done with string-move instructions,
1571 with multiple scalar move instructions, or with a library call.
1573 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1575 SIZE is an rtx that says how long they are.
1576 ALIGN is the maximum alignment we can assume they have,
1579 Return the address of the new block, if memcpy is called and returns it,
1583 emit_block_move (x, y, size, align)
1590 if (GET_MODE (x) != BLKmode)
1593 if (GET_MODE (y) != BLKmode)
1596 x = protect_from_queue (x, 1);
1597 y = protect_from_queue (y, 0);
1598 size = protect_from_queue (size, 0);
1600 if (GET_CODE (x) != MEM)
1602 if (GET_CODE (y) != MEM)
1607 if (GET_CODE (size) == CONST_INT
1608 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1609 move_by_pieces (x, y, INTVAL (size), align);
1612 /* Try the most limited insn first, because there's no point
1613 including more than one in the machine description unless
1614 the more limited one has some advantage. */
1616 rtx opalign = GEN_INT (align);
1617 enum machine_mode mode;
1619 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1620 mode = GET_MODE_WIDER_MODE (mode))
1622 enum insn_code code = movstr_optab[(int) mode];
1624 if (code != CODE_FOR_nothing
1625 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1626 here because if SIZE is less than the mode mask, as it is
1627 returned by the macro, it will definitely be less than the
1628 actual mode mask. */
1629 && ((GET_CODE (size) == CONST_INT
1630 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1631 <= (GET_MODE_MASK (mode) >> 1)))
1632 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1633 && (insn_operand_predicate[(int) code][0] == 0
1634 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1635 && (insn_operand_predicate[(int) code][1] == 0
1636 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1637 && (insn_operand_predicate[(int) code][3] == 0
1638 || (*insn_operand_predicate[(int) code][3]) (opalign,
1642 rtx last = get_last_insn ();
1645 op2 = convert_to_mode (mode, size, 1);
1646 if (insn_operand_predicate[(int) code][2] != 0
1647 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1648 op2 = copy_to_mode_reg (mode, op2);
1650 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1657 delete_insns_since (last);
1661 #ifdef TARGET_MEM_FUNCTIONS
1663 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1664 ptr_mode, 3, XEXP (x, 0), Pmode,
1666 convert_to_mode (TYPE_MODE (sizetype), size,
1667 TREE_UNSIGNED (sizetype)),
1668 TYPE_MODE (sizetype));
1670 emit_library_call (bcopy_libfunc, 0,
1671 VOIDmode, 3, XEXP (y, 0), Pmode,
1673 convert_to_mode (TYPE_MODE (integer_type_node), size,
1674 TREE_UNSIGNED (integer_type_node)),
1675 TYPE_MODE (integer_type_node));
1682 /* Copy all or part of a value X into registers starting at REGNO.
1683 The number of registers to be filled is NREGS. */
1686 move_block_to_reg (regno, x, nregs, mode)
1690 enum machine_mode mode;
1693 #ifdef HAVE_load_multiple
1701 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1702 x = validize_mem (force_const_mem (mode, x));
1704 /* See if the machine can do this with a load multiple insn. */
1705 #ifdef HAVE_load_multiple
1706 if (HAVE_load_multiple)
1708 last = get_last_insn ();
1709 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1717 delete_insns_since (last);
1721 for (i = 0; i < nregs; i++)
1722 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1723 operand_subword_force (x, i, mode));
1726 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1727 The number of registers to be filled is NREGS. SIZE indicates the number
1728 of bytes in the object X. */
1732 move_block_from_reg (regno, x, nregs, size)
1739 #ifdef HAVE_store_multiple
1743 enum machine_mode mode;
1745 /* If SIZE is that of a mode no bigger than a word, just use that
1746 mode's store operation. */
1747 if (size <= UNITS_PER_WORD
1748 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1750 emit_move_insn (change_address (x, mode, NULL),
1751 gen_rtx_REG (mode, regno));
1755 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1756 to the left before storing to memory. Note that the previous test
1757 doesn't handle all cases (e.g. SIZE == 3). */
1758 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1760 rtx tem = operand_subword (x, 0, 1, BLKmode);
1766 shift = expand_shift (LSHIFT_EXPR, word_mode,
1767 gen_rtx_REG (word_mode, regno),
1768 build_int_2 ((UNITS_PER_WORD - size)
1769 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1770 emit_move_insn (tem, shift);
1774 /* See if the machine can do this with a store multiple insn. */
1775 #ifdef HAVE_store_multiple
1776 if (HAVE_store_multiple)
1778 last = get_last_insn ();
1779 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1787 delete_insns_since (last);
1791 for (i = 0; i < nregs; i++)
1793 rtx tem = operand_subword (x, i, 1, BLKmode);
1798 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1802 /* Emit code to move a block Y to a block X, where X is non-consecutive
1803 registers represented by a PARALLEL. */
1806 emit_group_load (x, y)
1809 rtx target_reg, source;
1812 if (GET_CODE (x) != PARALLEL)
1815 /* Check for a NULL entry, used to indicate that the parameter goes
1816 both on the stack and in registers. */
1817 if (XEXP (XVECEXP (x, 0, 0), 0))
1822 for (; i < XVECLEN (x, 0); i++)
1824 rtx element = XVECEXP (x, 0, i);
1826 target_reg = XEXP (element, 0);
1828 if (GET_CODE (y) == MEM)
1829 source = change_address (y, GET_MODE (target_reg),
1830 plus_constant (XEXP (y, 0),
1831 INTVAL (XEXP (element, 1))));
1832 else if (XEXP (element, 1) == const0_rtx)
1834 if (GET_MODE (target_reg) == GET_MODE (y))
1836 /* Allow for the target_reg to be smaller than the input register
1837 to allow for AIX with 4 DF arguments after a single SI arg. The
1838 last DF argument will only load 1 word into the integer registers,
1839 but load a DF value into the float registers. */
1840 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1841 <= GET_MODE_SIZE (GET_MODE (y)))
1842 && GET_MODE (target_reg) == word_mode)
1843 /* This might be a const_double, so we can't just use SUBREG. */
1844 source = operand_subword (y, 0, 0, VOIDmode);
1845 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1846 == GET_MODE_SIZE (GET_MODE (y)))
1847 source = gen_lowpart (GET_MODE (target_reg), y);
1854 emit_move_insn (target_reg, source);
1858 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1859 registers represented by a PARALLEL. */
1862 emit_group_store (x, y)
1865 rtx source_reg, target;
1868 if (GET_CODE (y) != PARALLEL)
1871 /* Check for a NULL entry, used to indicate that the parameter goes
1872 both on the stack and in registers. */
1873 if (XEXP (XVECEXP (y, 0, 0), 0))
1878 for (; i < XVECLEN (y, 0); i++)
1880 rtx element = XVECEXP (y, 0, i);
1882 source_reg = XEXP (element, 0);
1884 if (GET_CODE (x) == MEM)
1885 target = change_address (x, GET_MODE (source_reg),
1886 plus_constant (XEXP (x, 0),
1887 INTVAL (XEXP (element, 1))));
1888 else if (XEXP (element, 1) == const0_rtx)
1891 if (GET_MODE (target) != GET_MODE (source_reg))
1892 target = gen_lowpart (GET_MODE (source_reg), target);
1897 emit_move_insn (target, source_reg);
1901 /* Add a USE expression for REG to the (possibly empty) list pointed
1902 to by CALL_FUSAGE. REG must denote a hard register. */
1905 use_reg (call_fusage, reg)
1906 rtx *call_fusage, reg;
1908 if (GET_CODE (reg) != REG
1909 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1913 = gen_rtx_EXPR_LIST (VOIDmode,
1914 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1917 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1918 starting at REGNO. All of these registers must be hard registers. */
1921 use_regs (call_fusage, regno, nregs)
1928 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1931 for (i = 0; i < nregs; i++)
1932 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
1935 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1936 PARALLEL REGS. This is for calls that pass values in multiple
1937 non-contiguous locations. The Irix 6 ABI has examples of this. */
1940 use_group_regs (call_fusage, regs)
1946 for (i = 0; i < XVECLEN (regs, 0); i++)
1948 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1950 /* A NULL entry means the parameter goes both on the stack and in
1951 registers. This can also be a MEM for targets that pass values
1952 partially on the stack and partially in registers. */
1953 if (reg != 0 && GET_CODE (reg) == REG)
1954 use_reg (call_fusage, reg);
1958 /* Generate several move instructions to clear LEN bytes of block TO.
1959 (A MEM rtx with BLKmode). The caller must pass TO through
1960 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1964 clear_by_pieces (to, len, align)
1968 struct clear_by_pieces data;
1969 rtx to_addr = XEXP (to, 0);
1970 int max_size = MOVE_MAX + 1;
1973 data.to_addr = to_addr;
1976 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1977 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1979 data.explicit_inc_to = 0;
1981 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1982 if (data.reverse) data.offset = len;
1985 data.to_struct = MEM_IN_STRUCT_P (to);
1987 /* If copying requires more than two move insns,
1988 copy addresses to registers (to make displacements shorter)
1989 and use post-increment if available. */
1991 && move_by_pieces_ninsns (len, align) > 2)
1993 #ifdef HAVE_PRE_DECREMENT
1994 if (data.reverse && ! data.autinc_to)
1996 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1998 data.explicit_inc_to = -1;
2001 #ifdef HAVE_POST_INCREMENT
2002 if (! data.reverse && ! data.autinc_to)
2004 data.to_addr = copy_addr_to_reg (to_addr);
2006 data.explicit_inc_to = 1;
2009 if (!data.autinc_to && CONSTANT_P (to_addr))
2010 data.to_addr = copy_addr_to_reg (to_addr);
2013 if (! SLOW_UNALIGNED_ACCESS
2014 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2017 /* First move what we can in the largest integer mode, then go to
2018 successively smaller modes. */
2020 while (max_size > 1)
2022 enum machine_mode mode = VOIDmode, tmode;
2023 enum insn_code icode;
2025 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2026 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2027 if (GET_MODE_SIZE (tmode) < max_size)
2030 if (mode == VOIDmode)
2033 icode = mov_optab->handlers[(int) mode].insn_code;
2034 if (icode != CODE_FOR_nothing
2035 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2036 GET_MODE_SIZE (mode)))
2037 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2039 max_size = GET_MODE_SIZE (mode);
2042 /* The code above should have handled everything. */
2047 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2048 with move instructions for mode MODE. GENFUN is the gen_... function
2049 to make a move insn for that mode. DATA has all the other info. */
2052 clear_by_pieces_1 (genfun, mode, data)
2053 rtx (*genfun) PROTO ((rtx, ...));
2054 enum machine_mode mode;
2055 struct clear_by_pieces *data;
2057 register int size = GET_MODE_SIZE (mode);
2060 while (data->len >= size)
2062 if (data->reverse) data->offset -= size;
2064 to1 = (data->autinc_to
2065 ? gen_rtx_MEM (mode, data->to_addr)
2066 : copy_rtx (change_address (data->to, mode,
2067 plus_constant (data->to_addr,
2069 MEM_IN_STRUCT_P (to1) = data->to_struct;
2071 #ifdef HAVE_PRE_DECREMENT
2072 if (data->explicit_inc_to < 0)
2073 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2076 emit_insn ((*genfun) (to1, const0_rtx));
2077 #ifdef HAVE_POST_INCREMENT
2078 if (data->explicit_inc_to > 0)
2079 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2082 if (! data->reverse) data->offset += size;
2088 /* Write zeros through the storage of OBJECT.
2089 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2090 the maximum alignment we can is has, measured in bytes.
2092 If we call a function that returns the length of the block, return it. */
2095 clear_storage (object, size, align)
2102 if (GET_MODE (object) == BLKmode)
2104 object = protect_from_queue (object, 1);
2105 size = protect_from_queue (size, 0);
2107 if (GET_CODE (size) == CONST_INT
2108 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2109 clear_by_pieces (object, INTVAL (size), align);
2113 /* Try the most limited insn first, because there's no point
2114 including more than one in the machine description unless
2115 the more limited one has some advantage. */
2117 rtx opalign = GEN_INT (align);
2118 enum machine_mode mode;
2120 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2121 mode = GET_MODE_WIDER_MODE (mode))
2123 enum insn_code code = clrstr_optab[(int) mode];
2125 if (code != CODE_FOR_nothing
2126 /* We don't need MODE to be narrower than
2127 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2128 the mode mask, as it is returned by the macro, it will
2129 definitely be less than the actual mode mask. */
2130 && ((GET_CODE (size) == CONST_INT
2131 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2132 <= (GET_MODE_MASK (mode) >> 1)))
2133 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2134 && (insn_operand_predicate[(int) code][0] == 0
2135 || (*insn_operand_predicate[(int) code][0]) (object,
2137 && (insn_operand_predicate[(int) code][2] == 0
2138 || (*insn_operand_predicate[(int) code][2]) (opalign,
2142 rtx last = get_last_insn ();
2145 op1 = convert_to_mode (mode, size, 1);
2146 if (insn_operand_predicate[(int) code][1] != 0
2147 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2149 op1 = copy_to_mode_reg (mode, op1);
2151 pat = GEN_FCN ((int) code) (object, op1, opalign);
2158 delete_insns_since (last);
2163 #ifdef TARGET_MEM_FUNCTIONS
2165 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2167 XEXP (object, 0), Pmode,
2169 TYPE_MODE (integer_type_node),
2171 (TYPE_MODE (sizetype), size,
2172 TREE_UNSIGNED (sizetype)),
2173 TYPE_MODE (sizetype));
2175 emit_library_call (bzero_libfunc, 0,
2177 XEXP (object, 0), Pmode,
2179 (TYPE_MODE (integer_type_node), size,
2180 TREE_UNSIGNED (integer_type_node)),
2181 TYPE_MODE (integer_type_node));
2186 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2191 /* Generate code to copy Y into X.
2192 Both Y and X must have the same mode, except that
2193 Y can be a constant with VOIDmode.
2194 This mode cannot be BLKmode; use emit_block_move for that.
2196 Return the last instruction emitted. */
2199 emit_move_insn (x, y)
2202 enum machine_mode mode = GET_MODE (x);
2204 x = protect_from_queue (x, 1);
2205 y = protect_from_queue (y, 0);
2207 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2210 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2211 y = force_const_mem (mode, y);
2213 /* If X or Y are memory references, verify that their addresses are valid
2215 if (GET_CODE (x) == MEM
2216 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2217 && ! push_operand (x, GET_MODE (x)))
2219 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2220 x = change_address (x, VOIDmode, XEXP (x, 0));
2222 if (GET_CODE (y) == MEM
2223 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2225 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2226 y = change_address (y, VOIDmode, XEXP (y, 0));
2228 if (mode == BLKmode)
2231 return emit_move_insn_1 (x, y);
2234 /* Low level part of emit_move_insn.
2235 Called just like emit_move_insn, but assumes X and Y
2236 are basically valid. */
2239 emit_move_insn_1 (x, y)
2242 enum machine_mode mode = GET_MODE (x);
2243 enum machine_mode submode;
2244 enum mode_class class = GET_MODE_CLASS (mode);
2247 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2249 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2251 /* Expand complex moves by moving real part and imag part, if possible. */
2252 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2253 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2255 (class == MODE_COMPLEX_INT
2256 ? MODE_INT : MODE_FLOAT),
2258 && (mov_optab->handlers[(int) submode].insn_code
2259 != CODE_FOR_nothing))
2261 /* Don't split destination if it is a stack push. */
2262 int stack = push_operand (x, GET_MODE (x));
2264 /* If this is a stack, push the highpart first, so it
2265 will be in the argument order.
2267 In that case, change_address is used only to convert
2268 the mode, not to change the address. */
2271 /* Note that the real part always precedes the imag part in memory
2272 regardless of machine's endianness. */
2273 #ifdef STACK_GROWS_DOWNWARD
2274 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2275 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2276 gen_imagpart (submode, y)));
2277 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2278 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2279 gen_realpart (submode, y)));
2281 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2282 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2283 gen_realpart (submode, y)));
2284 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2285 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2286 gen_imagpart (submode, y)));
2291 /* Show the output dies here. */
2293 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2295 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2296 (gen_realpart (submode, x), gen_realpart (submode, y)));
2297 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2298 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2301 return get_last_insn ();
2304 /* This will handle any multi-word mode that lacks a move_insn pattern.
2305 However, you will get better code if you define such patterns,
2306 even if they must turn into multiple assembler instructions. */
2307 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2311 #ifdef PUSH_ROUNDING
2313 /* If X is a push on the stack, do the push now and replace
2314 X with a reference to the stack pointer. */
2315 if (push_operand (x, GET_MODE (x)))
2317 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2318 x = change_address (x, VOIDmode, stack_pointer_rtx);
2322 /* Show the output dies here. */
2324 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2327 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2330 rtx xpart = operand_subword (x, i, 1, mode);
2331 rtx ypart = operand_subword (y, i, 1, mode);
2333 /* If we can't get a part of Y, put Y into memory if it is a
2334 constant. Otherwise, force it into a register. If we still
2335 can't get a part of Y, abort. */
2336 if (ypart == 0 && CONSTANT_P (y))
2338 y = force_const_mem (mode, y);
2339 ypart = operand_subword (y, i, 1, mode);
2341 else if (ypart == 0)
2342 ypart = operand_subword_force (y, i, mode);
2344 if (xpart == 0 || ypart == 0)
2347 last_insn = emit_move_insn (xpart, ypart);
2356 /* Pushing data onto the stack. */
2358 /* Push a block of length SIZE (perhaps variable)
2359 and return an rtx to address the beginning of the block.
2360 Note that it is not possible for the value returned to be a QUEUED.
2361 The value may be virtual_outgoing_args_rtx.
2363 EXTRA is the number of bytes of padding to push in addition to SIZE.
2364 BELOW nonzero means this padding comes at low addresses;
2365 otherwise, the padding comes at high addresses. */
2368 push_block (size, extra, below)
2374 size = convert_modes (Pmode, ptr_mode, size, 1);
2375 if (CONSTANT_P (size))
2376 anti_adjust_stack (plus_constant (size, extra));
2377 else if (GET_CODE (size) == REG && extra == 0)
2378 anti_adjust_stack (size);
2381 rtx temp = copy_to_mode_reg (Pmode, size);
2383 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2384 temp, 0, OPTAB_LIB_WIDEN);
2385 anti_adjust_stack (temp);
2388 #ifdef STACK_GROWS_DOWNWARD
2389 temp = virtual_outgoing_args_rtx;
2390 if (extra != 0 && below)
2391 temp = plus_constant (temp, extra);
2393 if (GET_CODE (size) == CONST_INT)
2394 temp = plus_constant (virtual_outgoing_args_rtx,
2395 - INTVAL (size) - (below ? 0 : extra));
2396 else if (extra != 0 && !below)
2397 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2398 negate_rtx (Pmode, plus_constant (size, extra)));
2400 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2401 negate_rtx (Pmode, size));
2404 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2410 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2413 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2414 block of SIZE bytes. */
2417 get_push_address (size)
2422 if (STACK_PUSH_CODE == POST_DEC)
2423 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2424 else if (STACK_PUSH_CODE == POST_INC)
2425 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2427 temp = stack_pointer_rtx;
2429 return copy_to_reg (temp);
2432 /* Generate code to push X onto the stack, assuming it has mode MODE and
2434 MODE is redundant except when X is a CONST_INT (since they don't
2436 SIZE is an rtx for the size of data to be copied (in bytes),
2437 needed only if X is BLKmode.
2439 ALIGN (in bytes) is maximum alignment we can assume.
2441 If PARTIAL and REG are both nonzero, then copy that many of the first
2442 words of X into registers starting with REG, and push the rest of X.
2443 The amount of space pushed is decreased by PARTIAL words,
2444 rounded *down* to a multiple of PARM_BOUNDARY.
2445 REG must be a hard register in this case.
2446 If REG is zero but PARTIAL is not, take any all others actions for an
2447 argument partially in registers, but do not actually load any
2450 EXTRA is the amount in bytes of extra space to leave next to this arg.
2451 This is ignored if an argument block has already been allocated.
2453 On a machine that lacks real push insns, ARGS_ADDR is the address of
2454 the bottom of the argument block for this call. We use indexing off there
2455 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2456 argument block has not been preallocated.
2458 ARGS_SO_FAR is the size of args previously pushed for this call.
2460 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2461 for arguments passed in registers. If nonzero, it will be the number
2462 of bytes required. */
2465 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2466 args_addr, args_so_far, reg_parm_stack_space)
2468 enum machine_mode mode;
2477 int reg_parm_stack_space;
2480 enum direction stack_direction
2481 #ifdef STACK_GROWS_DOWNWARD
2487 /* Decide where to pad the argument: `downward' for below,
2488 `upward' for above, or `none' for don't pad it.
2489 Default is below for small data on big-endian machines; else above. */
2490 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2492 /* Invert direction if stack is post-update. */
2493 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2494 if (where_pad != none)
2495 where_pad = (where_pad == downward ? upward : downward);
2497 xinner = x = protect_from_queue (x, 0);
2499 if (mode == BLKmode)
2501 /* Copy a block into the stack, entirely or partially. */
2504 int used = partial * UNITS_PER_WORD;
2505 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2513 /* USED is now the # of bytes we need not copy to the stack
2514 because registers will take care of them. */
2517 xinner = change_address (xinner, BLKmode,
2518 plus_constant (XEXP (xinner, 0), used));
2520 /* If the partial register-part of the arg counts in its stack size,
2521 skip the part of stack space corresponding to the registers.
2522 Otherwise, start copying to the beginning of the stack space,
2523 by setting SKIP to 0. */
2524 skip = (reg_parm_stack_space == 0) ? 0 : used;
2526 #ifdef PUSH_ROUNDING
2527 /* Do it with several push insns if that doesn't take lots of insns
2528 and if there is no difficulty with push insns that skip bytes
2529 on the stack for alignment purposes. */
2531 && GET_CODE (size) == CONST_INT
2533 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2535 /* Here we avoid the case of a structure whose weak alignment
2536 forces many pushes of a small amount of data,
2537 and such small pushes do rounding that causes trouble. */
2538 && ((! SLOW_UNALIGNED_ACCESS)
2539 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2540 || PUSH_ROUNDING (align) == align)
2541 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2543 /* Push padding now if padding above and stack grows down,
2544 or if padding below and stack grows up.
2545 But if space already allocated, this has already been done. */
2546 if (extra && args_addr == 0
2547 && where_pad != none && where_pad != stack_direction)
2548 anti_adjust_stack (GEN_INT (extra));
2550 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2551 INTVAL (size) - used, align);
2553 if (flag_check_memory_usage && ! in_check_memory_usage)
2557 in_check_memory_usage = 1;
2558 temp = get_push_address (INTVAL(size) - used);
2559 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2560 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2562 XEXP (xinner, 0), ptr_mode,
2563 GEN_INT (INTVAL(size) - used),
2564 TYPE_MODE (sizetype));
2566 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2568 GEN_INT (INTVAL(size) - used),
2569 TYPE_MODE (sizetype),
2570 GEN_INT (MEMORY_USE_RW),
2571 TYPE_MODE (integer_type_node));
2572 in_check_memory_usage = 0;
2576 #endif /* PUSH_ROUNDING */
2578 /* Otherwise make space on the stack and copy the data
2579 to the address of that space. */
2581 /* Deduct words put into registers from the size we must copy. */
2584 if (GET_CODE (size) == CONST_INT)
2585 size = GEN_INT (INTVAL (size) - used);
2587 size = expand_binop (GET_MODE (size), sub_optab, size,
2588 GEN_INT (used), NULL_RTX, 0,
2592 /* Get the address of the stack space.
2593 In this case, we do not deal with EXTRA separately.
2594 A single stack adjust will do. */
2597 temp = push_block (size, extra, where_pad == downward);
2600 else if (GET_CODE (args_so_far) == CONST_INT)
2601 temp = memory_address (BLKmode,
2602 plus_constant (args_addr,
2603 skip + INTVAL (args_so_far)));
2605 temp = memory_address (BLKmode,
2606 plus_constant (gen_rtx_PLUS (Pmode,
2610 if (flag_check_memory_usage && ! in_check_memory_usage)
2614 in_check_memory_usage = 1;
2615 target = copy_to_reg (temp);
2616 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2617 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2619 XEXP (xinner, 0), ptr_mode,
2620 size, TYPE_MODE (sizetype));
2622 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2624 size, TYPE_MODE (sizetype),
2625 GEN_INT (MEMORY_USE_RW),
2626 TYPE_MODE (integer_type_node));
2627 in_check_memory_usage = 0;
2630 /* TEMP is the address of the block. Copy the data there. */
2631 if (GET_CODE (size) == CONST_INT
2632 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2635 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2636 INTVAL (size), align);
2641 rtx opalign = GEN_INT (align);
2642 enum machine_mode mode;
2643 rtx target = gen_rtx (MEM, BLKmode, temp);
2645 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2647 mode = GET_MODE_WIDER_MODE (mode))
2649 enum insn_code code = movstr_optab[(int) mode];
2651 if (code != CODE_FOR_nothing
2652 && ((GET_CODE (size) == CONST_INT
2653 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2654 <= (GET_MODE_MASK (mode) >> 1)))
2655 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2656 && (insn_operand_predicate[(int) code][0] == 0
2657 || ((*insn_operand_predicate[(int) code][0])
2659 && (insn_operand_predicate[(int) code][1] == 0
2660 || ((*insn_operand_predicate[(int) code][1])
2662 && (insn_operand_predicate[(int) code][3] == 0
2663 || ((*insn_operand_predicate[(int) code][3])
2664 (opalign, VOIDmode))))
2666 rtx op2 = convert_to_mode (mode, size, 1);
2667 rtx last = get_last_insn ();
2670 if (insn_operand_predicate[(int) code][2] != 0
2671 && ! ((*insn_operand_predicate[(int) code][2])
2673 op2 = copy_to_mode_reg (mode, op2);
2675 pat = GEN_FCN ((int) code) (target, xinner,
2683 delete_insns_since (last);
2688 #ifndef ACCUMULATE_OUTGOING_ARGS
2689 /* If the source is referenced relative to the stack pointer,
2690 copy it to another register to stabilize it. We do not need
2691 to do this if we know that we won't be changing sp. */
2693 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2694 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2695 temp = copy_to_reg (temp);
2698 /* Make inhibit_defer_pop nonzero around the library call
2699 to force it to pop the bcopy-arguments right away. */
2701 #ifdef TARGET_MEM_FUNCTIONS
2702 emit_library_call (memcpy_libfunc, 0,
2703 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2704 convert_to_mode (TYPE_MODE (sizetype),
2705 size, TREE_UNSIGNED (sizetype)),
2706 TYPE_MODE (sizetype));
2708 emit_library_call (bcopy_libfunc, 0,
2709 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2710 convert_to_mode (TYPE_MODE (integer_type_node),
2712 TREE_UNSIGNED (integer_type_node)),
2713 TYPE_MODE (integer_type_node));
2718 else if (partial > 0)
2720 /* Scalar partly in registers. */
2722 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2725 /* # words of start of argument
2726 that we must make space for but need not store. */
2727 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2728 int args_offset = INTVAL (args_so_far);
2731 /* Push padding now if padding above and stack grows down,
2732 or if padding below and stack grows up.
2733 But if space already allocated, this has already been done. */
2734 if (extra && args_addr == 0
2735 && where_pad != none && where_pad != stack_direction)
2736 anti_adjust_stack (GEN_INT (extra));
2738 /* If we make space by pushing it, we might as well push
2739 the real data. Otherwise, we can leave OFFSET nonzero
2740 and leave the space uninitialized. */
2744 /* Now NOT_STACK gets the number of words that we don't need to
2745 allocate on the stack. */
2746 not_stack = partial - offset;
2748 /* If the partial register-part of the arg counts in its stack size,
2749 skip the part of stack space corresponding to the registers.
2750 Otherwise, start copying to the beginning of the stack space,
2751 by setting SKIP to 0. */
2752 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
2754 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2755 x = validize_mem (force_const_mem (mode, x));
2757 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2758 SUBREGs of such registers are not allowed. */
2759 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2760 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2761 x = copy_to_reg (x);
2763 /* Loop over all the words allocated on the stack for this arg. */
2764 /* We can do it by words, because any scalar bigger than a word
2765 has a size a multiple of a word. */
2766 #ifndef PUSH_ARGS_REVERSED
2767 for (i = not_stack; i < size; i++)
2769 for (i = size - 1; i >= not_stack; i--)
2771 if (i >= not_stack + offset)
2772 emit_push_insn (operand_subword_force (x, i, mode),
2773 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2775 GEN_INT (args_offset + ((i - not_stack + skip)
2777 reg_parm_stack_space);
2782 rtx target = NULL_RTX;
2784 /* Push padding now if padding above and stack grows down,
2785 or if padding below and stack grows up.
2786 But if space already allocated, this has already been done. */
2787 if (extra && args_addr == 0
2788 && where_pad != none && where_pad != stack_direction)
2789 anti_adjust_stack (GEN_INT (extra));
2791 #ifdef PUSH_ROUNDING
2793 addr = gen_push_operand ();
2797 if (GET_CODE (args_so_far) == CONST_INT)
2799 = memory_address (mode,
2800 plus_constant (args_addr,
2801 INTVAL (args_so_far)));
2803 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
2808 emit_move_insn (gen_rtx_MEM (mode, addr), x);
2810 if (flag_check_memory_usage && ! in_check_memory_usage)
2812 in_check_memory_usage = 1;
2814 target = get_push_address (GET_MODE_SIZE (mode));
2816 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2817 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2819 XEXP (x, 0), ptr_mode,
2820 GEN_INT (GET_MODE_SIZE (mode)),
2821 TYPE_MODE (sizetype));
2823 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2825 GEN_INT (GET_MODE_SIZE (mode)),
2826 TYPE_MODE (sizetype),
2827 GEN_INT (MEMORY_USE_RW),
2828 TYPE_MODE (integer_type_node));
2829 in_check_memory_usage = 0;
2834 /* If part should go in registers, copy that part
2835 into the appropriate registers. Do this now, at the end,
2836 since mem-to-mem copies above may do function calls. */
2837 if (partial > 0 && reg != 0)
2839 /* Handle calls that pass values in multiple non-contiguous locations.
2840 The Irix 6 ABI has examples of this. */
2841 if (GET_CODE (reg) == PARALLEL)
2842 emit_group_load (reg, x);
2844 move_block_to_reg (REGNO (reg), x, partial, mode);
2847 if (extra && args_addr == 0 && where_pad == stack_direction)
2848 anti_adjust_stack (GEN_INT (extra));
2851 /* Expand an assignment that stores the value of FROM into TO.
2852 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2853 (This may contain a QUEUED rtx;
2854 if the value is constant, this rtx is a constant.)
2855 Otherwise, the returned value is NULL_RTX.
2857 SUGGEST_REG is no longer actually used.
2858 It used to mean, copy the value through a register
2859 and return that register, if that is possible.
2860 We now use WANT_VALUE to decide whether to do this. */
2863 expand_assignment (to, from, want_value, suggest_reg)
2868 register rtx to_rtx = 0;
2871 /* Don't crash if the lhs of the assignment was erroneous. */
2873 if (TREE_CODE (to) == ERROR_MARK)
2875 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2876 return want_value ? result : NULL_RTX;
2879 /* Assignment of a structure component needs special treatment
2880 if the structure component's rtx is not simply a MEM.
2881 Assignment of an array element at a constant index, and assignment of
2882 an array element in an unaligned packed structure field, has the same
2885 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2886 || TREE_CODE (to) == ARRAY_REF)
2888 enum machine_mode mode1;
2898 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2899 &unsignedp, &volatilep, &alignment);
2901 /* If we are going to use store_bit_field and extract_bit_field,
2902 make sure to_rtx will be safe for multiple use. */
2904 if (mode1 == VOIDmode && want_value)
2905 tem = stabilize_reference (tem);
2907 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
2910 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2912 if (GET_CODE (to_rtx) != MEM)
2915 if (GET_MODE (offset_rtx) != ptr_mode)
2917 #ifdef POINTERS_EXTEND_UNSIGNED
2918 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
2920 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2924 to_rtx = change_address (to_rtx, VOIDmode,
2925 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
2926 force_reg (ptr_mode, offset_rtx)));
2930 if (GET_CODE (to_rtx) == MEM)
2932 /* When the offset is zero, to_rtx is the address of the
2933 structure we are storing into, and hence may be shared.
2934 We must make a new MEM before setting the volatile bit. */
2936 to_rtx = copy_rtx (to_rtx);
2938 MEM_VOLATILE_P (to_rtx) = 1;
2940 #if 0 /* This was turned off because, when a field is volatile
2941 in an object which is not volatile, the object may be in a register,
2942 and then we would abort over here. */
2948 if (TREE_CODE (to) == COMPONENT_REF
2949 && TREE_READONLY (TREE_OPERAND (to, 1)))
2952 to_rtx = copy_rtx (to_rtx);
2954 RTX_UNCHANGING_P (to_rtx) = 1;
2957 /* Check the access. */
2958 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2963 enum machine_mode best_mode;
2965 best_mode = get_best_mode (bitsize, bitpos,
2966 TYPE_ALIGN (TREE_TYPE (tem)),
2968 if (best_mode == VOIDmode)
2971 best_mode_size = GET_MODE_BITSIZE (best_mode);
2972 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2973 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2974 size *= GET_MODE_SIZE (best_mode);
2976 /* Check the access right of the pointer. */
2978 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
2980 GEN_INT (size), TYPE_MODE (sizetype),
2981 GEN_INT (MEMORY_USE_WO),
2982 TYPE_MODE (integer_type_node));
2985 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2987 /* Spurious cast makes HPUX compiler happy. */
2988 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2991 /* Required alignment of containing datum. */
2993 int_size_in_bytes (TREE_TYPE (tem)));
2994 preserve_temp_slots (result);
2998 /* If the value is meaningful, convert RESULT to the proper mode.
2999 Otherwise, return nothing. */
3000 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3001 TYPE_MODE (TREE_TYPE (from)),
3003 TREE_UNSIGNED (TREE_TYPE (to)))
3007 /* If the rhs is a function call and its value is not an aggregate,
3008 call the function before we start to compute the lhs.
3009 This is needed for correct code for cases such as
3010 val = setjmp (buf) on machines where reference to val
3011 requires loading up part of an address in a separate insn.
3013 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3014 a promoted variable where the zero- or sign- extension needs to be done.
3015 Handling this in the normal way is safe because no computation is done
3017 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3018 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3019 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3024 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3026 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3028 /* Handle calls that return values in multiple non-contiguous locations.
3029 The Irix 6 ABI has examples of this. */
3030 if (GET_CODE (to_rtx) == PARALLEL)
3031 emit_group_load (to_rtx, value);
3032 else if (GET_MODE (to_rtx) == BLKmode)
3033 emit_block_move (to_rtx, value, expr_size (from),
3034 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3036 emit_move_insn (to_rtx, value);
3037 preserve_temp_slots (to_rtx);
3040 return want_value ? to_rtx : NULL_RTX;
3043 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3044 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3047 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3049 /* Don't move directly into a return register. */
3050 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3055 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3056 emit_move_insn (to_rtx, temp);
3057 preserve_temp_slots (to_rtx);
3060 return want_value ? to_rtx : NULL_RTX;
3063 /* In case we are returning the contents of an object which overlaps
3064 the place the value is being stored, use a safe function when copying
3065 a value through a pointer into a structure value return block. */
3066 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3067 && current_function_returns_struct
3068 && !current_function_returns_pcc_struct)
3073 size = expr_size (from);
3074 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3075 EXPAND_MEMORY_USE_DONT);
3077 /* Copy the rights of the bitmap. */
3078 if (flag_check_memory_usage)
3079 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3080 XEXP (to_rtx, 0), ptr_mode,
3081 XEXP (from_rtx, 0), ptr_mode,
3082 convert_to_mode (TYPE_MODE (sizetype),
3083 size, TREE_UNSIGNED (sizetype)),
3084 TYPE_MODE (sizetype));
3086 #ifdef TARGET_MEM_FUNCTIONS
3087 emit_library_call (memcpy_libfunc, 0,
3088 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3089 XEXP (from_rtx, 0), Pmode,
3090 convert_to_mode (TYPE_MODE (sizetype),
3091 size, TREE_UNSIGNED (sizetype)),
3092 TYPE_MODE (sizetype));
3094 emit_library_call (bcopy_libfunc, 0,
3095 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3096 XEXP (to_rtx, 0), Pmode,
3097 convert_to_mode (TYPE_MODE (integer_type_node),
3098 size, TREE_UNSIGNED (integer_type_node)),
3099 TYPE_MODE (integer_type_node));
3102 preserve_temp_slots (to_rtx);
3105 return want_value ? to_rtx : NULL_RTX;
3108 /* Compute FROM and store the value in the rtx we got. */
3111 result = store_expr (from, to_rtx, want_value);
3112 preserve_temp_slots (result);
3115 return want_value ? result : NULL_RTX;
3118 /* Generate code for computing expression EXP,
3119 and storing the value into TARGET.
3120 TARGET may contain a QUEUED rtx.
3122 If WANT_VALUE is nonzero, return a copy of the value
3123 not in TARGET, so that we can be sure to use the proper
3124 value in a containing expression even if TARGET has something
3125 else stored in it. If possible, we copy the value through a pseudo
3126 and return that pseudo. Or, if the value is constant, we try to
3127 return the constant. In some cases, we return a pseudo
3128 copied *from* TARGET.
3130 If the mode is BLKmode then we may return TARGET itself.
3131 It turns out that in BLKmode it doesn't cause a problem.
3132 because C has no operators that could combine two different
3133 assignments into the same BLKmode object with different values
3134 with no sequence point. Will other languages need this to
3137 If WANT_VALUE is 0, we return NULL, to make sure
3138 to catch quickly any cases where the caller uses the value
3139 and fails to set WANT_VALUE. */
3142 store_expr (exp, target, want_value)
3144 register rtx target;
3148 int dont_return_target = 0;
3150 if (TREE_CODE (exp) == COMPOUND_EXPR)
3152 /* Perform first part of compound expression, then assign from second
3154 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3156 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3158 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3160 /* For conditional expression, get safe form of the target. Then
3161 test the condition, doing the appropriate assignment on either
3162 side. This avoids the creation of unnecessary temporaries.
3163 For non-BLKmode, it is more efficient not to do this. */
3165 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3168 target = protect_from_queue (target, 1);
3170 do_pending_stack_adjust ();
3172 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3173 start_cleanup_deferral ();
3174 store_expr (TREE_OPERAND (exp, 1), target, 0);
3175 end_cleanup_deferral ();
3177 emit_jump_insn (gen_jump (lab2));
3180 start_cleanup_deferral ();
3181 store_expr (TREE_OPERAND (exp, 2), target, 0);
3182 end_cleanup_deferral ();
3187 return want_value ? target : NULL_RTX;
3189 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3190 && GET_MODE (target) != BLKmode)
3191 /* If target is in memory and caller wants value in a register instead,
3192 arrange that. Pass TARGET as target for expand_expr so that,
3193 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3194 We know expand_expr will not use the target in that case.
3195 Don't do this if TARGET is volatile because we are supposed
3196 to write it and then read it. */
3198 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3199 GET_MODE (target), 0);
3200 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3201 temp = copy_to_reg (temp);
3202 dont_return_target = 1;
3204 else if (queued_subexp_p (target))
3205 /* If target contains a postincrement, let's not risk
3206 using it as the place to generate the rhs. */
3208 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3210 /* Expand EXP into a new pseudo. */
3211 temp = gen_reg_rtx (GET_MODE (target));
3212 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3215 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3217 /* If target is volatile, ANSI requires accessing the value
3218 *from* the target, if it is accessed. So make that happen.
3219 In no case return the target itself. */
3220 if (! MEM_VOLATILE_P (target) && want_value)
3221 dont_return_target = 1;
3223 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3224 /* If this is an scalar in a register that is stored in a wider mode
3225 than the declared mode, compute the result into its declared mode
3226 and then convert to the wider mode. Our value is the computed
3229 /* If we don't want a value, we can do the conversion inside EXP,
3230 which will often result in some optimizations. Do the conversion
3231 in two steps: first change the signedness, if needed, then
3232 the extend. But don't do this if the type of EXP is a subtype
3233 of something else since then the conversion might involve
3234 more than just converting modes. */
3235 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3236 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3238 if (TREE_UNSIGNED (TREE_TYPE (exp))
3239 != SUBREG_PROMOTED_UNSIGNED_P (target))
3242 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3246 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3247 SUBREG_PROMOTED_UNSIGNED_P (target)),
3251 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3253 /* If TEMP is a volatile MEM and we want a result value, make
3254 the access now so it gets done only once. Likewise if
3255 it contains TARGET. */
3256 if (GET_CODE (temp) == MEM && want_value
3257 && (MEM_VOLATILE_P (temp)
3258 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3259 temp = copy_to_reg (temp);
3261 /* If TEMP is a VOIDmode constant, use convert_modes to make
3262 sure that we properly convert it. */
3263 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3264 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3265 TYPE_MODE (TREE_TYPE (exp)), temp,
3266 SUBREG_PROMOTED_UNSIGNED_P (target));
3268 convert_move (SUBREG_REG (target), temp,
3269 SUBREG_PROMOTED_UNSIGNED_P (target));
3270 return want_value ? temp : NULL_RTX;
3274 temp = expand_expr (exp, target, GET_MODE (target), 0);
3275 /* Return TARGET if it's a specified hardware register.
3276 If TARGET is a volatile mem ref, either return TARGET
3277 or return a reg copied *from* TARGET; ANSI requires this.
3279 Otherwise, if TEMP is not TARGET, return TEMP
3280 if it is constant (for efficiency),
3281 or if we really want the correct value. */
3282 if (!(target && GET_CODE (target) == REG
3283 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3284 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3285 && ! rtx_equal_p (temp, target)
3286 && (CONSTANT_P (temp) || want_value))
3287 dont_return_target = 1;
3290 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3291 the same as that of TARGET, adjust the constant. This is needed, for
3292 example, in case it is a CONST_DOUBLE and we want only a word-sized
3294 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3295 && TREE_CODE (exp) != ERROR_MARK
3296 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3297 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3298 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3300 if (flag_check_memory_usage
3301 && GET_CODE (target) == MEM
3302 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3304 if (GET_CODE (temp) == MEM)
3305 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3306 XEXP (target, 0), ptr_mode,
3307 XEXP (temp, 0), ptr_mode,
3308 expr_size (exp), TYPE_MODE (sizetype));
3310 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3311 XEXP (target, 0), ptr_mode,
3312 expr_size (exp), TYPE_MODE (sizetype),
3313 GEN_INT (MEMORY_USE_WO),
3314 TYPE_MODE (integer_type_node));
3317 /* If value was not generated in the target, store it there.
3318 Convert the value to TARGET's type first if nec. */
3320 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3322 target = protect_from_queue (target, 1);
3323 if (GET_MODE (temp) != GET_MODE (target)
3324 && GET_MODE (temp) != VOIDmode)
3326 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3327 if (dont_return_target)
3329 /* In this case, we will return TEMP,
3330 so make sure it has the proper mode.
3331 But don't forget to store the value into TARGET. */
3332 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3333 emit_move_insn (target, temp);
3336 convert_move (target, temp, unsignedp);
3339 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3341 /* Handle copying a string constant into an array.
3342 The string constant may be shorter than the array.
3343 So copy just the string's actual length, and clear the rest. */
3347 /* Get the size of the data type of the string,
3348 which is actually the size of the target. */
3349 size = expr_size (exp);
3350 if (GET_CODE (size) == CONST_INT
3351 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3352 emit_block_move (target, temp, size,
3353 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3356 /* Compute the size of the data to copy from the string. */
3358 = size_binop (MIN_EXPR,
3359 make_tree (sizetype, size),
3361 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3362 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3366 /* Copy that much. */
3367 emit_block_move (target, temp, copy_size_rtx,
3368 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3370 /* Figure out how much is left in TARGET that we have to clear.
3371 Do all calculations in ptr_mode. */
3373 addr = XEXP (target, 0);
3374 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3376 if (GET_CODE (copy_size_rtx) == CONST_INT)
3378 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3379 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3383 addr = force_reg (ptr_mode, addr);
3384 addr = expand_binop (ptr_mode, add_optab, addr,
3385 copy_size_rtx, NULL_RTX, 0,
3388 size = expand_binop (ptr_mode, sub_optab, size,
3389 copy_size_rtx, NULL_RTX, 0,
3392 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3393 GET_MODE (size), 0, 0);
3394 label = gen_label_rtx ();
3395 emit_jump_insn (gen_blt (label));
3398 if (size != const0_rtx)
3400 /* Be sure we can write on ADDR. */
3401 if (flag_check_memory_usage)
3402 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3404 size, TYPE_MODE (sizetype),
3405 GEN_INT (MEMORY_USE_WO),
3406 TYPE_MODE (integer_type_node));
3407 #ifdef TARGET_MEM_FUNCTIONS
3408 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3410 const0_rtx, TYPE_MODE (integer_type_node),
3411 convert_to_mode (TYPE_MODE (sizetype),
3413 TREE_UNSIGNED (sizetype)),
3414 TYPE_MODE (sizetype));
3416 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3418 convert_to_mode (TYPE_MODE (integer_type_node),
3420 TREE_UNSIGNED (integer_type_node)),
3421 TYPE_MODE (integer_type_node));
3429 /* Handle calls that return values in multiple non-contiguous locations.
3430 The Irix 6 ABI has examples of this. */
3431 else if (GET_CODE (target) == PARALLEL)
3432 emit_group_load (target, temp);
3433 else if (GET_MODE (temp) == BLKmode)
3434 emit_block_move (target, temp, expr_size (exp),
3435 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3437 emit_move_insn (target, temp);
3440 /* If we don't want a value, return NULL_RTX. */
3444 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3445 ??? The latter test doesn't seem to make sense. */
3446 else if (dont_return_target && GET_CODE (temp) != MEM)
3449 /* Return TARGET itself if it is a hard register. */
3450 else if (want_value && GET_MODE (target) != BLKmode
3451 && ! (GET_CODE (target) == REG
3452 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3453 return copy_to_reg (target);
3459 /* Return 1 if EXP just contains zeros. */
3467 switch (TREE_CODE (exp))
3471 case NON_LVALUE_EXPR:
3472 return is_zeros_p (TREE_OPERAND (exp, 0));
3475 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3479 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3482 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3485 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3486 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3487 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3488 if (! is_zeros_p (TREE_VALUE (elt)))
3498 /* Return 1 if EXP contains mostly (3/4) zeros. */
3501 mostly_zeros_p (exp)
3504 if (TREE_CODE (exp) == CONSTRUCTOR)
3506 int elts = 0, zeros = 0;
3507 tree elt = CONSTRUCTOR_ELTS (exp);
3508 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3510 /* If there are no ranges of true bits, it is all zero. */
3511 return elt == NULL_TREE;
3513 for (; elt; elt = TREE_CHAIN (elt))
3515 /* We do not handle the case where the index is a RANGE_EXPR,
3516 so the statistic will be somewhat inaccurate.
3517 We do make a more accurate count in store_constructor itself,
3518 so since this function is only used for nested array elements,
3519 this should be close enough. */
3520 if (mostly_zeros_p (TREE_VALUE (elt)))
3525 return 4 * zeros >= 3 * elts;
3528 return is_zeros_p (exp);
3531 /* Helper function for store_constructor.
3532 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3533 TYPE is the type of the CONSTRUCTOR, not the element type.
3534 CLEARED is as for store_constructor.
3536 This provides a recursive shortcut back to store_constructor when it isn't
3537 necessary to go through store_field. This is so that we can pass through
3538 the cleared field to let store_constructor know that we may not have to
3539 clear a substructure if the outer structure has already been cleared. */
3542 store_constructor_field (target, bitsize, bitpos,
3543 mode, exp, type, cleared)
3545 int bitsize, bitpos;
3546 enum machine_mode mode;
3550 if (TREE_CODE (exp) == CONSTRUCTOR
3551 && bitpos % BITS_PER_UNIT == 0
3552 /* If we have a non-zero bitpos for a register target, then we just
3553 let store_field do the bitfield handling. This is unlikely to
3554 generate unnecessary clear instructions anyways. */
3555 && (bitpos == 0 || GET_CODE (target) == MEM))
3558 target = change_address (target, VOIDmode,
3559 plus_constant (XEXP (target, 0),
3560 bitpos / BITS_PER_UNIT));
3561 store_constructor (exp, target, cleared);
3564 store_field (target, bitsize, bitpos, mode, exp,
3565 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3566 int_size_in_bytes (type));
3569 /* Store the value of constructor EXP into the rtx TARGET.
3570 TARGET is either a REG or a MEM.
3571 CLEARED is true if TARGET is known to have been zero'd. */
3574 store_constructor (exp, target, cleared)
3579 tree type = TREE_TYPE (exp);
3581 /* We know our target cannot conflict, since safe_from_p has been called. */
3583 /* Don't try copying piece by piece into a hard register
3584 since that is vulnerable to being clobbered by EXP.
3585 Instead, construct in a pseudo register and then copy it all. */
3586 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3588 rtx temp = gen_reg_rtx (GET_MODE (target));
3589 store_constructor (exp, temp, 0);
3590 emit_move_insn (target, temp);
3595 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3596 || TREE_CODE (type) == QUAL_UNION_TYPE)
3600 /* Inform later passes that the whole union value is dead. */
3601 if (TREE_CODE (type) == UNION_TYPE
3602 || TREE_CODE (type) == QUAL_UNION_TYPE)
3603 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3605 /* If we are building a static constructor into a register,
3606 set the initial value as zero so we can fold the value into
3607 a constant. But if more than one register is involved,
3608 this probably loses. */
3609 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3610 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3613 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3618 /* If the constructor has fewer fields than the structure
3619 or if we are initializing the structure to mostly zeros,
3620 clear the whole structure first. */
3621 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3622 != list_length (TYPE_FIELDS (type)))
3623 || mostly_zeros_p (exp))
3626 clear_storage (target, expr_size (exp),
3627 TYPE_ALIGN (type) / BITS_PER_UNIT);
3632 /* Inform later passes that the old value is dead. */
3633 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3635 /* Store each element of the constructor into
3636 the corresponding field of TARGET. */
3638 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3640 register tree field = TREE_PURPOSE (elt);
3641 register enum machine_mode mode;
3645 tree pos, constant = 0, offset = 0;
3646 rtx to_rtx = target;
3648 /* Just ignore missing fields.
3649 We cleared the whole structure, above,
3650 if any fields are missing. */
3654 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3657 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3658 unsignedp = TREE_UNSIGNED (field);
3659 mode = DECL_MODE (field);
3660 if (DECL_BIT_FIELD (field))
3663 pos = DECL_FIELD_BITPOS (field);
3664 if (TREE_CODE (pos) == INTEGER_CST)
3666 else if (TREE_CODE (pos) == PLUS_EXPR
3667 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3668 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3673 bitpos = TREE_INT_CST_LOW (constant);
3679 if (contains_placeholder_p (offset))
3680 offset = build (WITH_RECORD_EXPR, sizetype,
3681 offset, make_tree (TREE_TYPE (exp), target));
3683 offset = size_binop (FLOOR_DIV_EXPR, offset,
3684 size_int (BITS_PER_UNIT));
3686 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3687 if (GET_CODE (to_rtx) != MEM)
3690 if (GET_MODE (offset_rtx) != ptr_mode)
3692 #ifdef POINTERS_EXTEND_UNSIGNED
3693 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
3695 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3700 = change_address (to_rtx, VOIDmode,
3701 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3702 force_reg (ptr_mode, offset_rtx)));
3704 if (TREE_READONLY (field))
3706 if (GET_CODE (to_rtx) == MEM)
3707 to_rtx = copy_rtx (to_rtx);
3709 RTX_UNCHANGING_P (to_rtx) = 1;
3712 store_constructor_field (to_rtx, bitsize, bitpos,
3713 mode, TREE_VALUE (elt), type, cleared);
3716 else if (TREE_CODE (type) == ARRAY_TYPE)
3721 tree domain = TYPE_DOMAIN (type);
3722 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3723 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3724 tree elttype = TREE_TYPE (type);
3726 /* If the constructor has fewer elements than the array,
3727 clear the whole array first. Similarly if this is
3728 static constructor of a non-BLKmode object. */
3729 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3733 HOST_WIDE_INT count = 0, zero_count = 0;
3735 /* This loop is a more accurate version of the loop in
3736 mostly_zeros_p (it handles RANGE_EXPR in an index).
3737 It is also needed to check for missing elements. */
3738 for (elt = CONSTRUCTOR_ELTS (exp);
3740 elt = TREE_CHAIN (elt))
3742 tree index = TREE_PURPOSE (elt);
3743 HOST_WIDE_INT this_node_count;
3744 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3746 tree lo_index = TREE_OPERAND (index, 0);
3747 tree hi_index = TREE_OPERAND (index, 1);
3748 if (TREE_CODE (lo_index) != INTEGER_CST
3749 || TREE_CODE (hi_index) != INTEGER_CST)
3754 this_node_count = TREE_INT_CST_LOW (hi_index)
3755 - TREE_INT_CST_LOW (lo_index) + 1;
3758 this_node_count = 1;
3759 count += this_node_count;
3760 if (mostly_zeros_p (TREE_VALUE (elt)))
3761 zero_count += this_node_count;
3763 /* Clear the entire array first if there are any missing elements,
3764 or if the incidence of zero elements is >= 75%. */
3765 if (count < maxelt - minelt + 1
3766 || 4 * zero_count >= 3 * count)
3772 clear_storage (target, expr_size (exp),
3773 TYPE_ALIGN (type) / BITS_PER_UNIT);
3777 /* Inform later passes that the old value is dead. */
3778 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3780 /* Store each element of the constructor into
3781 the corresponding element of TARGET, determined
3782 by counting the elements. */
3783 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3785 elt = TREE_CHAIN (elt), i++)
3787 register enum machine_mode mode;
3791 tree value = TREE_VALUE (elt);
3792 tree index = TREE_PURPOSE (elt);
3793 rtx xtarget = target;
3795 if (cleared && is_zeros_p (value))
3798 mode = TYPE_MODE (elttype);
3799 bitsize = GET_MODE_BITSIZE (mode);
3800 unsignedp = TREE_UNSIGNED (elttype);
3802 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3804 tree lo_index = TREE_OPERAND (index, 0);
3805 tree hi_index = TREE_OPERAND (index, 1);
3806 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3807 struct nesting *loop;
3808 HOST_WIDE_INT lo, hi, count;
3811 /* If the range is constant and "small", unroll the loop. */
3812 if (TREE_CODE (lo_index) == INTEGER_CST
3813 && TREE_CODE (hi_index) == INTEGER_CST
3814 && (lo = TREE_INT_CST_LOW (lo_index),
3815 hi = TREE_INT_CST_LOW (hi_index),
3816 count = hi - lo + 1,
3817 (GET_CODE (target) != MEM
3819 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3820 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3823 lo -= minelt; hi -= minelt;
3824 for (; lo <= hi; lo++)
3826 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3827 store_constructor_field (target, bitsize, bitpos,
3828 mode, value, type, cleared);
3833 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3834 loop_top = gen_label_rtx ();
3835 loop_end = gen_label_rtx ();
3837 unsignedp = TREE_UNSIGNED (domain);
3839 index = build_decl (VAR_DECL, NULL_TREE, domain);
3841 DECL_RTL (index) = index_r
3842 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3845 if (TREE_CODE (value) == SAVE_EXPR
3846 && SAVE_EXPR_RTL (value) == 0)
3848 /* Make sure value gets expanded once before the
3850 expand_expr (value, const0_rtx, VOIDmode, 0);
3853 store_expr (lo_index, index_r, 0);
3854 loop = expand_start_loop (0);
3856 /* Assign value to element index. */
3857 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3858 size_int (BITS_PER_UNIT));
3859 position = size_binop (MULT_EXPR,
3860 size_binop (MINUS_EXPR, index,
3861 TYPE_MIN_VALUE (domain)),
3863 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3864 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
3865 xtarget = change_address (target, mode, addr);
3866 if (TREE_CODE (value) == CONSTRUCTOR)
3867 store_constructor (value, xtarget, cleared);
3869 store_expr (value, xtarget, 0);
3871 expand_exit_loop_if_false (loop,
3872 build (LT_EXPR, integer_type_node,
3875 expand_increment (build (PREINCREMENT_EXPR,
3877 index, integer_one_node), 0, 0);
3879 emit_label (loop_end);
3881 /* Needed by stupid register allocation. to extend the
3882 lifetime of pseudo-regs used by target past the end
3884 emit_insn (gen_rtx_USE (GET_MODE (target), target));
3887 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3888 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3894 index = size_int (i);
3897 index = size_binop (MINUS_EXPR, index,
3898 TYPE_MIN_VALUE (domain));
3899 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3900 size_int (BITS_PER_UNIT));
3901 position = size_binop (MULT_EXPR, index, position);
3902 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3903 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
3904 xtarget = change_address (target, mode, addr);
3905 store_expr (value, xtarget, 0);
3910 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3911 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3913 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3914 store_constructor_field (target, bitsize, bitpos,
3915 mode, value, type, cleared);
3919 /* set constructor assignments */
3920 else if (TREE_CODE (type) == SET_TYPE)
3922 tree elt = CONSTRUCTOR_ELTS (exp);
3923 int nbytes = int_size_in_bytes (type), nbits;
3924 tree domain = TYPE_DOMAIN (type);
3925 tree domain_min, domain_max, bitlength;
3927 /* The default implementation strategy is to extract the constant
3928 parts of the constructor, use that to initialize the target,
3929 and then "or" in whatever non-constant ranges we need in addition.
3931 If a large set is all zero or all ones, it is
3932 probably better to set it using memset (if available) or bzero.
3933 Also, if a large set has just a single range, it may also be
3934 better to first clear all the first clear the set (using
3935 bzero/memset), and set the bits we want. */
3937 /* Check for all zeros. */
3938 if (elt == NULL_TREE)
3941 clear_storage (target, expr_size (exp),
3942 TYPE_ALIGN (type) / BITS_PER_UNIT);
3946 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3947 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3948 bitlength = size_binop (PLUS_EXPR,
3949 size_binop (MINUS_EXPR, domain_max, domain_min),
3952 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3954 nbits = TREE_INT_CST_LOW (bitlength);
3956 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3957 are "complicated" (more than one range), initialize (the
3958 constant parts) by copying from a constant. */
3959 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3960 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3962 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3963 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3964 char *bit_buffer = (char *) alloca (nbits);
3965 HOST_WIDE_INT word = 0;
3968 int offset = 0; /* In bytes from beginning of set. */
3969 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3972 if (bit_buffer[ibit])
3974 if (BYTES_BIG_ENDIAN)
3975 word |= (1 << (set_word_size - 1 - bit_pos));
3977 word |= 1 << bit_pos;
3980 if (bit_pos >= set_word_size || ibit == nbits)
3982 if (word != 0 || ! cleared)
3984 rtx datum = GEN_INT (word);
3986 /* The assumption here is that it is safe to use
3987 XEXP if the set is multi-word, but not if
3988 it's single-word. */
3989 if (GET_CODE (target) == MEM)
3991 to_rtx = plus_constant (XEXP (target, 0), offset);
3992 to_rtx = change_address (target, mode, to_rtx);
3994 else if (offset == 0)
3998 emit_move_insn (to_rtx, datum);
4004 offset += set_word_size / BITS_PER_UNIT;
4010 /* Don't bother clearing storage if the set is all ones. */
4011 if (TREE_CHAIN (elt) != NULL_TREE
4012 || (TREE_PURPOSE (elt) == NULL_TREE
4014 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4015 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4016 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4017 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4019 clear_storage (target, expr_size (exp),
4020 TYPE_ALIGN (type) / BITS_PER_UNIT);
4023 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4025 /* start of range of element or NULL */
4026 tree startbit = TREE_PURPOSE (elt);
4027 /* end of range of element, or element value */
4028 tree endbit = TREE_VALUE (elt);
4029 #ifdef TARGET_MEM_FUNCTIONS
4030 HOST_WIDE_INT startb, endb;
4032 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4034 bitlength_rtx = expand_expr (bitlength,
4035 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4037 /* handle non-range tuple element like [ expr ] */
4038 if (startbit == NULL_TREE)
4040 startbit = save_expr (endbit);
4043 startbit = convert (sizetype, startbit);
4044 endbit = convert (sizetype, endbit);
4045 if (! integer_zerop (domain_min))
4047 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4048 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4050 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4051 EXPAND_CONST_ADDRESS);
4052 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4053 EXPAND_CONST_ADDRESS);
4057 targetx = assign_stack_temp (GET_MODE (target),
4058 GET_MODE_SIZE (GET_MODE (target)),
4060 emit_move_insn (targetx, target);
4062 else if (GET_CODE (target) == MEM)
4067 #ifdef TARGET_MEM_FUNCTIONS
4068 /* Optimization: If startbit and endbit are
4069 constants divisible by BITS_PER_UNIT,
4070 call memset instead. */
4071 if (TREE_CODE (startbit) == INTEGER_CST
4072 && TREE_CODE (endbit) == INTEGER_CST
4073 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4074 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4076 emit_library_call (memset_libfunc, 0,
4078 plus_constant (XEXP (targetx, 0),
4079 startb / BITS_PER_UNIT),
4081 constm1_rtx, TYPE_MODE (integer_type_node),
4082 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4083 TYPE_MODE (sizetype));
4088 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4089 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4090 bitlength_rtx, TYPE_MODE (sizetype),
4091 startbit_rtx, TYPE_MODE (sizetype),
4092 endbit_rtx, TYPE_MODE (sizetype));
4095 emit_move_insn (target, targetx);
4103 /* Store the value of EXP (an expression tree)
4104 into a subfield of TARGET which has mode MODE and occupies
4105 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4106 If MODE is VOIDmode, it means that we are storing into a bit-field.
4108 If VALUE_MODE is VOIDmode, return nothing in particular.
4109 UNSIGNEDP is not used in this case.
4111 Otherwise, return an rtx for the value stored. This rtx
4112 has mode VALUE_MODE if that is convenient to do.
4113 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4115 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4116 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4119 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4120 unsignedp, align, total_size)
4122 int bitsize, bitpos;
4123 enum machine_mode mode;
4125 enum machine_mode value_mode;
4130 HOST_WIDE_INT width_mask = 0;
4132 if (TREE_CODE (exp) == ERROR_MARK)
4135 if (bitsize < HOST_BITS_PER_WIDE_INT)
4136 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4138 /* If we are storing into an unaligned field of an aligned union that is
4139 in a register, we may have the mode of TARGET being an integer mode but
4140 MODE == BLKmode. In that case, get an aligned object whose size and
4141 alignment are the same as TARGET and store TARGET into it (we can avoid
4142 the store if the field being stored is the entire width of TARGET). Then
4143 call ourselves recursively to store the field into a BLKmode version of
4144 that object. Finally, load from the object into TARGET. This is not
4145 very efficient in general, but should only be slightly more expensive
4146 than the otherwise-required unaligned accesses. Perhaps this can be
4147 cleaned up later. */
4150 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4152 rtx object = assign_stack_temp (GET_MODE (target),
4153 GET_MODE_SIZE (GET_MODE (target)), 0);
4154 rtx blk_object = copy_rtx (object);
4156 MEM_IN_STRUCT_P (object) = 1;
4157 MEM_IN_STRUCT_P (blk_object) = 1;
4158 PUT_MODE (blk_object, BLKmode);
4160 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4161 emit_move_insn (object, target);
4163 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4166 /* Even though we aren't returning target, we need to
4167 give it the updated value. */
4168 emit_move_insn (target, object);
4173 /* If the structure is in a register or if the component
4174 is a bit field, we cannot use addressing to access it.
4175 Use bit-field techniques or SUBREG to store in it. */
4177 if (mode == VOIDmode
4178 || (mode != BLKmode && ! direct_store[(int) mode])
4179 || GET_CODE (target) == REG
4180 || GET_CODE (target) == SUBREG
4181 /* If the field isn't aligned enough to store as an ordinary memref,
4182 store it as a bit field. */
4183 || (SLOW_UNALIGNED_ACCESS
4184 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4185 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4187 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4189 /* If BITSIZE is narrower than the size of the type of EXP
4190 we will be narrowing TEMP. Normally, what's wanted are the
4191 low-order bits. However, if EXP's type is a record and this is
4192 big-endian machine, we want the upper BITSIZE bits. */
4193 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4194 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4195 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4196 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4197 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4201 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4203 if (mode != VOIDmode && mode != BLKmode
4204 && mode != TYPE_MODE (TREE_TYPE (exp)))
4205 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4207 /* If the modes of TARGET and TEMP are both BLKmode, both
4208 must be in memory and BITPOS must be aligned on a byte
4209 boundary. If so, we simply do a block copy. */
4210 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4212 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4213 || bitpos % BITS_PER_UNIT != 0)
4216 target = change_address (target, VOIDmode,
4217 plus_constant (XEXP (target, 0),
4218 bitpos / BITS_PER_UNIT));
4220 emit_block_move (target, temp,
4221 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4225 return value_mode == VOIDmode ? const0_rtx : target;
4228 /* Store the value in the bitfield. */
4229 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4230 if (value_mode != VOIDmode)
4232 /* The caller wants an rtx for the value. */
4233 /* If possible, avoid refetching from the bitfield itself. */
4235 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4238 enum machine_mode tmode;
4241 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4242 tmode = GET_MODE (temp);
4243 if (tmode == VOIDmode)
4245 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4246 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4247 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4249 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4250 NULL_RTX, value_mode, 0, align,
4257 rtx addr = XEXP (target, 0);
4260 /* If a value is wanted, it must be the lhs;
4261 so make the address stable for multiple use. */
4263 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4264 && ! CONSTANT_ADDRESS_P (addr)
4265 /* A frame-pointer reference is already stable. */
4266 && ! (GET_CODE (addr) == PLUS
4267 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4268 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4269 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4270 addr = copy_to_reg (addr);
4272 /* Now build a reference to just the desired component. */
4274 to_rtx = copy_rtx (change_address (target, mode,
4275 plus_constant (addr,
4277 / BITS_PER_UNIT))));
4278 MEM_IN_STRUCT_P (to_rtx) = 1;
4280 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4284 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4285 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4286 ARRAY_REFs and find the ultimate containing object, which we return.
4288 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4289 bit position, and *PUNSIGNEDP to the signedness of the field.
4290 If the position of the field is variable, we store a tree
4291 giving the variable offset (in units) in *POFFSET.
4292 This offset is in addition to the bit position.
4293 If the position is not variable, we store 0 in *POFFSET.
4294 We set *PALIGNMENT to the alignment in bytes of the address that will be
4295 computed. This is the alignment of the thing we return if *POFFSET
4296 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4298 If any of the extraction expressions is volatile,
4299 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4301 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4302 is a mode that can be used to access the field. In that case, *PBITSIZE
4305 If the field describes a variable-sized object, *PMODE is set to
4306 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4307 this case, but the address of the object can be found. */
4310 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4311 punsignedp, pvolatilep, palignment)
4316 enum machine_mode *pmode;
4321 tree orig_exp = exp;
4323 enum machine_mode mode = VOIDmode;
4324 tree offset = integer_zero_node;
4325 int alignment = BIGGEST_ALIGNMENT;
4327 if (TREE_CODE (exp) == COMPONENT_REF)
4329 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4330 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4331 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4332 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4334 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4336 size_tree = TREE_OPERAND (exp, 1);
4337 *punsignedp = TREE_UNSIGNED (exp);
4341 mode = TYPE_MODE (TREE_TYPE (exp));
4342 *pbitsize = GET_MODE_BITSIZE (mode);
4343 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4348 if (TREE_CODE (size_tree) != INTEGER_CST)
4349 mode = BLKmode, *pbitsize = -1;
4351 *pbitsize = TREE_INT_CST_LOW (size_tree);
4354 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4355 and find the ultimate containing object. */
4361 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4363 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4364 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4365 : TREE_OPERAND (exp, 2));
4366 tree constant = integer_zero_node, var = pos;
4368 /* If this field hasn't been filled in yet, don't go
4369 past it. This should only happen when folding expressions
4370 made during type construction. */
4374 /* Assume here that the offset is a multiple of a unit.
4375 If not, there should be an explicitly added constant. */
4376 if (TREE_CODE (pos) == PLUS_EXPR
4377 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4378 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4379 else if (TREE_CODE (pos) == INTEGER_CST)
4380 constant = pos, var = integer_zero_node;
4382 *pbitpos += TREE_INT_CST_LOW (constant);
4383 offset = size_binop (PLUS_EXPR, offset,
4384 size_binop (EXACT_DIV_EXPR, var,
4385 size_int (BITS_PER_UNIT)));
4388 else if (TREE_CODE (exp) == ARRAY_REF)
4390 /* This code is based on the code in case ARRAY_REF in expand_expr
4391 below. We assume here that the size of an array element is
4392 always an integral multiple of BITS_PER_UNIT. */
4394 tree index = TREE_OPERAND (exp, 1);
4395 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4397 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4398 tree index_type = TREE_TYPE (index);
4401 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4403 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4405 index_type = TREE_TYPE (index);
4408 if (! integer_zerop (low_bound))
4409 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4411 if (TREE_CODE (index) == INTEGER_CST)
4413 index = convert (sbitsizetype, index);
4414 index_type = TREE_TYPE (index);
4417 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4418 convert (sbitsizetype,
4419 TYPE_SIZE (TREE_TYPE (exp)))));
4421 if (TREE_CODE (xindex) == INTEGER_CST
4422 && TREE_INT_CST_HIGH (xindex) == 0)
4423 *pbitpos += TREE_INT_CST_LOW (xindex);
4426 /* Either the bit offset calculated above is not constant, or
4427 it overflowed. In either case, redo the multiplication
4428 against the size in units. This is especially important
4429 in the non-constant case to avoid a division at runtime. */
4430 xindex = fold (build (MULT_EXPR, ssizetype, index,
4432 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4434 if (contains_placeholder_p (xindex))
4435 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4437 offset = size_binop (PLUS_EXPR, offset, xindex);
4440 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4441 && ! ((TREE_CODE (exp) == NOP_EXPR
4442 || TREE_CODE (exp) == CONVERT_EXPR)
4443 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4444 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4446 && (TYPE_MODE (TREE_TYPE (exp))
4447 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4450 /* If any reference in the chain is volatile, the effect is volatile. */
4451 if (TREE_THIS_VOLATILE (exp))
4454 /* If the offset is non-constant already, then we can't assume any
4455 alignment more than the alignment here. */
4456 if (! integer_zerop (offset))
4457 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4459 exp = TREE_OPERAND (exp, 0);
4462 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4463 alignment = MIN (alignment, DECL_ALIGN (exp));
4464 else if (TREE_TYPE (exp) != 0)
4465 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4467 if (integer_zerop (offset))
4470 if (offset != 0 && contains_placeholder_p (offset))
4471 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4475 *palignment = alignment / BITS_PER_UNIT;
4479 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4480 static enum memory_use_mode
4481 get_memory_usage_from_modifier (modifier)
4482 enum expand_modifier modifier;
4488 return MEMORY_USE_RO;
4490 case EXPAND_MEMORY_USE_WO:
4491 return MEMORY_USE_WO;
4493 case EXPAND_MEMORY_USE_RW:
4494 return MEMORY_USE_RW;
4496 case EXPAND_MEMORY_USE_DONT:
4497 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4498 MEMORY_USE_DONT, because they are modifiers to a call of
4499 expand_expr in the ADDR_EXPR case of expand_expr. */
4500 case EXPAND_CONST_ADDRESS:
4501 case EXPAND_INITIALIZER:
4502 return MEMORY_USE_DONT;
4503 case EXPAND_MEMORY_USE_BAD:
4509 /* Given an rtx VALUE that may contain additions and multiplications,
4510 return an equivalent value that just refers to a register or memory.
4511 This is done by generating instructions to perform the arithmetic
4512 and returning a pseudo-register containing the value.
4514 The returned value may be a REG, SUBREG, MEM or constant. */
4517 force_operand (value, target)
4520 register optab binoptab = 0;
4521 /* Use a temporary to force order of execution of calls to
4525 /* Use subtarget as the target for operand 0 of a binary operation. */
4526 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4528 if (GET_CODE (value) == PLUS)
4529 binoptab = add_optab;
4530 else if (GET_CODE (value) == MINUS)
4531 binoptab = sub_optab;
4532 else if (GET_CODE (value) == MULT)
4534 op2 = XEXP (value, 1);
4535 if (!CONSTANT_P (op2)
4536 && !(GET_CODE (op2) == REG && op2 != subtarget))
4538 tmp = force_operand (XEXP (value, 0), subtarget);
4539 return expand_mult (GET_MODE (value), tmp,
4540 force_operand (op2, NULL_RTX),
4546 op2 = XEXP (value, 1);
4547 if (!CONSTANT_P (op2)
4548 && !(GET_CODE (op2) == REG && op2 != subtarget))
4550 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4552 binoptab = add_optab;
4553 op2 = negate_rtx (GET_MODE (value), op2);
4556 /* Check for an addition with OP2 a constant integer and our first
4557 operand a PLUS of a virtual register and something else. In that
4558 case, we want to emit the sum of the virtual register and the
4559 constant first and then add the other value. This allows virtual
4560 register instantiation to simply modify the constant rather than
4561 creating another one around this addition. */
4562 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4563 && GET_CODE (XEXP (value, 0)) == PLUS
4564 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4565 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4566 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4568 rtx temp = expand_binop (GET_MODE (value), binoptab,
4569 XEXP (XEXP (value, 0), 0), op2,
4570 subtarget, 0, OPTAB_LIB_WIDEN);
4571 return expand_binop (GET_MODE (value), binoptab, temp,
4572 force_operand (XEXP (XEXP (value, 0), 1), 0),
4573 target, 0, OPTAB_LIB_WIDEN);
4576 tmp = force_operand (XEXP (value, 0), subtarget);
4577 return expand_binop (GET_MODE (value), binoptab, tmp,
4578 force_operand (op2, NULL_RTX),
4579 target, 0, OPTAB_LIB_WIDEN);
4580 /* We give UNSIGNEDP = 0 to expand_binop
4581 because the only operations we are expanding here are signed ones. */
4586 /* Subroutine of expand_expr:
4587 save the non-copied parts (LIST) of an expr (LHS), and return a list
4588 which can restore these values to their previous values,
4589 should something modify their storage. */
4592 save_noncopied_parts (lhs, list)
4599 for (tail = list; tail; tail = TREE_CHAIN (tail))
4600 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4601 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4604 tree part = TREE_VALUE (tail);
4605 tree part_type = TREE_TYPE (part);
4606 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4607 rtx target = assign_temp (part_type, 0, 1, 1);
4608 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4609 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4610 parts = tree_cons (to_be_saved,
4611 build (RTL_EXPR, part_type, NULL_TREE,
4614 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4619 /* Subroutine of expand_expr:
4620 record the non-copied parts (LIST) of an expr (LHS), and return a list
4621 which specifies the initial values of these parts. */
4624 init_noncopied_parts (lhs, list)
4631 for (tail = list; tail; tail = TREE_CHAIN (tail))
4632 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4633 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4636 tree part = TREE_VALUE (tail);
4637 tree part_type = TREE_TYPE (part);
4638 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4639 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4644 /* Subroutine of expand_expr: return nonzero iff there is no way that
4645 EXP can reference X, which is being modified. TOP_P is nonzero if this
4646 call is going to be used to determine whether we need a temporary
4647 for EXP, as opposed to a recursive call to this function.
4649 It is always safe for this routine to return zero since it merely
4650 searches for optimization opportunities. */
4653 safe_from_p (x, exp, top_p)
4660 static int save_expr_count;
4661 static int save_expr_size = 0;
4662 static tree *save_expr_rewritten;
4663 static tree save_expr_trees[256];
4666 /* If EXP has varying size, we MUST use a target since we currently
4667 have no way of allocating temporaries of variable size
4668 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4669 So we assume here that something at a higher level has prevented a
4670 clash. This is somewhat bogus, but the best we can do. Only
4671 do this when X is BLKmode and when we are at the top level. */
4672 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4673 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4674 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4675 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4676 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4678 && GET_MODE (x) == BLKmode))
4681 if (top_p && save_expr_size == 0)
4685 save_expr_count = 0;
4686 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
4687 save_expr_rewritten = &save_expr_trees[0];
4689 rtn = safe_from_p (x, exp, 1);
4691 for (i = 0; i < save_expr_count; ++i)
4693 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
4695 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
4703 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4704 find the underlying pseudo. */
4705 if (GET_CODE (x) == SUBREG)
4708 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4712 /* If X is a location in the outgoing argument area, it is always safe. */
4713 if (GET_CODE (x) == MEM
4714 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4715 || (GET_CODE (XEXP (x, 0)) == PLUS
4716 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4719 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4722 exp_rtl = DECL_RTL (exp);
4729 if (TREE_CODE (exp) == TREE_LIST)
4730 return ((TREE_VALUE (exp) == 0
4731 || safe_from_p (x, TREE_VALUE (exp), 0))
4732 && (TREE_CHAIN (exp) == 0
4733 || safe_from_p (x, TREE_CHAIN (exp), 0)));
4734 else if (TREE_CODE (exp) == ERROR_MARK)
4735 return 1; /* An already-visited SAVE_EXPR? */
4740 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
4744 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4745 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
4749 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4750 the expression. If it is set, we conflict iff we are that rtx or
4751 both are in memory. Otherwise, we check all operands of the
4752 expression recursively. */
4754 switch (TREE_CODE (exp))
4757 return (staticp (TREE_OPERAND (exp, 0))
4758 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
4759 || TREE_STATIC (exp));
4762 if (GET_CODE (x) == MEM)
4767 exp_rtl = CALL_EXPR_RTL (exp);
4770 /* Assume that the call will clobber all hard registers and
4772 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4773 || GET_CODE (x) == MEM)
4780 /* If a sequence exists, we would have to scan every instruction
4781 in the sequence to see if it was safe. This is probably not
4783 if (RTL_EXPR_SEQUENCE (exp))
4786 exp_rtl = RTL_EXPR_RTL (exp);
4789 case WITH_CLEANUP_EXPR:
4790 exp_rtl = RTL_EXPR_RTL (exp);
4793 case CLEANUP_POINT_EXPR:
4794 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
4797 exp_rtl = SAVE_EXPR_RTL (exp);
4801 /* This SAVE_EXPR might appear many times in the top-level
4802 safe_from_p() expression, and if it has a complex
4803 subexpression, examining it multiple times could result
4804 in a combinatorial explosion. E.g. on an Alpha
4805 running at least 200MHz, a Fortran test case compiled with
4806 optimization took about 28 minutes to compile -- even though
4807 it was only a few lines long, and the complicated line causing
4808 so much time to be spent in the earlier version of safe_from_p()
4809 had only 293 or so unique nodes.
4811 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
4812 where it is so we can turn it back in the top-level safe_from_p()
4815 /* For now, don't bother re-sizing the array. */
4816 if (save_expr_count >= save_expr_size)
4818 save_expr_rewritten[save_expr_count++] = exp;
4819 TREE_SET_CODE (exp, ERROR_MARK);
4821 nops = tree_code_length[(int) SAVE_EXPR];
4822 for (i = 0; i < nops; i++)
4823 if (TREE_OPERAND (exp, i) != 0
4824 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
4829 /* The only operand we look at is operand 1. The rest aren't
4830 part of the expression. */
4831 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
4833 case METHOD_CALL_EXPR:
4834 /* This takes a rtx argument, but shouldn't appear here. */
4841 /* If we have an rtx, we do not need to scan our operands. */
4845 nops = tree_code_length[(int) TREE_CODE (exp)];
4846 for (i = 0; i < nops; i++)
4847 if (TREE_OPERAND (exp, i) != 0
4848 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
4852 /* If we have an rtl, find any enclosed object. Then see if we conflict
4856 if (GET_CODE (exp_rtl) == SUBREG)
4858 exp_rtl = SUBREG_REG (exp_rtl);
4859 if (GET_CODE (exp_rtl) == REG
4860 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4864 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4865 are memory and EXP is not readonly. */
4866 return ! (rtx_equal_p (x, exp_rtl)
4867 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4868 && ! TREE_READONLY (exp)));
4871 /* If we reach here, it is safe. */
4875 /* Subroutine of expand_expr: return nonzero iff EXP is an
4876 expression whose type is statically determinable. */
4882 if (TREE_CODE (exp) == PARM_DECL
4883 || TREE_CODE (exp) == VAR_DECL
4884 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4885 || TREE_CODE (exp) == COMPONENT_REF
4886 || TREE_CODE (exp) == ARRAY_REF)
4891 /* Subroutine of expand_expr: return rtx if EXP is a
4892 variable or parameter; else return 0. */
4899 switch (TREE_CODE (exp))
4903 return DECL_RTL (exp);
4909 /* expand_expr: generate code for computing expression EXP.
4910 An rtx for the computed value is returned. The value is never null.
4911 In the case of a void EXP, const0_rtx is returned.
4913 The value may be stored in TARGET if TARGET is nonzero.
4914 TARGET is just a suggestion; callers must assume that
4915 the rtx returned may not be the same as TARGET.
4917 If TARGET is CONST0_RTX, it means that the value will be ignored.
4919 If TMODE is not VOIDmode, it suggests generating the
4920 result in mode TMODE. But this is done only when convenient.
4921 Otherwise, TMODE is ignored and the value generated in its natural mode.
4922 TMODE is just a suggestion; callers must assume that
4923 the rtx returned may not have mode TMODE.
4925 Note that TARGET may have neither TMODE nor MODE. In that case, it
4926 probably will not be used.
4928 If MODIFIER is EXPAND_SUM then when EXP is an addition
4929 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4930 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4931 products as above, or REG or MEM, or constant.
4932 Ordinarily in such cases we would output mul or add instructions
4933 and then return a pseudo reg containing the sum.
4935 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4936 it also marks a label as absolutely required (it can't be dead).
4937 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4938 This is used for outputting expressions used in initializers.
4940 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4941 with a constant address even if that address is not normally legitimate.
4942 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4945 expand_expr (exp, target, tmode, modifier)
4948 enum machine_mode tmode;
4949 enum expand_modifier modifier;
4951 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4952 This is static so it will be accessible to our recursive callees. */
4953 static tree placeholder_list = 0;
4954 register rtx op0, op1, temp;
4955 tree type = TREE_TYPE (exp);
4956 int unsignedp = TREE_UNSIGNED (type);
4957 register enum machine_mode mode = TYPE_MODE (type);
4958 register enum tree_code code = TREE_CODE (exp);
4960 /* Use subtarget as the target for operand 0 of a binary operation. */
4961 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4962 rtx original_target = target;
4963 int ignore = (target == const0_rtx
4964 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4965 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4966 || code == COND_EXPR)
4967 && TREE_CODE (type) == VOID_TYPE));
4969 /* Used by check-memory-usage to make modifier read only. */
4970 enum expand_modifier ro_modifier;
4972 /* Make a read-only version of the modifier. */
4973 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4974 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4975 ro_modifier = modifier;
4977 ro_modifier = EXPAND_NORMAL;
4979 /* Don't use hard regs as subtargets, because the combiner
4980 can only handle pseudo regs. */
4981 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4983 /* Avoid subtargets inside loops,
4984 since they hide some invariant expressions. */
4985 if (preserve_subexpressions_p ())
4988 /* If we are going to ignore this result, we need only do something
4989 if there is a side-effect somewhere in the expression. If there
4990 is, short-circuit the most common cases here. Note that we must
4991 not call expand_expr with anything but const0_rtx in case this
4992 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4996 if (! TREE_SIDE_EFFECTS (exp))
4999 /* Ensure we reference a volatile object even if value is ignored. */
5000 if (TREE_THIS_VOLATILE (exp)
5001 && TREE_CODE (exp) != FUNCTION_DECL
5002 && mode != VOIDmode && mode != BLKmode)
5004 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5005 if (GET_CODE (temp) == MEM)
5006 temp = copy_to_reg (temp);
5010 if (TREE_CODE_CLASS (code) == '1')
5011 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5012 VOIDmode, ro_modifier);
5013 else if (TREE_CODE_CLASS (code) == '2'
5014 || TREE_CODE_CLASS (code) == '<')
5016 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5017 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5020 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5021 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5022 /* If the second operand has no side effects, just evaluate
5024 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5025 VOIDmode, ro_modifier);
5030 /* If will do cse, generate all results into pseudo registers
5031 since 1) that allows cse to find more things
5032 and 2) otherwise cse could produce an insn the machine
5035 if (! cse_not_expected && mode != BLKmode && target
5036 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5043 tree function = decl_function_context (exp);
5044 /* Handle using a label in a containing function. */
5045 if (function != current_function_decl
5046 && function != inline_function_decl && function != 0)
5048 struct function *p = find_function_data (function);
5049 /* Allocate in the memory associated with the function
5050 that the label is in. */
5051 push_obstacks (p->function_obstack,
5052 p->function_maybepermanent_obstack);
5054 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5059 else if (modifier == EXPAND_INITIALIZER)
5060 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5061 label_rtx (exp), forced_labels);
5062 temp = gen_rtx_MEM (FUNCTION_MODE,
5063 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5064 if (function != current_function_decl
5065 && function != inline_function_decl && function != 0)
5066 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5071 if (DECL_RTL (exp) == 0)
5073 error_with_decl (exp, "prior parameter's size depends on `%s'");
5074 return CONST0_RTX (mode);
5077 /* ... fall through ... */
5080 /* If a static var's type was incomplete when the decl was written,
5081 but the type is complete now, lay out the decl now. */
5082 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5083 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5085 push_obstacks_nochange ();
5086 end_temporary_allocation ();
5087 layout_decl (exp, 0);
5088 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5092 /* Only check automatic variables. Currently, function arguments are
5093 not checked (this can be done at compile-time with prototypes).
5094 Aggregates are not checked. */
5095 if (flag_check_memory_usage && code == VAR_DECL
5096 && GET_CODE (DECL_RTL (exp)) == MEM
5097 && DECL_CONTEXT (exp) != NULL_TREE
5098 && ! TREE_STATIC (exp)
5099 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5101 enum memory_use_mode memory_usage;
5102 memory_usage = get_memory_usage_from_modifier (modifier);
5104 if (memory_usage != MEMORY_USE_DONT)
5105 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5106 XEXP (DECL_RTL (exp), 0), ptr_mode,
5107 GEN_INT (int_size_in_bytes (type)),
5108 TYPE_MODE (sizetype),
5109 GEN_INT (memory_usage),
5110 TYPE_MODE (integer_type_node));
5113 /* ... fall through ... */
5117 if (DECL_RTL (exp) == 0)
5120 /* Ensure variable marked as used even if it doesn't go through
5121 a parser. If it hasn't be used yet, write out an external
5123 if (! TREE_USED (exp))
5125 assemble_external (exp);
5126 TREE_USED (exp) = 1;
5129 /* Show we haven't gotten RTL for this yet. */
5132 /* Handle variables inherited from containing functions. */
5133 context = decl_function_context (exp);
5135 /* We treat inline_function_decl as an alias for the current function
5136 because that is the inline function whose vars, types, etc.
5137 are being merged into the current function.
5138 See expand_inline_function. */
5140 if (context != 0 && context != current_function_decl
5141 && context != inline_function_decl
5142 /* If var is static, we don't need a static chain to access it. */
5143 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5144 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5148 /* Mark as non-local and addressable. */
5149 DECL_NONLOCAL (exp) = 1;
5150 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5152 mark_addressable (exp);
5153 if (GET_CODE (DECL_RTL (exp)) != MEM)
5155 addr = XEXP (DECL_RTL (exp), 0);
5156 if (GET_CODE (addr) == MEM)
5157 addr = gen_rtx_MEM (Pmode,
5158 fix_lexical_addr (XEXP (addr, 0), exp));
5160 addr = fix_lexical_addr (addr, exp);
5161 temp = change_address (DECL_RTL (exp), mode, addr);
5164 /* This is the case of an array whose size is to be determined
5165 from its initializer, while the initializer is still being parsed.
5168 else if (GET_CODE (DECL_RTL (exp)) == MEM
5169 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5170 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5171 XEXP (DECL_RTL (exp), 0));
5173 /* If DECL_RTL is memory, we are in the normal case and either
5174 the address is not valid or it is not a register and -fforce-addr
5175 is specified, get the address into a register. */
5177 else if (GET_CODE (DECL_RTL (exp)) == MEM
5178 && modifier != EXPAND_CONST_ADDRESS
5179 && modifier != EXPAND_SUM
5180 && modifier != EXPAND_INITIALIZER
5181 && (! memory_address_p (DECL_MODE (exp),
5182 XEXP (DECL_RTL (exp), 0))
5184 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5185 temp = change_address (DECL_RTL (exp), VOIDmode,
5186 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5188 /* If we got something, return it. But first, set the alignment
5189 the address is a register. */
5192 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5193 mark_reg_pointer (XEXP (temp, 0),
5194 DECL_ALIGN (exp) / BITS_PER_UNIT);
5199 /* If the mode of DECL_RTL does not match that of the decl, it
5200 must be a promoted value. We return a SUBREG of the wanted mode,
5201 but mark it so that we know that it was already extended. */
5203 if (GET_CODE (DECL_RTL (exp)) == REG
5204 && GET_MODE (DECL_RTL (exp)) != mode)
5206 /* Get the signedness used for this variable. Ensure we get the
5207 same mode we got when the variable was declared. */
5208 if (GET_MODE (DECL_RTL (exp))
5209 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5212 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5213 SUBREG_PROMOTED_VAR_P (temp) = 1;
5214 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5218 return DECL_RTL (exp);
5221 return immed_double_const (TREE_INT_CST_LOW (exp),
5222 TREE_INT_CST_HIGH (exp),
5226 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5227 EXPAND_MEMORY_USE_BAD);
5230 /* If optimized, generate immediate CONST_DOUBLE
5231 which will be turned into memory by reload if necessary.
5233 We used to force a register so that loop.c could see it. But
5234 this does not allow gen_* patterns to perform optimizations with
5235 the constants. It also produces two insns in cases like "x = 1.0;".
5236 On most machines, floating-point constants are not permitted in
5237 many insns, so we'd end up copying it to a register in any case.
5239 Now, we do the copying in expand_binop, if appropriate. */
5240 return immed_real_const (exp);
5244 if (! TREE_CST_RTL (exp))
5245 output_constant_def (exp);
5247 /* TREE_CST_RTL probably contains a constant address.
5248 On RISC machines where a constant address isn't valid,
5249 make some insns to get that address into a register. */
5250 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5251 && modifier != EXPAND_CONST_ADDRESS
5252 && modifier != EXPAND_INITIALIZER
5253 && modifier != EXPAND_SUM
5254 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5256 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5257 return change_address (TREE_CST_RTL (exp), VOIDmode,
5258 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5259 return TREE_CST_RTL (exp);
5261 case EXPR_WITH_FILE_LOCATION:
5264 char *saved_input_filename = input_filename;
5265 int saved_lineno = lineno;
5266 input_filename = EXPR_WFL_FILENAME (exp);
5267 lineno = EXPR_WFL_LINENO (exp);
5268 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5269 emit_line_note (input_filename, lineno);
5270 /* Possibly avoid switching back and force here */
5271 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5272 input_filename = saved_input_filename;
5273 lineno = saved_lineno;
5278 context = decl_function_context (exp);
5280 /* If this SAVE_EXPR was at global context, assume we are an
5281 initialization function and move it into our context. */
5283 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5285 /* We treat inline_function_decl as an alias for the current function
5286 because that is the inline function whose vars, types, etc.
5287 are being merged into the current function.
5288 See expand_inline_function. */
5289 if (context == current_function_decl || context == inline_function_decl)
5292 /* If this is non-local, handle it. */
5295 /* The following call just exists to abort if the context is
5296 not of a containing function. */
5297 find_function_data (context);
5299 temp = SAVE_EXPR_RTL (exp);
5300 if (temp && GET_CODE (temp) == REG)
5302 put_var_into_stack (exp);
5303 temp = SAVE_EXPR_RTL (exp);
5305 if (temp == 0 || GET_CODE (temp) != MEM)
5307 return change_address (temp, mode,
5308 fix_lexical_addr (XEXP (temp, 0), exp));
5310 if (SAVE_EXPR_RTL (exp) == 0)
5312 if (mode == VOIDmode)
5315 temp = assign_temp (type, 3, 0, 0);
5317 SAVE_EXPR_RTL (exp) = temp;
5318 if (!optimize && GET_CODE (temp) == REG)
5319 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5322 /* If the mode of TEMP does not match that of the expression, it
5323 must be a promoted value. We pass store_expr a SUBREG of the
5324 wanted mode but mark it so that we know that it was already
5325 extended. Note that `unsignedp' was modified above in
5328 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5330 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5331 SUBREG_PROMOTED_VAR_P (temp) = 1;
5332 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5335 if (temp == const0_rtx)
5336 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5337 EXPAND_MEMORY_USE_BAD);
5339 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5341 TREE_USED (exp) = 1;
5344 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5345 must be a promoted value. We return a SUBREG of the wanted mode,
5346 but mark it so that we know that it was already extended. */
5348 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5349 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5351 /* Compute the signedness and make the proper SUBREG. */
5352 promote_mode (type, mode, &unsignedp, 0);
5353 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5354 SUBREG_PROMOTED_VAR_P (temp) = 1;
5355 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5359 return SAVE_EXPR_RTL (exp);
5364 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5365 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5369 case PLACEHOLDER_EXPR:
5371 tree placeholder_expr;
5373 /* If there is an object on the head of the placeholder list,
5374 see if some object in it of type TYPE or a pointer to it. For
5375 further information, see tree.def. */
5376 for (placeholder_expr = placeholder_list;
5377 placeholder_expr != 0;
5378 placeholder_expr = TREE_CHAIN (placeholder_expr))
5380 tree need_type = TYPE_MAIN_VARIANT (type);
5382 tree old_list = placeholder_list;
5385 /* Find the outermost reference that is of the type we want.
5386 If none, see if any object has a type that is a pointer to
5387 the type we want. */
5388 for (elt = TREE_PURPOSE (placeholder_expr);
5389 elt != 0 && object == 0;
5391 = ((TREE_CODE (elt) == COMPOUND_EXPR
5392 || TREE_CODE (elt) == COND_EXPR)
5393 ? TREE_OPERAND (elt, 1)
5394 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5395 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5396 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5397 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5398 ? TREE_OPERAND (elt, 0) : 0))
5399 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5402 for (elt = TREE_PURPOSE (placeholder_expr);
5403 elt != 0 && object == 0;
5405 = ((TREE_CODE (elt) == COMPOUND_EXPR
5406 || TREE_CODE (elt) == COND_EXPR)
5407 ? TREE_OPERAND (elt, 1)
5408 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5409 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5410 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5411 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5412 ? TREE_OPERAND (elt, 0) : 0))
5413 if (POINTER_TYPE_P (TREE_TYPE (elt))
5414 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5416 object = build1 (INDIRECT_REF, need_type, elt);
5420 /* Expand this object skipping the list entries before
5421 it was found in case it is also a PLACEHOLDER_EXPR.
5422 In that case, we want to translate it using subsequent
5424 placeholder_list = TREE_CHAIN (placeholder_expr);
5425 temp = expand_expr (object, original_target, tmode,
5427 placeholder_list = old_list;
5433 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5436 case WITH_RECORD_EXPR:
5437 /* Put the object on the placeholder list, expand our first operand,
5438 and pop the list. */
5439 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5441 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5442 tmode, ro_modifier);
5443 placeholder_list = TREE_CHAIN (placeholder_list);
5447 expand_exit_loop_if_false (NULL_PTR,
5448 invert_truthvalue (TREE_OPERAND (exp, 0)));
5453 expand_start_loop (1);
5454 expand_expr_stmt (TREE_OPERAND (exp, 0));
5462 tree vars = TREE_OPERAND (exp, 0);
5463 int vars_need_expansion = 0;
5465 /* Need to open a binding contour here because
5466 if there are any cleanups they must be contained here. */
5467 expand_start_bindings (0);
5469 /* Mark the corresponding BLOCK for output in its proper place. */
5470 if (TREE_OPERAND (exp, 2) != 0
5471 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5472 insert_block (TREE_OPERAND (exp, 2));
5474 /* If VARS have not yet been expanded, expand them now. */
5477 if (DECL_RTL (vars) == 0)
5479 vars_need_expansion = 1;
5482 expand_decl_init (vars);
5483 vars = TREE_CHAIN (vars);
5486 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5488 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5494 if (RTL_EXPR_SEQUENCE (exp))
5496 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5498 emit_insns (RTL_EXPR_SEQUENCE (exp));
5499 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5501 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5502 free_temps_for_rtl_expr (exp);
5503 return RTL_EXPR_RTL (exp);
5506 /* If we don't need the result, just ensure we evaluate any
5511 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5512 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5513 EXPAND_MEMORY_USE_BAD);
5517 /* All elts simple constants => refer to a constant in memory. But
5518 if this is a non-BLKmode mode, let it store a field at a time
5519 since that should make a CONST_INT or CONST_DOUBLE when we
5520 fold. Likewise, if we have a target we can use, it is best to
5521 store directly into the target unless the type is large enough
5522 that memcpy will be used. If we are making an initializer and
5523 all operands are constant, put it in memory as well. */
5524 else if ((TREE_STATIC (exp)
5525 && ((mode == BLKmode
5526 && ! (target != 0 && safe_from_p (target, exp, 1)))
5527 || TREE_ADDRESSABLE (exp)
5528 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5529 && (move_by_pieces_ninsns
5530 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5531 TYPE_ALIGN (type) / BITS_PER_UNIT)
5533 && ! mostly_zeros_p (exp))))
5534 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5536 rtx constructor = output_constant_def (exp);
5537 if (modifier != EXPAND_CONST_ADDRESS
5538 && modifier != EXPAND_INITIALIZER
5539 && modifier != EXPAND_SUM
5540 && (! memory_address_p (GET_MODE (constructor),
5541 XEXP (constructor, 0))
5543 && GET_CODE (XEXP (constructor, 0)) != REG)))
5544 constructor = change_address (constructor, VOIDmode,
5545 XEXP (constructor, 0));
5551 /* Handle calls that pass values in multiple non-contiguous
5552 locations. The Irix 6 ABI has examples of this. */
5553 if (target == 0 || ! safe_from_p (target, exp, 1)
5554 || GET_CODE (target) == PARALLEL)
5556 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5557 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5559 target = assign_temp (type, 0, 1, 1);
5562 if (TREE_READONLY (exp))
5564 if (GET_CODE (target) == MEM)
5565 target = copy_rtx (target);
5567 RTX_UNCHANGING_P (target) = 1;
5570 store_constructor (exp, target, 0);
5576 tree exp1 = TREE_OPERAND (exp, 0);
5579 tree string = string_constant (exp1, &index);
5582 /* Try to optimize reads from const strings. */
5584 && TREE_CODE (string) == STRING_CST
5585 && TREE_CODE (index) == INTEGER_CST
5586 && !TREE_INT_CST_HIGH (index)
5587 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5588 && GET_MODE_CLASS (mode) == MODE_INT
5589 && GET_MODE_SIZE (mode) == 1
5590 && modifier != EXPAND_MEMORY_USE_WO)
5591 return GEN_INT (TREE_STRING_POINTER (string)[i]);
5593 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5594 op0 = memory_address (mode, op0);
5596 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5598 enum memory_use_mode memory_usage;
5599 memory_usage = get_memory_usage_from_modifier (modifier);
5601 if (memory_usage != MEMORY_USE_DONT)
5603 in_check_memory_usage = 1;
5604 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5606 GEN_INT (int_size_in_bytes (type)),
5607 TYPE_MODE (sizetype),
5608 GEN_INT (memory_usage),
5609 TYPE_MODE (integer_type_node));
5610 in_check_memory_usage = 0;
5614 temp = gen_rtx_MEM (mode, op0);
5615 /* If address was computed by addition,
5616 mark this as an element of an aggregate. */
5617 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5618 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5619 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5620 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5621 || (TREE_CODE (exp1) == ADDR_EXPR
5622 && (exp2 = TREE_OPERAND (exp1, 0))
5623 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5624 MEM_IN_STRUCT_P (temp) = 1;
5625 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5627 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5628 here, because, in C and C++, the fact that a location is accessed
5629 through a pointer to const does not mean that the value there can
5630 never change. Languages where it can never change should
5631 also set TREE_STATIC. */
5632 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5637 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5641 tree array = TREE_OPERAND (exp, 0);
5642 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5643 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5644 tree index = TREE_OPERAND (exp, 1);
5645 tree index_type = TREE_TYPE (index);
5648 /* Optimize the special-case of a zero lower bound.
5650 We convert the low_bound to sizetype to avoid some problems
5651 with constant folding. (E.g. suppose the lower bound is 1,
5652 and its mode is QI. Without the conversion, (ARRAY
5653 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5654 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5656 But sizetype isn't quite right either (especially if
5657 the lowbound is negative). FIXME */
5659 if (! integer_zerop (low_bound))
5660 index = fold (build (MINUS_EXPR, index_type, index,
5661 convert (sizetype, low_bound)));
5663 /* Fold an expression like: "foo"[2].
5664 This is not done in fold so it won't happen inside &.
5665 Don't fold if this is for wide characters since it's too
5666 difficult to do correctly and this is a very rare case. */
5668 if (TREE_CODE (array) == STRING_CST
5669 && TREE_CODE (index) == INTEGER_CST
5670 && !TREE_INT_CST_HIGH (index)
5671 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5672 && GET_MODE_CLASS (mode) == MODE_INT
5673 && GET_MODE_SIZE (mode) == 1)
5674 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5676 /* If this is a constant index into a constant array,
5677 just get the value from the array. Handle both the cases when
5678 we have an explicit constructor and when our operand is a variable
5679 that was declared const. */
5681 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5683 if (TREE_CODE (index) == INTEGER_CST
5684 && TREE_INT_CST_HIGH (index) == 0)
5686 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5688 i = TREE_INT_CST_LOW (index);
5690 elem = TREE_CHAIN (elem);
5692 return expand_expr (fold (TREE_VALUE (elem)), target,
5693 tmode, ro_modifier);
5697 else if (optimize >= 1
5698 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5699 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5700 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5702 if (TREE_CODE (index) == INTEGER_CST)
5704 tree init = DECL_INITIAL (array);
5706 i = TREE_INT_CST_LOW (index);
5707 if (TREE_CODE (init) == CONSTRUCTOR)
5709 tree elem = CONSTRUCTOR_ELTS (init);
5712 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5713 elem = TREE_CHAIN (elem);
5715 return expand_expr (fold (TREE_VALUE (elem)), target,
5716 tmode, ro_modifier);
5718 else if (TREE_CODE (init) == STRING_CST
5719 && TREE_INT_CST_HIGH (index) == 0
5720 && (TREE_INT_CST_LOW (index)
5721 < TREE_STRING_LENGTH (init)))
5723 (TREE_STRING_POINTER
5724 (init)[TREE_INT_CST_LOW (index)]));
5729 /* ... fall through ... */
5733 /* If the operand is a CONSTRUCTOR, we can just extract the
5734 appropriate field if it is present. Don't do this if we have
5735 already written the data since we want to refer to that copy
5736 and varasm.c assumes that's what we'll do. */
5737 if (code != ARRAY_REF
5738 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5739 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5743 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5744 elt = TREE_CHAIN (elt))
5745 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5746 /* We can normally use the value of the field in the
5747 CONSTRUCTOR. However, if this is a bitfield in
5748 an integral mode that we can fit in a HOST_WIDE_INT,
5749 we must mask only the number of bits in the bitfield,
5750 since this is done implicitly by the constructor. If
5751 the bitfield does not meet either of those conditions,
5752 we can't do this optimization. */
5753 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5754 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5756 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5757 <= HOST_BITS_PER_WIDE_INT))))
5759 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5760 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5762 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5764 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5766 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5767 op0 = expand_and (op0, op1, target);
5771 enum machine_mode imode
5772 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5774 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
5777 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5779 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5789 enum machine_mode mode1;
5795 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5796 &mode1, &unsignedp, &volatilep,
5799 /* If we got back the original object, something is wrong. Perhaps
5800 we are evaluating an expression too early. In any event, don't
5801 infinitely recurse. */
5805 /* If TEM's type is a union of variable size, pass TARGET to the inner
5806 computation, since it will need a temporary and TARGET is known
5807 to have to do. This occurs in unchecked conversion in Ada. */
5809 op0 = expand_expr (tem,
5810 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5811 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5813 ? target : NULL_RTX),
5815 modifier == EXPAND_INITIALIZER
5816 ? modifier : EXPAND_NORMAL);
5818 /* If this is a constant, put it into a register if it is a
5819 legitimate constant and memory if it isn't. */
5820 if (CONSTANT_P (op0))
5822 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5823 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5824 op0 = force_reg (mode, op0);
5826 op0 = validize_mem (force_const_mem (mode, op0));
5831 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5833 if (GET_CODE (op0) != MEM)
5836 if (GET_MODE (offset_rtx) != ptr_mode)
5838 #ifdef POINTERS_EXTEND_UNSIGNED
5839 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
5841 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5845 op0 = change_address (op0, VOIDmode,
5846 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
5847 force_reg (ptr_mode, offset_rtx)));
5850 /* Don't forget about volatility even if this is a bitfield. */
5851 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5853 op0 = copy_rtx (op0);
5854 MEM_VOLATILE_P (op0) = 1;
5857 /* Check the access. */
5858 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5860 enum memory_use_mode memory_usage;
5861 memory_usage = get_memory_usage_from_modifier (modifier);
5863 if (memory_usage != MEMORY_USE_DONT)
5868 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5869 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5871 /* Check the access right of the pointer. */
5872 if (size > BITS_PER_UNIT)
5873 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5875 GEN_INT (size / BITS_PER_UNIT),
5876 TYPE_MODE (sizetype),
5877 GEN_INT (memory_usage),
5878 TYPE_MODE (integer_type_node));
5882 /* In cases where an aligned union has an unaligned object
5883 as a field, we might be extracting a BLKmode value from
5884 an integer-mode (e.g., SImode) object. Handle this case
5885 by doing the extract into an object as wide as the field
5886 (which we know to be the width of a basic mode), then
5887 storing into memory, and changing the mode to BLKmode.
5888 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5889 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5890 if (mode1 == VOIDmode
5891 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5892 || (modifier != EXPAND_CONST_ADDRESS
5893 && modifier != EXPAND_INITIALIZER
5894 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5895 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5896 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5897 /* If the field isn't aligned enough to fetch as a memref,
5898 fetch it as a bit field. */
5899 || (SLOW_UNALIGNED_ACCESS
5900 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5901 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5903 enum machine_mode ext_mode = mode;
5905 if (ext_mode == BLKmode)
5906 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5908 if (ext_mode == BLKmode)
5910 /* In this case, BITPOS must start at a byte boundary and
5911 TARGET, if specified, must be a MEM. */
5912 if (GET_CODE (op0) != MEM
5913 || (target != 0 && GET_CODE (target) != MEM)
5914 || bitpos % BITS_PER_UNIT != 0)
5917 op0 = change_address (op0, VOIDmode,
5918 plus_constant (XEXP (op0, 0),
5919 bitpos / BITS_PER_UNIT));
5921 target = assign_temp (type, 0, 1, 1);
5923 emit_block_move (target, op0,
5924 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5931 op0 = validize_mem (op0);
5933 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5934 mark_reg_pointer (XEXP (op0, 0), alignment);
5936 op0 = extract_bit_field (op0, bitsize, bitpos,
5937 unsignedp, target, ext_mode, ext_mode,
5939 int_size_in_bytes (TREE_TYPE (tem)));
5941 /* If the result is a record type and BITSIZE is narrower than
5942 the mode of OP0, an integral mode, and this is a big endian
5943 machine, we must put the field into the high-order bits. */
5944 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5945 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5946 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5947 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5948 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5952 if (mode == BLKmode)
5954 rtx new = assign_stack_temp (ext_mode,
5955 bitsize / BITS_PER_UNIT, 0);
5957 emit_move_insn (new, op0);
5958 op0 = copy_rtx (new);
5959 PUT_MODE (op0, BLKmode);
5960 MEM_IN_STRUCT_P (op0) = 1;
5966 /* If the result is BLKmode, use that to access the object
5968 if (mode == BLKmode)
5971 /* Get a reference to just this component. */
5972 if (modifier == EXPAND_CONST_ADDRESS
5973 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5974 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
5975 (bitpos / BITS_PER_UNIT)));
5977 op0 = change_address (op0, mode1,
5978 plus_constant (XEXP (op0, 0),
5979 (bitpos / BITS_PER_UNIT)));
5980 if (GET_CODE (XEXP (op0, 0)) == REG)
5981 mark_reg_pointer (XEXP (op0, 0), alignment);
5983 MEM_IN_STRUCT_P (op0) = 1;
5984 MEM_VOLATILE_P (op0) |= volatilep;
5985 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5986 || modifier == EXPAND_CONST_ADDRESS
5987 || modifier == EXPAND_INITIALIZER)
5989 else if (target == 0)
5990 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5992 convert_move (target, op0, unsignedp);
5996 /* Intended for a reference to a buffer of a file-object in Pascal.
5997 But it's not certain that a special tree code will really be
5998 necessary for these. INDIRECT_REF might work for them. */
6004 /* Pascal set IN expression.
6007 rlo = set_low - (set_low%bits_per_word);
6008 the_word = set [ (index - rlo)/bits_per_word ];
6009 bit_index = index % bits_per_word;
6010 bitmask = 1 << bit_index;
6011 return !!(the_word & bitmask); */
6013 tree set = TREE_OPERAND (exp, 0);
6014 tree index = TREE_OPERAND (exp, 1);
6015 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6016 tree set_type = TREE_TYPE (set);
6017 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6018 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6019 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6020 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6021 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6022 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6023 rtx setaddr = XEXP (setval, 0);
6024 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6026 rtx diff, quo, rem, addr, bit, result;
6028 preexpand_calls (exp);
6030 /* If domain is empty, answer is no. Likewise if index is constant
6031 and out of bounds. */
6032 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6033 && TREE_CODE (set_low_bound) == INTEGER_CST
6034 && tree_int_cst_lt (set_high_bound, set_low_bound))
6035 || (TREE_CODE (index) == INTEGER_CST
6036 && TREE_CODE (set_low_bound) == INTEGER_CST
6037 && tree_int_cst_lt (index, set_low_bound))
6038 || (TREE_CODE (set_high_bound) == INTEGER_CST
6039 && TREE_CODE (index) == INTEGER_CST
6040 && tree_int_cst_lt (set_high_bound, index))))
6044 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6046 /* If we get here, we have to generate the code for both cases
6047 (in range and out of range). */
6049 op0 = gen_label_rtx ();
6050 op1 = gen_label_rtx ();
6052 if (! (GET_CODE (index_val) == CONST_INT
6053 && GET_CODE (lo_r) == CONST_INT))
6055 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
6056 GET_MODE (index_val), iunsignedp, 0);
6057 emit_jump_insn (gen_blt (op1));
6060 if (! (GET_CODE (index_val) == CONST_INT
6061 && GET_CODE (hi_r) == CONST_INT))
6063 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
6064 GET_MODE (index_val), iunsignedp, 0);
6065 emit_jump_insn (gen_bgt (op1));
6068 /* Calculate the element number of bit zero in the first word
6070 if (GET_CODE (lo_r) == CONST_INT)
6071 rlow = GEN_INT (INTVAL (lo_r)
6072 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6074 rlow = expand_binop (index_mode, and_optab, lo_r,
6075 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6076 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6078 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6079 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6081 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6082 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6083 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6084 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6086 addr = memory_address (byte_mode,
6087 expand_binop (index_mode, add_optab, diff,
6088 setaddr, NULL_RTX, iunsignedp,
6091 /* Extract the bit we want to examine */
6092 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6093 gen_rtx_MEM (byte_mode, addr),
6094 make_tree (TREE_TYPE (index), rem),
6096 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6097 GET_MODE (target) == byte_mode ? target : 0,
6098 1, OPTAB_LIB_WIDEN);
6100 if (result != target)
6101 convert_move (target, result, 1);
6103 /* Output the code to handle the out-of-range case. */
6106 emit_move_insn (target, const0_rtx);
6111 case WITH_CLEANUP_EXPR:
6112 if (RTL_EXPR_RTL (exp) == 0)
6115 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6116 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6118 /* That's it for this cleanup. */
6119 TREE_OPERAND (exp, 2) = 0;
6121 return RTL_EXPR_RTL (exp);
6123 case CLEANUP_POINT_EXPR:
6125 extern int temp_slot_level;
6126 /* Start a new binding layer that will keep track of all cleanup
6127 actions to be performed. */
6128 expand_start_bindings (0);
6130 target_temp_slot_level = temp_slot_level;
6132 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6133 /* If we're going to use this value, load it up now. */
6135 op0 = force_not_mem (op0);
6136 preserve_temp_slots (op0);
6137 expand_end_bindings (NULL_TREE, 0, 0);
6142 /* Check for a built-in function. */
6143 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6144 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6146 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6147 return expand_builtin (exp, target, subtarget, tmode, ignore);
6149 /* If this call was expanded already by preexpand_calls,
6150 just return the result we got. */
6151 if (CALL_EXPR_RTL (exp) != 0)
6152 return CALL_EXPR_RTL (exp);
6154 return expand_call (exp, target, ignore);
6156 case NON_LVALUE_EXPR:
6159 case REFERENCE_EXPR:
6160 if (TREE_CODE (type) == UNION_TYPE)
6162 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6165 if (mode != BLKmode)
6166 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6168 target = assign_temp (type, 0, 1, 1);
6171 if (GET_CODE (target) == MEM)
6172 /* Store data into beginning of memory target. */
6173 store_expr (TREE_OPERAND (exp, 0),
6174 change_address (target, TYPE_MODE (valtype), 0), 0);
6176 else if (GET_CODE (target) == REG)
6177 /* Store this field into a union of the proper type. */
6178 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6179 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6181 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6185 /* Return the entire union. */
6189 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6191 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6194 /* If the signedness of the conversion differs and OP0 is
6195 a promoted SUBREG, clear that indication since we now
6196 have to do the proper extension. */
6197 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6198 && GET_CODE (op0) == SUBREG)
6199 SUBREG_PROMOTED_VAR_P (op0) = 0;
6204 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6205 if (GET_MODE (op0) == mode)
6208 /* If OP0 is a constant, just convert it into the proper mode. */
6209 if (CONSTANT_P (op0))
6211 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6212 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6214 if (modifier == EXPAND_INITIALIZER)
6215 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6219 convert_to_mode (mode, op0,
6220 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6222 convert_move (target, op0,
6223 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6227 /* We come here from MINUS_EXPR when the second operand is a
6230 this_optab = add_optab;
6232 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6233 something else, make sure we add the register to the constant and
6234 then to the other thing. This case can occur during strength
6235 reduction and doing it this way will produce better code if the
6236 frame pointer or argument pointer is eliminated.
6238 fold-const.c will ensure that the constant is always in the inner
6239 PLUS_EXPR, so the only case we need to do anything about is if
6240 sp, ap, or fp is our second argument, in which case we must swap
6241 the innermost first argument and our second argument. */
6243 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6244 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6245 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6246 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6247 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6248 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6250 tree t = TREE_OPERAND (exp, 1);
6252 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6253 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6256 /* If the result is to be ptr_mode and we are adding an integer to
6257 something, we might be forming a constant. So try to use
6258 plus_constant. If it produces a sum and we can't accept it,
6259 use force_operand. This allows P = &ARR[const] to generate
6260 efficient code on machines where a SYMBOL_REF is not a valid
6263 If this is an EXPAND_SUM call, always return the sum. */
6264 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6265 || mode == ptr_mode)
6267 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6268 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6269 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6271 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6273 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6274 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6275 op1 = force_operand (op1, target);
6279 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6280 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6281 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6283 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6285 if (! CONSTANT_P (op0))
6287 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6288 VOIDmode, modifier);
6289 /* Don't go to both_summands if modifier
6290 says it's not right to return a PLUS. */
6291 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6295 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6296 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6297 op0 = force_operand (op0, target);
6302 /* No sense saving up arithmetic to be done
6303 if it's all in the wrong mode to form part of an address.
6304 And force_operand won't know whether to sign-extend or
6306 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6307 || mode != ptr_mode)
6310 preexpand_calls (exp);
6311 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6314 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6315 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6318 /* Make sure any term that's a sum with a constant comes last. */
6319 if (GET_CODE (op0) == PLUS
6320 && CONSTANT_P (XEXP (op0, 1)))
6326 /* If adding to a sum including a constant,
6327 associate it to put the constant outside. */
6328 if (GET_CODE (op1) == PLUS
6329 && CONSTANT_P (XEXP (op1, 1)))
6331 rtx constant_term = const0_rtx;
6333 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6336 /* Ensure that MULT comes first if there is one. */
6337 else if (GET_CODE (op0) == MULT)
6338 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6340 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6342 /* Let's also eliminate constants from op0 if possible. */
6343 op0 = eliminate_constant_term (op0, &constant_term);
6345 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6346 their sum should be a constant. Form it into OP1, since the
6347 result we want will then be OP0 + OP1. */
6349 temp = simplify_binary_operation (PLUS, mode, constant_term,
6354 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6357 /* Put a constant term last and put a multiplication first. */
6358 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6359 temp = op1, op1 = op0, op0 = temp;
6361 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6362 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6365 /* For initializers, we are allowed to return a MINUS of two
6366 symbolic constants. Here we handle all cases when both operands
6368 /* Handle difference of two symbolic constants,
6369 for the sake of an initializer. */
6370 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6371 && really_constant_p (TREE_OPERAND (exp, 0))
6372 && really_constant_p (TREE_OPERAND (exp, 1)))
6374 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6375 VOIDmode, ro_modifier);
6376 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6377 VOIDmode, ro_modifier);
6379 /* If the last operand is a CONST_INT, use plus_constant of
6380 the negated constant. Else make the MINUS. */
6381 if (GET_CODE (op1) == CONST_INT)
6382 return plus_constant (op0, - INTVAL (op1));
6384 return gen_rtx_MINUS (mode, op0, op1);
6386 /* Convert A - const to A + (-const). */
6387 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6389 tree negated = fold (build1 (NEGATE_EXPR, type,
6390 TREE_OPERAND (exp, 1)));
6392 /* Deal with the case where we can't negate the constant
6394 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6396 tree newtype = signed_type (type);
6397 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6398 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6399 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6401 if (! TREE_OVERFLOW (newneg))
6402 return expand_expr (convert (type,
6403 build (PLUS_EXPR, newtype,
6405 target, tmode, ro_modifier);
6409 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6413 this_optab = sub_optab;
6417 preexpand_calls (exp);
6418 /* If first operand is constant, swap them.
6419 Thus the following special case checks need only
6420 check the second operand. */
6421 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6423 register tree t1 = TREE_OPERAND (exp, 0);
6424 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6425 TREE_OPERAND (exp, 1) = t1;
6428 /* Attempt to return something suitable for generating an
6429 indexed address, for machines that support that. */
6431 if (modifier == EXPAND_SUM && mode == ptr_mode
6432 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6433 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6435 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6438 /* Apply distributive law if OP0 is x+c. */
6439 if (GET_CODE (op0) == PLUS
6440 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6441 return gen_rtx_PLUS (mode,
6442 gen_rtx_MULT (mode, XEXP (op0, 0),
6443 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6444 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6445 * INTVAL (XEXP (op0, 1))));
6447 if (GET_CODE (op0) != REG)
6448 op0 = force_operand (op0, NULL_RTX);
6449 if (GET_CODE (op0) != REG)
6450 op0 = copy_to_mode_reg (mode, op0);
6452 return gen_rtx_MULT (mode, op0,
6453 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6456 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6459 /* Check for multiplying things that have been extended
6460 from a narrower type. If this machine supports multiplying
6461 in that narrower type with a result in the desired type,
6462 do it that way, and avoid the explicit type-conversion. */
6463 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6464 && TREE_CODE (type) == INTEGER_TYPE
6465 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6466 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6467 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6468 && int_fits_type_p (TREE_OPERAND (exp, 1),
6469 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6470 /* Don't use a widening multiply if a shift will do. */
6471 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6472 > HOST_BITS_PER_WIDE_INT)
6473 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6475 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6476 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6478 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6479 /* If both operands are extended, they must either both
6480 be zero-extended or both be sign-extended. */
6481 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6483 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6485 enum machine_mode innermode
6486 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6487 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6488 ? smul_widen_optab : umul_widen_optab);
6489 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6490 ? umul_widen_optab : smul_widen_optab);
6491 if (mode == GET_MODE_WIDER_MODE (innermode))
6493 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6495 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6496 NULL_RTX, VOIDmode, 0);
6497 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6498 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6501 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6502 NULL_RTX, VOIDmode, 0);
6505 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6506 && innermode == word_mode)
6509 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6510 NULL_RTX, VOIDmode, 0);
6511 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6512 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6515 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6516 NULL_RTX, VOIDmode, 0);
6517 temp = expand_binop (mode, other_optab, op0, op1, target,
6518 unsignedp, OPTAB_LIB_WIDEN);
6519 htem = expand_mult_highpart_adjust (innermode,
6520 gen_highpart (innermode, temp),
6522 gen_highpart (innermode, temp),
6524 emit_move_insn (gen_highpart (innermode, temp), htem);
6529 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6530 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6531 return expand_mult (mode, op0, op1, target, unsignedp);
6533 case TRUNC_DIV_EXPR:
6534 case FLOOR_DIV_EXPR:
6536 case ROUND_DIV_EXPR:
6537 case EXACT_DIV_EXPR:
6538 preexpand_calls (exp);
6539 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6541 /* Possible optimization: compute the dividend with EXPAND_SUM
6542 then if the divisor is constant can optimize the case
6543 where some terms of the dividend have coeffs divisible by it. */
6544 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6545 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6546 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6549 this_optab = flodiv_optab;
6552 case TRUNC_MOD_EXPR:
6553 case FLOOR_MOD_EXPR:
6555 case ROUND_MOD_EXPR:
6556 preexpand_calls (exp);
6557 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6559 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6560 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6561 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6563 case FIX_ROUND_EXPR:
6564 case FIX_FLOOR_EXPR:
6566 abort (); /* Not used for C. */
6568 case FIX_TRUNC_EXPR:
6569 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6571 target = gen_reg_rtx (mode);
6572 expand_fix (target, op0, unsignedp);
6576 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6578 target = gen_reg_rtx (mode);
6579 /* expand_float can't figure out what to do if FROM has VOIDmode.
6580 So give it the correct mode. With -O, cse will optimize this. */
6581 if (GET_MODE (op0) == VOIDmode)
6582 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6584 expand_float (target, op0,
6585 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6589 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6590 temp = expand_unop (mode, neg_optab, op0, target, 0);
6596 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6598 /* Handle complex values specially. */
6599 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6600 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6601 return expand_complex_abs (mode, op0, target, unsignedp);
6603 /* Unsigned abs is simply the operand. Testing here means we don't
6604 risk generating incorrect code below. */
6605 if (TREE_UNSIGNED (type))
6608 return expand_abs (mode, op0, target, unsignedp,
6609 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
6613 target = original_target;
6614 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
6615 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6616 || GET_MODE (target) != mode
6617 || (GET_CODE (target) == REG
6618 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6619 target = gen_reg_rtx (mode);
6620 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6621 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6623 /* First try to do it with a special MIN or MAX instruction.
6624 If that does not win, use a conditional jump to select the proper
6626 this_optab = (TREE_UNSIGNED (type)
6627 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6628 : (code == MIN_EXPR ? smin_optab : smax_optab));
6630 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6635 /* At this point, a MEM target is no longer useful; we will get better
6638 if (GET_CODE (target) == MEM)
6639 target = gen_reg_rtx (mode);
6642 emit_move_insn (target, op0);
6644 op0 = gen_label_rtx ();
6646 /* If this mode is an integer too wide to compare properly,
6647 compare word by word. Rely on cse to optimize constant cases. */
6648 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6650 if (code == MAX_EXPR)
6651 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6652 target, op1, NULL_RTX, op0);
6654 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6655 op1, target, NULL_RTX, op0);
6656 emit_move_insn (target, op1);
6660 if (code == MAX_EXPR)
6661 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6662 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6663 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6665 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6666 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6667 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6668 if (temp == const0_rtx)
6669 emit_move_insn (target, op1);
6670 else if (temp != const_true_rtx)
6672 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6673 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6676 emit_move_insn (target, op1);
6683 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6684 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6690 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6691 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6696 /* ??? Can optimize bitwise operations with one arg constant.
6697 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6698 and (a bitwise1 b) bitwise2 b (etc)
6699 but that is probably not worth while. */
6701 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6702 boolean values when we want in all cases to compute both of them. In
6703 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6704 as actual zero-or-1 values and then bitwise anding. In cases where
6705 there cannot be any side effects, better code would be made by
6706 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6707 how to recognize those cases. */
6709 case TRUTH_AND_EXPR:
6711 this_optab = and_optab;
6716 this_optab = ior_optab;
6719 case TRUTH_XOR_EXPR:
6721 this_optab = xor_optab;
6728 preexpand_calls (exp);
6729 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6731 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6732 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6735 /* Could determine the answer when only additive constants differ. Also,
6736 the addition of one can be handled by changing the condition. */
6743 preexpand_calls (exp);
6744 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6748 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6749 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6751 && GET_CODE (original_target) == REG
6752 && (GET_MODE (original_target)
6753 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6755 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6758 if (temp != original_target)
6759 temp = copy_to_reg (temp);
6761 op1 = gen_label_rtx ();
6762 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6763 GET_MODE (temp), unsignedp, 0);
6764 emit_jump_insn (gen_beq (op1));
6765 emit_move_insn (temp, const1_rtx);
6770 /* If no set-flag instruction, must generate a conditional
6771 store into a temporary variable. Drop through
6772 and handle this like && and ||. */
6774 case TRUTH_ANDIF_EXPR:
6775 case TRUTH_ORIF_EXPR:
6777 && (target == 0 || ! safe_from_p (target, exp, 1)
6778 /* Make sure we don't have a hard reg (such as function's return
6779 value) live across basic blocks, if not optimizing. */
6780 || (!optimize && GET_CODE (target) == REG
6781 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6782 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6785 emit_clr_insn (target);
6787 op1 = gen_label_rtx ();
6788 jumpifnot (exp, op1);
6791 emit_0_to_1_insn (target);
6794 return ignore ? const0_rtx : target;
6796 case TRUTH_NOT_EXPR:
6797 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6798 /* The parser is careful to generate TRUTH_NOT_EXPR
6799 only with operands that are always zero or one. */
6800 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6801 target, 1, OPTAB_LIB_WIDEN);
6807 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6809 return expand_expr (TREE_OPERAND (exp, 1),
6810 (ignore ? const0_rtx : target),
6814 /* If we would have a "singleton" (see below) were it not for a
6815 conversion in each arm, bring that conversion back out. */
6816 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6817 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6818 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6819 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6821 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6822 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6824 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6825 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6826 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6827 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6828 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6829 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6830 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6831 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6832 return expand_expr (build1 (NOP_EXPR, type,
6833 build (COND_EXPR, TREE_TYPE (true),
6834 TREE_OPERAND (exp, 0),
6836 target, tmode, modifier);
6840 /* Note that COND_EXPRs whose type is a structure or union
6841 are required to be constructed to contain assignments of
6842 a temporary variable, so that we can evaluate them here
6843 for side effect only. If type is void, we must do likewise. */
6845 /* If an arm of the branch requires a cleanup,
6846 only that cleanup is performed. */
6849 tree binary_op = 0, unary_op = 0;
6851 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6852 convert it to our mode, if necessary. */
6853 if (integer_onep (TREE_OPERAND (exp, 1))
6854 && integer_zerop (TREE_OPERAND (exp, 2))
6855 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6859 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6864 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
6865 if (GET_MODE (op0) == mode)
6869 target = gen_reg_rtx (mode);
6870 convert_move (target, op0, unsignedp);
6874 /* Check for X ? A + B : A. If we have this, we can copy A to the
6875 output and conditionally add B. Similarly for unary operations.
6876 Don't do this if X has side-effects because those side effects
6877 might affect A or B and the "?" operation is a sequence point in
6878 ANSI. (operand_equal_p tests for side effects.) */
6880 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6881 && operand_equal_p (TREE_OPERAND (exp, 2),
6882 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6883 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6884 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6885 && operand_equal_p (TREE_OPERAND (exp, 1),
6886 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6887 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6888 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6889 && operand_equal_p (TREE_OPERAND (exp, 2),
6890 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6891 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6892 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6893 && operand_equal_p (TREE_OPERAND (exp, 1),
6894 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6895 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6897 /* If we are not to produce a result, we have no target. Otherwise,
6898 if a target was specified use it; it will not be used as an
6899 intermediate target unless it is safe. If no target, use a
6904 else if (original_target
6905 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
6906 || (singleton && GET_CODE (original_target) == REG
6907 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6908 && original_target == var_rtx (singleton)))
6909 && GET_MODE (original_target) == mode
6910 #ifdef HAVE_conditional_move
6911 && (! can_conditionally_move_p (mode)
6912 || GET_CODE (original_target) == REG
6913 || TREE_ADDRESSABLE (type))
6915 && ! (GET_CODE (original_target) == MEM
6916 && MEM_VOLATILE_P (original_target)))
6917 temp = original_target;
6918 else if (TREE_ADDRESSABLE (type))
6921 temp = assign_temp (type, 0, 0, 1);
6923 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6924 do the test of X as a store-flag operation, do this as
6925 A + ((X != 0) << log C). Similarly for other simple binary
6926 operators. Only do for C == 1 if BRANCH_COST is low. */
6927 if (temp && singleton && binary_op
6928 && (TREE_CODE (binary_op) == PLUS_EXPR
6929 || TREE_CODE (binary_op) == MINUS_EXPR
6930 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6931 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6932 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6933 : integer_onep (TREE_OPERAND (binary_op, 1)))
6934 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6937 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6938 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6939 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6942 /* If we had X ? A : A + 1, do this as A + (X == 0).
6944 We have to invert the truth value here and then put it
6945 back later if do_store_flag fails. We cannot simply copy
6946 TREE_OPERAND (exp, 0) to another variable and modify that
6947 because invert_truthvalue can modify the tree pointed to
6949 if (singleton == TREE_OPERAND (exp, 1))
6950 TREE_OPERAND (exp, 0)
6951 = invert_truthvalue (TREE_OPERAND (exp, 0));
6953 result = do_store_flag (TREE_OPERAND (exp, 0),
6954 (safe_from_p (temp, singleton, 1)
6956 mode, BRANCH_COST <= 1);
6958 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6959 result = expand_shift (LSHIFT_EXPR, mode, result,
6960 build_int_2 (tree_log2
6964 (safe_from_p (temp, singleton, 1)
6965 ? temp : NULL_RTX), 0);
6969 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6970 return expand_binop (mode, boptab, op1, result, temp,
6971 unsignedp, OPTAB_LIB_WIDEN);
6973 else if (singleton == TREE_OPERAND (exp, 1))
6974 TREE_OPERAND (exp, 0)
6975 = invert_truthvalue (TREE_OPERAND (exp, 0));
6978 do_pending_stack_adjust ();
6980 op0 = gen_label_rtx ();
6982 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6986 /* If the target conflicts with the other operand of the
6987 binary op, we can't use it. Also, we can't use the target
6988 if it is a hard register, because evaluating the condition
6989 might clobber it. */
6991 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
6992 || (GET_CODE (temp) == REG
6993 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6994 temp = gen_reg_rtx (mode);
6995 store_expr (singleton, temp, 0);
6998 expand_expr (singleton,
6999 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7000 if (singleton == TREE_OPERAND (exp, 1))
7001 jumpif (TREE_OPERAND (exp, 0), op0);
7003 jumpifnot (TREE_OPERAND (exp, 0), op0);
7005 start_cleanup_deferral ();
7006 if (binary_op && temp == 0)
7007 /* Just touch the other operand. */
7008 expand_expr (TREE_OPERAND (binary_op, 1),
7009 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7011 store_expr (build (TREE_CODE (binary_op), type,
7012 make_tree (type, temp),
7013 TREE_OPERAND (binary_op, 1)),
7016 store_expr (build1 (TREE_CODE (unary_op), type,
7017 make_tree (type, temp)),
7021 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7022 comparison operator. If we have one of these cases, set the
7023 output to A, branch on A (cse will merge these two references),
7024 then set the output to FOO. */
7026 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7027 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7028 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7029 TREE_OPERAND (exp, 1), 0)
7030 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7031 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7032 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7034 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7035 temp = gen_reg_rtx (mode);
7036 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7037 jumpif (TREE_OPERAND (exp, 0), op0);
7039 start_cleanup_deferral ();
7040 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7044 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7045 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7046 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7047 TREE_OPERAND (exp, 2), 0)
7048 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7049 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7050 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7052 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7053 temp = gen_reg_rtx (mode);
7054 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7055 jumpifnot (TREE_OPERAND (exp, 0), op0);
7057 start_cleanup_deferral ();
7058 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7063 op1 = gen_label_rtx ();
7064 jumpifnot (TREE_OPERAND (exp, 0), op0);
7066 start_cleanup_deferral ();
7068 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7070 expand_expr (TREE_OPERAND (exp, 1),
7071 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7072 end_cleanup_deferral ();
7074 emit_jump_insn (gen_jump (op1));
7077 start_cleanup_deferral ();
7079 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7081 expand_expr (TREE_OPERAND (exp, 2),
7082 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7085 end_cleanup_deferral ();
7096 /* Something needs to be initialized, but we didn't know
7097 where that thing was when building the tree. For example,
7098 it could be the return value of a function, or a parameter
7099 to a function which lays down in the stack, or a temporary
7100 variable which must be passed by reference.
7102 We guarantee that the expression will either be constructed
7103 or copied into our original target. */
7105 tree slot = TREE_OPERAND (exp, 0);
7106 tree cleanups = NULL_TREE;
7109 if (TREE_CODE (slot) != VAR_DECL)
7113 target = original_target;
7117 if (DECL_RTL (slot) != 0)
7119 target = DECL_RTL (slot);
7120 /* If we have already expanded the slot, so don't do
7122 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7127 target = assign_temp (type, 2, 0, 1);
7128 /* All temp slots at this level must not conflict. */
7129 preserve_temp_slots (target);
7130 DECL_RTL (slot) = target;
7131 if (TREE_ADDRESSABLE (slot))
7133 TREE_ADDRESSABLE (slot) = 0;
7134 mark_addressable (slot);
7137 /* Since SLOT is not known to the called function
7138 to belong to its stack frame, we must build an explicit
7139 cleanup. This case occurs when we must build up a reference
7140 to pass the reference as an argument. In this case,
7141 it is very likely that such a reference need not be
7144 if (TREE_OPERAND (exp, 2) == 0)
7145 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7146 cleanups = TREE_OPERAND (exp, 2);
7151 /* This case does occur, when expanding a parameter which
7152 needs to be constructed on the stack. The target
7153 is the actual stack address that we want to initialize.
7154 The function we call will perform the cleanup in this case. */
7156 /* If we have already assigned it space, use that space,
7157 not target that we were passed in, as our target
7158 parameter is only a hint. */
7159 if (DECL_RTL (slot) != 0)
7161 target = DECL_RTL (slot);
7162 /* If we have already expanded the slot, so don't do
7164 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7169 DECL_RTL (slot) = target;
7170 /* If we must have an addressable slot, then make sure that
7171 the RTL that we just stored in slot is OK. */
7172 if (TREE_ADDRESSABLE (slot))
7174 TREE_ADDRESSABLE (slot) = 0;
7175 mark_addressable (slot);
7180 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7181 /* Mark it as expanded. */
7182 TREE_OPERAND (exp, 1) = NULL_TREE;
7184 TREE_USED (slot) = 1;
7185 store_expr (exp1, target, 0);
7187 expand_decl_cleanup (NULL_TREE, cleanups);
7194 tree lhs = TREE_OPERAND (exp, 0);
7195 tree rhs = TREE_OPERAND (exp, 1);
7196 tree noncopied_parts = 0;
7197 tree lhs_type = TREE_TYPE (lhs);
7199 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7200 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7201 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7202 TYPE_NONCOPIED_PARTS (lhs_type));
7203 while (noncopied_parts != 0)
7205 expand_assignment (TREE_VALUE (noncopied_parts),
7206 TREE_PURPOSE (noncopied_parts), 0, 0);
7207 noncopied_parts = TREE_CHAIN (noncopied_parts);
7214 /* If lhs is complex, expand calls in rhs before computing it.
7215 That's so we don't compute a pointer and save it over a call.
7216 If lhs is simple, compute it first so we can give it as a
7217 target if the rhs is just a call. This avoids an extra temp and copy
7218 and that prevents a partial-subsumption which makes bad code.
7219 Actually we could treat component_ref's of vars like vars. */
7221 tree lhs = TREE_OPERAND (exp, 0);
7222 tree rhs = TREE_OPERAND (exp, 1);
7223 tree noncopied_parts = 0;
7224 tree lhs_type = TREE_TYPE (lhs);
7228 if (TREE_CODE (lhs) != VAR_DECL
7229 && TREE_CODE (lhs) != RESULT_DECL
7230 && TREE_CODE (lhs) != PARM_DECL
7231 && ! (TREE_CODE (lhs) == INDIRECT_REF
7232 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7233 preexpand_calls (exp);
7235 /* Check for |= or &= of a bitfield of size one into another bitfield
7236 of size 1. In this case, (unless we need the result of the
7237 assignment) we can do this more efficiently with a
7238 test followed by an assignment, if necessary.
7240 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7241 things change so we do, this code should be enhanced to
7244 && TREE_CODE (lhs) == COMPONENT_REF
7245 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7246 || TREE_CODE (rhs) == BIT_AND_EXPR)
7247 && TREE_OPERAND (rhs, 0) == lhs
7248 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7249 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7250 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7252 rtx label = gen_label_rtx ();
7254 do_jump (TREE_OPERAND (rhs, 1),
7255 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7256 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7257 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7258 (TREE_CODE (rhs) == BIT_IOR_EXPR
7260 : integer_zero_node)),
7262 do_pending_stack_adjust ();
7267 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7268 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7269 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7270 TYPE_NONCOPIED_PARTS (lhs_type));
7272 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7273 while (noncopied_parts != 0)
7275 expand_assignment (TREE_PURPOSE (noncopied_parts),
7276 TREE_VALUE (noncopied_parts), 0, 0);
7277 noncopied_parts = TREE_CHAIN (noncopied_parts);
7282 case PREINCREMENT_EXPR:
7283 case PREDECREMENT_EXPR:
7284 return expand_increment (exp, 0, ignore);
7286 case POSTINCREMENT_EXPR:
7287 case POSTDECREMENT_EXPR:
7288 /* Faster to treat as pre-increment if result is not used. */
7289 return expand_increment (exp, ! ignore, ignore);
7292 /* If nonzero, TEMP will be set to the address of something that might
7293 be a MEM corresponding to a stack slot. */
7296 /* Are we taking the address of a nested function? */
7297 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7298 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7299 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7300 && ! TREE_STATIC (exp))
7302 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7303 op0 = force_operand (op0, target);
7305 /* If we are taking the address of something erroneous, just
7307 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7311 /* We make sure to pass const0_rtx down if we came in with
7312 ignore set, to avoid doing the cleanups twice for something. */
7313 op0 = expand_expr (TREE_OPERAND (exp, 0),
7314 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7315 (modifier == EXPAND_INITIALIZER
7316 ? modifier : EXPAND_CONST_ADDRESS));
7318 /* If we are going to ignore the result, OP0 will have been set
7319 to const0_rtx, so just return it. Don't get confused and
7320 think we are taking the address of the constant. */
7324 op0 = protect_from_queue (op0, 0);
7326 /* We would like the object in memory. If it is a constant,
7327 we can have it be statically allocated into memory. For
7328 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7329 memory and store the value into it. */
7331 if (CONSTANT_P (op0))
7332 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7334 else if (GET_CODE (op0) == MEM)
7336 mark_temp_addr_taken (op0);
7337 temp = XEXP (op0, 0);
7340 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7341 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7343 /* If this object is in a register, it must be not
7345 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7346 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7348 mark_temp_addr_taken (memloc);
7349 emit_move_insn (memloc, op0);
7353 if (GET_CODE (op0) != MEM)
7356 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7358 temp = XEXP (op0, 0);
7359 #ifdef POINTERS_EXTEND_UNSIGNED
7360 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7361 && mode == ptr_mode)
7362 temp = convert_memory_address (ptr_mode, temp);
7367 op0 = force_operand (XEXP (op0, 0), target);
7370 if (flag_force_addr && GET_CODE (op0) != REG)
7371 op0 = force_reg (Pmode, op0);
7373 if (GET_CODE (op0) == REG
7374 && ! REG_USERVAR_P (op0))
7375 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7377 /* If we might have had a temp slot, add an equivalent address
7380 update_temp_slot_address (temp, op0);
7382 #ifdef POINTERS_EXTEND_UNSIGNED
7383 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7384 && mode == ptr_mode)
7385 op0 = convert_memory_address (ptr_mode, op0);
7390 case ENTRY_VALUE_EXPR:
7393 /* COMPLEX type for Extended Pascal & Fortran */
7396 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7399 /* Get the rtx code of the operands. */
7400 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7401 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7404 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7408 /* Move the real (op0) and imaginary (op1) parts to their location. */
7409 emit_move_insn (gen_realpart (mode, target), op0);
7410 emit_move_insn (gen_imagpart (mode, target), op1);
7412 insns = get_insns ();
7415 /* Complex construction should appear as a single unit. */
7416 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7417 each with a separate pseudo as destination.
7418 It's not correct for flow to treat them as a unit. */
7419 if (GET_CODE (target) != CONCAT)
7420 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7428 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7429 return gen_realpart (mode, op0);
7432 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7433 return gen_imagpart (mode, op0);
7437 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7441 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7444 target = gen_reg_rtx (mode);
7448 /* Store the realpart and the negated imagpart to target. */
7449 emit_move_insn (gen_realpart (partmode, target),
7450 gen_realpart (partmode, op0));
7452 imag_t = gen_imagpart (partmode, target);
7453 temp = expand_unop (partmode, neg_optab,
7454 gen_imagpart (partmode, op0), imag_t, 0);
7456 emit_move_insn (imag_t, temp);
7458 insns = get_insns ();
7461 /* Conjugate should appear as a single unit
7462 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7463 each with a separate pseudo as destination.
7464 It's not correct for flow to treat them as a unit. */
7465 if (GET_CODE (target) != CONCAT)
7466 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7473 case TRY_CATCH_EXPR:
7475 tree handler = TREE_OPERAND (exp, 1);
7477 expand_eh_region_start ();
7479 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7481 expand_eh_region_end (handler);
7488 rtx dcc = get_dynamic_cleanup_chain ();
7489 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
7495 rtx dhc = get_dynamic_handler_chain ();
7496 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
7501 op0 = CONST0_RTX (tmode);
7507 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7510 /* Here to do an ordinary binary operator, generating an instruction
7511 from the optab already placed in `this_optab'. */
7513 preexpand_calls (exp);
7514 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7516 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7517 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7519 temp = expand_binop (mode, this_optab, op0, op1, target,
7520 unsignedp, OPTAB_LIB_WIDEN);
7528 /* Return the alignment in bits of EXP, a pointer valued expression.
7529 But don't return more than MAX_ALIGN no matter what.
7530 The alignment returned is, by default, the alignment of the thing that
7531 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7533 Otherwise, look at the expression to see if we can do better, i.e., if the
7534 expression is actually pointing at an object whose alignment is tighter. */
7537 get_pointer_alignment (exp, max_align)
7541 unsigned align, inner;
7543 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7546 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7547 align = MIN (align, max_align);
7551 switch (TREE_CODE (exp))
7555 case NON_LVALUE_EXPR:
7556 exp = TREE_OPERAND (exp, 0);
7557 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7559 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7560 align = MIN (inner, max_align);
7564 /* If sum of pointer + int, restrict our maximum alignment to that
7565 imposed by the integer. If not, we can't do any better than
7567 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7570 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7575 exp = TREE_OPERAND (exp, 0);
7579 /* See what we are pointing at and look at its alignment. */
7580 exp = TREE_OPERAND (exp, 0);
7581 if (TREE_CODE (exp) == FUNCTION_DECL)
7582 align = FUNCTION_BOUNDARY;
7583 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7584 align = DECL_ALIGN (exp);
7585 #ifdef CONSTANT_ALIGNMENT
7586 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7587 align = CONSTANT_ALIGNMENT (exp, align);
7589 return MIN (align, max_align);
7597 /* Return the tree node and offset if a given argument corresponds to
7598 a string constant. */
7601 string_constant (arg, ptr_offset)
7607 if (TREE_CODE (arg) == ADDR_EXPR
7608 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7610 *ptr_offset = integer_zero_node;
7611 return TREE_OPERAND (arg, 0);
7613 else if (TREE_CODE (arg) == PLUS_EXPR)
7615 tree arg0 = TREE_OPERAND (arg, 0);
7616 tree arg1 = TREE_OPERAND (arg, 1);
7621 if (TREE_CODE (arg0) == ADDR_EXPR
7622 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7625 return TREE_OPERAND (arg0, 0);
7627 else if (TREE_CODE (arg1) == ADDR_EXPR
7628 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7631 return TREE_OPERAND (arg1, 0);
7638 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7639 way, because it could contain a zero byte in the middle.
7640 TREE_STRING_LENGTH is the size of the character array, not the string.
7642 Unfortunately, string_constant can't access the values of const char
7643 arrays with initializers, so neither can we do so here. */
7653 src = string_constant (src, &offset_node);
7656 max = TREE_STRING_LENGTH (src);
7657 ptr = TREE_STRING_POINTER (src);
7658 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7660 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7661 compute the offset to the following null if we don't know where to
7662 start searching for it. */
7664 for (i = 0; i < max; i++)
7667 /* We don't know the starting offset, but we do know that the string
7668 has no internal zero bytes. We can assume that the offset falls
7669 within the bounds of the string; otherwise, the programmer deserves
7670 what he gets. Subtract the offset from the length of the string,
7672 /* This would perhaps not be valid if we were dealing with named
7673 arrays in addition to literal string constants. */
7674 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7677 /* We have a known offset into the string. Start searching there for
7678 a null character. */
7679 if (offset_node == 0)
7683 /* Did we get a long long offset? If so, punt. */
7684 if (TREE_INT_CST_HIGH (offset_node) != 0)
7686 offset = TREE_INT_CST_LOW (offset_node);
7688 /* If the offset is known to be out of bounds, warn, and call strlen at
7690 if (offset < 0 || offset > max)
7692 warning ("offset outside bounds of constant string");
7695 /* Use strlen to search for the first zero byte. Since any strings
7696 constructed with build_string will have nulls appended, we win even
7697 if we get handed something like (char[4])"abcd".
7699 Since OFFSET is our starting index into the string, no further
7700 calculation is needed. */
7701 return size_int (strlen (ptr + offset));
7705 expand_builtin_return_addr (fndecl_code, count, tem)
7706 enum built_in_function fndecl_code;
7712 /* Some machines need special handling before we can access
7713 arbitrary frames. For example, on the sparc, we must first flush
7714 all register windows to the stack. */
7715 #ifdef SETUP_FRAME_ADDRESSES
7717 SETUP_FRAME_ADDRESSES ();
7720 /* On the sparc, the return address is not in the frame, it is in a
7721 register. There is no way to access it off of the current frame
7722 pointer, but it can be accessed off the previous frame pointer by
7723 reading the value from the register window save area. */
7724 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7725 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7729 /* Scan back COUNT frames to the specified frame. */
7730 for (i = 0; i < count; i++)
7732 /* Assume the dynamic chain pointer is in the word that the
7733 frame address points to, unless otherwise specified. */
7734 #ifdef DYNAMIC_CHAIN_ADDRESS
7735 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7737 tem = memory_address (Pmode, tem);
7738 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
7741 /* For __builtin_frame_address, return what we've got. */
7742 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7745 /* For __builtin_return_address, Get the return address from that
7747 #ifdef RETURN_ADDR_RTX
7748 tem = RETURN_ADDR_RTX (count, tem);
7750 tem = memory_address (Pmode,
7751 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7752 tem = gen_rtx_MEM (Pmode, tem);
7757 /* __builtin_setjmp is passed a pointer to an array of five words (not
7758 all will be used on all machines). It operates similarly to the C
7759 library function of the same name, but is more efficient. Much of
7760 the code below (and for longjmp) is copied from the handling of
7763 NOTE: This is intended for use by GNAT and the exception handling
7764 scheme in the compiler and will only work in the method used by
7768 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
7771 rtx first_label, next_label;
7773 rtx lab1 = gen_label_rtx ();
7774 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
7775 enum machine_mode value_mode;
7778 value_mode = TYPE_MODE (integer_type_node);
7780 #ifdef POINTERS_EXTEND_UNSIGNED
7781 buf_addr = convert_memory_address (Pmode, buf_addr);
7784 buf_addr = force_reg (Pmode, buf_addr);
7786 if (target == 0 || GET_CODE (target) != REG
7787 || REGNO (target) < FIRST_PSEUDO_REGISTER)
7788 target = gen_reg_rtx (value_mode);
7792 #ifndef BUILTIN_SETJMP_FRAME_VALUE
7793 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
7796 /* We store the frame pointer and the address of lab1 in the buffer
7797 and use the rest of it for the stack save area, which is
7798 machine-dependent. */
7799 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
7800 BUILTIN_SETJMP_FRAME_VALUE);
7801 emit_move_insn (validize_mem
7802 (gen_rtx_MEM (Pmode,
7803 plus_constant (buf_addr,
7804 GET_MODE_SIZE (Pmode)))),
7805 gen_rtx_LABEL_REF (Pmode, lab1));
7807 stack_save = gen_rtx_MEM (sa_mode,
7808 plus_constant (buf_addr,
7809 2 * GET_MODE_SIZE (Pmode)));
7810 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
7812 /* If there is further processing to do, do it. */
7813 #ifdef HAVE_builtin_setjmp_setup
7814 if (HAVE_builtin_setjmp_setup)
7815 emit_insn (gen_builtin_setjmp_setup (buf_addr));
7818 /* Set TARGET to zero and branch to the first-time-through label. */
7819 emit_move_insn (target, const0_rtx);
7820 emit_jump_insn (gen_jump (first_label));
7824 /* Tell flow about the strange goings on. */
7825 current_function_has_nonlocal_label = 1;
7827 /* Clobber the FP when we get here, so we have to make sure it's
7828 marked as used by this function. */
7829 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7831 /* Mark the static chain as clobbered here so life information
7832 doesn't get messed up for it. */
7833 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
7835 /* Now put in the code to restore the frame pointer, and argument
7836 pointer, if needed. The code below is from expand_end_bindings
7837 in stmt.c; see detailed documentation there. */
7838 #ifdef HAVE_nonlocal_goto
7839 if (! HAVE_nonlocal_goto)
7841 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
7843 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7844 if (fixed_regs[ARG_POINTER_REGNUM])
7846 #ifdef ELIMINABLE_REGS
7848 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
7850 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
7851 if (elim_regs[i].from == ARG_POINTER_REGNUM
7852 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
7855 if (i == sizeof elim_regs / sizeof elim_regs [0])
7858 /* Now restore our arg pointer from the address at which it
7859 was saved in our stack frame.
7860 If there hasn't be space allocated for it yet, make
7862 if (arg_pointer_save_area == 0)
7863 arg_pointer_save_area
7864 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
7865 emit_move_insn (virtual_incoming_args_rtx,
7866 copy_to_reg (arg_pointer_save_area));
7871 #ifdef HAVE_builtin_setjmp_receiver
7872 if (HAVE_builtin_setjmp_receiver)
7873 emit_insn (gen_builtin_setjmp_receiver (lab1));
7876 #ifdef HAVE_nonlocal_goto_receiver
7877 if (HAVE_nonlocal_goto_receiver)
7878 emit_insn (gen_nonlocal_goto_receiver ());
7885 /* Set TARGET, and branch to the next-time-through label. */
7886 emit_move_insn (target, gen_lowpart (GET_MODE (target), static_chain_rtx));
7887 emit_jump_insn (gen_jump (next_label));
7894 expand_builtin_longjmp (buf_addr, value)
7895 rtx buf_addr, value;
7898 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
7900 #ifdef POINTERS_EXTEND_UNSIGNED
7901 buf_addr = convert_memory_address (Pmode, buf_addr);
7903 buf_addr = force_reg (Pmode, buf_addr);
7905 /* The value sent by longjmp is not allowed to be zero. Force it
7907 if (GET_CODE (value) == CONST_INT)
7909 if (INTVAL (value) == 0)
7914 lab = gen_label_rtx ();
7916 emit_cmp_insn (value, const0_rtx, NE, NULL_RTX, GET_MODE (value), 0, 0);
7917 emit_jump_insn (gen_bne (lab));
7918 emit_move_insn (value, const1_rtx);
7922 /* Make sure the value is in the right mode to be copied to the chain. */
7923 if (GET_MODE (value) != VOIDmode)
7924 value = gen_lowpart (GET_MODE (static_chain_rtx), value);
7926 #ifdef HAVE_builtin_longjmp
7927 if (HAVE_builtin_longjmp)
7929 /* Copy the "return value" to the static chain reg. */
7930 emit_move_insn (static_chain_rtx, value);
7931 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7932 emit_insn (gen_builtin_longjmp (buf_addr));
7937 fp = gen_rtx_MEM (Pmode, buf_addr);
7938 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
7939 GET_MODE_SIZE (Pmode)));
7941 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
7942 2 * GET_MODE_SIZE (Pmode)));
7944 /* Pick up FP, label, and SP from the block and jump. This code is
7945 from expand_goto in stmt.c; see there for detailed comments. */
7946 #if HAVE_nonlocal_goto
7947 if (HAVE_nonlocal_goto)
7948 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
7952 lab = copy_to_reg (lab);
7954 /* Copy the "return value" to the static chain reg. */
7955 emit_move_insn (static_chain_rtx, value);
7957 emit_move_insn (hard_frame_pointer_rtx, fp);
7958 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
7960 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
7961 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
7962 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
7963 emit_indirect_jump (lab);
7969 /* Expand an expression EXP that calls a built-in function,
7970 with result going to TARGET if that's convenient
7971 (and in mode MODE if that's convenient).
7972 SUBTARGET may be used as the target for computing one of EXP's operands.
7973 IGNORE is nonzero if the value is to be ignored. */
7975 #define CALLED_AS_BUILT_IN(NODE) \
7976 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7979 expand_builtin (exp, target, subtarget, mode, ignore)
7983 enum machine_mode mode;
7986 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7987 tree arglist = TREE_OPERAND (exp, 1);
7990 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7991 optab builtin_optab;
7993 switch (DECL_FUNCTION_CODE (fndecl))
7998 /* build_function_call changes these into ABS_EXPR. */
8003 /* Treat these like sqrt, but only if the user asks for them. */
8004 if (! flag_fast_math)
8006 case BUILT_IN_FSQRT:
8007 /* If not optimizing, call the library function. */
8012 /* Arg could be wrong type if user redeclared this fcn wrong. */
8013 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8016 /* Stabilize and compute the argument. */
8017 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8018 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8020 exp = copy_node (exp);
8021 arglist = copy_node (arglist);
8022 TREE_OPERAND (exp, 1) = arglist;
8023 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8025 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8027 /* Make a suitable register to place result in. */
8028 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8033 switch (DECL_FUNCTION_CODE (fndecl))
8036 builtin_optab = sin_optab; break;
8038 builtin_optab = cos_optab; break;
8039 case BUILT_IN_FSQRT:
8040 builtin_optab = sqrt_optab; break;
8045 /* Compute into TARGET.
8046 Set TARGET to wherever the result comes back. */
8047 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8048 builtin_optab, op0, target, 0);
8050 /* If we were unable to expand via the builtin, stop the
8051 sequence (without outputting the insns) and break, causing
8052 a call to the library function. */
8059 /* Check the results by default. But if flag_fast_math is turned on,
8060 then assume sqrt will always be called with valid arguments. */
8062 if (! flag_fast_math)
8064 /* Don't define the builtin FP instructions
8065 if your machine is not IEEE. */
8066 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8069 lab1 = gen_label_rtx ();
8071 /* Test the result; if it is NaN, set errno=EDOM because
8072 the argument was not in the domain. */
8073 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8074 emit_jump_insn (gen_beq (lab1));
8078 #ifdef GEN_ERRNO_RTX
8079 rtx errno_rtx = GEN_ERRNO_RTX;
8082 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8085 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8088 /* We can't set errno=EDOM directly; let the library call do it.
8089 Pop the arguments right away in case the call gets deleted. */
8091 expand_call (exp, target, 0);
8098 /* Output the entire sequence. */
8099 insns = get_insns ();
8108 /* __builtin_apply_args returns block of memory allocated on
8109 the stack into which is stored the arg pointer, structure
8110 value address, static chain, and all the registers that might
8111 possibly be used in performing a function call. The code is
8112 moved to the start of the function so the incoming values are
8114 case BUILT_IN_APPLY_ARGS:
8115 /* Don't do __builtin_apply_args more than once in a function.
8116 Save the result of the first call and reuse it. */
8117 if (apply_args_value != 0)
8118 return apply_args_value;
8120 /* When this function is called, it means that registers must be
8121 saved on entry to this function. So we migrate the
8122 call to the first insn of this function. */
8127 temp = expand_builtin_apply_args ();
8131 apply_args_value = temp;
8133 /* Put the sequence after the NOTE that starts the function.
8134 If this is inside a SEQUENCE, make the outer-level insn
8135 chain current, so the code is placed at the start of the
8137 push_topmost_sequence ();
8138 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8139 pop_topmost_sequence ();
8143 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8144 FUNCTION with a copy of the parameters described by
8145 ARGUMENTS, and ARGSIZE. It returns a block of memory
8146 allocated on the stack into which is stored all the registers
8147 that might possibly be used for returning the result of a
8148 function. ARGUMENTS is the value returned by
8149 __builtin_apply_args. ARGSIZE is the number of bytes of
8150 arguments that must be copied. ??? How should this value be
8151 computed? We'll also need a safe worst case value for varargs
8153 case BUILT_IN_APPLY:
8155 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8156 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8157 || TREE_CHAIN (arglist) == 0
8158 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8159 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8160 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8168 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8169 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8171 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8174 /* __builtin_return (RESULT) causes the function to return the
8175 value described by RESULT. RESULT is address of the block of
8176 memory returned by __builtin_apply. */
8177 case BUILT_IN_RETURN:
8179 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8180 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8181 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8182 NULL_RTX, VOIDmode, 0));
8185 case BUILT_IN_SAVEREGS:
8186 /* Don't do __builtin_saveregs more than once in a function.
8187 Save the result of the first call and reuse it. */
8188 if (saveregs_value != 0)
8189 return saveregs_value;
8191 /* When this function is called, it means that registers must be
8192 saved on entry to this function. So we migrate the
8193 call to the first insn of this function. */
8197 /* Now really call the function. `expand_call' does not call
8198 expand_builtin, so there is no danger of infinite recursion here. */
8201 #ifdef EXPAND_BUILTIN_SAVEREGS
8202 /* Do whatever the machine needs done in this case. */
8203 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8205 /* The register where the function returns its value
8206 is likely to have something else in it, such as an argument.
8207 So preserve that register around the call. */
8209 if (value_mode != VOIDmode)
8211 rtx valreg = hard_libcall_value (value_mode);
8212 rtx saved_valreg = gen_reg_rtx (value_mode);
8214 emit_move_insn (saved_valreg, valreg);
8215 temp = expand_call (exp, target, ignore);
8216 emit_move_insn (valreg, saved_valreg);
8219 /* Generate the call, putting the value in a pseudo. */
8220 temp = expand_call (exp, target, ignore);
8226 saveregs_value = temp;
8228 /* Put the sequence after the NOTE that starts the function.
8229 If this is inside a SEQUENCE, make the outer-level insn
8230 chain current, so the code is placed at the start of the
8232 push_topmost_sequence ();
8233 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8234 pop_topmost_sequence ();
8238 /* __builtin_args_info (N) returns word N of the arg space info
8239 for the current function. The number and meanings of words
8240 is controlled by the definition of CUMULATIVE_ARGS. */
8241 case BUILT_IN_ARGS_INFO:
8243 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8244 int *word_ptr = (int *) ¤t_function_args_info;
8246 /* These are used by the code below that is if 0'ed away */
8248 tree type, elts, result;
8251 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8252 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8253 __FILE__, __LINE__);
8257 tree arg = TREE_VALUE (arglist);
8258 if (TREE_CODE (arg) != INTEGER_CST)
8259 error ("argument of `__builtin_args_info' must be constant");
8262 int wordnum = TREE_INT_CST_LOW (arg);
8264 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8265 error ("argument of `__builtin_args_info' out of range");
8267 return GEN_INT (word_ptr[wordnum]);
8271 error ("missing argument in `__builtin_args_info'");
8276 for (i = 0; i < nwords; i++)
8277 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8279 type = build_array_type (integer_type_node,
8280 build_index_type (build_int_2 (nwords, 0)));
8281 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8282 TREE_CONSTANT (result) = 1;
8283 TREE_STATIC (result) = 1;
8284 result = build (INDIRECT_REF, build_pointer_type (type), result);
8285 TREE_CONSTANT (result) = 1;
8286 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8290 /* Return the address of the first anonymous stack arg. */
8291 case BUILT_IN_NEXT_ARG:
8293 tree fntype = TREE_TYPE (current_function_decl);
8295 if ((TYPE_ARG_TYPES (fntype) == 0
8296 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8298 && ! current_function_varargs)
8300 error ("`va_start' used in function with fixed args");
8306 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8307 tree arg = TREE_VALUE (arglist);
8309 /* Strip off all nops for the sake of the comparison. This
8310 is not quite the same as STRIP_NOPS. It does more.
8311 We must also strip off INDIRECT_EXPR for C++ reference
8313 while (TREE_CODE (arg) == NOP_EXPR
8314 || TREE_CODE (arg) == CONVERT_EXPR
8315 || TREE_CODE (arg) == NON_LVALUE_EXPR
8316 || TREE_CODE (arg) == INDIRECT_REF)
8317 arg = TREE_OPERAND (arg, 0);
8318 if (arg != last_parm)
8319 warning ("second parameter of `va_start' not last named argument");
8321 else if (! current_function_varargs)
8322 /* Evidently an out of date version of <stdarg.h>; can't validate
8323 va_start's second argument, but can still work as intended. */
8324 warning ("`__builtin_next_arg' called without an argument");
8327 return expand_binop (Pmode, add_optab,
8328 current_function_internal_arg_pointer,
8329 current_function_arg_offset_rtx,
8330 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8332 case BUILT_IN_CLASSIFY_TYPE:
8335 tree type = TREE_TYPE (TREE_VALUE (arglist));
8336 enum tree_code code = TREE_CODE (type);
8337 if (code == VOID_TYPE)
8338 return GEN_INT (void_type_class);
8339 if (code == INTEGER_TYPE)
8340 return GEN_INT (integer_type_class);
8341 if (code == CHAR_TYPE)
8342 return GEN_INT (char_type_class);
8343 if (code == ENUMERAL_TYPE)
8344 return GEN_INT (enumeral_type_class);
8345 if (code == BOOLEAN_TYPE)
8346 return GEN_INT (boolean_type_class);
8347 if (code == POINTER_TYPE)
8348 return GEN_INT (pointer_type_class);
8349 if (code == REFERENCE_TYPE)
8350 return GEN_INT (reference_type_class);
8351 if (code == OFFSET_TYPE)
8352 return GEN_INT (offset_type_class);
8353 if (code == REAL_TYPE)
8354 return GEN_INT (real_type_class);
8355 if (code == COMPLEX_TYPE)
8356 return GEN_INT (complex_type_class);
8357 if (code == FUNCTION_TYPE)
8358 return GEN_INT (function_type_class);
8359 if (code == METHOD_TYPE)
8360 return GEN_INT (method_type_class);
8361 if (code == RECORD_TYPE)
8362 return GEN_INT (record_type_class);
8363 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8364 return GEN_INT (union_type_class);
8365 if (code == ARRAY_TYPE)
8367 if (TYPE_STRING_FLAG (type))
8368 return GEN_INT (string_type_class);
8370 return GEN_INT (array_type_class);
8372 if (code == SET_TYPE)
8373 return GEN_INT (set_type_class);
8374 if (code == FILE_TYPE)
8375 return GEN_INT (file_type_class);
8376 if (code == LANG_TYPE)
8377 return GEN_INT (lang_type_class);
8379 return GEN_INT (no_type_class);
8381 case BUILT_IN_CONSTANT_P:
8386 tree arg = TREE_VALUE (arglist);
8389 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8390 || (TREE_CODE (arg) == ADDR_EXPR
8391 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8392 ? const1_rtx : const0_rtx);
8395 case BUILT_IN_FRAME_ADDRESS:
8396 /* The argument must be a nonnegative integer constant.
8397 It counts the number of frames to scan up the stack.
8398 The value is the address of that frame. */
8399 case BUILT_IN_RETURN_ADDRESS:
8400 /* The argument must be a nonnegative integer constant.
8401 It counts the number of frames to scan up the stack.
8402 The value is the return address saved in that frame. */
8404 /* Warning about missing arg was already issued. */
8406 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8407 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8409 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8410 error ("invalid arg to `__builtin_frame_address'");
8412 error ("invalid arg to `__builtin_return_address'");
8417 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8418 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8419 hard_frame_pointer_rtx);
8421 /* Some ports cannot access arbitrary stack frames. */
8424 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8425 warning ("unsupported arg to `__builtin_frame_address'");
8427 warning ("unsupported arg to `__builtin_return_address'");
8431 /* For __builtin_frame_address, return what we've got. */
8432 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8435 if (GET_CODE (tem) != REG)
8436 tem = copy_to_reg (tem);
8440 /* Returns the address of the area where the structure is returned.
8442 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8444 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8445 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8448 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8450 case BUILT_IN_ALLOCA:
8452 /* Arg could be non-integer if user redeclared this fcn wrong. */
8453 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8456 /* Compute the argument. */
8457 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8459 /* Allocate the desired space. */
8460 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8463 /* If not optimizing, call the library function. */
8464 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8468 /* Arg could be non-integer if user redeclared this fcn wrong. */
8469 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8472 /* Compute the argument. */
8473 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8474 /* Compute ffs, into TARGET if possible.
8475 Set TARGET to wherever the result comes back. */
8476 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8477 ffs_optab, op0, target, 1);
8482 case BUILT_IN_STRLEN:
8483 /* If not optimizing, call the library function. */
8484 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8488 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8489 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8493 tree src = TREE_VALUE (arglist);
8494 tree len = c_strlen (src);
8497 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8499 rtx result, src_rtx, char_rtx;
8500 enum machine_mode insn_mode = value_mode, char_mode;
8501 enum insn_code icode;
8503 /* If the length is known, just return it. */
8505 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8507 /* If SRC is not a pointer type, don't do this operation inline. */
8511 /* Call a function if we can't compute strlen in the right mode. */
8513 while (insn_mode != VOIDmode)
8515 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8516 if (icode != CODE_FOR_nothing)
8519 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8521 if (insn_mode == VOIDmode)
8524 /* Make a place to write the result of the instruction. */
8527 && GET_CODE (result) == REG
8528 && GET_MODE (result) == insn_mode
8529 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8530 result = gen_reg_rtx (insn_mode);
8532 /* Make sure the operands are acceptable to the predicates. */
8534 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8535 result = gen_reg_rtx (insn_mode);
8536 src_rtx = memory_address (BLKmode,
8537 expand_expr (src, NULL_RTX, ptr_mode,
8540 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8541 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8543 /* Check the string is readable and has an end. */
8544 if (flag_check_memory_usage)
8545 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8547 GEN_INT (MEMORY_USE_RO),
8548 TYPE_MODE (integer_type_node));
8550 char_rtx = const0_rtx;
8551 char_mode = insn_operand_mode[(int)icode][2];
8552 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8553 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8555 emit_insn (GEN_FCN (icode) (result,
8556 gen_rtx_MEM (BLKmode, src_rtx),
8557 char_rtx, GEN_INT (align)));
8559 /* Return the value in the proper mode for this function. */
8560 if (GET_MODE (result) == value_mode)
8562 else if (target != 0)
8564 convert_move (target, result, 0);
8568 return convert_to_mode (value_mode, result, 0);
8571 case BUILT_IN_STRCPY:
8572 /* If not optimizing, call the library function. */
8573 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8577 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8578 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8579 || TREE_CHAIN (arglist) == 0
8580 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8584 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8589 len = size_binop (PLUS_EXPR, len, integer_one_node);
8591 chainon (arglist, build_tree_list (NULL_TREE, len));
8595 case BUILT_IN_MEMCPY:
8596 /* If not optimizing, call the library function. */
8597 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8601 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8602 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8603 || TREE_CHAIN (arglist) == 0
8604 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8606 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8607 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8608 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8613 tree dest = TREE_VALUE (arglist);
8614 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8615 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8619 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8621 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8622 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
8624 /* If either SRC or DEST is not a pointer type, don't do
8625 this operation in-line. */
8626 if (src_align == 0 || dest_align == 0)
8628 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8629 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8633 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8634 dest_mem = gen_rtx_MEM (BLKmode,
8635 memory_address (BLKmode, dest_rtx));
8636 /* There could be a void* cast on top of the object. */
8637 while (TREE_CODE (dest) == NOP_EXPR)
8638 dest = TREE_OPERAND (dest, 0);
8639 type = TREE_TYPE (TREE_TYPE (dest));
8640 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8641 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
8642 src_mem = gen_rtx_MEM (BLKmode,
8643 memory_address (BLKmode, src_rtx));
8644 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8646 /* Just copy the rights of SRC to the rights of DEST. */
8647 if (flag_check_memory_usage)
8648 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
8651 len_rtx, TYPE_MODE (sizetype));
8653 /* There could be a void* cast on top of the object. */
8654 while (TREE_CODE (src) == NOP_EXPR)
8655 src = TREE_OPERAND (src, 0);
8656 type = TREE_TYPE (TREE_TYPE (src));
8657 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8659 /* Copy word part most expediently. */
8661 = emit_block_move (dest_mem, src_mem, len_rtx,
8662 MIN (src_align, dest_align));
8665 dest_addr = force_operand (dest_rtx, NULL_RTX);
8670 case BUILT_IN_MEMSET:
8671 /* If not optimizing, call the library function. */
8672 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8676 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8677 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8678 || TREE_CHAIN (arglist) == 0
8679 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8681 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8683 != (TREE_CODE (TREE_TYPE
8685 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8689 tree dest = TREE_VALUE (arglist);
8690 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8691 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8695 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8696 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
8698 /* If DEST is not a pointer type, don't do this
8699 operation in-line. */
8700 if (dest_align == 0)
8703 /* If the arguments have side-effects, then we can only evaluate
8704 them at most once. The following code evaluates them twice if
8705 they are not constants because we break out to expand_call
8706 in that case. They can't be constants if they have side-effects
8707 so we can check for that first. Alternatively, we could call
8708 save_expr to make multiple evaluation safe. */
8709 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
8712 /* If VAL is not 0, don't do this operation in-line. */
8713 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8716 /* If LEN does not expand to a constant, don't do this
8717 operation in-line. */
8718 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8719 if (GET_CODE (len_rtx) != CONST_INT)
8722 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8723 dest_mem = gen_rtx_MEM (BLKmode,
8724 memory_address (BLKmode, dest_rtx));
8726 /* Just check DST is writable and mark it as readable. */
8727 if (flag_check_memory_usage)
8728 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8730 len_rtx, TYPE_MODE (sizetype),
8731 GEN_INT (MEMORY_USE_WO),
8732 TYPE_MODE (integer_type_node));
8734 /* There could be a void* cast on top of the object. */
8735 while (TREE_CODE (dest) == NOP_EXPR)
8736 dest = TREE_OPERAND (dest, 0);
8738 if (TREE_CODE (dest) == ADDR_EXPR)
8739 /* If this is the address of an object, check whether the
8740 object is an array. */
8741 type = TREE_TYPE (TREE_OPERAND (dest, 0));
8743 type = TREE_TYPE (TREE_TYPE (dest));
8744 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8746 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
8749 dest_addr = force_operand (dest_rtx, NULL_RTX);
8754 /* These comparison functions need an instruction that returns an actual
8755 index. An ordinary compare that just sets the condition codes
8757 #ifdef HAVE_cmpstrsi
8758 case BUILT_IN_STRCMP:
8759 /* If not optimizing, call the library function. */
8760 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8763 /* If we need to check memory accesses, call the library function. */
8764 if (flag_check_memory_usage)
8768 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8769 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8770 || TREE_CHAIN (arglist) == 0
8771 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8773 else if (!HAVE_cmpstrsi)
8776 tree arg1 = TREE_VALUE (arglist);
8777 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8780 len = c_strlen (arg1);
8782 len = size_binop (PLUS_EXPR, integer_one_node, len);
8783 len2 = c_strlen (arg2);
8785 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8787 /* If we don't have a constant length for the first, use the length
8788 of the second, if we know it. We don't require a constant for
8789 this case; some cost analysis could be done if both are available
8790 but neither is constant. For now, assume they're equally cheap.
8792 If both strings have constant lengths, use the smaller. This
8793 could arise if optimization results in strcpy being called with
8794 two fixed strings, or if the code was machine-generated. We should
8795 add some code to the `memcmp' handler below to deal with such
8796 situations, someday. */
8797 if (!len || TREE_CODE (len) != INTEGER_CST)
8804 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8806 if (tree_int_cst_lt (len2, len))
8810 chainon (arglist, build_tree_list (NULL_TREE, len));
8814 case BUILT_IN_MEMCMP:
8815 /* If not optimizing, call the library function. */
8816 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8819 /* If we need to check memory accesses, call the library function. */
8820 if (flag_check_memory_usage)
8824 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8825 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8826 || TREE_CHAIN (arglist) == 0
8827 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8828 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8829 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8831 else if (!HAVE_cmpstrsi)
8834 tree arg1 = TREE_VALUE (arglist);
8835 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8836 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8840 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8842 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8843 enum machine_mode insn_mode
8844 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8846 /* If we don't have POINTER_TYPE, call the function. */
8847 if (arg1_align == 0 || arg2_align == 0)
8849 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8850 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8854 /* Make a place to write the result of the instruction. */
8857 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8858 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8859 result = gen_reg_rtx (insn_mode);
8861 emit_insn (gen_cmpstrsi (result,
8862 gen_rtx_MEM (BLKmode,
8863 expand_expr (arg1, NULL_RTX,
8866 gen_rtx_MEM (BLKmode,
8867 expand_expr (arg2, NULL_RTX,
8870 expand_expr (len, NULL_RTX, VOIDmode, 0),
8871 GEN_INT (MIN (arg1_align, arg2_align))));
8873 /* Return the value in the proper mode for this function. */
8874 mode = TYPE_MODE (TREE_TYPE (exp));
8875 if (GET_MODE (result) == mode)
8877 else if (target != 0)
8879 convert_move (target, result, 0);
8883 return convert_to_mode (mode, result, 0);
8886 case BUILT_IN_STRCMP:
8887 case BUILT_IN_MEMCMP:
8891 case BUILT_IN_SETJMP:
8893 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8897 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8899 rtx lab = gen_label_rtx ();
8900 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
8905 /* __builtin_longjmp is passed a pointer to an array of five words.
8906 It's similar to the C library longjmp function but works with
8907 __builtin_setjmp above. */
8908 case BUILT_IN_LONGJMP:
8909 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8910 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8914 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8916 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8917 const0_rtx, VOIDmode, 0);
8918 expand_builtin_longjmp (buf_addr, value);
8922 /* Various hooks for the DWARF 2 __throw routine. */
8923 case BUILT_IN_UNWIND_INIT:
8924 expand_builtin_unwind_init ();
8927 return frame_pointer_rtx;
8929 return stack_pointer_rtx;
8930 #ifdef DWARF2_UNWIND_INFO
8931 case BUILT_IN_DWARF_FP_REGNUM:
8932 return expand_builtin_dwarf_fp_regnum ();
8933 case BUILT_IN_DWARF_REG_SIZE:
8934 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
8936 case BUILT_IN_FROB_RETURN_ADDR:
8937 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
8938 case BUILT_IN_EXTRACT_RETURN_ADDR:
8939 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
8940 case BUILT_IN_SET_RETURN_ADDR_REG:
8941 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
8943 case BUILT_IN_EH_STUB:
8944 return expand_builtin_eh_stub ();
8945 case BUILT_IN_SET_EH_REGS:
8946 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
8947 TREE_VALUE (TREE_CHAIN (arglist)));
8950 default: /* just do library call, if unknown builtin */
8951 error ("built-in function `%s' not currently supported",
8952 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8955 /* The switch statement above can drop through to cause the function
8956 to be called normally. */
8958 return expand_call (exp, target, ignore);
8961 /* Built-in functions to perform an untyped call and return. */
8963 /* For each register that may be used for calling a function, this
8964 gives a mode used to copy the register's value. VOIDmode indicates
8965 the register is not used for calling a function. If the machine
8966 has register windows, this gives only the outbound registers.
8967 INCOMING_REGNO gives the corresponding inbound register. */
8968 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
8970 /* For each register that may be used for returning values, this gives
8971 a mode used to copy the register's value. VOIDmode indicates the
8972 register is not used for returning values. If the machine has
8973 register windows, this gives only the outbound registers.
8974 INCOMING_REGNO gives the corresponding inbound register. */
8975 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
8977 /* For each register that may be used for calling a function, this
8978 gives the offset of that register into the block returned by
8979 __builtin_apply_args. 0 indicates that the register is not
8980 used for calling a function. */
8981 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8983 /* Return the offset of register REGNO into the block returned by
8984 __builtin_apply_args. This is not declared static, since it is
8985 needed in objc-act.c. */
8988 apply_args_register_offset (regno)
8993 /* Arguments are always put in outgoing registers (in the argument
8994 block) if such make sense. */
8995 #ifdef OUTGOING_REGNO
8996 regno = OUTGOING_REGNO(regno);
8998 return apply_args_reg_offset[regno];
9001 /* Return the size required for the block returned by __builtin_apply_args,
9002 and initialize apply_args_mode. */
9007 static int size = -1;
9009 enum machine_mode mode;
9011 /* The values computed by this function never change. */
9014 /* The first value is the incoming arg-pointer. */
9015 size = GET_MODE_SIZE (Pmode);
9017 /* The second value is the structure value address unless this is
9018 passed as an "invisible" first argument. */
9019 if (struct_value_rtx)
9020 size += GET_MODE_SIZE (Pmode);
9022 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9023 if (FUNCTION_ARG_REGNO_P (regno))
9025 /* Search for the proper mode for copying this register's
9026 value. I'm not sure this is right, but it works so far. */
9027 enum machine_mode best_mode = VOIDmode;
9029 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9031 mode = GET_MODE_WIDER_MODE (mode))
9032 if (HARD_REGNO_MODE_OK (regno, mode)
9033 && HARD_REGNO_NREGS (regno, mode) == 1)
9036 if (best_mode == VOIDmode)
9037 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9039 mode = GET_MODE_WIDER_MODE (mode))
9040 if (HARD_REGNO_MODE_OK (regno, mode)
9041 && (mov_optab->handlers[(int) mode].insn_code
9042 != CODE_FOR_nothing))
9046 if (mode == VOIDmode)
9049 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9050 if (size % align != 0)
9051 size = CEIL (size, align) * align;
9052 apply_args_reg_offset[regno] = size;
9053 size += GET_MODE_SIZE (mode);
9054 apply_args_mode[regno] = mode;
9058 apply_args_mode[regno] = VOIDmode;
9059 apply_args_reg_offset[regno] = 0;
9065 /* Return the size required for the block returned by __builtin_apply,
9066 and initialize apply_result_mode. */
9069 apply_result_size ()
9071 static int size = -1;
9073 enum machine_mode mode;
9075 /* The values computed by this function never change. */
9080 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9081 if (FUNCTION_VALUE_REGNO_P (regno))
9083 /* Search for the proper mode for copying this register's
9084 value. I'm not sure this is right, but it works so far. */
9085 enum machine_mode best_mode = VOIDmode;
9087 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9089 mode = GET_MODE_WIDER_MODE (mode))
9090 if (HARD_REGNO_MODE_OK (regno, mode))
9093 if (best_mode == VOIDmode)
9094 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9096 mode = GET_MODE_WIDER_MODE (mode))
9097 if (HARD_REGNO_MODE_OK (regno, mode)
9098 && (mov_optab->handlers[(int) mode].insn_code
9099 != CODE_FOR_nothing))
9103 if (mode == VOIDmode)
9106 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9107 if (size % align != 0)
9108 size = CEIL (size, align) * align;
9109 size += GET_MODE_SIZE (mode);
9110 apply_result_mode[regno] = mode;
9113 apply_result_mode[regno] = VOIDmode;
9115 /* Allow targets that use untyped_call and untyped_return to override
9116 the size so that machine-specific information can be stored here. */
9117 #ifdef APPLY_RESULT_SIZE
9118 size = APPLY_RESULT_SIZE;
9124 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9125 /* Create a vector describing the result block RESULT. If SAVEP is true,
9126 the result block is used to save the values; otherwise it is used to
9127 restore the values. */
9130 result_vector (savep, result)
9134 int regno, size, align, nelts;
9135 enum machine_mode mode;
9137 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9140 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9141 if ((mode = apply_result_mode[regno]) != VOIDmode)
9143 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9144 if (size % align != 0)
9145 size = CEIL (size, align) * align;
9146 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9147 mem = change_address (result, mode,
9148 plus_constant (XEXP (result, 0), size));
9149 savevec[nelts++] = (savep
9150 ? gen_rtx_SET (VOIDmode, mem, reg)
9151 : gen_rtx_SET (VOIDmode, reg, mem));
9152 size += GET_MODE_SIZE (mode);
9154 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9156 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9158 /* Save the state required to perform an untyped call with the same
9159 arguments as were passed to the current function. */
9162 expand_builtin_apply_args ()
9165 int size, align, regno;
9166 enum machine_mode mode;
9168 /* Create a block where the arg-pointer, structure value address,
9169 and argument registers can be saved. */
9170 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9172 /* Walk past the arg-pointer and structure value address. */
9173 size = GET_MODE_SIZE (Pmode);
9174 if (struct_value_rtx)
9175 size += GET_MODE_SIZE (Pmode);
9177 /* Save each register used in calling a function to the block. */
9178 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9179 if ((mode = apply_args_mode[regno]) != VOIDmode)
9183 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9184 if (size % align != 0)
9185 size = CEIL (size, align) * align;
9187 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9190 /* For reg-stack.c's stack register household.
9191 Compare with a similar piece of code in function.c. */
9193 emit_insn (gen_rtx_USE (mode, tem));
9196 emit_move_insn (change_address (registers, mode,
9197 plus_constant (XEXP (registers, 0),
9200 size += GET_MODE_SIZE (mode);
9203 /* Save the arg pointer to the block. */
9204 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9205 copy_to_reg (virtual_incoming_args_rtx));
9206 size = GET_MODE_SIZE (Pmode);
9208 /* Save the structure value address unless this is passed as an
9209 "invisible" first argument. */
9210 if (struct_value_incoming_rtx)
9212 emit_move_insn (change_address (registers, Pmode,
9213 plus_constant (XEXP (registers, 0),
9215 copy_to_reg (struct_value_incoming_rtx));
9216 size += GET_MODE_SIZE (Pmode);
9219 /* Return the address of the block. */
9220 return copy_addr_to_reg (XEXP (registers, 0));
9223 /* Perform an untyped call and save the state required to perform an
9224 untyped return of whatever value was returned by the given function. */
9227 expand_builtin_apply (function, arguments, argsize)
9228 rtx function, arguments, argsize;
9230 int size, align, regno;
9231 enum machine_mode mode;
9232 rtx incoming_args, result, reg, dest, call_insn;
9233 rtx old_stack_level = 0;
9234 rtx call_fusage = 0;
9236 /* Create a block where the return registers can be saved. */
9237 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9239 /* ??? The argsize value should be adjusted here. */
9241 /* Fetch the arg pointer from the ARGUMENTS block. */
9242 incoming_args = gen_reg_rtx (Pmode);
9243 emit_move_insn (incoming_args,
9244 gen_rtx_MEM (Pmode, arguments));
9245 #ifndef STACK_GROWS_DOWNWARD
9246 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9247 incoming_args, 0, OPTAB_LIB_WIDEN);
9250 /* Perform postincrements before actually calling the function. */
9253 /* Push a new argument block and copy the arguments. */
9254 do_pending_stack_adjust ();
9256 /* Save the stack with nonlocal if available */
9257 #ifdef HAVE_save_stack_nonlocal
9258 if (HAVE_save_stack_nonlocal)
9259 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9262 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9264 /* Push a block of memory onto the stack to store the memory arguments.
9265 Save the address in a register, and copy the memory arguments. ??? I
9266 haven't figured out how the calling convention macros effect this,
9267 but it's likely that the source and/or destination addresses in
9268 the block copy will need updating in machine specific ways. */
9269 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9270 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9271 gen_rtx_MEM (BLKmode, incoming_args),
9273 PARM_BOUNDARY / BITS_PER_UNIT);
9275 /* Refer to the argument block. */
9277 arguments = gen_rtx_MEM (BLKmode, arguments);
9279 /* Walk past the arg-pointer and structure value address. */
9280 size = GET_MODE_SIZE (Pmode);
9281 if (struct_value_rtx)
9282 size += GET_MODE_SIZE (Pmode);
9284 /* Restore each of the registers previously saved. Make USE insns
9285 for each of these registers for use in making the call. */
9286 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9287 if ((mode = apply_args_mode[regno]) != VOIDmode)
9289 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9290 if (size % align != 0)
9291 size = CEIL (size, align) * align;
9292 reg = gen_rtx_REG (mode, regno);
9293 emit_move_insn (reg,
9294 change_address (arguments, mode,
9295 plus_constant (XEXP (arguments, 0),
9298 use_reg (&call_fusage, reg);
9299 size += GET_MODE_SIZE (mode);
9302 /* Restore the structure value address unless this is passed as an
9303 "invisible" first argument. */
9304 size = GET_MODE_SIZE (Pmode);
9305 if (struct_value_rtx)
9307 rtx value = gen_reg_rtx (Pmode);
9308 emit_move_insn (value,
9309 change_address (arguments, Pmode,
9310 plus_constant (XEXP (arguments, 0),
9312 emit_move_insn (struct_value_rtx, value);
9313 if (GET_CODE (struct_value_rtx) == REG)
9314 use_reg (&call_fusage, struct_value_rtx);
9315 size += GET_MODE_SIZE (Pmode);
9318 /* All arguments and registers used for the call are set up by now! */
9319 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9321 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9322 and we don't want to load it into a register as an optimization,
9323 because prepare_call_address already did it if it should be done. */
9324 if (GET_CODE (function) != SYMBOL_REF)
9325 function = memory_address (FUNCTION_MODE, function);
9327 /* Generate the actual call instruction and save the return value. */
9328 #ifdef HAVE_untyped_call
9329 if (HAVE_untyped_call)
9330 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9331 result, result_vector (1, result)));
9334 #ifdef HAVE_call_value
9335 if (HAVE_call_value)
9339 /* Locate the unique return register. It is not possible to
9340 express a call that sets more than one return register using
9341 call_value; use untyped_call for that. In fact, untyped_call
9342 only needs to save the return registers in the given block. */
9343 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9344 if ((mode = apply_result_mode[regno]) != VOIDmode)
9347 abort (); /* HAVE_untyped_call required. */
9348 valreg = gen_rtx_REG (mode, regno);
9351 emit_call_insn (gen_call_value (valreg,
9352 gen_rtx_MEM (FUNCTION_MODE, function),
9353 const0_rtx, NULL_RTX, const0_rtx));
9355 emit_move_insn (change_address (result, GET_MODE (valreg),
9363 /* Find the CALL insn we just emitted. */
9364 for (call_insn = get_last_insn ();
9365 call_insn && GET_CODE (call_insn) != CALL_INSN;
9366 call_insn = PREV_INSN (call_insn))
9372 /* Put the register usage information on the CALL. If there is already
9373 some usage information, put ours at the end. */
9374 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9378 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9379 link = XEXP (link, 1))
9382 XEXP (link, 1) = call_fusage;
9385 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9387 /* Restore the stack. */
9388 #ifdef HAVE_save_stack_nonlocal
9389 if (HAVE_save_stack_nonlocal)
9390 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9393 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9395 /* Return the address of the result block. */
9396 return copy_addr_to_reg (XEXP (result, 0));
9399 /* Perform an untyped return. */
9402 expand_builtin_return (result)
9405 int size, align, regno;
9406 enum machine_mode mode;
9408 rtx call_fusage = 0;
9410 apply_result_size ();
9411 result = gen_rtx_MEM (BLKmode, result);
9413 #ifdef HAVE_untyped_return
9414 if (HAVE_untyped_return)
9416 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9422 /* Restore the return value and note that each value is used. */
9424 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9425 if ((mode = apply_result_mode[regno]) != VOIDmode)
9427 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9428 if (size % align != 0)
9429 size = CEIL (size, align) * align;
9430 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9431 emit_move_insn (reg,
9432 change_address (result, mode,
9433 plus_constant (XEXP (result, 0),
9436 push_to_sequence (call_fusage);
9437 emit_insn (gen_rtx_USE (VOIDmode, reg));
9438 call_fusage = get_insns ();
9440 size += GET_MODE_SIZE (mode);
9443 /* Put the USE insns before the return. */
9444 emit_insns (call_fusage);
9446 /* Return whatever values was restored by jumping directly to the end
9448 expand_null_return ();
9451 /* Expand code for a post- or pre- increment or decrement
9452 and return the RTX for the result.
9453 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9456 expand_increment (exp, post, ignore)
9460 register rtx op0, op1;
9461 register rtx temp, value;
9462 register tree incremented = TREE_OPERAND (exp, 0);
9463 optab this_optab = add_optab;
9465 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9466 int op0_is_copy = 0;
9467 int single_insn = 0;
9468 /* 1 means we can't store into OP0 directly,
9469 because it is a subreg narrower than a word,
9470 and we don't dare clobber the rest of the word. */
9473 /* Stabilize any component ref that might need to be
9474 evaluated more than once below. */
9476 || TREE_CODE (incremented) == BIT_FIELD_REF
9477 || (TREE_CODE (incremented) == COMPONENT_REF
9478 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9479 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9480 incremented = stabilize_reference (incremented);
9481 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9482 ones into save exprs so that they don't accidentally get evaluated
9483 more than once by the code below. */
9484 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9485 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9486 incremented = save_expr (incremented);
9488 /* Compute the operands as RTX.
9489 Note whether OP0 is the actual lvalue or a copy of it:
9490 I believe it is a copy iff it is a register or subreg
9491 and insns were generated in computing it. */
9493 temp = get_last_insn ();
9494 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9496 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9497 in place but instead must do sign- or zero-extension during assignment,
9498 so we copy it into a new register and let the code below use it as
9501 Note that we can safely modify this SUBREG since it is know not to be
9502 shared (it was made by the expand_expr call above). */
9504 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9507 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9511 else if (GET_CODE (op0) == SUBREG
9512 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9514 /* We cannot increment this SUBREG in place. If we are
9515 post-incrementing, get a copy of the old value. Otherwise,
9516 just mark that we cannot increment in place. */
9518 op0 = copy_to_reg (op0);
9523 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9524 && temp != get_last_insn ());
9525 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9526 EXPAND_MEMORY_USE_BAD);
9528 /* Decide whether incrementing or decrementing. */
9529 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9530 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9531 this_optab = sub_optab;
9533 /* Convert decrement by a constant into a negative increment. */
9534 if (this_optab == sub_optab
9535 && GET_CODE (op1) == CONST_INT)
9537 op1 = GEN_INT (- INTVAL (op1));
9538 this_optab = add_optab;
9541 /* For a preincrement, see if we can do this with a single instruction. */
9544 icode = (int) this_optab->handlers[(int) mode].insn_code;
9545 if (icode != (int) CODE_FOR_nothing
9546 /* Make sure that OP0 is valid for operands 0 and 1
9547 of the insn we want to queue. */
9548 && (*insn_operand_predicate[icode][0]) (op0, mode)
9549 && (*insn_operand_predicate[icode][1]) (op0, mode)
9550 && (*insn_operand_predicate[icode][2]) (op1, mode))
9554 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9555 then we cannot just increment OP0. We must therefore contrive to
9556 increment the original value. Then, for postincrement, we can return
9557 OP0 since it is a copy of the old value. For preincrement, expand here
9558 unless we can do it with a single insn.
9560 Likewise if storing directly into OP0 would clobber high bits
9561 we need to preserve (bad_subreg). */
9562 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9564 /* This is the easiest way to increment the value wherever it is.
9565 Problems with multiple evaluation of INCREMENTED are prevented
9566 because either (1) it is a component_ref or preincrement,
9567 in which case it was stabilized above, or (2) it is an array_ref
9568 with constant index in an array in a register, which is
9569 safe to reevaluate. */
9570 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9571 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9572 ? MINUS_EXPR : PLUS_EXPR),
9575 TREE_OPERAND (exp, 1));
9577 while (TREE_CODE (incremented) == NOP_EXPR
9578 || TREE_CODE (incremented) == CONVERT_EXPR)
9580 newexp = convert (TREE_TYPE (incremented), newexp);
9581 incremented = TREE_OPERAND (incremented, 0);
9584 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9585 return post ? op0 : temp;
9590 /* We have a true reference to the value in OP0.
9591 If there is an insn to add or subtract in this mode, queue it.
9592 Queueing the increment insn avoids the register shuffling
9593 that often results if we must increment now and first save
9594 the old value for subsequent use. */
9596 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9597 op0 = stabilize (op0);
9600 icode = (int) this_optab->handlers[(int) mode].insn_code;
9601 if (icode != (int) CODE_FOR_nothing
9602 /* Make sure that OP0 is valid for operands 0 and 1
9603 of the insn we want to queue. */
9604 && (*insn_operand_predicate[icode][0]) (op0, mode)
9605 && (*insn_operand_predicate[icode][1]) (op0, mode))
9607 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9608 op1 = force_reg (mode, op1);
9610 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9612 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9614 rtx addr = (general_operand (XEXP (op0, 0), mode)
9615 ? force_reg (Pmode, XEXP (op0, 0))
9616 : copy_to_reg (XEXP (op0, 0)));
9619 op0 = change_address (op0, VOIDmode, addr);
9620 temp = force_reg (GET_MODE (op0), op0);
9621 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9622 op1 = force_reg (mode, op1);
9624 /* The increment queue is LIFO, thus we have to `queue'
9625 the instructions in reverse order. */
9626 enqueue_insn (op0, gen_move_insn (op0, temp));
9627 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9632 /* Preincrement, or we can't increment with one simple insn. */
9634 /* Save a copy of the value before inc or dec, to return it later. */
9635 temp = value = copy_to_reg (op0);
9637 /* Arrange to return the incremented value. */
9638 /* Copy the rtx because expand_binop will protect from the queue,
9639 and the results of that would be invalid for us to return
9640 if our caller does emit_queue before using our result. */
9641 temp = copy_rtx (value = op0);
9643 /* Increment however we can. */
9644 op1 = expand_binop (mode, this_optab, value, op1,
9645 flag_check_memory_usage ? NULL_RTX : op0,
9646 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9647 /* Make sure the value is stored into OP0. */
9649 emit_move_insn (op0, op1);
9654 /* Expand all function calls contained within EXP, innermost ones first.
9655 But don't look within expressions that have sequence points.
9656 For each CALL_EXPR, record the rtx for its value
9657 in the CALL_EXPR_RTL field. */
9660 preexpand_calls (exp)
9663 register int nops, i;
9664 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9666 if (! do_preexpand_calls)
9669 /* Only expressions and references can contain calls. */
9671 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9674 switch (TREE_CODE (exp))
9677 /* Do nothing if already expanded. */
9678 if (CALL_EXPR_RTL (exp) != 0
9679 /* Do nothing if the call returns a variable-sized object. */
9680 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9681 /* Do nothing to built-in functions. */
9682 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9683 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9685 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9688 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9693 case TRUTH_ANDIF_EXPR:
9694 case TRUTH_ORIF_EXPR:
9695 /* If we find one of these, then we can be sure
9696 the adjust will be done for it (since it makes jumps).
9697 Do it now, so that if this is inside an argument
9698 of a function, we don't get the stack adjustment
9699 after some other args have already been pushed. */
9700 do_pending_stack_adjust ();
9705 case WITH_CLEANUP_EXPR:
9706 case CLEANUP_POINT_EXPR:
9707 case TRY_CATCH_EXPR:
9711 if (SAVE_EXPR_RTL (exp) != 0)
9718 nops = tree_code_length[(int) TREE_CODE (exp)];
9719 for (i = 0; i < nops; i++)
9720 if (TREE_OPERAND (exp, i) != 0)
9722 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9723 if (type == 'e' || type == '<' || type == '1' || type == '2'
9725 preexpand_calls (TREE_OPERAND (exp, i));
9729 /* At the start of a function, record that we have no previously-pushed
9730 arguments waiting to be popped. */
9733 init_pending_stack_adjust ()
9735 pending_stack_adjust = 0;
9738 /* When exiting from function, if safe, clear out any pending stack adjust
9739 so the adjustment won't get done.
9741 Note, if the current function calls alloca, then it must have a
9742 frame pointer regardless of the value of flag_omit_frame_pointer. */
9745 clear_pending_stack_adjust ()
9747 #ifdef EXIT_IGNORE_STACK
9749 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9750 && EXIT_IGNORE_STACK
9751 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9752 && ! flag_inline_functions)
9753 pending_stack_adjust = 0;
9757 /* Pop any previously-pushed arguments that have not been popped yet. */
9760 do_pending_stack_adjust ()
9762 if (inhibit_defer_pop == 0)
9764 if (pending_stack_adjust != 0)
9765 adjust_stack (GEN_INT (pending_stack_adjust));
9766 pending_stack_adjust = 0;
9770 /* Expand conditional expressions. */
9772 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9773 LABEL is an rtx of code CODE_LABEL, in this function and all the
9777 jumpifnot (exp, label)
9781 do_jump (exp, label, NULL_RTX);
9784 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9791 do_jump (exp, NULL_RTX, label);
9794 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9795 the result is zero, or IF_TRUE_LABEL if the result is one.
9796 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9797 meaning fall through in that case.
9799 do_jump always does any pending stack adjust except when it does not
9800 actually perform a jump. An example where there is no jump
9801 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9803 This function is responsible for optimizing cases such as
9804 &&, || and comparison operators in EXP. */
9807 do_jump (exp, if_false_label, if_true_label)
9809 rtx if_false_label, if_true_label;
9811 register enum tree_code code = TREE_CODE (exp);
9812 /* Some cases need to create a label to jump to
9813 in order to properly fall through.
9814 These cases set DROP_THROUGH_LABEL nonzero. */
9815 rtx drop_through_label = 0;
9820 enum machine_mode mode;
9830 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9836 /* This is not true with #pragma weak */
9838 /* The address of something can never be zero. */
9840 emit_jump (if_true_label);
9845 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9846 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9847 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9850 /* If we are narrowing the operand, we have to do the compare in the
9852 if ((TYPE_PRECISION (TREE_TYPE (exp))
9853 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9855 case NON_LVALUE_EXPR:
9856 case REFERENCE_EXPR:
9861 /* These cannot change zero->non-zero or vice versa. */
9862 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9866 /* This is never less insns than evaluating the PLUS_EXPR followed by
9867 a test and can be longer if the test is eliminated. */
9869 /* Reduce to minus. */
9870 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9871 TREE_OPERAND (exp, 0),
9872 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9873 TREE_OPERAND (exp, 1))));
9874 /* Process as MINUS. */
9878 /* Non-zero iff operands of minus differ. */
9879 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9880 TREE_OPERAND (exp, 0),
9881 TREE_OPERAND (exp, 1)),
9886 /* If we are AND'ing with a small constant, do this comparison in the
9887 smallest type that fits. If the machine doesn't have comparisons
9888 that small, it will be converted back to the wider comparison.
9889 This helps if we are testing the sign bit of a narrower object.
9890 combine can't do this for us because it can't know whether a
9891 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9893 if (! SLOW_BYTE_ACCESS
9894 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9895 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9896 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9897 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9898 && (type = type_for_mode (mode, 1)) != 0
9899 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9900 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9901 != CODE_FOR_nothing))
9903 do_jump (convert (type, exp), if_false_label, if_true_label);
9908 case TRUTH_NOT_EXPR:
9909 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9912 case TRUTH_ANDIF_EXPR:
9913 if (if_false_label == 0)
9914 if_false_label = drop_through_label = gen_label_rtx ();
9915 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9916 start_cleanup_deferral ();
9917 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9918 end_cleanup_deferral ();
9921 case TRUTH_ORIF_EXPR:
9922 if (if_true_label == 0)
9923 if_true_label = drop_through_label = gen_label_rtx ();
9924 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9925 start_cleanup_deferral ();
9926 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9927 end_cleanup_deferral ();
9932 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9933 preserve_temp_slots (NULL_RTX);
9937 do_pending_stack_adjust ();
9938 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9945 int bitsize, bitpos, unsignedp;
9946 enum machine_mode mode;
9952 /* Get description of this reference. We don't actually care
9953 about the underlying object here. */
9954 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9955 &mode, &unsignedp, &volatilep,
9958 type = type_for_size (bitsize, unsignedp);
9959 if (! SLOW_BYTE_ACCESS
9960 && type != 0 && bitsize >= 0
9961 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9962 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9963 != CODE_FOR_nothing))
9965 do_jump (convert (type, exp), if_false_label, if_true_label);
9972 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9973 if (integer_onep (TREE_OPERAND (exp, 1))
9974 && integer_zerop (TREE_OPERAND (exp, 2)))
9975 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9977 else if (integer_zerop (TREE_OPERAND (exp, 1))
9978 && integer_onep (TREE_OPERAND (exp, 2)))
9979 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9983 register rtx label1 = gen_label_rtx ();
9984 drop_through_label = gen_label_rtx ();
9986 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9988 start_cleanup_deferral ();
9989 /* Now the THEN-expression. */
9990 do_jump (TREE_OPERAND (exp, 1),
9991 if_false_label ? if_false_label : drop_through_label,
9992 if_true_label ? if_true_label : drop_through_label);
9993 /* In case the do_jump just above never jumps. */
9994 do_pending_stack_adjust ();
9995 emit_label (label1);
9997 /* Now the ELSE-expression. */
9998 do_jump (TREE_OPERAND (exp, 2),
9999 if_false_label ? if_false_label : drop_through_label,
10000 if_true_label ? if_true_label : drop_through_label);
10001 end_cleanup_deferral ();
10007 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10009 if (integer_zerop (TREE_OPERAND (exp, 1)))
10010 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10011 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10012 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10015 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10016 fold (build (EQ_EXPR, TREE_TYPE (exp),
10017 fold (build1 (REALPART_EXPR,
10018 TREE_TYPE (inner_type),
10019 TREE_OPERAND (exp, 0))),
10020 fold (build1 (REALPART_EXPR,
10021 TREE_TYPE (inner_type),
10022 TREE_OPERAND (exp, 1))))),
10023 fold (build (EQ_EXPR, TREE_TYPE (exp),
10024 fold (build1 (IMAGPART_EXPR,
10025 TREE_TYPE (inner_type),
10026 TREE_OPERAND (exp, 0))),
10027 fold (build1 (IMAGPART_EXPR,
10028 TREE_TYPE (inner_type),
10029 TREE_OPERAND (exp, 1))))))),
10030 if_false_label, if_true_label);
10031 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10032 && !can_compare_p (TYPE_MODE (inner_type)))
10033 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10035 comparison = compare (exp, EQ, EQ);
10041 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10043 if (integer_zerop (TREE_OPERAND (exp, 1)))
10044 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10045 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10046 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10049 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10050 fold (build (NE_EXPR, TREE_TYPE (exp),
10051 fold (build1 (REALPART_EXPR,
10052 TREE_TYPE (inner_type),
10053 TREE_OPERAND (exp, 0))),
10054 fold (build1 (REALPART_EXPR,
10055 TREE_TYPE (inner_type),
10056 TREE_OPERAND (exp, 1))))),
10057 fold (build (NE_EXPR, TREE_TYPE (exp),
10058 fold (build1 (IMAGPART_EXPR,
10059 TREE_TYPE (inner_type),
10060 TREE_OPERAND (exp, 0))),
10061 fold (build1 (IMAGPART_EXPR,
10062 TREE_TYPE (inner_type),
10063 TREE_OPERAND (exp, 1))))))),
10064 if_false_label, if_true_label);
10065 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10066 && !can_compare_p (TYPE_MODE (inner_type)))
10067 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10069 comparison = compare (exp, NE, NE);
10074 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10076 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10077 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10079 comparison = compare (exp, LT, LTU);
10083 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10085 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10086 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10088 comparison = compare (exp, LE, LEU);
10092 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10094 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10095 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10097 comparison = compare (exp, GT, GTU);
10101 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10103 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10104 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10106 comparison = compare (exp, GE, GEU);
10111 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10113 /* This is not needed any more and causes poor code since it causes
10114 comparisons and tests from non-SI objects to have different code
10116 /* Copy to register to avoid generating bad insns by cse
10117 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10118 if (!cse_not_expected && GET_CODE (temp) == MEM)
10119 temp = copy_to_reg (temp);
10121 do_pending_stack_adjust ();
10122 if (GET_CODE (temp) == CONST_INT)
10123 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10124 else if (GET_CODE (temp) == LABEL_REF)
10125 comparison = const_true_rtx;
10126 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10127 && !can_compare_p (GET_MODE (temp)))
10128 /* Note swapping the labels gives us not-equal. */
10129 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10130 else if (GET_MODE (temp) != VOIDmode)
10131 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10132 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10133 GET_MODE (temp), NULL_RTX, 0);
10138 /* Do any postincrements in the expression that was tested. */
10141 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10142 straight into a conditional jump instruction as the jump condition.
10143 Otherwise, all the work has been done already. */
10145 if (comparison == const_true_rtx)
10148 emit_jump (if_true_label);
10150 else if (comparison == const0_rtx)
10152 if (if_false_label)
10153 emit_jump (if_false_label);
10155 else if (comparison)
10156 do_jump_for_compare (comparison, if_false_label, if_true_label);
10158 if (drop_through_label)
10160 /* If do_jump produces code that might be jumped around,
10161 do any stack adjusts from that code, before the place
10162 where control merges in. */
10163 do_pending_stack_adjust ();
10164 emit_label (drop_through_label);
10168 /* Given a comparison expression EXP for values too wide to be compared
10169 with one insn, test the comparison and jump to the appropriate label.
10170 The code of EXP is ignored; we always test GT if SWAP is 0,
10171 and LT if SWAP is 1. */
10174 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10177 rtx if_false_label, if_true_label;
10179 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10180 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10181 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10182 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10183 rtx drop_through_label = 0;
10184 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10187 if (! if_true_label || ! if_false_label)
10188 drop_through_label = gen_label_rtx ();
10189 if (! if_true_label)
10190 if_true_label = drop_through_label;
10191 if (! if_false_label)
10192 if_false_label = drop_through_label;
10194 /* Compare a word at a time, high order first. */
10195 for (i = 0; i < nwords; i++)
10198 rtx op0_word, op1_word;
10200 if (WORDS_BIG_ENDIAN)
10202 op0_word = operand_subword_force (op0, i, mode);
10203 op1_word = operand_subword_force (op1, i, mode);
10207 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10208 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10211 /* All but high-order word must be compared as unsigned. */
10212 comp = compare_from_rtx (op0_word, op1_word,
10213 (unsignedp || i > 0) ? GTU : GT,
10214 unsignedp, word_mode, NULL_RTX, 0);
10215 if (comp == const_true_rtx)
10216 emit_jump (if_true_label);
10217 else if (comp != const0_rtx)
10218 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10220 /* Consider lower words only if these are equal. */
10221 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10223 if (comp == const_true_rtx)
10224 emit_jump (if_false_label);
10225 else if (comp != const0_rtx)
10226 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10229 if (if_false_label)
10230 emit_jump (if_false_label);
10231 if (drop_through_label)
10232 emit_label (drop_through_label);
10235 /* Compare OP0 with OP1, word at a time, in mode MODE.
10236 UNSIGNEDP says to do unsigned comparison.
10237 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10240 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10241 enum machine_mode mode;
10244 rtx if_false_label, if_true_label;
10246 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10247 rtx drop_through_label = 0;
10250 if (! if_true_label || ! if_false_label)
10251 drop_through_label = gen_label_rtx ();
10252 if (! if_true_label)
10253 if_true_label = drop_through_label;
10254 if (! if_false_label)
10255 if_false_label = drop_through_label;
10257 /* Compare a word at a time, high order first. */
10258 for (i = 0; i < nwords; i++)
10261 rtx op0_word, op1_word;
10263 if (WORDS_BIG_ENDIAN)
10265 op0_word = operand_subword_force (op0, i, mode);
10266 op1_word = operand_subword_force (op1, i, mode);
10270 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10271 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10274 /* All but high-order word must be compared as unsigned. */
10275 comp = compare_from_rtx (op0_word, op1_word,
10276 (unsignedp || i > 0) ? GTU : GT,
10277 unsignedp, word_mode, NULL_RTX, 0);
10278 if (comp == const_true_rtx)
10279 emit_jump (if_true_label);
10280 else if (comp != const0_rtx)
10281 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10283 /* Consider lower words only if these are equal. */
10284 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10286 if (comp == const_true_rtx)
10287 emit_jump (if_false_label);
10288 else if (comp != const0_rtx)
10289 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10292 if (if_false_label)
10293 emit_jump (if_false_label);
10294 if (drop_through_label)
10295 emit_label (drop_through_label);
10298 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10299 with one insn, test the comparison and jump to the appropriate label. */
10302 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10304 rtx if_false_label, if_true_label;
10306 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10307 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10308 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10309 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10311 rtx drop_through_label = 0;
10313 if (! if_false_label)
10314 drop_through_label = if_false_label = gen_label_rtx ();
10316 for (i = 0; i < nwords; i++)
10318 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10319 operand_subword_force (op1, i, mode),
10320 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10321 word_mode, NULL_RTX, 0);
10322 if (comp == const_true_rtx)
10323 emit_jump (if_false_label);
10324 else if (comp != const0_rtx)
10325 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10329 emit_jump (if_true_label);
10330 if (drop_through_label)
10331 emit_label (drop_through_label);
10334 /* Jump according to whether OP0 is 0.
10335 We assume that OP0 has an integer mode that is too wide
10336 for the available compare insns. */
10339 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10341 rtx if_false_label, if_true_label;
10343 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10346 rtx drop_through_label = 0;
10348 /* The fastest way of doing this comparison on almost any machine is to
10349 "or" all the words and compare the result. If all have to be loaded
10350 from memory and this is a very wide item, it's possible this may
10351 be slower, but that's highly unlikely. */
10353 part = gen_reg_rtx (word_mode);
10354 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10355 for (i = 1; i < nwords && part != 0; i++)
10356 part = expand_binop (word_mode, ior_optab, part,
10357 operand_subword_force (op0, i, GET_MODE (op0)),
10358 part, 1, OPTAB_WIDEN);
10362 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10365 if (comp == const_true_rtx)
10366 emit_jump (if_false_label);
10367 else if (comp == const0_rtx)
10368 emit_jump (if_true_label);
10370 do_jump_for_compare (comp, if_false_label, if_true_label);
10375 /* If we couldn't do the "or" simply, do this with a series of compares. */
10376 if (! if_false_label)
10377 drop_through_label = if_false_label = gen_label_rtx ();
10379 for (i = 0; i < nwords; i++)
10381 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10383 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10384 if (comp == const_true_rtx)
10385 emit_jump (if_false_label);
10386 else if (comp != const0_rtx)
10387 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10391 emit_jump (if_true_label);
10393 if (drop_through_label)
10394 emit_label (drop_through_label);
10397 /* Given a comparison expression in rtl form, output conditional branches to
10398 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10401 do_jump_for_compare (comparison, if_false_label, if_true_label)
10402 rtx comparison, if_false_label, if_true_label;
10406 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10407 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10411 if (if_false_label)
10412 emit_jump (if_false_label);
10414 else if (if_false_label)
10417 rtx prev = get_last_insn ();
10420 /* Output the branch with the opposite condition. Then try to invert
10421 what is generated. If more than one insn is a branch, or if the
10422 branch is not the last insn written, abort. If we can't invert
10423 the branch, emit make a true label, redirect this jump to that,
10424 emit a jump to the false label and define the true label. */
10426 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10427 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10431 /* Here we get the first insn that was just emitted. It used to be the
10432 case that, on some machines, emitting the branch would discard
10433 the previous compare insn and emit a replacement. This isn't
10434 done anymore, but abort if we see that PREV is deleted. */
10437 insn = get_insns ();
10438 else if (INSN_DELETED_P (prev))
10441 insn = NEXT_INSN (prev);
10443 for (; insn; insn = NEXT_INSN (insn))
10444 if (GET_CODE (insn) == JUMP_INSN)
10451 if (branch != get_last_insn ())
10454 JUMP_LABEL (branch) = if_false_label;
10455 if (! invert_jump (branch, if_false_label))
10457 if_true_label = gen_label_rtx ();
10458 redirect_jump (branch, if_true_label);
10459 emit_jump (if_false_label);
10460 emit_label (if_true_label);
10465 /* Generate code for a comparison expression EXP
10466 (including code to compute the values to be compared)
10467 and set (CC0) according to the result.
10468 SIGNED_CODE should be the rtx operation for this comparison for
10469 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10471 We force a stack adjustment unless there are currently
10472 things pushed on the stack that aren't yet used. */
10475 compare (exp, signed_code, unsigned_code)
10477 enum rtx_code signed_code, unsigned_code;
10480 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10482 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10483 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10484 register enum machine_mode mode = TYPE_MODE (type);
10485 int unsignedp = TREE_UNSIGNED (type);
10486 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10488 #ifdef HAVE_canonicalize_funcptr_for_compare
10489 /* If function pointers need to be "canonicalized" before they can
10490 be reliably compared, then canonicalize them. */
10491 if (HAVE_canonicalize_funcptr_for_compare
10492 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10493 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10496 rtx new_op0 = gen_reg_rtx (mode);
10498 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10502 if (HAVE_canonicalize_funcptr_for_compare
10503 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10504 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10507 rtx new_op1 = gen_reg_rtx (mode);
10509 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10514 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10516 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10517 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10520 /* Like compare but expects the values to compare as two rtx's.
10521 The decision as to signed or unsigned comparison must be made by the caller.
10523 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10526 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10527 size of MODE should be used. */
10530 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10531 register rtx op0, op1;
10532 enum rtx_code code;
10534 enum machine_mode mode;
10540 /* If one operand is constant, make it the second one. Only do this
10541 if the other operand is not constant as well. */
10543 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10544 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10549 code = swap_condition (code);
10552 if (flag_force_mem)
10554 op0 = force_not_mem (op0);
10555 op1 = force_not_mem (op1);
10558 do_pending_stack_adjust ();
10560 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10561 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10565 /* There's no need to do this now that combine.c can eliminate lots of
10566 sign extensions. This can be less efficient in certain cases on other
10569 /* If this is a signed equality comparison, we can do it as an
10570 unsigned comparison since zero-extension is cheaper than sign
10571 extension and comparisons with zero are done as unsigned. This is
10572 the case even on machines that can do fast sign extension, since
10573 zero-extension is easier to combine with other operations than
10574 sign-extension is. If we are comparing against a constant, we must
10575 convert it to what it would look like unsigned. */
10576 if ((code == EQ || code == NE) && ! unsignedp
10577 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10579 if (GET_CODE (op1) == CONST_INT
10580 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10581 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10586 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10588 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10591 /* Generate code to calculate EXP using a store-flag instruction
10592 and return an rtx for the result. EXP is either a comparison
10593 or a TRUTH_NOT_EXPR whose operand is a comparison.
10595 If TARGET is nonzero, store the result there if convenient.
10597 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10600 Return zero if there is no suitable set-flag instruction
10601 available on this machine.
10603 Once expand_expr has been called on the arguments of the comparison,
10604 we are committed to doing the store flag, since it is not safe to
10605 re-evaluate the expression. We emit the store-flag insn by calling
10606 emit_store_flag, but only expand the arguments if we have a reason
10607 to believe that emit_store_flag will be successful. If we think that
10608 it will, but it isn't, we have to simulate the store-flag with a
10609 set/jump/set sequence. */
10612 do_store_flag (exp, target, mode, only_cheap)
10615 enum machine_mode mode;
10618 enum rtx_code code;
10619 tree arg0, arg1, type;
10621 enum machine_mode operand_mode;
10625 enum insn_code icode;
10626 rtx subtarget = target;
10629 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10630 result at the end. We can't simply invert the test since it would
10631 have already been inverted if it were valid. This case occurs for
10632 some floating-point comparisons. */
10634 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10635 invert = 1, exp = TREE_OPERAND (exp, 0);
10637 arg0 = TREE_OPERAND (exp, 0);
10638 arg1 = TREE_OPERAND (exp, 1);
10639 type = TREE_TYPE (arg0);
10640 operand_mode = TYPE_MODE (type);
10641 unsignedp = TREE_UNSIGNED (type);
10643 /* We won't bother with BLKmode store-flag operations because it would mean
10644 passing a lot of information to emit_store_flag. */
10645 if (operand_mode == BLKmode)
10648 /* We won't bother with store-flag operations involving function pointers
10649 when function pointers must be canonicalized before comparisons. */
10650 #ifdef HAVE_canonicalize_funcptr_for_compare
10651 if (HAVE_canonicalize_funcptr_for_compare
10652 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10653 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10655 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10656 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10657 == FUNCTION_TYPE))))
10664 /* Get the rtx comparison code to use. We know that EXP is a comparison
10665 operation of some type. Some comparisons against 1 and -1 can be
10666 converted to comparisons with zero. Do so here so that the tests
10667 below will be aware that we have a comparison with zero. These
10668 tests will not catch constants in the first operand, but constants
10669 are rarely passed as the first operand. */
10671 switch (TREE_CODE (exp))
10680 if (integer_onep (arg1))
10681 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10683 code = unsignedp ? LTU : LT;
10686 if (! unsignedp && integer_all_onesp (arg1))
10687 arg1 = integer_zero_node, code = LT;
10689 code = unsignedp ? LEU : LE;
10692 if (! unsignedp && integer_all_onesp (arg1))
10693 arg1 = integer_zero_node, code = GE;
10695 code = unsignedp ? GTU : GT;
10698 if (integer_onep (arg1))
10699 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10701 code = unsignedp ? GEU : GE;
10707 /* Put a constant second. */
10708 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10710 tem = arg0; arg0 = arg1; arg1 = tem;
10711 code = swap_condition (code);
10714 /* If this is an equality or inequality test of a single bit, we can
10715 do this by shifting the bit being tested to the low-order bit and
10716 masking the result with the constant 1. If the condition was EQ,
10717 we xor it with 1. This does not require an scc insn and is faster
10718 than an scc insn even if we have it. */
10720 if ((code == NE || code == EQ)
10721 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10722 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10724 tree inner = TREE_OPERAND (arg0, 0);
10725 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10728 /* If INNER is a right shift of a constant and it plus BITNUM does
10729 not overflow, adjust BITNUM and INNER. */
10731 if (TREE_CODE (inner) == RSHIFT_EXPR
10732 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10733 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10734 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10735 < TYPE_PRECISION (type)))
10737 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10738 inner = TREE_OPERAND (inner, 0);
10741 /* If we are going to be able to omit the AND below, we must do our
10742 operations as unsigned. If we must use the AND, we have a choice.
10743 Normally unsigned is faster, but for some machines signed is. */
10744 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10745 #ifdef LOAD_EXTEND_OP
10746 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10752 if (subtarget == 0 || GET_CODE (subtarget) != REG
10753 || GET_MODE (subtarget) != operand_mode
10754 || ! safe_from_p (subtarget, inner, 1))
10757 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10760 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10761 size_int (bitnum), subtarget, ops_unsignedp);
10763 if (GET_MODE (op0) != mode)
10764 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10766 if ((code == EQ && ! invert) || (code == NE && invert))
10767 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10768 ops_unsignedp, OPTAB_LIB_WIDEN);
10770 /* Put the AND last so it can combine with more things. */
10771 if (bitnum != TYPE_PRECISION (type) - 1)
10772 op0 = expand_and (op0, const1_rtx, subtarget);
10777 /* Now see if we are likely to be able to do this. Return if not. */
10778 if (! can_compare_p (operand_mode))
10780 icode = setcc_gen_code[(int) code];
10781 if (icode == CODE_FOR_nothing
10782 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10784 /* We can only do this if it is one of the special cases that
10785 can be handled without an scc insn. */
10786 if ((code == LT && integer_zerop (arg1))
10787 || (! only_cheap && code == GE && integer_zerop (arg1)))
10789 else if (BRANCH_COST >= 0
10790 && ! only_cheap && (code == NE || code == EQ)
10791 && TREE_CODE (type) != REAL_TYPE
10792 && ((abs_optab->handlers[(int) operand_mode].insn_code
10793 != CODE_FOR_nothing)
10794 || (ffs_optab->handlers[(int) operand_mode].insn_code
10795 != CODE_FOR_nothing)))
10801 preexpand_calls (exp);
10802 if (subtarget == 0 || GET_CODE (subtarget) != REG
10803 || GET_MODE (subtarget) != operand_mode
10804 || ! safe_from_p (subtarget, arg1, 1))
10807 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10808 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10811 target = gen_reg_rtx (mode);
10813 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10814 because, if the emit_store_flag does anything it will succeed and
10815 OP0 and OP1 will not be used subsequently. */
10817 result = emit_store_flag (target, code,
10818 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10819 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10820 operand_mode, unsignedp, 1);
10825 result = expand_binop (mode, xor_optab, result, const1_rtx,
10826 result, 0, OPTAB_LIB_WIDEN);
10830 /* If this failed, we have to do this with set/compare/jump/set code. */
10831 if (GET_CODE (target) != REG
10832 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10833 target = gen_reg_rtx (GET_MODE (target));
10835 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10836 result = compare_from_rtx (op0, op1, code, unsignedp,
10837 operand_mode, NULL_RTX, 0);
10838 if (GET_CODE (result) == CONST_INT)
10839 return (((result == const0_rtx && ! invert)
10840 || (result != const0_rtx && invert))
10841 ? const0_rtx : const1_rtx);
10843 label = gen_label_rtx ();
10844 if (bcc_gen_fctn[(int) code] == 0)
10847 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10848 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10849 emit_label (label);
10854 /* Generate a tablejump instruction (used for switch statements). */
10856 #ifdef HAVE_tablejump
10858 /* INDEX is the value being switched on, with the lowest value
10859 in the table already subtracted.
10860 MODE is its expected mode (needed if INDEX is constant).
10861 RANGE is the length of the jump table.
10862 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10864 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10865 index value is out of range. */
10868 do_tablejump (index, mode, range, table_label, default_label)
10869 rtx index, range, table_label, default_label;
10870 enum machine_mode mode;
10872 register rtx temp, vector;
10874 /* Do an unsigned comparison (in the proper mode) between the index
10875 expression and the value which represents the length of the range.
10876 Since we just finished subtracting the lower bound of the range
10877 from the index expression, this comparison allows us to simultaneously
10878 check that the original index expression value is both greater than
10879 or equal to the minimum value of the range and less than or equal to
10880 the maximum value of the range. */
10882 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10883 emit_jump_insn (gen_bgtu (default_label));
10885 /* If index is in range, it must fit in Pmode.
10886 Convert to Pmode so we can index with it. */
10888 index = convert_to_mode (Pmode, index, 1);
10890 /* Don't let a MEM slip thru, because then INDEX that comes
10891 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10892 and break_out_memory_refs will go to work on it and mess it up. */
10893 #ifdef PIC_CASE_VECTOR_ADDRESS
10894 if (flag_pic && GET_CODE (index) != REG)
10895 index = copy_to_mode_reg (Pmode, index);
10898 /* If flag_force_addr were to affect this address
10899 it could interfere with the tricky assumptions made
10900 about addresses that contain label-refs,
10901 which may be valid only very near the tablejump itself. */
10902 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10903 GET_MODE_SIZE, because this indicates how large insns are. The other
10904 uses should all be Pmode, because they are addresses. This code
10905 could fail if addresses and insns are not the same size. */
10906 index = gen_rtx_PLUS (Pmode,
10907 gen_rtx_MULT (Pmode, index,
10908 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10909 gen_rtx_LABEL_REF (Pmode, table_label));
10910 #ifdef PIC_CASE_VECTOR_ADDRESS
10912 index = PIC_CASE_VECTOR_ADDRESS (index);
10915 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10916 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10917 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10918 RTX_UNCHANGING_P (vector) = 1;
10919 convert_move (temp, vector, 0);
10921 emit_jump_insn (gen_tablejump (temp, table_label));
10923 /* If we are generating PIC code or if the table is PC-relative, the
10924 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10925 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10929 #endif /* HAVE_tablejump */