1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
40 #include "typeclass.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust;
90 /* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94 int inhibit_defer_pop;
96 /* Nonzero means __builtin_saveregs has already been done in this function.
97 The value is the pseudoreg containing the value __builtin_saveregs
99 static rtx saveregs_value;
101 /* Similarly for __builtin_apply_args. */
102 static rtx apply_args_value;
104 /* Don't check memory usage, since code is being emitted to check a memory
105 usage. Used when current_function_check_memory_usage is true, to avoid
106 infinite recursion. */
107 static int in_check_memory_usage;
109 /* Postincrements that still need to be expanded. */
110 static rtx pending_chain;
112 /* This structure is used by move_by_pieces to describe the move to
114 struct move_by_pieces
124 int explicit_inc_from;
131 /* This structure is used by clear_by_pieces to describe the clear to
134 struct clear_by_pieces
146 extern struct obstack permanent_obstack;
147 extern rtx arg_pointer_save_area;
149 static rtx get_push_address PROTO ((int));
151 static rtx enqueue_insn PROTO((rtx, rtx));
152 static void init_queue PROTO((void));
153 static int move_by_pieces_ninsns PROTO((unsigned int, int));
154 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
155 struct move_by_pieces *));
156 static void clear_by_pieces PROTO((rtx, int, int));
157 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
158 struct clear_by_pieces *));
159 static int is_zeros_p PROTO((tree));
160 static int mostly_zeros_p PROTO((tree));
161 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
163 static void store_constructor PROTO((tree, rtx, int));
164 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
165 enum machine_mode, int, int,
167 static enum memory_use_mode
168 get_memory_usage_from_modifier PROTO((enum expand_modifier));
169 static tree save_noncopied_parts PROTO((tree, tree));
170 static tree init_noncopied_parts PROTO((tree, tree));
171 static int safe_from_p PROTO((rtx, tree, int));
172 static int fixed_type_p PROTO((tree));
173 static rtx var_rtx PROTO((tree));
174 static int get_pointer_alignment PROTO((tree, unsigned));
175 static tree string_constant PROTO((tree, tree *));
176 static tree c_strlen PROTO((tree));
177 static rtx get_memory_rtx PROTO((tree));
178 static rtx expand_builtin PROTO((tree, rtx, rtx,
179 enum machine_mode, int));
180 static int apply_args_size PROTO((void));
181 static int apply_result_size PROTO((void));
182 static rtx result_vector PROTO((int, rtx));
183 static rtx expand_builtin_apply_args PROTO((void));
184 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
185 static void expand_builtin_return PROTO((rtx));
186 static rtx expand_increment PROTO((tree, int, int));
187 static void preexpand_calls PROTO((tree));
188 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
189 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
190 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
191 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
192 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
194 /* Record for each mode whether we can move a register directly to or
195 from an object of that mode in memory. If we can't, we won't try
196 to use that mode directly when accessing a field of that mode. */
198 static char direct_load[NUM_MACHINE_MODES];
199 static char direct_store[NUM_MACHINE_MODES];
201 /* If a memory-to-memory move would take MOVE_RATIO or more simple
202 move-instruction sequences, we will do a movstr or libcall instead. */
205 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
208 /* If we are optimizing for space (-Os), cut down the default move ratio */
209 #define MOVE_RATIO (optimize_size ? 3 : 15)
213 /* This macro is used to determine whether move_by_pieces should be called
214 to perform a structure copy. */
215 #ifndef MOVE_BY_PIECES_P
216 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
217 (SIZE, ALIGN) < MOVE_RATIO)
220 /* This array records the insn_code of insns to perform block moves. */
221 enum insn_code movstr_optab[NUM_MACHINE_MODES];
223 /* This array records the insn_code of insns to perform block clears. */
224 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
226 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
228 #ifndef SLOW_UNALIGNED_ACCESS
229 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
232 /* Register mappings for target machines without register windows. */
233 #ifndef INCOMING_REGNO
234 #define INCOMING_REGNO(OUT) (OUT)
236 #ifndef OUTGOING_REGNO
237 #define OUTGOING_REGNO(IN) (IN)
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
247 enum machine_mode mode;
254 /* Since we are on the permanent obstack, we must be sure we save this
255 spot AFTER we call start_sequence, since it will reuse the rtl it
257 free_point = (char *) oballoc (0);
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
265 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
266 pat = PATTERN (insn);
268 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
269 mode = (enum machine_mode) ((int) mode + 1))
274 direct_load[(int) mode] = direct_store[(int) mode] = 0;
275 PUT_MODE (mem, mode);
276 PUT_MODE (mem1, mode);
278 /* See if there is some register that can be used in this mode and
279 directly loaded or stored from memory. */
281 if (mode != VOIDmode && mode != BLKmode)
282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
283 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
286 if (! HARD_REGNO_MODE_OK (regno, mode))
289 reg = gen_rtx_REG (mode, regno);
292 SET_DEST (pat) = reg;
293 if (recog (pat, insn, &num_clobbers) >= 0)
294 direct_load[(int) mode] = 1;
296 SET_SRC (pat) = mem1;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
302 SET_DEST (pat) = mem;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_store[(int) mode] = 1;
307 SET_DEST (pat) = mem1;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
317 /* This is run at the start of compiling a function. */
324 pending_stack_adjust = 0;
325 inhibit_defer_pop = 0;
327 apply_args_value = 0;
331 /* Save all variables describing the current status into the structure *P.
332 This is used before starting a nested function. */
338 p->pending_chain = pending_chain;
339 p->pending_stack_adjust = pending_stack_adjust;
340 p->inhibit_defer_pop = inhibit_defer_pop;
341 p->saveregs_value = saveregs_value;
342 p->apply_args_value = apply_args_value;
343 p->forced_labels = forced_labels;
345 pending_chain = NULL_RTX;
346 pending_stack_adjust = 0;
347 inhibit_defer_pop = 0;
349 apply_args_value = 0;
353 /* Restore all variables describing the current status from the structure *P.
354 This is used after a nested function. */
357 restore_expr_status (p)
360 pending_chain = p->pending_chain;
361 pending_stack_adjust = p->pending_stack_adjust;
362 inhibit_defer_pop = p->inhibit_defer_pop;
363 saveregs_value = p->saveregs_value;
364 apply_args_value = p->apply_args_value;
365 forced_labels = p->forced_labels;
368 /* Manage the queue of increment instructions to be output
369 for POSTINCREMENT_EXPR expressions, etc. */
371 /* Queue up to increment (or change) VAR later. BODY says how:
372 BODY should be the same thing you would pass to emit_insn
373 to increment right away. It will go to emit_insn later on.
375 The value is a QUEUED expression to be used in place of VAR
376 where you want to guarantee the pre-incrementation value of VAR. */
379 enqueue_insn (var, body)
382 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
383 var, NULL_RTX, NULL_RTX, body,
385 return pending_chain;
388 /* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
404 protect_from_queue (x, modify)
408 register RTX_CODE code = GET_CODE (x);
410 #if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
418 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
419 use of autoincrement. Make a copy of the contents of the memory
420 location rather than a copy of the address, but not if the value is
421 of mode BLKmode. Don't modify X in place since it might be
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
426 register rtx y = XEXP (x, 0);
427 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
429 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
430 MEM_COPY_ATTRIBUTES (new, x);
431 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
435 register rtx temp = gen_reg_rtx (GET_MODE (new));
436 emit_insn_before (gen_move_insn (temp, new),
442 /* Otherwise, recursively protect the subexpressions of all
443 the kinds of rtx's that can contain a QUEUED. */
446 rtx tem = protect_from_queue (XEXP (x, 0), 0);
447 if (tem != XEXP (x, 0))
453 else if (code == PLUS || code == MULT)
455 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
456 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
457 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
466 /* If the increment has not happened, use the variable itself. */
467 if (QUEUED_INSN (x) == 0)
468 return QUEUED_VAR (x);
469 /* If the increment has happened and a pre-increment copy exists,
471 if (QUEUED_COPY (x) != 0)
472 return QUEUED_COPY (x);
473 /* The increment has happened but we haven't set up a pre-increment copy.
474 Set one up now, and use it. */
475 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
476 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
478 return QUEUED_COPY (x);
481 /* Return nonzero if X contains a QUEUED expression:
482 if it contains anything that will be altered by a queued increment.
483 We handle only combinations of MEM, PLUS, MINUS and MULT operators
484 since memory addresses generally contain only those. */
490 register enum rtx_code code = GET_CODE (x);
496 return queued_subexp_p (XEXP (x, 0));
500 return (queued_subexp_p (XEXP (x, 0))
501 || queued_subexp_p (XEXP (x, 1)));
507 /* Perform all the pending incrementations. */
513 while ((p = pending_chain))
515 rtx body = QUEUED_BODY (p);
517 if (GET_CODE (body) == SEQUENCE)
519 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
520 emit_insn (QUEUED_BODY (p));
523 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
524 pending_chain = QUEUED_NEXT (p);
535 /* Copy data from FROM to TO, where the machine modes are not the same.
536 Both modes may be integer, or both may be floating.
537 UNSIGNEDP should be nonzero if FROM is an unsigned type.
538 This causes zero-extension instead of sign-extension. */
541 convert_move (to, from, unsignedp)
542 register rtx to, from;
545 enum machine_mode to_mode = GET_MODE (to);
546 enum machine_mode from_mode = GET_MODE (from);
547 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
548 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
552 /* rtx code for making an equivalent value. */
553 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
555 to = protect_from_queue (to, 1);
556 from = protect_from_queue (from, 0);
558 if (to_real != from_real)
561 /* If FROM is a SUBREG that indicates that we have already done at least
562 the required extension, strip it. We don't handle such SUBREGs as
565 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
566 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
567 >= GET_MODE_SIZE (to_mode))
568 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
569 from = gen_lowpart (to_mode, from), from_mode = to_mode;
571 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
574 if (to_mode == from_mode
575 || (from_mode == VOIDmode && CONSTANT_P (from)))
577 emit_move_insn (to, from);
585 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
587 /* Try converting directly if the insn is supported. */
588 if ((code = can_extend_p (to_mode, from_mode, 0))
591 emit_unop_insn (code, to, from, UNKNOWN);
596 #ifdef HAVE_trunchfqf2
597 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
599 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
603 #ifdef HAVE_trunctqfqf2
604 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
606 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
610 #ifdef HAVE_truncsfqf2
611 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
613 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
617 #ifdef HAVE_truncdfqf2
618 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
620 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
624 #ifdef HAVE_truncxfqf2
625 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
627 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
631 #ifdef HAVE_trunctfqf2
632 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
634 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
639 #ifdef HAVE_trunctqfhf2
640 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
642 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
646 #ifdef HAVE_truncsfhf2
647 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
649 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
653 #ifdef HAVE_truncdfhf2
654 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
656 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
660 #ifdef HAVE_truncxfhf2
661 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
663 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
667 #ifdef HAVE_trunctfhf2
668 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
670 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
675 #ifdef HAVE_truncsftqf2
676 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
678 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncdftqf2
683 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
685 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
689 #ifdef HAVE_truncxftqf2
690 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
692 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
696 #ifdef HAVE_trunctftqf2
697 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
699 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
704 #ifdef HAVE_truncdfsf2
705 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
707 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
711 #ifdef HAVE_truncxfsf2
712 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
714 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
718 #ifdef HAVE_trunctfsf2
719 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
721 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
725 #ifdef HAVE_truncxfdf2
726 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
728 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
732 #ifdef HAVE_trunctfdf2
733 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
735 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
747 libcall = extendsfdf2_libfunc;
751 libcall = extendsfxf2_libfunc;
755 libcall = extendsftf2_libfunc;
767 libcall = truncdfsf2_libfunc;
771 libcall = extenddfxf2_libfunc;
775 libcall = extenddftf2_libfunc;
787 libcall = truncxfsf2_libfunc;
791 libcall = truncxfdf2_libfunc;
803 libcall = trunctfsf2_libfunc;
807 libcall = trunctfdf2_libfunc;
819 if (libcall == (rtx) 0)
820 /* This conversion is not implemented yet. */
823 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
825 emit_move_insn (to, value);
829 /* Now both modes are integers. */
831 /* Handle expanding beyond a word. */
832 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
833 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
840 enum machine_mode lowpart_mode;
841 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
843 /* Try converting directly if the insn is supported. */
844 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
847 /* If FROM is a SUBREG, put it into a register. Do this
848 so that we always generate the same set of insns for
849 better cse'ing; if an intermediate assignment occurred,
850 we won't be doing the operation directly on the SUBREG. */
851 if (optimize > 0 && GET_CODE (from) == SUBREG)
852 from = force_reg (from_mode, from);
853 emit_unop_insn (code, to, from, equiv_code);
856 /* Next, try converting via full word. */
857 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
858 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
859 != CODE_FOR_nothing))
861 if (GET_CODE (to) == REG)
862 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
863 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
864 emit_unop_insn (code, to,
865 gen_lowpart (word_mode, to), equiv_code);
869 /* No special multiword conversion insn; do it by hand. */
872 /* Since we will turn this into a no conflict block, we must ensure
873 that the source does not overlap the target. */
875 if (reg_overlap_mentioned_p (to, from))
876 from = force_reg (from_mode, from);
878 /* Get a copy of FROM widened to a word, if necessary. */
879 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
880 lowpart_mode = word_mode;
882 lowpart_mode = from_mode;
884 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
886 lowpart = gen_lowpart (lowpart_mode, to);
887 emit_move_insn (lowpart, lowfrom);
889 /* Compute the value to put in each remaining word. */
891 fill_value = const0_rtx;
896 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
897 && STORE_FLAG_VALUE == -1)
899 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
901 fill_value = gen_reg_rtx (word_mode);
902 emit_insn (gen_slt (fill_value));
908 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
909 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
911 fill_value = convert_to_mode (word_mode, fill_value, 1);
915 /* Fill the remaining words. */
916 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
918 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
919 rtx subword = operand_subword (to, index, 1, to_mode);
924 if (fill_value != subword)
925 emit_move_insn (subword, fill_value);
928 insns = get_insns ();
931 emit_no_conflict_block (insns, to, from, NULL_RTX,
932 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
936 /* Truncating multi-word to a word or less. */
937 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
938 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
940 if (!((GET_CODE (from) == MEM
941 && ! MEM_VOLATILE_P (from)
942 && direct_load[(int) to_mode]
943 && ! mode_dependent_address_p (XEXP (from, 0)))
944 || GET_CODE (from) == REG
945 || GET_CODE (from) == SUBREG))
946 from = force_reg (from_mode, from);
947 convert_move (to, gen_lowpart (word_mode, from), 0);
951 /* Handle pointer conversion */ /* SPEE 900220 */
952 if (to_mode == PQImode)
954 if (from_mode != QImode)
955 from = convert_to_mode (QImode, from, unsignedp);
957 #ifdef HAVE_truncqipqi2
958 if (HAVE_truncqipqi2)
960 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
963 #endif /* HAVE_truncqipqi2 */
967 if (from_mode == PQImode)
969 if (to_mode != QImode)
971 from = convert_to_mode (QImode, from, unsignedp);
976 #ifdef HAVE_extendpqiqi2
977 if (HAVE_extendpqiqi2)
979 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
982 #endif /* HAVE_extendpqiqi2 */
987 if (to_mode == PSImode)
989 if (from_mode != SImode)
990 from = convert_to_mode (SImode, from, unsignedp);
992 #ifdef HAVE_truncsipsi2
993 if (HAVE_truncsipsi2)
995 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
998 #endif /* HAVE_truncsipsi2 */
1002 if (from_mode == PSImode)
1004 if (to_mode != SImode)
1006 from = convert_to_mode (SImode, from, unsignedp);
1011 #ifdef HAVE_extendpsisi2
1012 if (HAVE_extendpsisi2)
1014 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1017 #endif /* HAVE_extendpsisi2 */
1022 if (to_mode == PDImode)
1024 if (from_mode != DImode)
1025 from = convert_to_mode (DImode, from, unsignedp);
1027 #ifdef HAVE_truncdipdi2
1028 if (HAVE_truncdipdi2)
1030 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1033 #endif /* HAVE_truncdipdi2 */
1037 if (from_mode == PDImode)
1039 if (to_mode != DImode)
1041 from = convert_to_mode (DImode, from, unsignedp);
1046 #ifdef HAVE_extendpdidi2
1047 if (HAVE_extendpdidi2)
1049 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1052 #endif /* HAVE_extendpdidi2 */
1057 /* Now follow all the conversions between integers
1058 no more than a word long. */
1060 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1061 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1062 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1063 GET_MODE_BITSIZE (from_mode)))
1065 if (!((GET_CODE (from) == MEM
1066 && ! MEM_VOLATILE_P (from)
1067 && direct_load[(int) to_mode]
1068 && ! mode_dependent_address_p (XEXP (from, 0)))
1069 || GET_CODE (from) == REG
1070 || GET_CODE (from) == SUBREG))
1071 from = force_reg (from_mode, from);
1072 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1073 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1074 from = copy_to_reg (from);
1075 emit_move_insn (to, gen_lowpart (to_mode, from));
1079 /* Handle extension. */
1080 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1082 /* Convert directly if that works. */
1083 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1084 != CODE_FOR_nothing)
1086 emit_unop_insn (code, to, from, equiv_code);
1091 enum machine_mode intermediate;
1095 /* Search for a mode to convert via. */
1096 for (intermediate = from_mode; intermediate != VOIDmode;
1097 intermediate = GET_MODE_WIDER_MODE (intermediate))
1098 if (((can_extend_p (to_mode, intermediate, unsignedp)
1099 != CODE_FOR_nothing)
1100 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1101 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1102 && (can_extend_p (intermediate, from_mode, unsignedp)
1103 != CODE_FOR_nothing))
1105 convert_move (to, convert_to_mode (intermediate, from,
1106 unsignedp), unsignedp);
1110 /* No suitable intermediate mode.
1111 Generate what we need with shifts. */
1112 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1113 - GET_MODE_BITSIZE (from_mode), 0);
1114 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1115 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1117 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1120 emit_move_insn (to, tmp);
1125 /* Support special truncate insns for certain modes. */
1127 if (from_mode == DImode && to_mode == SImode)
1129 #ifdef HAVE_truncdisi2
1130 if (HAVE_truncdisi2)
1132 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 if (from_mode == DImode && to_mode == HImode)
1142 #ifdef HAVE_truncdihi2
1143 if (HAVE_truncdihi2)
1145 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 if (from_mode == DImode && to_mode == QImode)
1155 #ifdef HAVE_truncdiqi2
1156 if (HAVE_truncdiqi2)
1158 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 if (from_mode == SImode && to_mode == HImode)
1168 #ifdef HAVE_truncsihi2
1169 if (HAVE_truncsihi2)
1171 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 if (from_mode == SImode && to_mode == QImode)
1181 #ifdef HAVE_truncsiqi2
1182 if (HAVE_truncsiqi2)
1184 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 if (from_mode == HImode && to_mode == QImode)
1194 #ifdef HAVE_trunchiqi2
1195 if (HAVE_trunchiqi2)
1197 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 if (from_mode == TImode && to_mode == DImode)
1207 #ifdef HAVE_trunctidi2
1208 if (HAVE_trunctidi2)
1210 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 if (from_mode == TImode && to_mode == SImode)
1220 #ifdef HAVE_trunctisi2
1221 if (HAVE_trunctisi2)
1223 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1227 convert_move (to, force_reg (from_mode, from), unsignedp);
1231 if (from_mode == TImode && to_mode == HImode)
1233 #ifdef HAVE_trunctihi2
1234 if (HAVE_trunctihi2)
1236 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1240 convert_move (to, force_reg (from_mode, from), unsignedp);
1244 if (from_mode == TImode && to_mode == QImode)
1246 #ifdef HAVE_trunctiqi2
1247 if (HAVE_trunctiqi2)
1249 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1253 convert_move (to, force_reg (from_mode, from), unsignedp);
1257 /* Handle truncation of volatile memrefs, and so on;
1258 the things that couldn't be truncated directly,
1259 and for which there was no special instruction. */
1260 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1262 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1263 emit_move_insn (to, temp);
1267 /* Mode combination is not recognized. */
1271 /* Return an rtx for a value that would result
1272 from converting X to mode MODE.
1273 Both X and MODE may be floating, or both integer.
1274 UNSIGNEDP is nonzero if X is an unsigned value.
1275 This can be done by referring to a part of X in place
1276 or by copying to a new temporary with conversion.
1278 This function *must not* call protect_from_queue
1279 except when putting X into an insn (in which case convert_move does it). */
1282 convert_to_mode (mode, x, unsignedp)
1283 enum machine_mode mode;
1287 return convert_modes (mode, VOIDmode, x, unsignedp);
1290 /* Return an rtx for a value that would result
1291 from converting X from mode OLDMODE to mode MODE.
1292 Both modes may be floating, or both integer.
1293 UNSIGNEDP is nonzero if X is an unsigned value.
1295 This can be done by referring to a part of X in place
1296 or by copying to a new temporary with conversion.
1298 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1300 This function *must not* call protect_from_queue
1301 except when putting X into an insn (in which case convert_move does it). */
1304 convert_modes (mode, oldmode, x, unsignedp)
1305 enum machine_mode mode, oldmode;
1311 /* If FROM is a SUBREG that indicates that we have already done at least
1312 the required extension, strip it. */
1314 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1315 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1316 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1317 x = gen_lowpart (mode, x);
1319 if (GET_MODE (x) != VOIDmode)
1320 oldmode = GET_MODE (x);
1322 if (mode == oldmode)
1325 /* There is one case that we must handle specially: If we are converting
1326 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1327 we are to interpret the constant as unsigned, gen_lowpart will do
1328 the wrong if the constant appears negative. What we want to do is
1329 make the high-order word of the constant zero, not all ones. */
1331 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1332 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1333 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1335 HOST_WIDE_INT val = INTVAL (x);
1337 if (oldmode != VOIDmode
1338 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1340 int width = GET_MODE_BITSIZE (oldmode);
1342 /* We need to zero extend VAL. */
1343 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1346 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1349 /* We can do this with a gen_lowpart if both desired and current modes
1350 are integer, and this is either a constant integer, a register, or a
1351 non-volatile MEM. Except for the constant case where MODE is no
1352 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1354 if ((GET_CODE (x) == CONST_INT
1355 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1356 || (GET_MODE_CLASS (mode) == MODE_INT
1357 && GET_MODE_CLASS (oldmode) == MODE_INT
1358 && (GET_CODE (x) == CONST_DOUBLE
1359 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1360 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1361 && direct_load[(int) mode])
1362 || (GET_CODE (x) == REG
1363 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1364 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1366 /* ?? If we don't know OLDMODE, we have to assume here that
1367 X does not need sign- or zero-extension. This may not be
1368 the case, but it's the best we can do. */
1369 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1370 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1372 HOST_WIDE_INT val = INTVAL (x);
1373 int width = GET_MODE_BITSIZE (oldmode);
1375 /* We must sign or zero-extend in this case. Start by
1376 zero-extending, then sign extend if we need to. */
1377 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1379 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1380 val |= (HOST_WIDE_INT) (-1) << width;
1382 return GEN_INT (val);
1385 return gen_lowpart (mode, x);
1388 temp = gen_reg_rtx (mode);
1389 convert_move (temp, x, unsignedp);
1394 /* This macro is used to determine what the largest unit size that
1395 move_by_pieces can use is. */
1397 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1398 move efficiently, as opposed to MOVE_MAX which is the maximum
1399 number of bhytes we can move with a single instruction. */
1401 #ifndef MOVE_MAX_PIECES
1402 #define MOVE_MAX_PIECES MOVE_MAX
1405 /* Generate several move instructions to copy LEN bytes
1406 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1407 The caller must pass FROM and TO
1408 through protect_from_queue before calling.
1409 ALIGN (in bytes) is maximum alignment we can assume. */
1412 move_by_pieces (to, from, len, align)
1416 struct move_by_pieces data;
1417 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1418 int max_size = MOVE_MAX_PIECES + 1;
1419 enum machine_mode mode = VOIDmode, tmode;
1420 enum insn_code icode;
1423 data.to_addr = to_addr;
1424 data.from_addr = from_addr;
1428 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1429 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1431 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1432 || GET_CODE (from_addr) == POST_INC
1433 || GET_CODE (from_addr) == POST_DEC);
1435 data.explicit_inc_from = 0;
1436 data.explicit_inc_to = 0;
1438 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1439 if (data.reverse) data.offset = len;
1442 data.to_struct = MEM_IN_STRUCT_P (to);
1443 data.from_struct = MEM_IN_STRUCT_P (from);
1445 /* If copying requires more than two move insns,
1446 copy addresses to registers (to make displacements shorter)
1447 and use post-increment if available. */
1448 if (!(data.autinc_from && data.autinc_to)
1449 && move_by_pieces_ninsns (len, align) > 2)
1451 /* Find the mode of the largest move... */
1452 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1453 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1454 if (GET_MODE_SIZE (tmode) < max_size)
1457 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1459 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1460 data.autinc_from = 1;
1461 data.explicit_inc_from = -1;
1463 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1465 data.from_addr = copy_addr_to_reg (from_addr);
1466 data.autinc_from = 1;
1467 data.explicit_inc_from = 1;
1469 if (!data.autinc_from && CONSTANT_P (from_addr))
1470 data.from_addr = copy_addr_to_reg (from_addr);
1471 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1473 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1475 data.explicit_inc_to = -1;
1477 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1479 data.to_addr = copy_addr_to_reg (to_addr);
1481 data.explicit_inc_to = 1;
1483 if (!data.autinc_to && CONSTANT_P (to_addr))
1484 data.to_addr = copy_addr_to_reg (to_addr);
1487 if (! SLOW_UNALIGNED_ACCESS
1488 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1491 /* First move what we can in the largest integer mode, then go to
1492 successively smaller modes. */
1494 while (max_size > 1)
1496 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1497 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1498 if (GET_MODE_SIZE (tmode) < max_size)
1501 if (mode == VOIDmode)
1504 icode = mov_optab->handlers[(int) mode].insn_code;
1505 if (icode != CODE_FOR_nothing
1506 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1507 GET_MODE_SIZE (mode)))
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1510 max_size = GET_MODE_SIZE (mode);
1513 /* The code above should have handled everything. */
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bytes) is maximum alignment we can assume. */
1522 move_by_pieces_ninsns (l, align)
1526 register int n_insns = 0;
1527 int max_size = MOVE_MAX + 1;
1529 if (! SLOW_UNALIGNED_ACCESS
1530 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1533 while (max_size > 1)
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1543 if (mode == VOIDmode)
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing
1548 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1549 GET_MODE_SIZE (mode)))
1550 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1552 max_size = GET_MODE_SIZE (mode);
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1563 move_by_pieces_1 (genfun, mode, data)
1564 rtx (*genfun) PROTO ((rtx, ...));
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1568 register int size = GET_MODE_SIZE (mode);
1569 register rtx to1, from1;
1571 while (data->len >= size)
1573 if (data->reverse) data->offset -= size;
1575 to1 = (data->autinc_to
1576 ? gen_rtx_MEM (mode, data->to_addr)
1577 : copy_rtx (change_address (data->to, mode,
1578 plus_constant (data->to_addr,
1580 MEM_IN_STRUCT_P (to1) = data->to_struct;
1583 = (data->autinc_from
1584 ? gen_rtx_MEM (mode, data->from_addr)
1585 : copy_rtx (change_address (data->from, mode,
1586 plus_constant (data->from_addr,
1588 MEM_IN_STRUCT_P (from1) = data->from_struct;
1590 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1591 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1592 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1593 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1595 emit_insn ((*genfun) (to1, from1));
1596 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1597 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1598 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1599 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1601 if (! data->reverse) data->offset += size;
1607 /* Emit code to move a block Y to a block X.
1608 This may be done with string-move instructions,
1609 with multiple scalar move instructions, or with a library call.
1611 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1613 SIZE is an rtx that says how long they are.
1614 ALIGN is the maximum alignment we can assume they have,
1617 Return the address of the new block, if memcpy is called and returns it,
1621 emit_block_move (x, y, size, align)
1627 #ifdef TARGET_MEM_FUNCTIONS
1629 tree call_expr, arg_list;
1632 if (GET_MODE (x) != BLKmode)
1635 if (GET_MODE (y) != BLKmode)
1638 x = protect_from_queue (x, 1);
1639 y = protect_from_queue (y, 0);
1640 size = protect_from_queue (size, 0);
1642 if (GET_CODE (x) != MEM)
1644 if (GET_CODE (y) != MEM)
1649 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1650 move_by_pieces (x, y, INTVAL (size), align);
1653 /* Try the most limited insn first, because there's no point
1654 including more than one in the machine description unless
1655 the more limited one has some advantage. */
1657 rtx opalign = GEN_INT (align);
1658 enum machine_mode mode;
1660 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1661 mode = GET_MODE_WIDER_MODE (mode))
1663 enum insn_code code = movstr_optab[(int) mode];
1665 if (code != CODE_FOR_nothing
1666 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1667 here because if SIZE is less than the mode mask, as it is
1668 returned by the macro, it will definitely be less than the
1669 actual mode mask. */
1670 && ((GET_CODE (size) == CONST_INT
1671 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1672 <= (GET_MODE_MASK (mode) >> 1)))
1673 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1674 && (insn_operand_predicate[(int) code][0] == 0
1675 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1676 && (insn_operand_predicate[(int) code][1] == 0
1677 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1678 && (insn_operand_predicate[(int) code][3] == 0
1679 || (*insn_operand_predicate[(int) code][3]) (opalign,
1683 rtx last = get_last_insn ();
1686 op2 = convert_to_mode (mode, size, 1);
1687 if (insn_operand_predicate[(int) code][2] != 0
1688 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1689 op2 = copy_to_mode_reg (mode, op2);
1691 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1698 delete_insns_since (last);
1702 #ifdef TARGET_MEM_FUNCTIONS
1703 /* It is incorrect to use the libcall calling conventions to call
1704 memcpy in this context.
1706 This could be a user call to memcpy and the user may wish to
1707 examine the return value from memcpy.
1709 For targets where libcalls and normal calls have different conventions
1710 for returning pointers, we could end up generating incorrect code.
1712 So instead of using a libcall sequence we build up a suitable
1713 CALL_EXPR and expand the call in the normal fashion. */
1714 if (fn == NULL_TREE)
1718 /* This was copied from except.c, I don't know if all this is
1719 necessary in this context or not. */
1720 fn = get_identifier ("memcpy");
1721 push_obstacks_nochange ();
1722 end_temporary_allocation ();
1723 fntype = build_pointer_type (void_type_node);
1724 fntype = build_function_type (fntype, NULL_TREE);
1725 fn = build_decl (FUNCTION_DECL, fn, fntype);
1726 DECL_EXTERNAL (fn) = 1;
1727 TREE_PUBLIC (fn) = 1;
1728 DECL_ARTIFICIAL (fn) = 1;
1729 make_decl_rtl (fn, NULL_PTR, 1);
1730 assemble_external (fn);
1734 /* We need to make an argument list for the function call.
1736 memcpy has three arguments, the first two are void * addresses and
1737 the last is a size_t byte count for the copy. */
1739 = build_tree_list (NULL_TREE,
1740 make_tree (build_pointer_type (void_type_node),
1742 TREE_CHAIN (arg_list)
1743 = build_tree_list (NULL_TREE,
1744 make_tree (build_pointer_type (void_type_node),
1746 TREE_CHAIN (TREE_CHAIN (arg_list))
1747 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1748 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1750 /* Now we have to build up the CALL_EXPR itself. */
1751 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1752 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1753 call_expr, arg_list, NULL_TREE);
1754 TREE_SIDE_EFFECTS (call_expr) = 1;
1756 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1758 emit_library_call (bcopy_libfunc, 0,
1759 VOIDmode, 3, XEXP (y, 0), Pmode,
1761 convert_to_mode (TYPE_MODE (integer_type_node), size,
1762 TREE_UNSIGNED (integer_type_node)),
1763 TYPE_MODE (integer_type_node));
1770 /* Copy all or part of a value X into registers starting at REGNO.
1771 The number of registers to be filled is NREGS. */
1774 move_block_to_reg (regno, x, nregs, mode)
1778 enum machine_mode mode;
1781 #ifdef HAVE_load_multiple
1789 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1790 x = validize_mem (force_const_mem (mode, x));
1792 /* See if the machine can do this with a load multiple insn. */
1793 #ifdef HAVE_load_multiple
1794 if (HAVE_load_multiple)
1796 last = get_last_insn ();
1797 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1805 delete_insns_since (last);
1809 for (i = 0; i < nregs; i++)
1810 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1811 operand_subword_force (x, i, mode));
1814 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1815 The number of registers to be filled is NREGS. SIZE indicates the number
1816 of bytes in the object X. */
1820 move_block_from_reg (regno, x, nregs, size)
1827 #ifdef HAVE_store_multiple
1831 enum machine_mode mode;
1833 /* If SIZE is that of a mode no bigger than a word, just use that
1834 mode's store operation. */
1835 if (size <= UNITS_PER_WORD
1836 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1838 emit_move_insn (change_address (x, mode, NULL),
1839 gen_rtx_REG (mode, regno));
1843 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1844 to the left before storing to memory. Note that the previous test
1845 doesn't handle all cases (e.g. SIZE == 3). */
1846 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1848 rtx tem = operand_subword (x, 0, 1, BLKmode);
1854 shift = expand_shift (LSHIFT_EXPR, word_mode,
1855 gen_rtx_REG (word_mode, regno),
1856 build_int_2 ((UNITS_PER_WORD - size)
1857 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1858 emit_move_insn (tem, shift);
1862 /* See if the machine can do this with a store multiple insn. */
1863 #ifdef HAVE_store_multiple
1864 if (HAVE_store_multiple)
1866 last = get_last_insn ();
1867 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1875 delete_insns_since (last);
1879 for (i = 0; i < nregs; i++)
1881 rtx tem = operand_subword (x, i, 1, BLKmode);
1886 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1890 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1891 registers represented by a PARALLEL. SSIZE represents the total size of
1892 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1894 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1895 the balance will be in what would be the low-order memory addresses, i.e.
1896 left justified for big endian, right justified for little endian. This
1897 happens to be true for the targets currently using this support. If this
1898 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1902 emit_group_load (dst, orig_src, ssize, align)
1909 if (GET_CODE (dst) != PARALLEL)
1912 /* Check for a NULL entry, used to indicate that the parameter goes
1913 both on the stack and in registers. */
1914 if (XEXP (XVECEXP (dst, 0, 0), 0))
1919 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1921 /* If we won't be loading directly from memory, protect the real source
1922 from strange tricks we might play. */
1924 if (GET_CODE (src) != MEM)
1926 src = gen_reg_rtx (GET_MODE (orig_src));
1927 emit_move_insn (src, orig_src);
1930 /* Process the pieces. */
1931 for (i = start; i < XVECLEN (dst, 0); i++)
1933 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1934 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1935 int bytelen = GET_MODE_SIZE (mode);
1938 /* Handle trailing fragments that run over the size of the struct. */
1939 if (ssize >= 0 && bytepos + bytelen > ssize)
1941 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1942 bytelen = ssize - bytepos;
1947 /* Optimize the access just a bit. */
1948 if (GET_CODE (src) == MEM
1949 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1950 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1951 && bytelen == GET_MODE_SIZE (mode))
1953 tmps[i] = gen_reg_rtx (mode);
1954 emit_move_insn (tmps[i],
1955 change_address (src, mode,
1956 plus_constant (XEXP (src, 0),
1961 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1962 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1963 mode, mode, align, ssize);
1966 if (BYTES_BIG_ENDIAN && shift)
1968 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1969 tmps[i], 0, OPTAB_WIDEN);
1974 /* Copy the extracted pieces into the proper (probable) hard regs. */
1975 for (i = start; i < XVECLEN (dst, 0); i++)
1976 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1979 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1980 registers represented by a PARALLEL. SSIZE represents the total size of
1981 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1984 emit_group_store (orig_dst, src, ssize, align)
1991 if (GET_CODE (src) != PARALLEL)
1994 /* Check for a NULL entry, used to indicate that the parameter goes
1995 both on the stack and in registers. */
1996 if (XEXP (XVECEXP (src, 0, 0), 0))
2001 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2003 /* Copy the (probable) hard regs into pseudos. */
2004 for (i = start; i < XVECLEN (src, 0); i++)
2006 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2007 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2008 emit_move_insn (tmps[i], reg);
2012 /* If we won't be storing directly into memory, protect the real destination
2013 from strange tricks we might play. */
2015 if (GET_CODE (dst) == PARALLEL)
2019 /* We can get a PARALLEL dst if there is a conditional expression in
2020 a return statement. In that case, the dst and src are the same,
2021 so no action is necessary. */
2022 if (rtx_equal_p (dst, src))
2025 /* It is unclear if we can ever reach here, but we may as well handle
2026 it. Allocate a temporary, and split this into a store/load to/from
2029 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2030 emit_group_store (temp, src, ssize, align);
2031 emit_group_load (dst, temp, ssize, align);
2034 else if (GET_CODE (dst) != MEM)
2036 dst = gen_reg_rtx (GET_MODE (orig_dst));
2037 /* Make life a bit easier for combine. */
2038 emit_move_insn (dst, const0_rtx);
2040 else if (! MEM_IN_STRUCT_P (dst))
2042 /* store_bit_field requires that memory operations have
2043 mem_in_struct_p set; we might not. */
2045 dst = copy_rtx (orig_dst);
2046 MEM_SET_IN_STRUCT_P (dst, 1);
2049 /* Process the pieces. */
2050 for (i = start; i < XVECLEN (src, 0); i++)
2052 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2053 enum machine_mode mode = GET_MODE (tmps[i]);
2054 int bytelen = GET_MODE_SIZE (mode);
2056 /* Handle trailing fragments that run over the size of the struct. */
2057 if (ssize >= 0 && bytepos + bytelen > ssize)
2059 if (BYTES_BIG_ENDIAN)
2061 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2062 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2063 tmps[i], 0, OPTAB_WIDEN);
2065 bytelen = ssize - bytepos;
2068 /* Optimize the access just a bit. */
2069 if (GET_CODE (dst) == MEM
2070 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2071 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2072 && bytelen == GET_MODE_SIZE (mode))
2074 emit_move_insn (change_address (dst, mode,
2075 plus_constant (XEXP (dst, 0),
2081 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2082 mode, tmps[i], align, ssize);
2087 /* Copy from the pseudo into the (probable) hard reg. */
2088 if (GET_CODE (dst) == REG)
2089 emit_move_insn (orig_dst, dst);
2092 /* Generate code to copy a BLKmode object of TYPE out of a
2093 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2094 is null, a stack temporary is created. TGTBLK is returned.
2096 The primary purpose of this routine is to handle functions
2097 that return BLKmode structures in registers. Some machines
2098 (the PA for example) want to return all small structures
2099 in registers regardless of the structure's alignment.
2103 copy_blkmode_from_reg(tgtblk,srcreg,type)
2108 int bytes = int_size_in_bytes (type);
2109 rtx src = NULL, dst = NULL;
2110 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2111 int bitpos, xbitpos, big_endian_correction = 0;
2115 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2116 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2117 preserve_temp_slots (tgtblk);
2120 /* This code assumes srcreg is at least a full word. If it isn't,
2121 copy it into a new pseudo which is a full word. */
2122 if (GET_MODE (srcreg) != BLKmode
2123 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2124 srcreg = convert_to_mode (word_mode, srcreg,
2125 TREE_UNSIGNED (type));
2127 /* Structures whose size is not a multiple of a word are aligned
2128 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2129 machine, this means we must skip the empty high order bytes when
2130 calculating the bit offset. */
2131 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2132 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2135 /* Copy the structure BITSIZE bites at a time.
2137 We could probably emit more efficient code for machines
2138 which do not use strict alignment, but it doesn't seem
2139 worth the effort at the current time. */
2140 for (bitpos = 0, xbitpos = big_endian_correction;
2141 bitpos < bytes * BITS_PER_UNIT;
2142 bitpos += bitsize, xbitpos += bitsize)
2145 /* We need a new source operand each time xbitpos is on a
2146 word boundary and when xbitpos == big_endian_correction
2147 (the first time through). */
2148 if (xbitpos % BITS_PER_WORD == 0
2149 || xbitpos == big_endian_correction)
2150 src = operand_subword_force (srcreg,
2151 xbitpos / BITS_PER_WORD,
2154 /* We need a new destination operand each time bitpos is on
2156 if (bitpos % BITS_PER_WORD == 0)
2157 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2159 /* Use xbitpos for the source extraction (right justified) and
2160 xbitpos for the destination store (left justified). */
2161 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2162 extract_bit_field (src, bitsize,
2163 xbitpos % BITS_PER_WORD, 1,
2164 NULL_RTX, word_mode,
2166 bitsize / BITS_PER_UNIT,
2168 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2174 /* Add a USE expression for REG to the (possibly empty) list pointed
2175 to by CALL_FUSAGE. REG must denote a hard register. */
2178 use_reg (call_fusage, reg)
2179 rtx *call_fusage, reg;
2181 if (GET_CODE (reg) != REG
2182 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2186 = gen_rtx_EXPR_LIST (VOIDmode,
2187 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2190 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2191 starting at REGNO. All of these registers must be hard registers. */
2194 use_regs (call_fusage, regno, nregs)
2201 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2204 for (i = 0; i < nregs; i++)
2205 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2208 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2209 PARALLEL REGS. This is for calls that pass values in multiple
2210 non-contiguous locations. The Irix 6 ABI has examples of this. */
2213 use_group_regs (call_fusage, regs)
2219 for (i = 0; i < XVECLEN (regs, 0); i++)
2221 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2223 /* A NULL entry means the parameter goes both on the stack and in
2224 registers. This can also be a MEM for targets that pass values
2225 partially on the stack and partially in registers. */
2226 if (reg != 0 && GET_CODE (reg) == REG)
2227 use_reg (call_fusage, reg);
2231 /* Generate several move instructions to clear LEN bytes of block TO.
2232 (A MEM rtx with BLKmode). The caller must pass TO through
2233 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2237 clear_by_pieces (to, len, align)
2241 struct clear_by_pieces data;
2242 rtx to_addr = XEXP (to, 0);
2243 int max_size = MOVE_MAX_PIECES + 1;
2244 enum machine_mode mode = VOIDmode, tmode;
2245 enum insn_code icode;
2248 data.to_addr = to_addr;
2251 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2252 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2254 data.explicit_inc_to = 0;
2256 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2257 if (data.reverse) data.offset = len;
2260 data.to_struct = MEM_IN_STRUCT_P (to);
2262 /* If copying requires more than two move insns,
2263 copy addresses to registers (to make displacements shorter)
2264 and use post-increment if available. */
2266 && move_by_pieces_ninsns (len, align) > 2)
2268 /* Determine the main mode we'll be using */
2269 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2270 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2271 if (GET_MODE_SIZE (tmode) < max_size)
2274 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2276 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2278 data.explicit_inc_to = -1;
2280 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2282 data.to_addr = copy_addr_to_reg (to_addr);
2284 data.explicit_inc_to = 1;
2286 if (!data.autinc_to && CONSTANT_P (to_addr))
2287 data.to_addr = copy_addr_to_reg (to_addr);
2290 if (! SLOW_UNALIGNED_ACCESS
2291 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2294 /* First move what we can in the largest integer mode, then go to
2295 successively smaller modes. */
2297 while (max_size > 1)
2299 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2300 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2301 if (GET_MODE_SIZE (tmode) < max_size)
2304 if (mode == VOIDmode)
2307 icode = mov_optab->handlers[(int) mode].insn_code;
2308 if (icode != CODE_FOR_nothing
2309 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2310 GET_MODE_SIZE (mode)))
2311 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2313 max_size = GET_MODE_SIZE (mode);
2316 /* The code above should have handled everything. */
2321 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2322 with move instructions for mode MODE. GENFUN is the gen_... function
2323 to make a move insn for that mode. DATA has all the other info. */
2326 clear_by_pieces_1 (genfun, mode, data)
2327 rtx (*genfun) PROTO ((rtx, ...));
2328 enum machine_mode mode;
2329 struct clear_by_pieces *data;
2331 register int size = GET_MODE_SIZE (mode);
2334 while (data->len >= size)
2336 if (data->reverse) data->offset -= size;
2338 to1 = (data->autinc_to
2339 ? gen_rtx_MEM (mode, data->to_addr)
2340 : copy_rtx (change_address (data->to, mode,
2341 plus_constant (data->to_addr,
2343 MEM_IN_STRUCT_P (to1) = data->to_struct;
2345 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2346 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2348 emit_insn ((*genfun) (to1, const0_rtx));
2349 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2350 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2352 if (! data->reverse) data->offset += size;
2358 /* Write zeros through the storage of OBJECT.
2359 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2360 the maximum alignment we can is has, measured in bytes.
2362 If we call a function that returns the length of the block, return it. */
2365 clear_storage (object, size, align)
2370 #ifdef TARGET_MEM_FUNCTIONS
2372 tree call_expr, arg_list;
2376 if (GET_MODE (object) == BLKmode)
2378 object = protect_from_queue (object, 1);
2379 size = protect_from_queue (size, 0);
2381 if (GET_CODE (size) == CONST_INT
2382 && MOVE_BY_PIECES_P (INTVAL (size), align))
2383 clear_by_pieces (object, INTVAL (size), align);
2387 /* Try the most limited insn first, because there's no point
2388 including more than one in the machine description unless
2389 the more limited one has some advantage. */
2391 rtx opalign = GEN_INT (align);
2392 enum machine_mode mode;
2394 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2395 mode = GET_MODE_WIDER_MODE (mode))
2397 enum insn_code code = clrstr_optab[(int) mode];
2399 if (code != CODE_FOR_nothing
2400 /* We don't need MODE to be narrower than
2401 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2402 the mode mask, as it is returned by the macro, it will
2403 definitely be less than the actual mode mask. */
2404 && ((GET_CODE (size) == CONST_INT
2405 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2406 <= (GET_MODE_MASK (mode) >> 1)))
2407 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2408 && (insn_operand_predicate[(int) code][0] == 0
2409 || (*insn_operand_predicate[(int) code][0]) (object,
2411 && (insn_operand_predicate[(int) code][2] == 0
2412 || (*insn_operand_predicate[(int) code][2]) (opalign,
2416 rtx last = get_last_insn ();
2419 op1 = convert_to_mode (mode, size, 1);
2420 if (insn_operand_predicate[(int) code][1] != 0
2421 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2423 op1 = copy_to_mode_reg (mode, op1);
2425 pat = GEN_FCN ((int) code) (object, op1, opalign);
2432 delete_insns_since (last);
2437 #ifdef TARGET_MEM_FUNCTIONS
2438 /* It is incorrect to use the libcall calling conventions to call
2439 memset in this context.
2441 This could be a user call to memset and the user may wish to
2442 examine the return value from memset.
2444 For targets where libcalls and normal calls have different conventions
2445 for returning pointers, we could end up generating incorrect code.
2447 So instead of using a libcall sequence we build up a suitable
2448 CALL_EXPR and expand the call in the normal fashion. */
2449 if (fn == NULL_TREE)
2453 /* This was copied from except.c, I don't know if all this is
2454 necessary in this context or not. */
2455 fn = get_identifier ("memset");
2456 push_obstacks_nochange ();
2457 end_temporary_allocation ();
2458 fntype = build_pointer_type (void_type_node);
2459 fntype = build_function_type (fntype, NULL_TREE);
2460 fn = build_decl (FUNCTION_DECL, fn, fntype);
2461 DECL_EXTERNAL (fn) = 1;
2462 TREE_PUBLIC (fn) = 1;
2463 DECL_ARTIFICIAL (fn) = 1;
2464 make_decl_rtl (fn, NULL_PTR, 1);
2465 assemble_external (fn);
2469 /* We need to make an argument list for the function call.
2471 memset has three arguments, the first is a void * addresses, the
2472 second a integer with the initialization value, the last is a size_t
2473 byte count for the copy. */
2475 = build_tree_list (NULL_TREE,
2476 make_tree (build_pointer_type (void_type_node),
2478 TREE_CHAIN (arg_list)
2479 = build_tree_list (NULL_TREE,
2480 make_tree (integer_type_node, const0_rtx));
2481 TREE_CHAIN (TREE_CHAIN (arg_list))
2482 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2483 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2485 /* Now we have to build up the CALL_EXPR itself. */
2486 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2487 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2488 call_expr, arg_list, NULL_TREE);
2489 TREE_SIDE_EFFECTS (call_expr) = 1;
2491 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2493 emit_library_call (bzero_libfunc, 0,
2495 XEXP (object, 0), Pmode,
2497 (TYPE_MODE (integer_type_node), size,
2498 TREE_UNSIGNED (integer_type_node)),
2499 TYPE_MODE (integer_type_node));
2504 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2509 /* Generate code to copy Y into X.
2510 Both Y and X must have the same mode, except that
2511 Y can be a constant with VOIDmode.
2512 This mode cannot be BLKmode; use emit_block_move for that.
2514 Return the last instruction emitted. */
2517 emit_move_insn (x, y)
2520 enum machine_mode mode = GET_MODE (x);
2522 x = protect_from_queue (x, 1);
2523 y = protect_from_queue (y, 0);
2525 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2528 /* Never force constant_p_rtx to memory. */
2529 if (GET_CODE (y) == CONSTANT_P_RTX)
2531 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2532 y = force_const_mem (mode, y);
2534 /* If X or Y are memory references, verify that their addresses are valid
2536 if (GET_CODE (x) == MEM
2537 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2538 && ! push_operand (x, GET_MODE (x)))
2540 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2541 x = change_address (x, VOIDmode, XEXP (x, 0));
2543 if (GET_CODE (y) == MEM
2544 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2546 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2547 y = change_address (y, VOIDmode, XEXP (y, 0));
2549 if (mode == BLKmode)
2552 return emit_move_insn_1 (x, y);
2555 /* Low level part of emit_move_insn.
2556 Called just like emit_move_insn, but assumes X and Y
2557 are basically valid. */
2560 emit_move_insn_1 (x, y)
2563 enum machine_mode mode = GET_MODE (x);
2564 enum machine_mode submode;
2565 enum mode_class class = GET_MODE_CLASS (mode);
2568 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2570 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2572 /* Expand complex moves by moving real part and imag part, if possible. */
2573 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2574 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2576 (class == MODE_COMPLEX_INT
2577 ? MODE_INT : MODE_FLOAT),
2579 && (mov_optab->handlers[(int) submode].insn_code
2580 != CODE_FOR_nothing))
2582 /* Don't split destination if it is a stack push. */
2583 int stack = push_operand (x, GET_MODE (x));
2585 /* If this is a stack, push the highpart first, so it
2586 will be in the argument order.
2588 In that case, change_address is used only to convert
2589 the mode, not to change the address. */
2592 /* Note that the real part always precedes the imag part in memory
2593 regardless of machine's endianness. */
2594 #ifdef STACK_GROWS_DOWNWARD
2595 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2596 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2597 gen_imagpart (submode, y)));
2598 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2599 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2600 gen_realpart (submode, y)));
2602 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2603 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2604 gen_realpart (submode, y)));
2605 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2606 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2607 gen_imagpart (submode, y)));
2612 /* Show the output dies here. This is necessary for pseudos;
2613 hard regs shouldn't appear here except as return values.
2614 We never want to emit such a clobber after reload. */
2616 && ! (reload_in_progress || reload_completed))
2618 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2621 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2622 (gen_realpart (submode, x), gen_realpart (submode, y)));
2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2624 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2627 return get_last_insn ();
2630 /* This will handle any multi-word mode that lacks a move_insn pattern.
2631 However, you will get better code if you define such patterns,
2632 even if they must turn into multiple assembler instructions. */
2633 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2637 #ifdef PUSH_ROUNDING
2639 /* If X is a push on the stack, do the push now and replace
2640 X with a reference to the stack pointer. */
2641 if (push_operand (x, GET_MODE (x)))
2643 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2644 x = change_address (x, VOIDmode, stack_pointer_rtx);
2648 /* Show the output dies here. This is necessary for pseudos;
2649 hard regs shouldn't appear here except as return values.
2650 We never want to emit such a clobber after reload. */
2652 && ! (reload_in_progress || reload_completed))
2654 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2658 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2661 rtx xpart = operand_subword (x, i, 1, mode);
2662 rtx ypart = operand_subword (y, i, 1, mode);
2664 /* If we can't get a part of Y, put Y into memory if it is a
2665 constant. Otherwise, force it into a register. If we still
2666 can't get a part of Y, abort. */
2667 if (ypart == 0 && CONSTANT_P (y))
2669 y = force_const_mem (mode, y);
2670 ypart = operand_subword (y, i, 1, mode);
2672 else if (ypart == 0)
2673 ypart = operand_subword_force (y, i, mode);
2675 if (xpart == 0 || ypart == 0)
2678 last_insn = emit_move_insn (xpart, ypart);
2687 /* Pushing data onto the stack. */
2689 /* Push a block of length SIZE (perhaps variable)
2690 and return an rtx to address the beginning of the block.
2691 Note that it is not possible for the value returned to be a QUEUED.
2692 The value may be virtual_outgoing_args_rtx.
2694 EXTRA is the number of bytes of padding to push in addition to SIZE.
2695 BELOW nonzero means this padding comes at low addresses;
2696 otherwise, the padding comes at high addresses. */
2699 push_block (size, extra, below)
2705 size = convert_modes (Pmode, ptr_mode, size, 1);
2706 if (CONSTANT_P (size))
2707 anti_adjust_stack (plus_constant (size, extra));
2708 else if (GET_CODE (size) == REG && extra == 0)
2709 anti_adjust_stack (size);
2712 rtx temp = copy_to_mode_reg (Pmode, size);
2714 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2715 temp, 0, OPTAB_LIB_WIDEN);
2716 anti_adjust_stack (temp);
2719 #if defined (STACK_GROWS_DOWNWARD) \
2720 || (defined (ARGS_GROW_DOWNWARD) \
2721 && !defined (ACCUMULATE_OUTGOING_ARGS))
2723 /* Return the lowest stack address when STACK or ARGS grow downward and
2724 we are not aaccumulating outgoing arguments (the c4x port uses such
2726 temp = virtual_outgoing_args_rtx;
2727 if (extra != 0 && below)
2728 temp = plus_constant (temp, extra);
2730 if (GET_CODE (size) == CONST_INT)
2731 temp = plus_constant (virtual_outgoing_args_rtx,
2732 - INTVAL (size) - (below ? 0 : extra));
2733 else if (extra != 0 && !below)
2734 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2735 negate_rtx (Pmode, plus_constant (size, extra)));
2737 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2738 negate_rtx (Pmode, size));
2741 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2747 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2750 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2751 block of SIZE bytes. */
2754 get_push_address (size)
2759 if (STACK_PUSH_CODE == POST_DEC)
2760 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2761 else if (STACK_PUSH_CODE == POST_INC)
2762 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2764 temp = stack_pointer_rtx;
2766 return copy_to_reg (temp);
2769 /* Generate code to push X onto the stack, assuming it has mode MODE and
2771 MODE is redundant except when X is a CONST_INT (since they don't
2773 SIZE is an rtx for the size of data to be copied (in bytes),
2774 needed only if X is BLKmode.
2776 ALIGN (in bytes) is maximum alignment we can assume.
2778 If PARTIAL and REG are both nonzero, then copy that many of the first
2779 words of X into registers starting with REG, and push the rest of X.
2780 The amount of space pushed is decreased by PARTIAL words,
2781 rounded *down* to a multiple of PARM_BOUNDARY.
2782 REG must be a hard register in this case.
2783 If REG is zero but PARTIAL is not, take any all others actions for an
2784 argument partially in registers, but do not actually load any
2787 EXTRA is the amount in bytes of extra space to leave next to this arg.
2788 This is ignored if an argument block has already been allocated.
2790 On a machine that lacks real push insns, ARGS_ADDR is the address of
2791 the bottom of the argument block for this call. We use indexing off there
2792 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2793 argument block has not been preallocated.
2795 ARGS_SO_FAR is the size of args previously pushed for this call.
2797 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2798 for arguments passed in registers. If nonzero, it will be the number
2799 of bytes required. */
2802 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2803 args_addr, args_so_far, reg_parm_stack_space)
2805 enum machine_mode mode;
2814 int reg_parm_stack_space;
2817 enum direction stack_direction
2818 #ifdef STACK_GROWS_DOWNWARD
2824 /* Decide where to pad the argument: `downward' for below,
2825 `upward' for above, or `none' for don't pad it.
2826 Default is below for small data on big-endian machines; else above. */
2827 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2829 /* Invert direction if stack is post-update. */
2830 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2831 if (where_pad != none)
2832 where_pad = (where_pad == downward ? upward : downward);
2834 xinner = x = protect_from_queue (x, 0);
2836 if (mode == BLKmode)
2838 /* Copy a block into the stack, entirely or partially. */
2841 int used = partial * UNITS_PER_WORD;
2842 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2850 /* USED is now the # of bytes we need not copy to the stack
2851 because registers will take care of them. */
2854 xinner = change_address (xinner, BLKmode,
2855 plus_constant (XEXP (xinner, 0), used));
2857 /* If the partial register-part of the arg counts in its stack size,
2858 skip the part of stack space corresponding to the registers.
2859 Otherwise, start copying to the beginning of the stack space,
2860 by setting SKIP to 0. */
2861 skip = (reg_parm_stack_space == 0) ? 0 : used;
2863 #ifdef PUSH_ROUNDING
2864 /* Do it with several push insns if that doesn't take lots of insns
2865 and if there is no difficulty with push insns that skip bytes
2866 on the stack for alignment purposes. */
2868 && GET_CODE (size) == CONST_INT
2870 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2871 /* Here we avoid the case of a structure whose weak alignment
2872 forces many pushes of a small amount of data,
2873 and such small pushes do rounding that causes trouble. */
2874 && ((! SLOW_UNALIGNED_ACCESS)
2875 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2876 || PUSH_ROUNDING (align) == align)
2877 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2879 /* Push padding now if padding above and stack grows down,
2880 or if padding below and stack grows up.
2881 But if space already allocated, this has already been done. */
2882 if (extra && args_addr == 0
2883 && where_pad != none && where_pad != stack_direction)
2884 anti_adjust_stack (GEN_INT (extra));
2886 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2887 INTVAL (size) - used, align);
2889 if (current_function_check_memory_usage && ! in_check_memory_usage)
2893 in_check_memory_usage = 1;
2894 temp = get_push_address (INTVAL(size) - used);
2895 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2896 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2898 XEXP (xinner, 0), ptr_mode,
2899 GEN_INT (INTVAL(size) - used),
2900 TYPE_MODE (sizetype));
2902 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2904 GEN_INT (INTVAL(size) - used),
2905 TYPE_MODE (sizetype),
2906 GEN_INT (MEMORY_USE_RW),
2907 TYPE_MODE (integer_type_node));
2908 in_check_memory_usage = 0;
2912 #endif /* PUSH_ROUNDING */
2914 /* Otherwise make space on the stack and copy the data
2915 to the address of that space. */
2917 /* Deduct words put into registers from the size we must copy. */
2920 if (GET_CODE (size) == CONST_INT)
2921 size = GEN_INT (INTVAL (size) - used);
2923 size = expand_binop (GET_MODE (size), sub_optab, size,
2924 GEN_INT (used), NULL_RTX, 0,
2928 /* Get the address of the stack space.
2929 In this case, we do not deal with EXTRA separately.
2930 A single stack adjust will do. */
2933 temp = push_block (size, extra, where_pad == downward);
2936 else if (GET_CODE (args_so_far) == CONST_INT)
2937 temp = memory_address (BLKmode,
2938 plus_constant (args_addr,
2939 skip + INTVAL (args_so_far)));
2941 temp = memory_address (BLKmode,
2942 plus_constant (gen_rtx_PLUS (Pmode,
2946 if (current_function_check_memory_usage && ! in_check_memory_usage)
2950 in_check_memory_usage = 1;
2951 target = copy_to_reg (temp);
2952 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2953 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2955 XEXP (xinner, 0), ptr_mode,
2956 size, TYPE_MODE (sizetype));
2958 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2960 size, TYPE_MODE (sizetype),
2961 GEN_INT (MEMORY_USE_RW),
2962 TYPE_MODE (integer_type_node));
2963 in_check_memory_usage = 0;
2966 /* TEMP is the address of the block. Copy the data there. */
2967 if (GET_CODE (size) == CONST_INT
2968 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
2970 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2971 INTVAL (size), align);
2976 rtx opalign = GEN_INT (align);
2977 enum machine_mode mode;
2978 rtx target = gen_rtx_MEM (BLKmode, temp);
2980 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2982 mode = GET_MODE_WIDER_MODE (mode))
2984 enum insn_code code = movstr_optab[(int) mode];
2986 if (code != CODE_FOR_nothing
2987 && ((GET_CODE (size) == CONST_INT
2988 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2989 <= (GET_MODE_MASK (mode) >> 1)))
2990 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2991 && (insn_operand_predicate[(int) code][0] == 0
2992 || ((*insn_operand_predicate[(int) code][0])
2994 && (insn_operand_predicate[(int) code][1] == 0
2995 || ((*insn_operand_predicate[(int) code][1])
2997 && (insn_operand_predicate[(int) code][3] == 0
2998 || ((*insn_operand_predicate[(int) code][3])
2999 (opalign, VOIDmode))))
3001 rtx op2 = convert_to_mode (mode, size, 1);
3002 rtx last = get_last_insn ();
3005 if (insn_operand_predicate[(int) code][2] != 0
3006 && ! ((*insn_operand_predicate[(int) code][2])
3008 op2 = copy_to_mode_reg (mode, op2);
3010 pat = GEN_FCN ((int) code) (target, xinner,
3018 delete_insns_since (last);
3023 #ifndef ACCUMULATE_OUTGOING_ARGS
3024 /* If the source is referenced relative to the stack pointer,
3025 copy it to another register to stabilize it. We do not need
3026 to do this if we know that we won't be changing sp. */
3028 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3029 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3030 temp = copy_to_reg (temp);
3033 /* Make inhibit_defer_pop nonzero around the library call
3034 to force it to pop the bcopy-arguments right away. */
3036 #ifdef TARGET_MEM_FUNCTIONS
3037 emit_library_call (memcpy_libfunc, 0,
3038 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3039 convert_to_mode (TYPE_MODE (sizetype),
3040 size, TREE_UNSIGNED (sizetype)),
3041 TYPE_MODE (sizetype));
3043 emit_library_call (bcopy_libfunc, 0,
3044 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3045 convert_to_mode (TYPE_MODE (integer_type_node),
3047 TREE_UNSIGNED (integer_type_node)),
3048 TYPE_MODE (integer_type_node));
3053 else if (partial > 0)
3055 /* Scalar partly in registers. */
3057 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3060 /* # words of start of argument
3061 that we must make space for but need not store. */
3062 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3063 int args_offset = INTVAL (args_so_far);
3066 /* Push padding now if padding above and stack grows down,
3067 or if padding below and stack grows up.
3068 But if space already allocated, this has already been done. */
3069 if (extra && args_addr == 0
3070 && where_pad != none && where_pad != stack_direction)
3071 anti_adjust_stack (GEN_INT (extra));
3073 /* If we make space by pushing it, we might as well push
3074 the real data. Otherwise, we can leave OFFSET nonzero
3075 and leave the space uninitialized. */
3079 /* Now NOT_STACK gets the number of words that we don't need to
3080 allocate on the stack. */
3081 not_stack = partial - offset;
3083 /* If the partial register-part of the arg counts in its stack size,
3084 skip the part of stack space corresponding to the registers.
3085 Otherwise, start copying to the beginning of the stack space,
3086 by setting SKIP to 0. */
3087 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3089 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3090 x = validize_mem (force_const_mem (mode, x));
3092 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3093 SUBREGs of such registers are not allowed. */
3094 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3095 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3096 x = copy_to_reg (x);
3098 /* Loop over all the words allocated on the stack for this arg. */
3099 /* We can do it by words, because any scalar bigger than a word
3100 has a size a multiple of a word. */
3101 #ifndef PUSH_ARGS_REVERSED
3102 for (i = not_stack; i < size; i++)
3104 for (i = size - 1; i >= not_stack; i--)
3106 if (i >= not_stack + offset)
3107 emit_push_insn (operand_subword_force (x, i, mode),
3108 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3110 GEN_INT (args_offset + ((i - not_stack + skip)
3112 reg_parm_stack_space);
3117 rtx target = NULL_RTX;
3119 /* Push padding now if padding above and stack grows down,
3120 or if padding below and stack grows up.
3121 But if space already allocated, this has already been done. */
3122 if (extra && args_addr == 0
3123 && where_pad != none && where_pad != stack_direction)
3124 anti_adjust_stack (GEN_INT (extra));
3126 #ifdef PUSH_ROUNDING
3128 addr = gen_push_operand ();
3132 if (GET_CODE (args_so_far) == CONST_INT)
3134 = memory_address (mode,
3135 plus_constant (args_addr,
3136 INTVAL (args_so_far)));
3138 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3143 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3145 if (current_function_check_memory_usage && ! in_check_memory_usage)
3147 in_check_memory_usage = 1;
3149 target = get_push_address (GET_MODE_SIZE (mode));
3151 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3152 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3154 XEXP (x, 0), ptr_mode,
3155 GEN_INT (GET_MODE_SIZE (mode)),
3156 TYPE_MODE (sizetype));
3158 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3160 GEN_INT (GET_MODE_SIZE (mode)),
3161 TYPE_MODE (sizetype),
3162 GEN_INT (MEMORY_USE_RW),
3163 TYPE_MODE (integer_type_node));
3164 in_check_memory_usage = 0;
3169 /* If part should go in registers, copy that part
3170 into the appropriate registers. Do this now, at the end,
3171 since mem-to-mem copies above may do function calls. */
3172 if (partial > 0 && reg != 0)
3174 /* Handle calls that pass values in multiple non-contiguous locations.
3175 The Irix 6 ABI has examples of this. */
3176 if (GET_CODE (reg) == PARALLEL)
3177 emit_group_load (reg, x, -1, align); /* ??? size? */
3179 move_block_to_reg (REGNO (reg), x, partial, mode);
3182 if (extra && args_addr == 0 && where_pad == stack_direction)
3183 anti_adjust_stack (GEN_INT (extra));
3186 /* Expand an assignment that stores the value of FROM into TO.
3187 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3188 (This may contain a QUEUED rtx;
3189 if the value is constant, this rtx is a constant.)
3190 Otherwise, the returned value is NULL_RTX.
3192 SUGGEST_REG is no longer actually used.
3193 It used to mean, copy the value through a register
3194 and return that register, if that is possible.
3195 We now use WANT_VALUE to decide whether to do this. */
3198 expand_assignment (to, from, want_value, suggest_reg)
3203 register rtx to_rtx = 0;
3206 /* Don't crash if the lhs of the assignment was erroneous. */
3208 if (TREE_CODE (to) == ERROR_MARK)
3210 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3211 return want_value ? result : NULL_RTX;
3214 /* Assignment of a structure component needs special treatment
3215 if the structure component's rtx is not simply a MEM.
3216 Assignment of an array element at a constant index, and assignment of
3217 an array element in an unaligned packed structure field, has the same
3220 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3221 || TREE_CODE (to) == ARRAY_REF)
3223 enum machine_mode mode1;
3233 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3234 &unsignedp, &volatilep, &alignment);
3236 /* If we are going to use store_bit_field and extract_bit_field,
3237 make sure to_rtx will be safe for multiple use. */
3239 if (mode1 == VOIDmode && want_value)
3240 tem = stabilize_reference (tem);
3242 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3245 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3247 if (GET_CODE (to_rtx) != MEM)
3250 if (GET_MODE (offset_rtx) != ptr_mode)
3252 #ifdef POINTERS_EXTEND_UNSIGNED
3253 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3255 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3259 /* A constant address in TO_RTX can have VOIDmode, we must not try
3260 to call force_reg for that case. Avoid that case. */
3261 if (GET_CODE (to_rtx) == MEM
3262 && GET_MODE (to_rtx) == BLKmode
3263 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3265 && (bitpos % bitsize) == 0
3266 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3267 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3269 rtx temp = change_address (to_rtx, mode1,
3270 plus_constant (XEXP (to_rtx, 0),
3273 if (GET_CODE (XEXP (temp, 0)) == REG)
3276 to_rtx = change_address (to_rtx, mode1,
3277 force_reg (GET_MODE (XEXP (temp, 0)),
3282 to_rtx = change_address (to_rtx, VOIDmode,
3283 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3284 force_reg (ptr_mode, offset_rtx)));
3288 if (GET_CODE (to_rtx) == MEM)
3290 /* When the offset is zero, to_rtx is the address of the
3291 structure we are storing into, and hence may be shared.
3292 We must make a new MEM before setting the volatile bit. */
3294 to_rtx = copy_rtx (to_rtx);
3296 MEM_VOLATILE_P (to_rtx) = 1;
3298 #if 0 /* This was turned off because, when a field is volatile
3299 in an object which is not volatile, the object may be in a register,
3300 and then we would abort over here. */
3306 if (TREE_CODE (to) == COMPONENT_REF
3307 && TREE_READONLY (TREE_OPERAND (to, 1)))
3310 to_rtx = copy_rtx (to_rtx);
3312 RTX_UNCHANGING_P (to_rtx) = 1;
3315 /* Check the access. */
3316 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3321 enum machine_mode best_mode;
3323 best_mode = get_best_mode (bitsize, bitpos,
3324 TYPE_ALIGN (TREE_TYPE (tem)),
3326 if (best_mode == VOIDmode)
3329 best_mode_size = GET_MODE_BITSIZE (best_mode);
3330 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3331 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3332 size *= GET_MODE_SIZE (best_mode);
3334 /* Check the access right of the pointer. */
3336 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3338 GEN_INT (size), TYPE_MODE (sizetype),
3339 GEN_INT (MEMORY_USE_WO),
3340 TYPE_MODE (integer_type_node));
3343 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3345 /* Spurious cast makes HPUX compiler happy. */
3346 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3349 /* Required alignment of containing datum. */
3351 int_size_in_bytes (TREE_TYPE (tem)),
3352 get_alias_set (to));
3353 preserve_temp_slots (result);
3357 /* If the value is meaningful, convert RESULT to the proper mode.
3358 Otherwise, return nothing. */
3359 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3360 TYPE_MODE (TREE_TYPE (from)),
3362 TREE_UNSIGNED (TREE_TYPE (to)))
3366 /* If the rhs is a function call and its value is not an aggregate,
3367 call the function before we start to compute the lhs.
3368 This is needed for correct code for cases such as
3369 val = setjmp (buf) on machines where reference to val
3370 requires loading up part of an address in a separate insn.
3372 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3373 a promoted variable where the zero- or sign- extension needs to be done.
3374 Handling this in the normal way is safe because no computation is done
3376 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3377 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3378 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3383 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3385 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3387 /* Handle calls that return values in multiple non-contiguous locations.
3388 The Irix 6 ABI has examples of this. */
3389 if (GET_CODE (to_rtx) == PARALLEL)
3390 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3391 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3392 else if (GET_MODE (to_rtx) == BLKmode)
3393 emit_block_move (to_rtx, value, expr_size (from),
3394 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3397 #ifdef POINTERS_EXTEND_UNSIGNED
3398 if (TREE_CODE (to) == REFERENCE_TYPE
3399 || TREE_CODE (to) == POINTER_TYPE)
3400 value = convert_memory_address (GET_MODE (to_rtx), value);
3402 emit_move_insn (to_rtx, value);
3404 preserve_temp_slots (to_rtx);
3407 return want_value ? to_rtx : NULL_RTX;
3410 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3411 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3415 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3416 if (GET_CODE (to_rtx) == MEM)
3417 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3420 /* Don't move directly into a return register. */
3421 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3426 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3427 emit_move_insn (to_rtx, temp);
3428 preserve_temp_slots (to_rtx);
3431 return want_value ? to_rtx : NULL_RTX;
3434 /* In case we are returning the contents of an object which overlaps
3435 the place the value is being stored, use a safe function when copying
3436 a value through a pointer into a structure value return block. */
3437 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3438 && current_function_returns_struct
3439 && !current_function_returns_pcc_struct)
3444 size = expr_size (from);
3445 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3446 EXPAND_MEMORY_USE_DONT);
3448 /* Copy the rights of the bitmap. */
3449 if (current_function_check_memory_usage)
3450 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3451 XEXP (to_rtx, 0), ptr_mode,
3452 XEXP (from_rtx, 0), ptr_mode,
3453 convert_to_mode (TYPE_MODE (sizetype),
3454 size, TREE_UNSIGNED (sizetype)),
3455 TYPE_MODE (sizetype));
3457 #ifdef TARGET_MEM_FUNCTIONS
3458 emit_library_call (memcpy_libfunc, 0,
3459 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3460 XEXP (from_rtx, 0), Pmode,
3461 convert_to_mode (TYPE_MODE (sizetype),
3462 size, TREE_UNSIGNED (sizetype)),
3463 TYPE_MODE (sizetype));
3465 emit_library_call (bcopy_libfunc, 0,
3466 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3467 XEXP (to_rtx, 0), Pmode,
3468 convert_to_mode (TYPE_MODE (integer_type_node),
3469 size, TREE_UNSIGNED (integer_type_node)),
3470 TYPE_MODE (integer_type_node));
3473 preserve_temp_slots (to_rtx);
3476 return want_value ? to_rtx : NULL_RTX;
3479 /* Compute FROM and store the value in the rtx we got. */
3482 result = store_expr (from, to_rtx, want_value);
3483 preserve_temp_slots (result);
3486 return want_value ? result : NULL_RTX;
3489 /* Generate code for computing expression EXP,
3490 and storing the value into TARGET.
3491 TARGET may contain a QUEUED rtx.
3493 If WANT_VALUE is nonzero, return a copy of the value
3494 not in TARGET, so that we can be sure to use the proper
3495 value in a containing expression even if TARGET has something
3496 else stored in it. If possible, we copy the value through a pseudo
3497 and return that pseudo. Or, if the value is constant, we try to
3498 return the constant. In some cases, we return a pseudo
3499 copied *from* TARGET.
3501 If the mode is BLKmode then we may return TARGET itself.
3502 It turns out that in BLKmode it doesn't cause a problem.
3503 because C has no operators that could combine two different
3504 assignments into the same BLKmode object with different values
3505 with no sequence point. Will other languages need this to
3508 If WANT_VALUE is 0, we return NULL, to make sure
3509 to catch quickly any cases where the caller uses the value
3510 and fails to set WANT_VALUE. */
3513 store_expr (exp, target, want_value)
3515 register rtx target;
3519 int dont_return_target = 0;
3521 if (TREE_CODE (exp) == COMPOUND_EXPR)
3523 /* Perform first part of compound expression, then assign from second
3525 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3527 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3529 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3531 /* For conditional expression, get safe form of the target. Then
3532 test the condition, doing the appropriate assignment on either
3533 side. This avoids the creation of unnecessary temporaries.
3534 For non-BLKmode, it is more efficient not to do this. */
3536 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3539 target = protect_from_queue (target, 1);
3541 do_pending_stack_adjust ();
3543 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3544 start_cleanup_deferral ();
3545 store_expr (TREE_OPERAND (exp, 1), target, 0);
3546 end_cleanup_deferral ();
3548 emit_jump_insn (gen_jump (lab2));
3551 start_cleanup_deferral ();
3552 store_expr (TREE_OPERAND (exp, 2), target, 0);
3553 end_cleanup_deferral ();
3558 return want_value ? target : NULL_RTX;
3560 else if (queued_subexp_p (target))
3561 /* If target contains a postincrement, let's not risk
3562 using it as the place to generate the rhs. */
3564 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3566 /* Expand EXP into a new pseudo. */
3567 temp = gen_reg_rtx (GET_MODE (target));
3568 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3571 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3573 /* If target is volatile, ANSI requires accessing the value
3574 *from* the target, if it is accessed. So make that happen.
3575 In no case return the target itself. */
3576 if (! MEM_VOLATILE_P (target) && want_value)
3577 dont_return_target = 1;
3579 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3580 && GET_MODE (target) != BLKmode)
3581 /* If target is in memory and caller wants value in a register instead,
3582 arrange that. Pass TARGET as target for expand_expr so that,
3583 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3584 We know expand_expr will not use the target in that case.
3585 Don't do this if TARGET is volatile because we are supposed
3586 to write it and then read it. */
3588 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3589 GET_MODE (target), 0);
3590 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3591 temp = copy_to_reg (temp);
3592 dont_return_target = 1;
3594 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3595 /* If this is an scalar in a register that is stored in a wider mode
3596 than the declared mode, compute the result into its declared mode
3597 and then convert to the wider mode. Our value is the computed
3600 /* If we don't want a value, we can do the conversion inside EXP,
3601 which will often result in some optimizations. Do the conversion
3602 in two steps: first change the signedness, if needed, then
3603 the extend. But don't do this if the type of EXP is a subtype
3604 of something else since then the conversion might involve
3605 more than just converting modes. */
3606 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3607 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3609 if (TREE_UNSIGNED (TREE_TYPE (exp))
3610 != SUBREG_PROMOTED_UNSIGNED_P (target))
3613 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3617 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3618 SUBREG_PROMOTED_UNSIGNED_P (target)),
3622 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3624 /* If TEMP is a volatile MEM and we want a result value, make
3625 the access now so it gets done only once. Likewise if
3626 it contains TARGET. */
3627 if (GET_CODE (temp) == MEM && want_value
3628 && (MEM_VOLATILE_P (temp)
3629 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3630 temp = copy_to_reg (temp);
3632 /* If TEMP is a VOIDmode constant, use convert_modes to make
3633 sure that we properly convert it. */
3634 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3635 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3636 TYPE_MODE (TREE_TYPE (exp)), temp,
3637 SUBREG_PROMOTED_UNSIGNED_P (target));
3639 convert_move (SUBREG_REG (target), temp,
3640 SUBREG_PROMOTED_UNSIGNED_P (target));
3641 return want_value ? temp : NULL_RTX;
3645 temp = expand_expr (exp, target, GET_MODE (target), 0);
3646 /* Return TARGET if it's a specified hardware register.
3647 If TARGET is a volatile mem ref, either return TARGET
3648 or return a reg copied *from* TARGET; ANSI requires this.
3650 Otherwise, if TEMP is not TARGET, return TEMP
3651 if it is constant (for efficiency),
3652 or if we really want the correct value. */
3653 if (!(target && GET_CODE (target) == REG
3654 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3655 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3656 && ! rtx_equal_p (temp, target)
3657 && (CONSTANT_P (temp) || want_value))
3658 dont_return_target = 1;
3661 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3662 the same as that of TARGET, adjust the constant. This is needed, for
3663 example, in case it is a CONST_DOUBLE and we want only a word-sized
3665 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3666 && TREE_CODE (exp) != ERROR_MARK
3667 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3668 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3669 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3671 if (current_function_check_memory_usage
3672 && GET_CODE (target) == MEM
3673 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3675 if (GET_CODE (temp) == MEM)
3676 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3677 XEXP (target, 0), ptr_mode,
3678 XEXP (temp, 0), ptr_mode,
3679 expr_size (exp), TYPE_MODE (sizetype));
3681 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3682 XEXP (target, 0), ptr_mode,
3683 expr_size (exp), TYPE_MODE (sizetype),
3684 GEN_INT (MEMORY_USE_WO),
3685 TYPE_MODE (integer_type_node));
3688 /* If value was not generated in the target, store it there.
3689 Convert the value to TARGET's type first if nec. */
3690 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3691 one or both of them are volatile memory refs, we have to distinguish
3693 - expand_expr has used TARGET. In this case, we must not generate
3694 another copy. This can be detected by TARGET being equal according
3696 - expand_expr has not used TARGET - that means that the source just
3697 happens to have the same RTX form. Since temp will have been created
3698 by expand_expr, it will compare unequal according to == .
3699 We must generate a copy in this case, to reach the correct number
3700 of volatile memory references. */
3702 if ((! rtx_equal_p (temp, target)
3703 || (temp != target && (side_effects_p (temp)
3704 || side_effects_p (target))))
3705 && TREE_CODE (exp) != ERROR_MARK)
3707 target = protect_from_queue (target, 1);
3708 if (GET_MODE (temp) != GET_MODE (target)
3709 && GET_MODE (temp) != VOIDmode)
3711 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3712 if (dont_return_target)
3714 /* In this case, we will return TEMP,
3715 so make sure it has the proper mode.
3716 But don't forget to store the value into TARGET. */
3717 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3718 emit_move_insn (target, temp);
3721 convert_move (target, temp, unsignedp);
3724 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3726 /* Handle copying a string constant into an array.
3727 The string constant may be shorter than the array.
3728 So copy just the string's actual length, and clear the rest. */
3732 /* Get the size of the data type of the string,
3733 which is actually the size of the target. */
3734 size = expr_size (exp);
3735 if (GET_CODE (size) == CONST_INT
3736 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3737 emit_block_move (target, temp, size,
3738 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3741 /* Compute the size of the data to copy from the string. */
3743 = size_binop (MIN_EXPR,
3744 make_tree (sizetype, size),
3746 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3747 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3751 /* Copy that much. */
3752 emit_block_move (target, temp, copy_size_rtx,
3753 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3755 /* Figure out how much is left in TARGET that we have to clear.
3756 Do all calculations in ptr_mode. */
3758 addr = XEXP (target, 0);
3759 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3761 if (GET_CODE (copy_size_rtx) == CONST_INT)
3763 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3764 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3768 addr = force_reg (ptr_mode, addr);
3769 addr = expand_binop (ptr_mode, add_optab, addr,
3770 copy_size_rtx, NULL_RTX, 0,
3773 size = expand_binop (ptr_mode, sub_optab, size,
3774 copy_size_rtx, NULL_RTX, 0,
3777 label = gen_label_rtx ();
3778 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3779 GET_MODE (size), 0, 0, label);
3782 if (size != const0_rtx)
3784 /* Be sure we can write on ADDR. */
3785 if (current_function_check_memory_usage)
3786 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3788 size, TYPE_MODE (sizetype),
3789 GEN_INT (MEMORY_USE_WO),
3790 TYPE_MODE (integer_type_node));
3791 #ifdef TARGET_MEM_FUNCTIONS
3792 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3794 const0_rtx, TYPE_MODE (integer_type_node),
3795 convert_to_mode (TYPE_MODE (sizetype),
3797 TREE_UNSIGNED (sizetype)),
3798 TYPE_MODE (sizetype));
3800 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3802 convert_to_mode (TYPE_MODE (integer_type_node),
3804 TREE_UNSIGNED (integer_type_node)),
3805 TYPE_MODE (integer_type_node));
3813 /* Handle calls that return values in multiple non-contiguous locations.
3814 The Irix 6 ABI has examples of this. */
3815 else if (GET_CODE (target) == PARALLEL)
3816 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3817 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3818 else if (GET_MODE (temp) == BLKmode)
3819 emit_block_move (target, temp, expr_size (exp),
3820 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3822 emit_move_insn (target, temp);
3825 /* If we don't want a value, return NULL_RTX. */
3829 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3830 ??? The latter test doesn't seem to make sense. */
3831 else if (dont_return_target && GET_CODE (temp) != MEM)
3834 /* Return TARGET itself if it is a hard register. */
3835 else if (want_value && GET_MODE (target) != BLKmode
3836 && ! (GET_CODE (target) == REG
3837 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3838 return copy_to_reg (target);
3844 /* Return 1 if EXP just contains zeros. */
3852 switch (TREE_CODE (exp))
3856 case NON_LVALUE_EXPR:
3857 return is_zeros_p (TREE_OPERAND (exp, 0));
3860 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3864 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3867 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3870 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3871 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3872 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3873 if (! is_zeros_p (TREE_VALUE (elt)))
3883 /* Return 1 if EXP contains mostly (3/4) zeros. */
3886 mostly_zeros_p (exp)
3889 if (TREE_CODE (exp) == CONSTRUCTOR)
3891 int elts = 0, zeros = 0;
3892 tree elt = CONSTRUCTOR_ELTS (exp);
3893 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3895 /* If there are no ranges of true bits, it is all zero. */
3896 return elt == NULL_TREE;
3898 for (; elt; elt = TREE_CHAIN (elt))
3900 /* We do not handle the case where the index is a RANGE_EXPR,
3901 so the statistic will be somewhat inaccurate.
3902 We do make a more accurate count in store_constructor itself,
3903 so since this function is only used for nested array elements,
3904 this should be close enough. */
3905 if (mostly_zeros_p (TREE_VALUE (elt)))
3910 return 4 * zeros >= 3 * elts;
3913 return is_zeros_p (exp);
3916 /* Helper function for store_constructor.
3917 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3918 TYPE is the type of the CONSTRUCTOR, not the element type.
3919 CLEARED is as for store_constructor.
3921 This provides a recursive shortcut back to store_constructor when it isn't
3922 necessary to go through store_field. This is so that we can pass through
3923 the cleared field to let store_constructor know that we may not have to
3924 clear a substructure if the outer structure has already been cleared. */
3927 store_constructor_field (target, bitsize, bitpos,
3928 mode, exp, type, cleared)
3930 int bitsize, bitpos;
3931 enum machine_mode mode;
3935 if (TREE_CODE (exp) == CONSTRUCTOR
3936 && bitpos % BITS_PER_UNIT == 0
3937 /* If we have a non-zero bitpos for a register target, then we just
3938 let store_field do the bitfield handling. This is unlikely to
3939 generate unnecessary clear instructions anyways. */
3940 && (bitpos == 0 || GET_CODE (target) == MEM))
3943 target = change_address (target, VOIDmode,
3944 plus_constant (XEXP (target, 0),
3945 bitpos / BITS_PER_UNIT));
3946 store_constructor (exp, target, cleared);
3949 store_field (target, bitsize, bitpos, mode, exp,
3950 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3951 int_size_in_bytes (type), 0);
3954 /* Store the value of constructor EXP into the rtx TARGET.
3955 TARGET is either a REG or a MEM.
3956 CLEARED is true if TARGET is known to have been zero'd. */
3959 store_constructor (exp, target, cleared)
3964 tree type = TREE_TYPE (exp);
3965 rtx exp_size = expr_size (exp);
3967 /* We know our target cannot conflict, since safe_from_p has been called. */
3969 /* Don't try copying piece by piece into a hard register
3970 since that is vulnerable to being clobbered by EXP.
3971 Instead, construct in a pseudo register and then copy it all. */
3972 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3974 rtx temp = gen_reg_rtx (GET_MODE (target));
3975 store_constructor (exp, temp, 0);
3976 emit_move_insn (target, temp);
3981 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3982 || TREE_CODE (type) == QUAL_UNION_TYPE)
3986 /* Inform later passes that the whole union value is dead. */
3987 if (TREE_CODE (type) == UNION_TYPE
3988 || TREE_CODE (type) == QUAL_UNION_TYPE)
3989 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3991 /* If we are building a static constructor into a register,
3992 set the initial value as zero so we can fold the value into
3993 a constant. But if more than one register is involved,
3994 this probably loses. */
3995 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3996 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3999 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4004 /* If the constructor has fewer fields than the structure
4005 or if we are initializing the structure to mostly zeros,
4006 clear the whole structure first. */
4007 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4008 != list_length (TYPE_FIELDS (type)))
4009 || mostly_zeros_p (exp))
4012 clear_storage (target, expr_size (exp),
4013 TYPE_ALIGN (type) / BITS_PER_UNIT);
4018 /* Inform later passes that the old value is dead. */
4019 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4021 /* Store each element of the constructor into
4022 the corresponding field of TARGET. */
4024 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4026 register tree field = TREE_PURPOSE (elt);
4027 tree value = TREE_VALUE (elt);
4028 register enum machine_mode mode;
4032 tree pos, constant = 0, offset = 0;
4033 rtx to_rtx = target;
4035 /* Just ignore missing fields.
4036 We cleared the whole structure, above,
4037 if any fields are missing. */
4041 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4044 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4045 unsignedp = TREE_UNSIGNED (field);
4046 mode = DECL_MODE (field);
4047 if (DECL_BIT_FIELD (field))
4050 pos = DECL_FIELD_BITPOS (field);
4051 if (TREE_CODE (pos) == INTEGER_CST)
4053 else if (TREE_CODE (pos) == PLUS_EXPR
4054 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4055 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4060 bitpos = TREE_INT_CST_LOW (constant);
4066 if (contains_placeholder_p (offset))
4067 offset = build (WITH_RECORD_EXPR, sizetype,
4068 offset, make_tree (TREE_TYPE (exp), target));
4070 offset = size_binop (FLOOR_DIV_EXPR, offset,
4071 size_int (BITS_PER_UNIT));
4073 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4074 if (GET_CODE (to_rtx) != MEM)
4077 if (GET_MODE (offset_rtx) != ptr_mode)
4079 #ifdef POINTERS_EXTEND_UNSIGNED
4080 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4082 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4087 = change_address (to_rtx, VOIDmode,
4088 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4089 force_reg (ptr_mode, offset_rtx)));
4091 if (TREE_READONLY (field))
4093 if (GET_CODE (to_rtx) == MEM)
4094 to_rtx = copy_rtx (to_rtx);
4096 RTX_UNCHANGING_P (to_rtx) = 1;
4099 #ifdef WORD_REGISTER_OPERATIONS
4100 /* If this initializes a field that is smaller than a word, at the
4101 start of a word, try to widen it to a full word.
4102 This special case allows us to output C++ member function
4103 initializations in a form that the optimizers can understand. */
4105 && GET_CODE (target) == REG
4106 && bitsize < BITS_PER_WORD
4107 && bitpos % BITS_PER_WORD == 0
4108 && GET_MODE_CLASS (mode) == MODE_INT
4109 && TREE_CODE (value) == INTEGER_CST
4110 && GET_CODE (exp_size) == CONST_INT
4111 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4113 tree type = TREE_TYPE (value);
4114 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4116 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4117 value = convert (type, value);
4119 if (BYTES_BIG_ENDIAN)
4121 = fold (build (LSHIFT_EXPR, type, value,
4122 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4123 bitsize = BITS_PER_WORD;
4127 store_constructor_field (to_rtx, bitsize, bitpos,
4128 mode, value, type, cleared);
4131 else if (TREE_CODE (type) == ARRAY_TYPE)
4136 tree domain = TYPE_DOMAIN (type);
4137 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4138 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4139 tree elttype = TREE_TYPE (type);
4141 /* If the constructor has fewer elements than the array,
4142 clear the whole array first. Similarly if this is
4143 static constructor of a non-BLKmode object. */
4144 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4148 HOST_WIDE_INT count = 0, zero_count = 0;
4150 /* This loop is a more accurate version of the loop in
4151 mostly_zeros_p (it handles RANGE_EXPR in an index).
4152 It is also needed to check for missing elements. */
4153 for (elt = CONSTRUCTOR_ELTS (exp);
4155 elt = TREE_CHAIN (elt))
4157 tree index = TREE_PURPOSE (elt);
4158 HOST_WIDE_INT this_node_count;
4159 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4161 tree lo_index = TREE_OPERAND (index, 0);
4162 tree hi_index = TREE_OPERAND (index, 1);
4163 if (TREE_CODE (lo_index) != INTEGER_CST
4164 || TREE_CODE (hi_index) != INTEGER_CST)
4169 this_node_count = TREE_INT_CST_LOW (hi_index)
4170 - TREE_INT_CST_LOW (lo_index) + 1;
4173 this_node_count = 1;
4174 count += this_node_count;
4175 if (mostly_zeros_p (TREE_VALUE (elt)))
4176 zero_count += this_node_count;
4178 /* Clear the entire array first if there are any missing elements,
4179 or if the incidence of zero elements is >= 75%. */
4180 if (count < maxelt - minelt + 1
4181 || 4 * zero_count >= 3 * count)
4187 clear_storage (target, expr_size (exp),
4188 TYPE_ALIGN (type) / BITS_PER_UNIT);
4192 /* Inform later passes that the old value is dead. */
4193 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4195 /* Store each element of the constructor into
4196 the corresponding element of TARGET, determined
4197 by counting the elements. */
4198 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4200 elt = TREE_CHAIN (elt), i++)
4202 register enum machine_mode mode;
4206 tree value = TREE_VALUE (elt);
4207 tree index = TREE_PURPOSE (elt);
4208 rtx xtarget = target;
4210 if (cleared && is_zeros_p (value))
4213 mode = TYPE_MODE (elttype);
4214 bitsize = GET_MODE_BITSIZE (mode);
4215 unsignedp = TREE_UNSIGNED (elttype);
4217 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4219 tree lo_index = TREE_OPERAND (index, 0);
4220 tree hi_index = TREE_OPERAND (index, 1);
4221 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4222 struct nesting *loop;
4223 HOST_WIDE_INT lo, hi, count;
4226 /* If the range is constant and "small", unroll the loop. */
4227 if (TREE_CODE (lo_index) == INTEGER_CST
4228 && TREE_CODE (hi_index) == INTEGER_CST
4229 && (lo = TREE_INT_CST_LOW (lo_index),
4230 hi = TREE_INT_CST_LOW (hi_index),
4231 count = hi - lo + 1,
4232 (GET_CODE (target) != MEM
4234 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4235 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4238 lo -= minelt; hi -= minelt;
4239 for (; lo <= hi; lo++)
4241 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4242 store_constructor_field (target, bitsize, bitpos,
4243 mode, value, type, cleared);
4248 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4249 loop_top = gen_label_rtx ();
4250 loop_end = gen_label_rtx ();
4252 unsignedp = TREE_UNSIGNED (domain);
4254 index = build_decl (VAR_DECL, NULL_TREE, domain);
4256 DECL_RTL (index) = index_r
4257 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4260 if (TREE_CODE (value) == SAVE_EXPR
4261 && SAVE_EXPR_RTL (value) == 0)
4263 /* Make sure value gets expanded once before the
4265 expand_expr (value, const0_rtx, VOIDmode, 0);
4268 store_expr (lo_index, index_r, 0);
4269 loop = expand_start_loop (0);
4271 /* Assign value to element index. */
4272 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4273 size_int (BITS_PER_UNIT));
4274 position = size_binop (MULT_EXPR,
4275 size_binop (MINUS_EXPR, index,
4276 TYPE_MIN_VALUE (domain)),
4278 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4279 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4280 xtarget = change_address (target, mode, addr);
4281 if (TREE_CODE (value) == CONSTRUCTOR)
4282 store_constructor (value, xtarget, cleared);
4284 store_expr (value, xtarget, 0);
4286 expand_exit_loop_if_false (loop,
4287 build (LT_EXPR, integer_type_node,
4290 expand_increment (build (PREINCREMENT_EXPR,
4292 index, integer_one_node), 0, 0);
4294 emit_label (loop_end);
4296 /* Needed by stupid register allocation. to extend the
4297 lifetime of pseudo-regs used by target past the end
4299 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4302 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4303 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4309 index = size_int (i);
4312 index = size_binop (MINUS_EXPR, index,
4313 TYPE_MIN_VALUE (domain));
4314 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4315 size_int (BITS_PER_UNIT));
4316 position = size_binop (MULT_EXPR, index, position);
4317 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4318 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4319 xtarget = change_address (target, mode, addr);
4320 store_expr (value, xtarget, 0);
4325 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4326 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4328 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4329 store_constructor_field (target, bitsize, bitpos,
4330 mode, value, type, cleared);
4334 /* set constructor assignments */
4335 else if (TREE_CODE (type) == SET_TYPE)
4337 tree elt = CONSTRUCTOR_ELTS (exp);
4338 int nbytes = int_size_in_bytes (type), nbits;
4339 tree domain = TYPE_DOMAIN (type);
4340 tree domain_min, domain_max, bitlength;
4342 /* The default implementation strategy is to extract the constant
4343 parts of the constructor, use that to initialize the target,
4344 and then "or" in whatever non-constant ranges we need in addition.
4346 If a large set is all zero or all ones, it is
4347 probably better to set it using memset (if available) or bzero.
4348 Also, if a large set has just a single range, it may also be
4349 better to first clear all the first clear the set (using
4350 bzero/memset), and set the bits we want. */
4352 /* Check for all zeros. */
4353 if (elt == NULL_TREE)
4356 clear_storage (target, expr_size (exp),
4357 TYPE_ALIGN (type) / BITS_PER_UNIT);
4361 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4362 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4363 bitlength = size_binop (PLUS_EXPR,
4364 size_binop (MINUS_EXPR, domain_max, domain_min),
4367 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4369 nbits = TREE_INT_CST_LOW (bitlength);
4371 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4372 are "complicated" (more than one range), initialize (the
4373 constant parts) by copying from a constant. */
4374 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4375 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4377 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4378 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4379 char *bit_buffer = (char *) alloca (nbits);
4380 HOST_WIDE_INT word = 0;
4383 int offset = 0; /* In bytes from beginning of set. */
4384 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4387 if (bit_buffer[ibit])
4389 if (BYTES_BIG_ENDIAN)
4390 word |= (1 << (set_word_size - 1 - bit_pos));
4392 word |= 1 << bit_pos;
4395 if (bit_pos >= set_word_size || ibit == nbits)
4397 if (word != 0 || ! cleared)
4399 rtx datum = GEN_INT (word);
4401 /* The assumption here is that it is safe to use
4402 XEXP if the set is multi-word, but not if
4403 it's single-word. */
4404 if (GET_CODE (target) == MEM)
4406 to_rtx = plus_constant (XEXP (target, 0), offset);
4407 to_rtx = change_address (target, mode, to_rtx);
4409 else if (offset == 0)
4413 emit_move_insn (to_rtx, datum);
4419 offset += set_word_size / BITS_PER_UNIT;
4425 /* Don't bother clearing storage if the set is all ones. */
4426 if (TREE_CHAIN (elt) != NULL_TREE
4427 || (TREE_PURPOSE (elt) == NULL_TREE
4429 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4430 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4431 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4432 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4434 clear_storage (target, expr_size (exp),
4435 TYPE_ALIGN (type) / BITS_PER_UNIT);
4438 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4440 /* start of range of element or NULL */
4441 tree startbit = TREE_PURPOSE (elt);
4442 /* end of range of element, or element value */
4443 tree endbit = TREE_VALUE (elt);
4444 #ifdef TARGET_MEM_FUNCTIONS
4445 HOST_WIDE_INT startb, endb;
4447 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4449 bitlength_rtx = expand_expr (bitlength,
4450 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4452 /* handle non-range tuple element like [ expr ] */
4453 if (startbit == NULL_TREE)
4455 startbit = save_expr (endbit);
4458 startbit = convert (sizetype, startbit);
4459 endbit = convert (sizetype, endbit);
4460 if (! integer_zerop (domain_min))
4462 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4463 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4465 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4466 EXPAND_CONST_ADDRESS);
4467 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4468 EXPAND_CONST_ADDRESS);
4472 targetx = assign_stack_temp (GET_MODE (target),
4473 GET_MODE_SIZE (GET_MODE (target)),
4475 emit_move_insn (targetx, target);
4477 else if (GET_CODE (target) == MEM)
4482 #ifdef TARGET_MEM_FUNCTIONS
4483 /* Optimization: If startbit and endbit are
4484 constants divisible by BITS_PER_UNIT,
4485 call memset instead. */
4486 if (TREE_CODE (startbit) == INTEGER_CST
4487 && TREE_CODE (endbit) == INTEGER_CST
4488 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4489 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4491 emit_library_call (memset_libfunc, 0,
4493 plus_constant (XEXP (targetx, 0),
4494 startb / BITS_PER_UNIT),
4496 constm1_rtx, TYPE_MODE (integer_type_node),
4497 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4498 TYPE_MODE (sizetype));
4503 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4504 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4505 bitlength_rtx, TYPE_MODE (sizetype),
4506 startbit_rtx, TYPE_MODE (sizetype),
4507 endbit_rtx, TYPE_MODE (sizetype));
4510 emit_move_insn (target, targetx);
4518 /* Store the value of EXP (an expression tree)
4519 into a subfield of TARGET which has mode MODE and occupies
4520 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4521 If MODE is VOIDmode, it means that we are storing into a bit-field.
4523 If VALUE_MODE is VOIDmode, return nothing in particular.
4524 UNSIGNEDP is not used in this case.
4526 Otherwise, return an rtx for the value stored. This rtx
4527 has mode VALUE_MODE if that is convenient to do.
4528 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4530 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4531 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4533 ALIAS_SET is the alias set for the destination. This value will
4534 (in general) be different from that for TARGET, since TARGET is a
4535 reference to the containing structure. */
4538 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4539 unsignedp, align, total_size, alias_set)
4541 int bitsize, bitpos;
4542 enum machine_mode mode;
4544 enum machine_mode value_mode;
4550 HOST_WIDE_INT width_mask = 0;
4552 if (TREE_CODE (exp) == ERROR_MARK)
4555 if (bitsize < HOST_BITS_PER_WIDE_INT)
4556 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4558 /* If we are storing into an unaligned field of an aligned union that is
4559 in a register, we may have the mode of TARGET being an integer mode but
4560 MODE == BLKmode. In that case, get an aligned object whose size and
4561 alignment are the same as TARGET and store TARGET into it (we can avoid
4562 the store if the field being stored is the entire width of TARGET). Then
4563 call ourselves recursively to store the field into a BLKmode version of
4564 that object. Finally, load from the object into TARGET. This is not
4565 very efficient in general, but should only be slightly more expensive
4566 than the otherwise-required unaligned accesses. Perhaps this can be
4567 cleaned up later. */
4570 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4572 rtx object = assign_stack_temp (GET_MODE (target),
4573 GET_MODE_SIZE (GET_MODE (target)), 0);
4574 rtx blk_object = copy_rtx (object);
4576 MEM_SET_IN_STRUCT_P (object, 1);
4577 MEM_SET_IN_STRUCT_P (blk_object, 1);
4578 PUT_MODE (blk_object, BLKmode);
4580 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4581 emit_move_insn (object, target);
4583 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4584 align, total_size, alias_set);
4586 /* Even though we aren't returning target, we need to
4587 give it the updated value. */
4588 emit_move_insn (target, object);
4593 /* If the structure is in a register or if the component
4594 is a bit field, we cannot use addressing to access it.
4595 Use bit-field techniques or SUBREG to store in it. */
4597 if (mode == VOIDmode
4598 || (mode != BLKmode && ! direct_store[(int) mode]
4599 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4600 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4601 || GET_CODE (target) == REG
4602 || GET_CODE (target) == SUBREG
4603 /* If the field isn't aligned enough to store as an ordinary memref,
4604 store it as a bit field. */
4605 || (SLOW_UNALIGNED_ACCESS
4606 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4607 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4609 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4611 /* If BITSIZE is narrower than the size of the type of EXP
4612 we will be narrowing TEMP. Normally, what's wanted are the
4613 low-order bits. However, if EXP's type is a record and this is
4614 big-endian machine, we want the upper BITSIZE bits. */
4615 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4616 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4617 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4618 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4619 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4623 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4625 if (mode != VOIDmode && mode != BLKmode
4626 && mode != TYPE_MODE (TREE_TYPE (exp)))
4627 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4629 /* If the modes of TARGET and TEMP are both BLKmode, both
4630 must be in memory and BITPOS must be aligned on a byte
4631 boundary. If so, we simply do a block copy. */
4632 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4634 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4635 || bitpos % BITS_PER_UNIT != 0)
4638 target = change_address (target, VOIDmode,
4639 plus_constant (XEXP (target, 0),
4640 bitpos / BITS_PER_UNIT));
4642 emit_block_move (target, temp,
4643 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4647 return value_mode == VOIDmode ? const0_rtx : target;
4650 /* Store the value in the bitfield. */
4651 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4652 if (value_mode != VOIDmode)
4654 /* The caller wants an rtx for the value. */
4655 /* If possible, avoid refetching from the bitfield itself. */
4657 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4660 enum machine_mode tmode;
4663 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4664 tmode = GET_MODE (temp);
4665 if (tmode == VOIDmode)
4667 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4668 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4669 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4671 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4672 NULL_RTX, value_mode, 0, align,
4679 rtx addr = XEXP (target, 0);
4682 /* If a value is wanted, it must be the lhs;
4683 so make the address stable for multiple use. */
4685 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4686 && ! CONSTANT_ADDRESS_P (addr)
4687 /* A frame-pointer reference is already stable. */
4688 && ! (GET_CODE (addr) == PLUS
4689 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4690 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4691 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4692 addr = copy_to_reg (addr);
4694 /* Now build a reference to just the desired component. */
4696 to_rtx = copy_rtx (change_address (target, mode,
4697 plus_constant (addr,
4699 / BITS_PER_UNIT))));
4700 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4701 MEM_ALIAS_SET (to_rtx) = alias_set;
4703 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4707 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4708 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4709 ARRAY_REFs and find the ultimate containing object, which we return.
4711 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4712 bit position, and *PUNSIGNEDP to the signedness of the field.
4713 If the position of the field is variable, we store a tree
4714 giving the variable offset (in units) in *POFFSET.
4715 This offset is in addition to the bit position.
4716 If the position is not variable, we store 0 in *POFFSET.
4717 We set *PALIGNMENT to the alignment in bytes of the address that will be
4718 computed. This is the alignment of the thing we return if *POFFSET
4719 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4721 If any of the extraction expressions is volatile,
4722 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4724 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4725 is a mode that can be used to access the field. In that case, *PBITSIZE
4728 If the field describes a variable-sized object, *PMODE is set to
4729 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4730 this case, but the address of the object can be found. */
4733 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4734 punsignedp, pvolatilep, palignment)
4739 enum machine_mode *pmode;
4744 tree orig_exp = exp;
4746 enum machine_mode mode = VOIDmode;
4747 tree offset = integer_zero_node;
4748 unsigned int alignment = BIGGEST_ALIGNMENT;
4750 if (TREE_CODE (exp) == COMPONENT_REF)
4752 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4753 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4754 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4755 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4757 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4759 size_tree = TREE_OPERAND (exp, 1);
4760 *punsignedp = TREE_UNSIGNED (exp);
4764 mode = TYPE_MODE (TREE_TYPE (exp));
4765 if (mode == BLKmode)
4766 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4768 *pbitsize = GET_MODE_BITSIZE (mode);
4769 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4774 if (TREE_CODE (size_tree) != INTEGER_CST)
4775 mode = BLKmode, *pbitsize = -1;
4777 *pbitsize = TREE_INT_CST_LOW (size_tree);
4780 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4781 and find the ultimate containing object. */
4787 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4789 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4790 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4791 : TREE_OPERAND (exp, 2));
4792 tree constant = integer_zero_node, var = pos;
4794 /* If this field hasn't been filled in yet, don't go
4795 past it. This should only happen when folding expressions
4796 made during type construction. */
4800 /* Assume here that the offset is a multiple of a unit.
4801 If not, there should be an explicitly added constant. */
4802 if (TREE_CODE (pos) == PLUS_EXPR
4803 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4804 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4805 else if (TREE_CODE (pos) == INTEGER_CST)
4806 constant = pos, var = integer_zero_node;
4808 *pbitpos += TREE_INT_CST_LOW (constant);
4809 offset = size_binop (PLUS_EXPR, offset,
4810 size_binop (EXACT_DIV_EXPR, var,
4811 size_int (BITS_PER_UNIT)));
4814 else if (TREE_CODE (exp) == ARRAY_REF)
4816 /* This code is based on the code in case ARRAY_REF in expand_expr
4817 below. We assume here that the size of an array element is
4818 always an integral multiple of BITS_PER_UNIT. */
4820 tree index = TREE_OPERAND (exp, 1);
4821 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4823 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4824 tree index_type = TREE_TYPE (index);
4827 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4829 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4831 index_type = TREE_TYPE (index);
4834 /* Optimize the special-case of a zero lower bound.
4836 We convert the low_bound to sizetype to avoid some problems
4837 with constant folding. (E.g. suppose the lower bound is 1,
4838 and its mode is QI. Without the conversion, (ARRAY
4839 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4840 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4842 But sizetype isn't quite right either (especially if
4843 the lowbound is negative). FIXME */
4845 if (! integer_zerop (low_bound))
4846 index = fold (build (MINUS_EXPR, index_type, index,
4847 convert (sizetype, low_bound)));
4849 if (TREE_CODE (index) == INTEGER_CST)
4851 index = convert (sbitsizetype, index);
4852 index_type = TREE_TYPE (index);
4855 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4856 convert (sbitsizetype,
4857 TYPE_SIZE (TREE_TYPE (exp)))));
4859 if (TREE_CODE (xindex) == INTEGER_CST
4860 && TREE_INT_CST_HIGH (xindex) == 0)
4861 *pbitpos += TREE_INT_CST_LOW (xindex);
4864 /* Either the bit offset calculated above is not constant, or
4865 it overflowed. In either case, redo the multiplication
4866 against the size in units. This is especially important
4867 in the non-constant case to avoid a division at runtime. */
4868 xindex = fold (build (MULT_EXPR, ssizetype, index,
4870 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4872 if (contains_placeholder_p (xindex))
4873 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4875 offset = size_binop (PLUS_EXPR, offset, xindex);
4878 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4879 && ! ((TREE_CODE (exp) == NOP_EXPR
4880 || TREE_CODE (exp) == CONVERT_EXPR)
4881 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4882 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4884 && (TYPE_MODE (TREE_TYPE (exp))
4885 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4888 /* If any reference in the chain is volatile, the effect is volatile. */
4889 if (TREE_THIS_VOLATILE (exp))
4892 /* If the offset is non-constant already, then we can't assume any
4893 alignment more than the alignment here. */
4894 if (! integer_zerop (offset))
4895 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4897 exp = TREE_OPERAND (exp, 0);
4900 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4901 alignment = MIN (alignment, DECL_ALIGN (exp));
4902 else if (TREE_TYPE (exp) != 0)
4903 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4905 if (integer_zerop (offset))
4908 if (offset != 0 && contains_placeholder_p (offset))
4909 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4913 *palignment = alignment / BITS_PER_UNIT;
4917 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4918 static enum memory_use_mode
4919 get_memory_usage_from_modifier (modifier)
4920 enum expand_modifier modifier;
4926 return MEMORY_USE_RO;
4928 case EXPAND_MEMORY_USE_WO:
4929 return MEMORY_USE_WO;
4931 case EXPAND_MEMORY_USE_RW:
4932 return MEMORY_USE_RW;
4934 case EXPAND_MEMORY_USE_DONT:
4935 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4936 MEMORY_USE_DONT, because they are modifiers to a call of
4937 expand_expr in the ADDR_EXPR case of expand_expr. */
4938 case EXPAND_CONST_ADDRESS:
4939 case EXPAND_INITIALIZER:
4940 return MEMORY_USE_DONT;
4941 case EXPAND_MEMORY_USE_BAD:
4947 /* Given an rtx VALUE that may contain additions and multiplications,
4948 return an equivalent value that just refers to a register or memory.
4949 This is done by generating instructions to perform the arithmetic
4950 and returning a pseudo-register containing the value.
4952 The returned value may be a REG, SUBREG, MEM or constant. */
4955 force_operand (value, target)
4958 register optab binoptab = 0;
4959 /* Use a temporary to force order of execution of calls to
4963 /* Use subtarget as the target for operand 0 of a binary operation. */
4964 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4966 /* Check for a PIC address load. */
4968 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4969 && XEXP (value, 0) == pic_offset_table_rtx
4970 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4971 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4972 || GET_CODE (XEXP (value, 1)) == CONST))
4975 subtarget = gen_reg_rtx (GET_MODE (value));
4976 emit_move_insn (subtarget, value);
4980 if (GET_CODE (value) == PLUS)
4981 binoptab = add_optab;
4982 else if (GET_CODE (value) == MINUS)
4983 binoptab = sub_optab;
4984 else if (GET_CODE (value) == MULT)
4986 op2 = XEXP (value, 1);
4987 if (!CONSTANT_P (op2)
4988 && !(GET_CODE (op2) == REG && op2 != subtarget))
4990 tmp = force_operand (XEXP (value, 0), subtarget);
4991 return expand_mult (GET_MODE (value), tmp,
4992 force_operand (op2, NULL_RTX),
4998 op2 = XEXP (value, 1);
4999 if (!CONSTANT_P (op2)
5000 && !(GET_CODE (op2) == REG && op2 != subtarget))
5002 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5004 binoptab = add_optab;
5005 op2 = negate_rtx (GET_MODE (value), op2);
5008 /* Check for an addition with OP2 a constant integer and our first
5009 operand a PLUS of a virtual register and something else. In that
5010 case, we want to emit the sum of the virtual register and the
5011 constant first and then add the other value. This allows virtual
5012 register instantiation to simply modify the constant rather than
5013 creating another one around this addition. */
5014 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5015 && GET_CODE (XEXP (value, 0)) == PLUS
5016 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5017 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5018 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5020 rtx temp = expand_binop (GET_MODE (value), binoptab,
5021 XEXP (XEXP (value, 0), 0), op2,
5022 subtarget, 0, OPTAB_LIB_WIDEN);
5023 return expand_binop (GET_MODE (value), binoptab, temp,
5024 force_operand (XEXP (XEXP (value, 0), 1), 0),
5025 target, 0, OPTAB_LIB_WIDEN);
5028 tmp = force_operand (XEXP (value, 0), subtarget);
5029 return expand_binop (GET_MODE (value), binoptab, tmp,
5030 force_operand (op2, NULL_RTX),
5031 target, 0, OPTAB_LIB_WIDEN);
5032 /* We give UNSIGNEDP = 0 to expand_binop
5033 because the only operations we are expanding here are signed ones. */
5038 /* Subroutine of expand_expr:
5039 save the non-copied parts (LIST) of an expr (LHS), and return a list
5040 which can restore these values to their previous values,
5041 should something modify their storage. */
5044 save_noncopied_parts (lhs, list)
5051 for (tail = list; tail; tail = TREE_CHAIN (tail))
5052 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5053 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5056 tree part = TREE_VALUE (tail);
5057 tree part_type = TREE_TYPE (part);
5058 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5059 rtx target = assign_temp (part_type, 0, 1, 1);
5060 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5061 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5062 parts = tree_cons (to_be_saved,
5063 build (RTL_EXPR, part_type, NULL_TREE,
5066 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5071 /* Subroutine of expand_expr:
5072 record the non-copied parts (LIST) of an expr (LHS), and return a list
5073 which specifies the initial values of these parts. */
5076 init_noncopied_parts (lhs, list)
5083 for (tail = list; tail; tail = TREE_CHAIN (tail))
5084 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5085 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5088 tree part = TREE_VALUE (tail);
5089 tree part_type = TREE_TYPE (part);
5090 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5091 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5096 /* Subroutine of expand_expr: return nonzero iff there is no way that
5097 EXP can reference X, which is being modified. TOP_P is nonzero if this
5098 call is going to be used to determine whether we need a temporary
5099 for EXP, as opposed to a recursive call to this function.
5101 It is always safe for this routine to return zero since it merely
5102 searches for optimization opportunities. */
5105 safe_from_p (x, exp, top_p)
5112 static int save_expr_count;
5113 static int save_expr_size = 0;
5114 static tree *save_expr_rewritten;
5115 static tree save_expr_trees[256];
5118 /* If EXP has varying size, we MUST use a target since we currently
5119 have no way of allocating temporaries of variable size
5120 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5121 So we assume here that something at a higher level has prevented a
5122 clash. This is somewhat bogus, but the best we can do. Only
5123 do this when X is BLKmode and when we are at the top level. */
5124 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5125 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5126 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5127 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5128 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5130 && GET_MODE (x) == BLKmode))
5133 if (top_p && save_expr_size == 0)
5137 save_expr_count = 0;
5138 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5139 save_expr_rewritten = &save_expr_trees[0];
5141 rtn = safe_from_p (x, exp, 1);
5143 for (i = 0; i < save_expr_count; ++i)
5145 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5147 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5155 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5156 find the underlying pseudo. */
5157 if (GET_CODE (x) == SUBREG)
5160 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5164 /* If X is a location in the outgoing argument area, it is always safe. */
5165 if (GET_CODE (x) == MEM
5166 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5167 || (GET_CODE (XEXP (x, 0)) == PLUS
5168 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5171 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5174 exp_rtl = DECL_RTL (exp);
5181 if (TREE_CODE (exp) == TREE_LIST)
5182 return ((TREE_VALUE (exp) == 0
5183 || safe_from_p (x, TREE_VALUE (exp), 0))
5184 && (TREE_CHAIN (exp) == 0
5185 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5186 else if (TREE_CODE (exp) == ERROR_MARK)
5187 return 1; /* An already-visited SAVE_EXPR? */
5192 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5196 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5197 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5201 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5202 the expression. If it is set, we conflict iff we are that rtx or
5203 both are in memory. Otherwise, we check all operands of the
5204 expression recursively. */
5206 switch (TREE_CODE (exp))
5209 return (staticp (TREE_OPERAND (exp, 0))
5210 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5211 || TREE_STATIC (exp));
5214 if (GET_CODE (x) == MEM)
5219 exp_rtl = CALL_EXPR_RTL (exp);
5222 /* Assume that the call will clobber all hard registers and
5224 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5225 || GET_CODE (x) == MEM)
5232 /* If a sequence exists, we would have to scan every instruction
5233 in the sequence to see if it was safe. This is probably not
5235 if (RTL_EXPR_SEQUENCE (exp))
5238 exp_rtl = RTL_EXPR_RTL (exp);
5241 case WITH_CLEANUP_EXPR:
5242 exp_rtl = RTL_EXPR_RTL (exp);
5245 case CLEANUP_POINT_EXPR:
5246 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5249 exp_rtl = SAVE_EXPR_RTL (exp);
5253 /* This SAVE_EXPR might appear many times in the top-level
5254 safe_from_p() expression, and if it has a complex
5255 subexpression, examining it multiple times could result
5256 in a combinatorial explosion. E.g. on an Alpha
5257 running at least 200MHz, a Fortran test case compiled with
5258 optimization took about 28 minutes to compile -- even though
5259 it was only a few lines long, and the complicated line causing
5260 so much time to be spent in the earlier version of safe_from_p()
5261 had only 293 or so unique nodes.
5263 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5264 where it is so we can turn it back in the top-level safe_from_p()
5267 /* For now, don't bother re-sizing the array. */
5268 if (save_expr_count >= save_expr_size)
5270 save_expr_rewritten[save_expr_count++] = exp;
5272 nops = tree_code_length[(int) SAVE_EXPR];
5273 for (i = 0; i < nops; i++)
5275 tree operand = TREE_OPERAND (exp, i);
5276 if (operand == NULL_TREE)
5278 TREE_SET_CODE (exp, ERROR_MARK);
5279 if (!safe_from_p (x, operand, 0))
5281 TREE_SET_CODE (exp, SAVE_EXPR);
5283 TREE_SET_CODE (exp, ERROR_MARK);
5287 /* The only operand we look at is operand 1. The rest aren't
5288 part of the expression. */
5289 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5291 case METHOD_CALL_EXPR:
5292 /* This takes a rtx argument, but shouldn't appear here. */
5299 /* If we have an rtx, we do not need to scan our operands. */
5303 nops = tree_code_length[(int) TREE_CODE (exp)];
5304 for (i = 0; i < nops; i++)
5305 if (TREE_OPERAND (exp, i) != 0
5306 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5310 /* If we have an rtl, find any enclosed object. Then see if we conflict
5314 if (GET_CODE (exp_rtl) == SUBREG)
5316 exp_rtl = SUBREG_REG (exp_rtl);
5317 if (GET_CODE (exp_rtl) == REG
5318 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5322 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5323 are memory and EXP is not readonly. */
5324 return ! (rtx_equal_p (x, exp_rtl)
5325 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5326 && ! TREE_READONLY (exp)));
5329 /* If we reach here, it is safe. */
5333 /* Subroutine of expand_expr: return nonzero iff EXP is an
5334 expression whose type is statically determinable. */
5340 if (TREE_CODE (exp) == PARM_DECL
5341 || TREE_CODE (exp) == VAR_DECL
5342 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5343 || TREE_CODE (exp) == COMPONENT_REF
5344 || TREE_CODE (exp) == ARRAY_REF)
5349 /* Subroutine of expand_expr: return rtx if EXP is a
5350 variable or parameter; else return 0. */
5357 switch (TREE_CODE (exp))
5361 return DECL_RTL (exp);
5367 #ifdef MAX_INTEGER_COMPUTATION_MODE
5369 check_max_integer_computation_mode (exp)
5372 enum tree_code code = TREE_CODE (exp);
5373 enum machine_mode mode;
5375 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5376 if (code == NOP_EXPR
5377 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5380 /* First check the type of the overall operation. We need only look at
5381 unary, binary and relational operations. */
5382 if (TREE_CODE_CLASS (code) == '1'
5383 || TREE_CODE_CLASS (code) == '2'
5384 || TREE_CODE_CLASS (code) == '<')
5386 mode = TYPE_MODE (TREE_TYPE (exp));
5387 if (GET_MODE_CLASS (mode) == MODE_INT
5388 && mode > MAX_INTEGER_COMPUTATION_MODE)
5389 fatal ("unsupported wide integer operation");
5392 /* Check operand of a unary op. */
5393 if (TREE_CODE_CLASS (code) == '1')
5395 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5396 if (GET_MODE_CLASS (mode) == MODE_INT
5397 && mode > MAX_INTEGER_COMPUTATION_MODE)
5398 fatal ("unsupported wide integer operation");
5401 /* Check operands of a binary/comparison op. */
5402 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5404 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5405 if (GET_MODE_CLASS (mode) == MODE_INT
5406 && mode > MAX_INTEGER_COMPUTATION_MODE)
5407 fatal ("unsupported wide integer operation");
5409 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5410 if (GET_MODE_CLASS (mode) == MODE_INT
5411 && mode > MAX_INTEGER_COMPUTATION_MODE)
5412 fatal ("unsupported wide integer operation");
5418 /* expand_expr: generate code for computing expression EXP.
5419 An rtx for the computed value is returned. The value is never null.
5420 In the case of a void EXP, const0_rtx is returned.
5422 The value may be stored in TARGET if TARGET is nonzero.
5423 TARGET is just a suggestion; callers must assume that
5424 the rtx returned may not be the same as TARGET.
5426 If TARGET is CONST0_RTX, it means that the value will be ignored.
5428 If TMODE is not VOIDmode, it suggests generating the
5429 result in mode TMODE. But this is done only when convenient.
5430 Otherwise, TMODE is ignored and the value generated in its natural mode.
5431 TMODE is just a suggestion; callers must assume that
5432 the rtx returned may not have mode TMODE.
5434 Note that TARGET may have neither TMODE nor MODE. In that case, it
5435 probably will not be used.
5437 If MODIFIER is EXPAND_SUM then when EXP is an addition
5438 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5439 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5440 products as above, or REG or MEM, or constant.
5441 Ordinarily in such cases we would output mul or add instructions
5442 and then return a pseudo reg containing the sum.
5444 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5445 it also marks a label as absolutely required (it can't be dead).
5446 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5447 This is used for outputting expressions used in initializers.
5449 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5450 with a constant address even if that address is not normally legitimate.
5451 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5454 expand_expr (exp, target, tmode, modifier)
5457 enum machine_mode tmode;
5458 enum expand_modifier modifier;
5460 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5461 This is static so it will be accessible to our recursive callees. */
5462 static tree placeholder_list = 0;
5463 register rtx op0, op1, temp;
5464 tree type = TREE_TYPE (exp);
5465 int unsignedp = TREE_UNSIGNED (type);
5466 register enum machine_mode mode;
5467 register enum tree_code code = TREE_CODE (exp);
5469 rtx subtarget, original_target;
5472 /* Used by check-memory-usage to make modifier read only. */
5473 enum expand_modifier ro_modifier;
5475 /* Handle ERROR_MARK before anybody tries to access its type. */
5476 if (TREE_CODE (exp) == ERROR_MARK)
5478 op0 = CONST0_RTX (tmode);
5484 mode = TYPE_MODE (type);
5485 /* Use subtarget as the target for operand 0 of a binary operation. */
5486 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5487 original_target = target;
5488 ignore = (target == const0_rtx
5489 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5490 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5491 || code == COND_EXPR)
5492 && TREE_CODE (type) == VOID_TYPE));
5494 /* Make a read-only version of the modifier. */
5495 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5496 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5497 ro_modifier = modifier;
5499 ro_modifier = EXPAND_NORMAL;
5501 /* Don't use hard regs as subtargets, because the combiner
5502 can only handle pseudo regs. */
5503 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5505 /* Avoid subtargets inside loops,
5506 since they hide some invariant expressions. */
5507 if (preserve_subexpressions_p ())
5510 /* If we are going to ignore this result, we need only do something
5511 if there is a side-effect somewhere in the expression. If there
5512 is, short-circuit the most common cases here. Note that we must
5513 not call expand_expr with anything but const0_rtx in case this
5514 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5518 if (! TREE_SIDE_EFFECTS (exp))
5521 /* Ensure we reference a volatile object even if value is ignored. */
5522 if (TREE_THIS_VOLATILE (exp)
5523 && TREE_CODE (exp) != FUNCTION_DECL
5524 && mode != VOIDmode && mode != BLKmode)
5526 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5527 if (GET_CODE (temp) == MEM)
5528 temp = copy_to_reg (temp);
5532 if (TREE_CODE_CLASS (code) == '1')
5533 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5534 VOIDmode, ro_modifier);
5535 else if (TREE_CODE_CLASS (code) == '2'
5536 || TREE_CODE_CLASS (code) == '<')
5538 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5539 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5542 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5543 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5544 /* If the second operand has no side effects, just evaluate
5546 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5547 VOIDmode, ro_modifier);
5552 #ifdef MAX_INTEGER_COMPUTATION_MODE
5554 && TREE_CODE (exp) != INTEGER_CST
5555 && TREE_CODE (exp) != PARM_DECL
5556 && TREE_CODE (exp) != ARRAY_REF
5557 && TREE_CODE (exp) != COMPONENT_REF
5558 && TREE_CODE (exp) != BIT_FIELD_REF
5559 && TREE_CODE (exp) != INDIRECT_REF
5560 && TREE_CODE (exp) != CALL_EXPR
5561 && TREE_CODE (exp) != VAR_DECL)
5563 enum machine_mode mode = GET_MODE (target);
5565 if (GET_MODE_CLASS (mode) == MODE_INT
5566 && mode > MAX_INTEGER_COMPUTATION_MODE)
5567 fatal ("unsupported wide integer operation");
5570 if (TREE_CODE (exp) != INTEGER_CST
5571 && TREE_CODE (exp) != PARM_DECL
5572 && TREE_CODE (exp) != ARRAY_REF
5573 && TREE_CODE (exp) != COMPONENT_REF
5574 && TREE_CODE (exp) != BIT_FIELD_REF
5575 && TREE_CODE (exp) != INDIRECT_REF
5576 && TREE_CODE (exp) != VAR_DECL
5577 && TREE_CODE (exp) != CALL_EXPR
5578 && GET_MODE_CLASS (tmode) == MODE_INT
5579 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5580 fatal ("unsupported wide integer operation");
5582 check_max_integer_computation_mode (exp);
5585 /* If will do cse, generate all results into pseudo registers
5586 since 1) that allows cse to find more things
5587 and 2) otherwise cse could produce an insn the machine
5590 if (! cse_not_expected && mode != BLKmode && target
5591 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5598 tree function = decl_function_context (exp);
5599 /* Handle using a label in a containing function. */
5600 if (function != current_function_decl
5601 && function != inline_function_decl && function != 0)
5603 struct function *p = find_function_data (function);
5604 /* Allocate in the memory associated with the function
5605 that the label is in. */
5606 push_obstacks (p->function_obstack,
5607 p->function_maybepermanent_obstack);
5609 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5616 if (modifier == EXPAND_INITIALIZER)
5617 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5621 temp = gen_rtx_MEM (FUNCTION_MODE,
5622 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5623 if (function != current_function_decl
5624 && function != inline_function_decl && function != 0)
5625 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5630 if (DECL_RTL (exp) == 0)
5632 error_with_decl (exp, "prior parameter's size depends on `%s'");
5633 return CONST0_RTX (mode);
5636 /* ... fall through ... */
5639 /* If a static var's type was incomplete when the decl was written,
5640 but the type is complete now, lay out the decl now. */
5641 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5642 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5644 push_obstacks_nochange ();
5645 end_temporary_allocation ();
5646 layout_decl (exp, 0);
5647 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5651 /* Although static-storage variables start off initialized, according to
5652 ANSI C, a memcpy could overwrite them with uninitialized values. So
5653 we check them too. This also lets us check for read-only variables
5654 accessed via a non-const declaration, in case it won't be detected
5655 any other way (e.g., in an embedded system or OS kernel without
5658 Aggregates are not checked here; they're handled elsewhere. */
5659 if (current_function_check_memory_usage && code == VAR_DECL
5660 && GET_CODE (DECL_RTL (exp)) == MEM
5661 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5663 enum memory_use_mode memory_usage;
5664 memory_usage = get_memory_usage_from_modifier (modifier);
5666 if (memory_usage != MEMORY_USE_DONT)
5667 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5668 XEXP (DECL_RTL (exp), 0), ptr_mode,
5669 GEN_INT (int_size_in_bytes (type)),
5670 TYPE_MODE (sizetype),
5671 GEN_INT (memory_usage),
5672 TYPE_MODE (integer_type_node));
5675 /* ... fall through ... */
5679 if (DECL_RTL (exp) == 0)
5682 /* Ensure variable marked as used even if it doesn't go through
5683 a parser. If it hasn't be used yet, write out an external
5685 if (! TREE_USED (exp))
5687 assemble_external (exp);
5688 TREE_USED (exp) = 1;
5691 /* Show we haven't gotten RTL for this yet. */
5694 /* Handle variables inherited from containing functions. */
5695 context = decl_function_context (exp);
5697 /* We treat inline_function_decl as an alias for the current function
5698 because that is the inline function whose vars, types, etc.
5699 are being merged into the current function.
5700 See expand_inline_function. */
5702 if (context != 0 && context != current_function_decl
5703 && context != inline_function_decl
5704 /* If var is static, we don't need a static chain to access it. */
5705 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5706 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5710 /* Mark as non-local and addressable. */
5711 DECL_NONLOCAL (exp) = 1;
5712 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5714 mark_addressable (exp);
5715 if (GET_CODE (DECL_RTL (exp)) != MEM)
5717 addr = XEXP (DECL_RTL (exp), 0);
5718 if (GET_CODE (addr) == MEM)
5719 addr = gen_rtx_MEM (Pmode,
5720 fix_lexical_addr (XEXP (addr, 0), exp));
5722 addr = fix_lexical_addr (addr, exp);
5723 temp = change_address (DECL_RTL (exp), mode, addr);
5726 /* This is the case of an array whose size is to be determined
5727 from its initializer, while the initializer is still being parsed.
5730 else if (GET_CODE (DECL_RTL (exp)) == MEM
5731 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5732 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5733 XEXP (DECL_RTL (exp), 0));
5735 /* If DECL_RTL is memory, we are in the normal case and either
5736 the address is not valid or it is not a register and -fforce-addr
5737 is specified, get the address into a register. */
5739 else if (GET_CODE (DECL_RTL (exp)) == MEM
5740 && modifier != EXPAND_CONST_ADDRESS
5741 && modifier != EXPAND_SUM
5742 && modifier != EXPAND_INITIALIZER
5743 && (! memory_address_p (DECL_MODE (exp),
5744 XEXP (DECL_RTL (exp), 0))
5746 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5747 temp = change_address (DECL_RTL (exp), VOIDmode,
5748 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5750 /* If we got something, return it. But first, set the alignment
5751 the address is a register. */
5754 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5755 mark_reg_pointer (XEXP (temp, 0),
5756 DECL_ALIGN (exp) / BITS_PER_UNIT);
5761 /* If the mode of DECL_RTL does not match that of the decl, it
5762 must be a promoted value. We return a SUBREG of the wanted mode,
5763 but mark it so that we know that it was already extended. */
5765 if (GET_CODE (DECL_RTL (exp)) == REG
5766 && GET_MODE (DECL_RTL (exp)) != mode)
5768 /* Get the signedness used for this variable. Ensure we get the
5769 same mode we got when the variable was declared. */
5770 if (GET_MODE (DECL_RTL (exp))
5771 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5774 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5775 SUBREG_PROMOTED_VAR_P (temp) = 1;
5776 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5780 return DECL_RTL (exp);
5783 return immed_double_const (TREE_INT_CST_LOW (exp),
5784 TREE_INT_CST_HIGH (exp),
5788 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5789 EXPAND_MEMORY_USE_BAD);
5792 /* If optimized, generate immediate CONST_DOUBLE
5793 which will be turned into memory by reload if necessary.
5795 We used to force a register so that loop.c could see it. But
5796 this does not allow gen_* patterns to perform optimizations with
5797 the constants. It also produces two insns in cases like "x = 1.0;".
5798 On most machines, floating-point constants are not permitted in
5799 many insns, so we'd end up copying it to a register in any case.
5801 Now, we do the copying in expand_binop, if appropriate. */
5802 return immed_real_const (exp);
5806 if (! TREE_CST_RTL (exp))
5807 output_constant_def (exp);
5809 /* TREE_CST_RTL probably contains a constant address.
5810 On RISC machines where a constant address isn't valid,
5811 make some insns to get that address into a register. */
5812 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5813 && modifier != EXPAND_CONST_ADDRESS
5814 && modifier != EXPAND_INITIALIZER
5815 && modifier != EXPAND_SUM
5816 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5818 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5819 return change_address (TREE_CST_RTL (exp), VOIDmode,
5820 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5821 return TREE_CST_RTL (exp);
5823 case EXPR_WITH_FILE_LOCATION:
5826 char *saved_input_filename = input_filename;
5827 int saved_lineno = lineno;
5828 input_filename = EXPR_WFL_FILENAME (exp);
5829 lineno = EXPR_WFL_LINENO (exp);
5830 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5831 emit_line_note (input_filename, lineno);
5832 /* Possibly avoid switching back and force here */
5833 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5834 input_filename = saved_input_filename;
5835 lineno = saved_lineno;
5840 context = decl_function_context (exp);
5842 /* If this SAVE_EXPR was at global context, assume we are an
5843 initialization function and move it into our context. */
5845 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5847 /* We treat inline_function_decl as an alias for the current function
5848 because that is the inline function whose vars, types, etc.
5849 are being merged into the current function.
5850 See expand_inline_function. */
5851 if (context == current_function_decl || context == inline_function_decl)
5854 /* If this is non-local, handle it. */
5857 /* The following call just exists to abort if the context is
5858 not of a containing function. */
5859 find_function_data (context);
5861 temp = SAVE_EXPR_RTL (exp);
5862 if (temp && GET_CODE (temp) == REG)
5864 put_var_into_stack (exp);
5865 temp = SAVE_EXPR_RTL (exp);
5867 if (temp == 0 || GET_CODE (temp) != MEM)
5869 return change_address (temp, mode,
5870 fix_lexical_addr (XEXP (temp, 0), exp));
5872 if (SAVE_EXPR_RTL (exp) == 0)
5874 if (mode == VOIDmode)
5877 temp = assign_temp (type, 3, 0, 0);
5879 SAVE_EXPR_RTL (exp) = temp;
5880 if (!optimize && GET_CODE (temp) == REG)
5881 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5884 /* If the mode of TEMP does not match that of the expression, it
5885 must be a promoted value. We pass store_expr a SUBREG of the
5886 wanted mode but mark it so that we know that it was already
5887 extended. Note that `unsignedp' was modified above in
5890 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5892 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5893 SUBREG_PROMOTED_VAR_P (temp) = 1;
5894 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5897 if (temp == const0_rtx)
5898 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5899 EXPAND_MEMORY_USE_BAD);
5901 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5903 TREE_USED (exp) = 1;
5906 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5907 must be a promoted value. We return a SUBREG of the wanted mode,
5908 but mark it so that we know that it was already extended. */
5910 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5911 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5913 /* Compute the signedness and make the proper SUBREG. */
5914 promote_mode (type, mode, &unsignedp, 0);
5915 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5916 SUBREG_PROMOTED_VAR_P (temp) = 1;
5917 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5921 return SAVE_EXPR_RTL (exp);
5926 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5927 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5931 case PLACEHOLDER_EXPR:
5933 tree placeholder_expr;
5935 /* If there is an object on the head of the placeholder list,
5936 see if some object in it of type TYPE or a pointer to it. For
5937 further information, see tree.def. */
5938 for (placeholder_expr = placeholder_list;
5939 placeholder_expr != 0;
5940 placeholder_expr = TREE_CHAIN (placeholder_expr))
5942 tree need_type = TYPE_MAIN_VARIANT (type);
5944 tree old_list = placeholder_list;
5947 /* Find the outermost reference that is of the type we want.
5948 If none, see if any object has a type that is a pointer to
5949 the type we want. */
5950 for (elt = TREE_PURPOSE (placeholder_expr);
5951 elt != 0 && object == 0;
5953 = ((TREE_CODE (elt) == COMPOUND_EXPR
5954 || TREE_CODE (elt) == COND_EXPR)
5955 ? TREE_OPERAND (elt, 1)
5956 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5957 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5958 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5959 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5960 ? TREE_OPERAND (elt, 0) : 0))
5961 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5964 for (elt = TREE_PURPOSE (placeholder_expr);
5965 elt != 0 && object == 0;
5967 = ((TREE_CODE (elt) == COMPOUND_EXPR
5968 || TREE_CODE (elt) == COND_EXPR)
5969 ? TREE_OPERAND (elt, 1)
5970 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5971 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5972 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5973 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5974 ? TREE_OPERAND (elt, 0) : 0))
5975 if (POINTER_TYPE_P (TREE_TYPE (elt))
5976 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5978 object = build1 (INDIRECT_REF, need_type, elt);
5982 /* Expand this object skipping the list entries before
5983 it was found in case it is also a PLACEHOLDER_EXPR.
5984 In that case, we want to translate it using subsequent
5986 placeholder_list = TREE_CHAIN (placeholder_expr);
5987 temp = expand_expr (object, original_target, tmode,
5989 placeholder_list = old_list;
5995 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5998 case WITH_RECORD_EXPR:
5999 /* Put the object on the placeholder list, expand our first operand,
6000 and pop the list. */
6001 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6003 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6004 tmode, ro_modifier);
6005 placeholder_list = TREE_CHAIN (placeholder_list);
6009 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6010 expand_goto (TREE_OPERAND (exp, 0));
6012 expand_computed_goto (TREE_OPERAND (exp, 0));
6016 expand_exit_loop_if_false (NULL_PTR,
6017 invert_truthvalue (TREE_OPERAND (exp, 0)));
6020 case LABELED_BLOCK_EXPR:
6021 if (LABELED_BLOCK_BODY (exp))
6022 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6023 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6026 case EXIT_BLOCK_EXPR:
6027 if (EXIT_BLOCK_RETURN (exp))
6028 sorry ("returned value in block_exit_expr");
6029 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6034 expand_start_loop (1);
6035 expand_expr_stmt (TREE_OPERAND (exp, 0));
6043 tree vars = TREE_OPERAND (exp, 0);
6044 int vars_need_expansion = 0;
6046 /* Need to open a binding contour here because
6047 if there are any cleanups they must be contained here. */
6048 expand_start_bindings (0);
6050 /* Mark the corresponding BLOCK for output in its proper place. */
6051 if (TREE_OPERAND (exp, 2) != 0
6052 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6053 insert_block (TREE_OPERAND (exp, 2));
6055 /* If VARS have not yet been expanded, expand them now. */
6058 if (DECL_RTL (vars) == 0)
6060 vars_need_expansion = 1;
6063 expand_decl_init (vars);
6064 vars = TREE_CHAIN (vars);
6067 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6069 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6075 if (RTL_EXPR_SEQUENCE (exp))
6077 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6079 emit_insns (RTL_EXPR_SEQUENCE (exp));
6080 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6082 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6083 free_temps_for_rtl_expr (exp);
6084 return RTL_EXPR_RTL (exp);
6087 /* If we don't need the result, just ensure we evaluate any
6092 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6093 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6094 EXPAND_MEMORY_USE_BAD);
6098 /* All elts simple constants => refer to a constant in memory. But
6099 if this is a non-BLKmode mode, let it store a field at a time
6100 since that should make a CONST_INT or CONST_DOUBLE when we
6101 fold. Likewise, if we have a target we can use, it is best to
6102 store directly into the target unless the type is large enough
6103 that memcpy will be used. If we are making an initializer and
6104 all operands are constant, put it in memory as well. */
6105 else if ((TREE_STATIC (exp)
6106 && ((mode == BLKmode
6107 && ! (target != 0 && safe_from_p (target, exp, 1)))
6108 || TREE_ADDRESSABLE (exp)
6109 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6110 && (!MOVE_BY_PIECES_P
6111 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6112 TYPE_ALIGN (type) / BITS_PER_UNIT))
6113 && ! mostly_zeros_p (exp))))
6114 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6116 rtx constructor = output_constant_def (exp);
6117 if (modifier != EXPAND_CONST_ADDRESS
6118 && modifier != EXPAND_INITIALIZER
6119 && modifier != EXPAND_SUM
6120 && (! memory_address_p (GET_MODE (constructor),
6121 XEXP (constructor, 0))
6123 && GET_CODE (XEXP (constructor, 0)) != REG)))
6124 constructor = change_address (constructor, VOIDmode,
6125 XEXP (constructor, 0));
6131 /* Handle calls that pass values in multiple non-contiguous
6132 locations. The Irix 6 ABI has examples of this. */
6133 if (target == 0 || ! safe_from_p (target, exp, 1)
6134 || GET_CODE (target) == PARALLEL)
6136 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6137 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6139 target = assign_temp (type, 0, 1, 1);
6142 if (TREE_READONLY (exp))
6144 if (GET_CODE (target) == MEM)
6145 target = copy_rtx (target);
6147 RTX_UNCHANGING_P (target) = 1;
6150 store_constructor (exp, target, 0);
6156 tree exp1 = TREE_OPERAND (exp, 0);
6159 tree string = string_constant (exp1, &index);
6162 /* Try to optimize reads from const strings. */
6164 && TREE_CODE (string) == STRING_CST
6165 && TREE_CODE (index) == INTEGER_CST
6166 && !TREE_INT_CST_HIGH (index)
6167 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6168 && GET_MODE_CLASS (mode) == MODE_INT
6169 && GET_MODE_SIZE (mode) == 1
6170 && modifier != EXPAND_MEMORY_USE_WO)
6171 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6173 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6174 op0 = memory_address (mode, op0);
6176 if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6178 enum memory_use_mode memory_usage;
6179 memory_usage = get_memory_usage_from_modifier (modifier);
6181 if (memory_usage != MEMORY_USE_DONT)
6183 in_check_memory_usage = 1;
6184 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6186 GEN_INT (int_size_in_bytes (type)),
6187 TYPE_MODE (sizetype),
6188 GEN_INT (memory_usage),
6189 TYPE_MODE (integer_type_node));
6190 in_check_memory_usage = 0;
6194 temp = gen_rtx_MEM (mode, op0);
6195 /* If address was computed by addition,
6196 mark this as an element of an aggregate. */
6197 if (TREE_CODE (exp1) == PLUS_EXPR
6198 || (TREE_CODE (exp1) == SAVE_EXPR
6199 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6200 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6201 || (TREE_CODE (exp1) == ADDR_EXPR
6202 && (exp2 = TREE_OPERAND (exp1, 0))
6203 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6204 MEM_SET_IN_STRUCT_P (temp, 1);
6206 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6207 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6209 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6210 here, because, in C and C++, the fact that a location is accessed
6211 through a pointer to const does not mean that the value there can
6212 never change. Languages where it can never change should
6213 also set TREE_STATIC. */
6214 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6219 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6223 tree array = TREE_OPERAND (exp, 0);
6224 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6225 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6226 tree index = TREE_OPERAND (exp, 1);
6227 tree index_type = TREE_TYPE (index);
6230 /* Optimize the special-case of a zero lower bound.
6232 We convert the low_bound to sizetype to avoid some problems
6233 with constant folding. (E.g. suppose the lower bound is 1,
6234 and its mode is QI. Without the conversion, (ARRAY
6235 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6236 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6238 But sizetype isn't quite right either (especially if
6239 the lowbound is negative). FIXME */
6241 if (! integer_zerop (low_bound))
6242 index = fold (build (MINUS_EXPR, index_type, index,
6243 convert (sizetype, low_bound)));
6245 /* Fold an expression like: "foo"[2].
6246 This is not done in fold so it won't happen inside &.
6247 Don't fold if this is for wide characters since it's too
6248 difficult to do correctly and this is a very rare case. */
6250 if (TREE_CODE (array) == STRING_CST
6251 && TREE_CODE (index) == INTEGER_CST
6252 && !TREE_INT_CST_HIGH (index)
6253 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6254 && GET_MODE_CLASS (mode) == MODE_INT
6255 && GET_MODE_SIZE (mode) == 1)
6256 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6258 /* If this is a constant index into a constant array,
6259 just get the value from the array. Handle both the cases when
6260 we have an explicit constructor and when our operand is a variable
6261 that was declared const. */
6263 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6265 if (TREE_CODE (index) == INTEGER_CST
6266 && TREE_INT_CST_HIGH (index) == 0)
6268 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6270 i = TREE_INT_CST_LOW (index);
6272 elem = TREE_CHAIN (elem);
6274 return expand_expr (fold (TREE_VALUE (elem)), target,
6275 tmode, ro_modifier);
6279 else if (optimize >= 1
6280 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6281 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6282 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6284 if (TREE_CODE (index) == INTEGER_CST)
6286 tree init = DECL_INITIAL (array);
6288 i = TREE_INT_CST_LOW (index);
6289 if (TREE_CODE (init) == CONSTRUCTOR)
6291 tree elem = CONSTRUCTOR_ELTS (init);
6294 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6295 elem = TREE_CHAIN (elem);
6297 return expand_expr (fold (TREE_VALUE (elem)), target,
6298 tmode, ro_modifier);
6300 else if (TREE_CODE (init) == STRING_CST
6301 && TREE_INT_CST_HIGH (index) == 0
6302 && (TREE_INT_CST_LOW (index)
6303 < TREE_STRING_LENGTH (init)))
6305 (TREE_STRING_POINTER
6306 (init)[TREE_INT_CST_LOW (index)]));
6311 /* ... fall through ... */
6315 /* If the operand is a CONSTRUCTOR, we can just extract the
6316 appropriate field if it is present. Don't do this if we have
6317 already written the data since we want to refer to that copy
6318 and varasm.c assumes that's what we'll do. */
6319 if (code != ARRAY_REF
6320 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6321 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6325 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6326 elt = TREE_CHAIN (elt))
6327 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6328 /* We can normally use the value of the field in the
6329 CONSTRUCTOR. However, if this is a bitfield in
6330 an integral mode that we can fit in a HOST_WIDE_INT,
6331 we must mask only the number of bits in the bitfield,
6332 since this is done implicitly by the constructor. If
6333 the bitfield does not meet either of those conditions,
6334 we can't do this optimization. */
6335 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6336 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6338 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6339 <= HOST_BITS_PER_WIDE_INT))))
6341 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6342 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6344 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6346 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6348 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6349 op0 = expand_and (op0, op1, target);
6353 enum machine_mode imode
6354 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6356 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6359 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6361 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6371 enum machine_mode mode1;
6377 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6378 &mode1, &unsignedp, &volatilep,
6381 /* If we got back the original object, something is wrong. Perhaps
6382 we are evaluating an expression too early. In any event, don't
6383 infinitely recurse. */
6387 /* If TEM's type is a union of variable size, pass TARGET to the inner
6388 computation, since it will need a temporary and TARGET is known
6389 to have to do. This occurs in unchecked conversion in Ada. */
6391 op0 = expand_expr (tem,
6392 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6393 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6395 ? target : NULL_RTX),
6397 modifier == EXPAND_INITIALIZER
6398 ? modifier : EXPAND_NORMAL);
6400 /* If this is a constant, put it into a register if it is a
6401 legitimate constant and memory if it isn't. */
6402 if (CONSTANT_P (op0))
6404 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6405 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6406 op0 = force_reg (mode, op0);
6408 op0 = validize_mem (force_const_mem (mode, op0));
6413 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6415 if (GET_CODE (op0) != MEM)
6418 if (GET_MODE (offset_rtx) != ptr_mode)
6420 #ifdef POINTERS_EXTEND_UNSIGNED
6421 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6423 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6427 /* A constant address in TO_RTX can have VOIDmode, we must not try
6428 to call force_reg for that case. Avoid that case. */
6429 if (GET_CODE (op0) == MEM
6430 && GET_MODE (op0) == BLKmode
6431 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6433 && (bitpos % bitsize) == 0
6434 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6435 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6437 rtx temp = change_address (op0, mode1,
6438 plus_constant (XEXP (op0, 0),
6441 if (GET_CODE (XEXP (temp, 0)) == REG)
6444 op0 = change_address (op0, mode1,
6445 force_reg (GET_MODE (XEXP (temp, 0)),
6451 op0 = change_address (op0, VOIDmode,
6452 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6453 force_reg (ptr_mode, offset_rtx)));
6456 /* Don't forget about volatility even if this is a bitfield. */
6457 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6459 op0 = copy_rtx (op0);
6460 MEM_VOLATILE_P (op0) = 1;
6463 /* Check the access. */
6464 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6466 enum memory_use_mode memory_usage;
6467 memory_usage = get_memory_usage_from_modifier (modifier);
6469 if (memory_usage != MEMORY_USE_DONT)
6474 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6475 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6477 /* Check the access right of the pointer. */
6478 if (size > BITS_PER_UNIT)
6479 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6481 GEN_INT (size / BITS_PER_UNIT),
6482 TYPE_MODE (sizetype),
6483 GEN_INT (memory_usage),
6484 TYPE_MODE (integer_type_node));
6488 /* In cases where an aligned union has an unaligned object
6489 as a field, we might be extracting a BLKmode value from
6490 an integer-mode (e.g., SImode) object. Handle this case
6491 by doing the extract into an object as wide as the field
6492 (which we know to be the width of a basic mode), then
6493 storing into memory, and changing the mode to BLKmode.
6494 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6495 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6496 if (mode1 == VOIDmode
6497 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6498 || (modifier != EXPAND_CONST_ADDRESS
6499 && modifier != EXPAND_INITIALIZER
6500 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6501 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6502 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6503 /* If the field isn't aligned enough to fetch as a memref,
6504 fetch it as a bit field. */
6505 || (SLOW_UNALIGNED_ACCESS
6506 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6507 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6509 enum machine_mode ext_mode = mode;
6511 if (ext_mode == BLKmode)
6512 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6514 if (ext_mode == BLKmode)
6516 /* In this case, BITPOS must start at a byte boundary and
6517 TARGET, if specified, must be a MEM. */
6518 if (GET_CODE (op0) != MEM
6519 || (target != 0 && GET_CODE (target) != MEM)
6520 || bitpos % BITS_PER_UNIT != 0)
6523 op0 = change_address (op0, VOIDmode,
6524 plus_constant (XEXP (op0, 0),
6525 bitpos / BITS_PER_UNIT));
6527 target = assign_temp (type, 0, 1, 1);
6529 emit_block_move (target, op0,
6530 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6537 op0 = validize_mem (op0);
6539 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6540 mark_reg_pointer (XEXP (op0, 0), alignment);
6542 op0 = extract_bit_field (op0, bitsize, bitpos,
6543 unsignedp, target, ext_mode, ext_mode,
6545 int_size_in_bytes (TREE_TYPE (tem)));
6547 /* If the result is a record type and BITSIZE is narrower than
6548 the mode of OP0, an integral mode, and this is a big endian
6549 machine, we must put the field into the high-order bits. */
6550 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6551 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6552 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6553 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6554 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6558 if (mode == BLKmode)
6560 rtx new = assign_stack_temp (ext_mode,
6561 bitsize / BITS_PER_UNIT, 0);
6563 emit_move_insn (new, op0);
6564 op0 = copy_rtx (new);
6565 PUT_MODE (op0, BLKmode);
6566 MEM_SET_IN_STRUCT_P (op0, 1);
6572 /* If the result is BLKmode, use that to access the object
6574 if (mode == BLKmode)
6577 /* Get a reference to just this component. */
6578 if (modifier == EXPAND_CONST_ADDRESS
6579 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6580 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6581 (bitpos / BITS_PER_UNIT)));
6583 op0 = change_address (op0, mode1,
6584 plus_constant (XEXP (op0, 0),
6585 (bitpos / BITS_PER_UNIT)));
6587 if (GET_CODE (op0) == MEM)
6588 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6590 if (GET_CODE (XEXP (op0, 0)) == REG)
6591 mark_reg_pointer (XEXP (op0, 0), alignment);
6593 MEM_SET_IN_STRUCT_P (op0, 1);
6594 MEM_VOLATILE_P (op0) |= volatilep;
6595 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6596 || modifier == EXPAND_CONST_ADDRESS
6597 || modifier == EXPAND_INITIALIZER)
6599 else if (target == 0)
6600 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6602 convert_move (target, op0, unsignedp);
6606 /* Intended for a reference to a buffer of a file-object in Pascal.
6607 But it's not certain that a special tree code will really be
6608 necessary for these. INDIRECT_REF might work for them. */
6614 /* Pascal set IN expression.
6617 rlo = set_low - (set_low%bits_per_word);
6618 the_word = set [ (index - rlo)/bits_per_word ];
6619 bit_index = index % bits_per_word;
6620 bitmask = 1 << bit_index;
6621 return !!(the_word & bitmask); */
6623 tree set = TREE_OPERAND (exp, 0);
6624 tree index = TREE_OPERAND (exp, 1);
6625 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6626 tree set_type = TREE_TYPE (set);
6627 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6628 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6629 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6630 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6631 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6632 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6633 rtx setaddr = XEXP (setval, 0);
6634 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6636 rtx diff, quo, rem, addr, bit, result;
6638 preexpand_calls (exp);
6640 /* If domain is empty, answer is no. Likewise if index is constant
6641 and out of bounds. */
6642 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6643 && TREE_CODE (set_low_bound) == INTEGER_CST
6644 && tree_int_cst_lt (set_high_bound, set_low_bound))
6645 || (TREE_CODE (index) == INTEGER_CST
6646 && TREE_CODE (set_low_bound) == INTEGER_CST
6647 && tree_int_cst_lt (index, set_low_bound))
6648 || (TREE_CODE (set_high_bound) == INTEGER_CST
6649 && TREE_CODE (index) == INTEGER_CST
6650 && tree_int_cst_lt (set_high_bound, index))))
6654 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6656 /* If we get here, we have to generate the code for both cases
6657 (in range and out of range). */
6659 op0 = gen_label_rtx ();
6660 op1 = gen_label_rtx ();
6662 if (! (GET_CODE (index_val) == CONST_INT
6663 && GET_CODE (lo_r) == CONST_INT))
6665 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6666 GET_MODE (index_val), iunsignedp, 0, op1);
6669 if (! (GET_CODE (index_val) == CONST_INT
6670 && GET_CODE (hi_r) == CONST_INT))
6672 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6673 GET_MODE (index_val), iunsignedp, 0, op1);
6676 /* Calculate the element number of bit zero in the first word
6678 if (GET_CODE (lo_r) == CONST_INT)
6679 rlow = GEN_INT (INTVAL (lo_r)
6680 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6682 rlow = expand_binop (index_mode, and_optab, lo_r,
6683 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6684 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6686 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6687 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6689 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6690 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6691 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6692 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6694 addr = memory_address (byte_mode,
6695 expand_binop (index_mode, add_optab, diff,
6696 setaddr, NULL_RTX, iunsignedp,
6699 /* Extract the bit we want to examine */
6700 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6701 gen_rtx_MEM (byte_mode, addr),
6702 make_tree (TREE_TYPE (index), rem),
6704 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6705 GET_MODE (target) == byte_mode ? target : 0,
6706 1, OPTAB_LIB_WIDEN);
6708 if (result != target)
6709 convert_move (target, result, 1);
6711 /* Output the code to handle the out-of-range case. */
6714 emit_move_insn (target, const0_rtx);
6719 case WITH_CLEANUP_EXPR:
6720 if (RTL_EXPR_RTL (exp) == 0)
6723 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6724 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6726 /* That's it for this cleanup. */
6727 TREE_OPERAND (exp, 2) = 0;
6729 return RTL_EXPR_RTL (exp);
6731 case CLEANUP_POINT_EXPR:
6733 /* Start a new binding layer that will keep track of all cleanup
6734 actions to be performed. */
6735 expand_start_bindings (0);
6737 target_temp_slot_level = temp_slot_level;
6739 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6740 /* If we're going to use this value, load it up now. */
6742 op0 = force_not_mem (op0);
6743 preserve_temp_slots (op0);
6744 expand_end_bindings (NULL_TREE, 0, 0);
6749 /* Check for a built-in function. */
6750 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6751 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6753 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6754 return expand_builtin (exp, target, subtarget, tmode, ignore);
6756 /* If this call was expanded already by preexpand_calls,
6757 just return the result we got. */
6758 if (CALL_EXPR_RTL (exp) != 0)
6759 return CALL_EXPR_RTL (exp);
6761 return expand_call (exp, target, ignore);
6763 case NON_LVALUE_EXPR:
6766 case REFERENCE_EXPR:
6767 if (TREE_CODE (type) == UNION_TYPE)
6769 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6772 if (mode != BLKmode)
6773 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6775 target = assign_temp (type, 0, 1, 1);
6778 if (GET_CODE (target) == MEM)
6779 /* Store data into beginning of memory target. */
6780 store_expr (TREE_OPERAND (exp, 0),
6781 change_address (target, TYPE_MODE (valtype), 0), 0);
6783 else if (GET_CODE (target) == REG)
6784 /* Store this field into a union of the proper type. */
6785 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6786 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6788 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6793 /* Return the entire union. */
6797 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6799 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6802 /* If the signedness of the conversion differs and OP0 is
6803 a promoted SUBREG, clear that indication since we now
6804 have to do the proper extension. */
6805 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6806 && GET_CODE (op0) == SUBREG)
6807 SUBREG_PROMOTED_VAR_P (op0) = 0;
6812 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6813 if (GET_MODE (op0) == mode)
6816 /* If OP0 is a constant, just convert it into the proper mode. */
6817 if (CONSTANT_P (op0))
6819 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6820 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6822 if (modifier == EXPAND_INITIALIZER)
6823 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6827 convert_to_mode (mode, op0,
6828 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6830 convert_move (target, op0,
6831 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6835 /* We come here from MINUS_EXPR when the second operand is a
6838 this_optab = add_optab;
6840 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6841 something else, make sure we add the register to the constant and
6842 then to the other thing. This case can occur during strength
6843 reduction and doing it this way will produce better code if the
6844 frame pointer or argument pointer is eliminated.
6846 fold-const.c will ensure that the constant is always in the inner
6847 PLUS_EXPR, so the only case we need to do anything about is if
6848 sp, ap, or fp is our second argument, in which case we must swap
6849 the innermost first argument and our second argument. */
6851 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6852 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6853 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6854 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6855 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6856 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6858 tree t = TREE_OPERAND (exp, 1);
6860 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6861 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6864 /* If the result is to be ptr_mode and we are adding an integer to
6865 something, we might be forming a constant. So try to use
6866 plus_constant. If it produces a sum and we can't accept it,
6867 use force_operand. This allows P = &ARR[const] to generate
6868 efficient code on machines where a SYMBOL_REF is not a valid
6871 If this is an EXPAND_SUM call, always return the sum. */
6872 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6873 || mode == ptr_mode)
6875 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6876 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6877 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6879 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6881 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6882 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6883 op1 = force_operand (op1, target);
6887 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6888 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6889 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6891 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6893 if (! CONSTANT_P (op0))
6895 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6896 VOIDmode, modifier);
6897 /* Don't go to both_summands if modifier
6898 says it's not right to return a PLUS. */
6899 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6903 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6904 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6905 op0 = force_operand (op0, target);
6910 /* No sense saving up arithmetic to be done
6911 if it's all in the wrong mode to form part of an address.
6912 And force_operand won't know whether to sign-extend or
6914 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6915 || mode != ptr_mode)
6918 preexpand_calls (exp);
6919 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6922 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6923 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6926 /* Make sure any term that's a sum with a constant comes last. */
6927 if (GET_CODE (op0) == PLUS
6928 && CONSTANT_P (XEXP (op0, 1)))
6934 /* If adding to a sum including a constant,
6935 associate it to put the constant outside. */
6936 if (GET_CODE (op1) == PLUS
6937 && CONSTANT_P (XEXP (op1, 1)))
6939 rtx constant_term = const0_rtx;
6941 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6944 /* Ensure that MULT comes first if there is one. */
6945 else if (GET_CODE (op0) == MULT)
6946 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6948 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6950 /* Let's also eliminate constants from op0 if possible. */
6951 op0 = eliminate_constant_term (op0, &constant_term);
6953 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6954 their sum should be a constant. Form it into OP1, since the
6955 result we want will then be OP0 + OP1. */
6957 temp = simplify_binary_operation (PLUS, mode, constant_term,
6962 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6965 /* Put a constant term last and put a multiplication first. */
6966 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6967 temp = op1, op1 = op0, op0 = temp;
6969 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6970 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6973 /* For initializers, we are allowed to return a MINUS of two
6974 symbolic constants. Here we handle all cases when both operands
6976 /* Handle difference of two symbolic constants,
6977 for the sake of an initializer. */
6978 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6979 && really_constant_p (TREE_OPERAND (exp, 0))
6980 && really_constant_p (TREE_OPERAND (exp, 1)))
6982 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6983 VOIDmode, ro_modifier);
6984 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6985 VOIDmode, ro_modifier);
6987 /* If the last operand is a CONST_INT, use plus_constant of
6988 the negated constant. Else make the MINUS. */
6989 if (GET_CODE (op1) == CONST_INT)
6990 return plus_constant (op0, - INTVAL (op1));
6992 return gen_rtx_MINUS (mode, op0, op1);
6994 /* Convert A - const to A + (-const). */
6995 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6997 tree negated = fold (build1 (NEGATE_EXPR, type,
6998 TREE_OPERAND (exp, 1)));
7000 /* Deal with the case where we can't negate the constant
7002 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7004 tree newtype = signed_type (type);
7005 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7006 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7007 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7009 if (! TREE_OVERFLOW (newneg))
7010 return expand_expr (convert (type,
7011 build (PLUS_EXPR, newtype,
7013 target, tmode, ro_modifier);
7017 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7021 this_optab = sub_optab;
7025 preexpand_calls (exp);
7026 /* If first operand is constant, swap them.
7027 Thus the following special case checks need only
7028 check the second operand. */
7029 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7031 register tree t1 = TREE_OPERAND (exp, 0);
7032 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7033 TREE_OPERAND (exp, 1) = t1;
7036 /* Attempt to return something suitable for generating an
7037 indexed address, for machines that support that. */
7039 if (modifier == EXPAND_SUM && mode == ptr_mode
7040 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7041 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7043 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7046 /* Apply distributive law if OP0 is x+c. */
7047 if (GET_CODE (op0) == PLUS
7048 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7049 return gen_rtx_PLUS (mode,
7050 gen_rtx_MULT (mode, XEXP (op0, 0),
7051 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7052 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7053 * INTVAL (XEXP (op0, 1))));
7055 if (GET_CODE (op0) != REG)
7056 op0 = force_operand (op0, NULL_RTX);
7057 if (GET_CODE (op0) != REG)
7058 op0 = copy_to_mode_reg (mode, op0);
7060 return gen_rtx_MULT (mode, op0,
7061 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7064 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7067 /* Check for multiplying things that have been extended
7068 from a narrower type. If this machine supports multiplying
7069 in that narrower type with a result in the desired type,
7070 do it that way, and avoid the explicit type-conversion. */
7071 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7072 && TREE_CODE (type) == INTEGER_TYPE
7073 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7074 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7075 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7076 && int_fits_type_p (TREE_OPERAND (exp, 1),
7077 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7078 /* Don't use a widening multiply if a shift will do. */
7079 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7080 > HOST_BITS_PER_WIDE_INT)
7081 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7083 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7084 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7086 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7087 /* If both operands are extended, they must either both
7088 be zero-extended or both be sign-extended. */
7089 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7091 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7093 enum machine_mode innermode
7094 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7095 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7096 ? smul_widen_optab : umul_widen_optab);
7097 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7098 ? umul_widen_optab : smul_widen_optab);
7099 if (mode == GET_MODE_WIDER_MODE (innermode))
7101 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7103 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7104 NULL_RTX, VOIDmode, 0);
7105 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7106 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7109 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7110 NULL_RTX, VOIDmode, 0);
7113 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7114 && innermode == word_mode)
7117 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7118 NULL_RTX, VOIDmode, 0);
7119 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7120 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7123 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7124 NULL_RTX, VOIDmode, 0);
7125 temp = expand_binop (mode, other_optab, op0, op1, target,
7126 unsignedp, OPTAB_LIB_WIDEN);
7127 htem = expand_mult_highpart_adjust (innermode,
7128 gen_highpart (innermode, temp),
7130 gen_highpart (innermode, temp),
7132 emit_move_insn (gen_highpart (innermode, temp), htem);
7137 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7138 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7139 return expand_mult (mode, op0, op1, target, unsignedp);
7141 case TRUNC_DIV_EXPR:
7142 case FLOOR_DIV_EXPR:
7144 case ROUND_DIV_EXPR:
7145 case EXACT_DIV_EXPR:
7146 preexpand_calls (exp);
7147 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7149 /* Possible optimization: compute the dividend with EXPAND_SUM
7150 then if the divisor is constant can optimize the case
7151 where some terms of the dividend have coeffs divisible by it. */
7152 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7153 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7154 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7157 this_optab = flodiv_optab;
7160 case TRUNC_MOD_EXPR:
7161 case FLOOR_MOD_EXPR:
7163 case ROUND_MOD_EXPR:
7164 preexpand_calls (exp);
7165 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7167 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7168 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7169 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7171 case FIX_ROUND_EXPR:
7172 case FIX_FLOOR_EXPR:
7174 abort (); /* Not used for C. */
7176 case FIX_TRUNC_EXPR:
7177 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7179 target = gen_reg_rtx (mode);
7180 expand_fix (target, op0, unsignedp);
7184 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7186 target = gen_reg_rtx (mode);
7187 /* expand_float can't figure out what to do if FROM has VOIDmode.
7188 So give it the correct mode. With -O, cse will optimize this. */
7189 if (GET_MODE (op0) == VOIDmode)
7190 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7192 expand_float (target, op0,
7193 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7197 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7198 temp = expand_unop (mode, neg_optab, op0, target, 0);
7204 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7206 /* Handle complex values specially. */
7207 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7208 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7209 return expand_complex_abs (mode, op0, target, unsignedp);
7211 /* Unsigned abs is simply the operand. Testing here means we don't
7212 risk generating incorrect code below. */
7213 if (TREE_UNSIGNED (type))
7216 return expand_abs (mode, op0, target,
7217 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7221 target = original_target;
7222 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7223 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7224 || GET_MODE (target) != mode
7225 || (GET_CODE (target) == REG
7226 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7227 target = gen_reg_rtx (mode);
7228 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7229 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7231 /* First try to do it with a special MIN or MAX instruction.
7232 If that does not win, use a conditional jump to select the proper
7234 this_optab = (TREE_UNSIGNED (type)
7235 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7236 : (code == MIN_EXPR ? smin_optab : smax_optab));
7238 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7243 /* At this point, a MEM target is no longer useful; we will get better
7246 if (GET_CODE (target) == MEM)
7247 target = gen_reg_rtx (mode);
7250 emit_move_insn (target, op0);
7252 op0 = gen_label_rtx ();
7254 /* If this mode is an integer too wide to compare properly,
7255 compare word by word. Rely on cse to optimize constant cases. */
7256 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7258 if (code == MAX_EXPR)
7259 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7260 target, op1, NULL_RTX, op0);
7262 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7263 op1, target, NULL_RTX, op0);
7264 emit_move_insn (target, op1);
7268 if (code == MAX_EXPR)
7269 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7270 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7271 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7273 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7274 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7275 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7276 if (temp == const0_rtx)
7277 emit_move_insn (target, op1);
7278 else if (temp != const_true_rtx)
7280 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7281 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7284 emit_move_insn (target, op1);
7291 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7292 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7298 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7299 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7304 /* ??? Can optimize bitwise operations with one arg constant.
7305 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7306 and (a bitwise1 b) bitwise2 b (etc)
7307 but that is probably not worth while. */
7309 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7310 boolean values when we want in all cases to compute both of them. In
7311 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7312 as actual zero-or-1 values and then bitwise anding. In cases where
7313 there cannot be any side effects, better code would be made by
7314 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7315 how to recognize those cases. */
7317 case TRUTH_AND_EXPR:
7319 this_optab = and_optab;
7324 this_optab = ior_optab;
7327 case TRUTH_XOR_EXPR:
7329 this_optab = xor_optab;
7336 preexpand_calls (exp);
7337 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7339 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7340 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7343 /* Could determine the answer when only additive constants differ. Also,
7344 the addition of one can be handled by changing the condition. */
7351 preexpand_calls (exp);
7352 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7356 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7357 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7359 && GET_CODE (original_target) == REG
7360 && (GET_MODE (original_target)
7361 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7363 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7366 if (temp != original_target)
7367 temp = copy_to_reg (temp);
7369 op1 = gen_label_rtx ();
7370 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7371 GET_MODE (temp), unsignedp, 0, op1);
7372 emit_move_insn (temp, const1_rtx);
7377 /* If no set-flag instruction, must generate a conditional
7378 store into a temporary variable. Drop through
7379 and handle this like && and ||. */
7381 case TRUTH_ANDIF_EXPR:
7382 case TRUTH_ORIF_EXPR:
7384 && (target == 0 || ! safe_from_p (target, exp, 1)
7385 /* Make sure we don't have a hard reg (such as function's return
7386 value) live across basic blocks, if not optimizing. */
7387 || (!optimize && GET_CODE (target) == REG
7388 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7389 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7392 emit_clr_insn (target);
7394 op1 = gen_label_rtx ();
7395 jumpifnot (exp, op1);
7398 emit_0_to_1_insn (target);
7401 return ignore ? const0_rtx : target;
7403 case TRUTH_NOT_EXPR:
7404 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7405 /* The parser is careful to generate TRUTH_NOT_EXPR
7406 only with operands that are always zero or one. */
7407 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7408 target, 1, OPTAB_LIB_WIDEN);
7414 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7416 return expand_expr (TREE_OPERAND (exp, 1),
7417 (ignore ? const0_rtx : target),
7421 /* If we would have a "singleton" (see below) were it not for a
7422 conversion in each arm, bring that conversion back out. */
7423 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7424 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7425 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7426 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7428 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7429 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7431 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7432 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7433 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7434 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7435 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7436 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7437 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7438 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7439 return expand_expr (build1 (NOP_EXPR, type,
7440 build (COND_EXPR, TREE_TYPE (true),
7441 TREE_OPERAND (exp, 0),
7443 target, tmode, modifier);
7447 /* Note that COND_EXPRs whose type is a structure or union
7448 are required to be constructed to contain assignments of
7449 a temporary variable, so that we can evaluate them here
7450 for side effect only. If type is void, we must do likewise. */
7452 /* If an arm of the branch requires a cleanup,
7453 only that cleanup is performed. */
7456 tree binary_op = 0, unary_op = 0;
7458 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7459 convert it to our mode, if necessary. */
7460 if (integer_onep (TREE_OPERAND (exp, 1))
7461 && integer_zerop (TREE_OPERAND (exp, 2))
7462 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7466 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7471 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7472 if (GET_MODE (op0) == mode)
7476 target = gen_reg_rtx (mode);
7477 convert_move (target, op0, unsignedp);
7481 /* Check for X ? A + B : A. If we have this, we can copy A to the
7482 output and conditionally add B. Similarly for unary operations.
7483 Don't do this if X has side-effects because those side effects
7484 might affect A or B and the "?" operation is a sequence point in
7485 ANSI. (operand_equal_p tests for side effects.) */
7487 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7488 && operand_equal_p (TREE_OPERAND (exp, 2),
7489 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7490 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7491 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7492 && operand_equal_p (TREE_OPERAND (exp, 1),
7493 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7494 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7495 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7496 && operand_equal_p (TREE_OPERAND (exp, 2),
7497 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7498 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7499 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7500 && operand_equal_p (TREE_OPERAND (exp, 1),
7501 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7502 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7504 /* If we are not to produce a result, we have no target. Otherwise,
7505 if a target was specified use it; it will not be used as an
7506 intermediate target unless it is safe. If no target, use a
7511 else if (original_target
7512 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7513 || (singleton && GET_CODE (original_target) == REG
7514 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7515 && original_target == var_rtx (singleton)))
7516 && GET_MODE (original_target) == mode
7517 #ifdef HAVE_conditional_move
7518 && (! can_conditionally_move_p (mode)
7519 || GET_CODE (original_target) == REG
7520 || TREE_ADDRESSABLE (type))
7522 && ! (GET_CODE (original_target) == MEM
7523 && MEM_VOLATILE_P (original_target)))
7524 temp = original_target;
7525 else if (TREE_ADDRESSABLE (type))
7528 temp = assign_temp (type, 0, 0, 1);
7530 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7531 do the test of X as a store-flag operation, do this as
7532 A + ((X != 0) << log C). Similarly for other simple binary
7533 operators. Only do for C == 1 if BRANCH_COST is low. */
7534 if (temp && singleton && binary_op
7535 && (TREE_CODE (binary_op) == PLUS_EXPR
7536 || TREE_CODE (binary_op) == MINUS_EXPR
7537 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7538 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7539 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7540 : integer_onep (TREE_OPERAND (binary_op, 1)))
7541 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7544 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7545 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7546 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7549 /* If we had X ? A : A + 1, do this as A + (X == 0).
7551 We have to invert the truth value here and then put it
7552 back later if do_store_flag fails. We cannot simply copy
7553 TREE_OPERAND (exp, 0) to another variable and modify that
7554 because invert_truthvalue can modify the tree pointed to
7556 if (singleton == TREE_OPERAND (exp, 1))
7557 TREE_OPERAND (exp, 0)
7558 = invert_truthvalue (TREE_OPERAND (exp, 0));
7560 result = do_store_flag (TREE_OPERAND (exp, 0),
7561 (safe_from_p (temp, singleton, 1)
7563 mode, BRANCH_COST <= 1);
7565 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7566 result = expand_shift (LSHIFT_EXPR, mode, result,
7567 build_int_2 (tree_log2
7571 (safe_from_p (temp, singleton, 1)
7572 ? temp : NULL_RTX), 0);
7576 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7577 return expand_binop (mode, boptab, op1, result, temp,
7578 unsignedp, OPTAB_LIB_WIDEN);
7580 else if (singleton == TREE_OPERAND (exp, 1))
7581 TREE_OPERAND (exp, 0)
7582 = invert_truthvalue (TREE_OPERAND (exp, 0));
7585 do_pending_stack_adjust ();
7587 op0 = gen_label_rtx ();
7589 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7593 /* If the target conflicts with the other operand of the
7594 binary op, we can't use it. Also, we can't use the target
7595 if it is a hard register, because evaluating the condition
7596 might clobber it. */
7598 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7599 || (GET_CODE (temp) == REG
7600 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7601 temp = gen_reg_rtx (mode);
7602 store_expr (singleton, temp, 0);
7605 expand_expr (singleton,
7606 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7607 if (singleton == TREE_OPERAND (exp, 1))
7608 jumpif (TREE_OPERAND (exp, 0), op0);
7610 jumpifnot (TREE_OPERAND (exp, 0), op0);
7612 start_cleanup_deferral ();
7613 if (binary_op && temp == 0)
7614 /* Just touch the other operand. */
7615 expand_expr (TREE_OPERAND (binary_op, 1),
7616 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7618 store_expr (build (TREE_CODE (binary_op), type,
7619 make_tree (type, temp),
7620 TREE_OPERAND (binary_op, 1)),
7623 store_expr (build1 (TREE_CODE (unary_op), type,
7624 make_tree (type, temp)),
7628 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7629 comparison operator. If we have one of these cases, set the
7630 output to A, branch on A (cse will merge these two references),
7631 then set the output to FOO. */
7633 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7634 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7635 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7636 TREE_OPERAND (exp, 1), 0)
7637 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7638 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7639 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7641 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7642 temp = gen_reg_rtx (mode);
7643 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7644 jumpif (TREE_OPERAND (exp, 0), op0);
7646 start_cleanup_deferral ();
7647 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7651 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7652 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7653 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7654 TREE_OPERAND (exp, 2), 0)
7655 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7656 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7657 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7659 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7660 temp = gen_reg_rtx (mode);
7661 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7662 jumpifnot (TREE_OPERAND (exp, 0), op0);
7664 start_cleanup_deferral ();
7665 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7670 op1 = gen_label_rtx ();
7671 jumpifnot (TREE_OPERAND (exp, 0), op0);
7673 start_cleanup_deferral ();
7675 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7677 expand_expr (TREE_OPERAND (exp, 1),
7678 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7679 end_cleanup_deferral ();
7681 emit_jump_insn (gen_jump (op1));
7684 start_cleanup_deferral ();
7686 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7688 expand_expr (TREE_OPERAND (exp, 2),
7689 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7692 end_cleanup_deferral ();
7703 /* Something needs to be initialized, but we didn't know
7704 where that thing was when building the tree. For example,
7705 it could be the return value of a function, or a parameter
7706 to a function which lays down in the stack, or a temporary
7707 variable which must be passed by reference.
7709 We guarantee that the expression will either be constructed
7710 or copied into our original target. */
7712 tree slot = TREE_OPERAND (exp, 0);
7713 tree cleanups = NULL_TREE;
7716 if (TREE_CODE (slot) != VAR_DECL)
7720 target = original_target;
7724 if (DECL_RTL (slot) != 0)
7726 target = DECL_RTL (slot);
7727 /* If we have already expanded the slot, so don't do
7729 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7734 target = assign_temp (type, 2, 0, 1);
7735 /* All temp slots at this level must not conflict. */
7736 preserve_temp_slots (target);
7737 DECL_RTL (slot) = target;
7738 if (TREE_ADDRESSABLE (slot))
7740 TREE_ADDRESSABLE (slot) = 0;
7741 mark_addressable (slot);
7744 /* Since SLOT is not known to the called function
7745 to belong to its stack frame, we must build an explicit
7746 cleanup. This case occurs when we must build up a reference
7747 to pass the reference as an argument. In this case,
7748 it is very likely that such a reference need not be
7751 if (TREE_OPERAND (exp, 2) == 0)
7752 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7753 cleanups = TREE_OPERAND (exp, 2);
7758 /* This case does occur, when expanding a parameter which
7759 needs to be constructed on the stack. The target
7760 is the actual stack address that we want to initialize.
7761 The function we call will perform the cleanup in this case. */
7763 /* If we have already assigned it space, use that space,
7764 not target that we were passed in, as our target
7765 parameter is only a hint. */
7766 if (DECL_RTL (slot) != 0)
7768 target = DECL_RTL (slot);
7769 /* If we have already expanded the slot, so don't do
7771 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7776 DECL_RTL (slot) = target;
7777 /* If we must have an addressable slot, then make sure that
7778 the RTL that we just stored in slot is OK. */
7779 if (TREE_ADDRESSABLE (slot))
7781 TREE_ADDRESSABLE (slot) = 0;
7782 mark_addressable (slot);
7787 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7788 /* Mark it as expanded. */
7789 TREE_OPERAND (exp, 1) = NULL_TREE;
7791 TREE_USED (slot) = 1;
7792 store_expr (exp1, target, 0);
7794 expand_decl_cleanup (NULL_TREE, cleanups);
7801 tree lhs = TREE_OPERAND (exp, 0);
7802 tree rhs = TREE_OPERAND (exp, 1);
7803 tree noncopied_parts = 0;
7804 tree lhs_type = TREE_TYPE (lhs);
7806 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7807 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7808 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7809 TYPE_NONCOPIED_PARTS (lhs_type));
7810 while (noncopied_parts != 0)
7812 expand_assignment (TREE_VALUE (noncopied_parts),
7813 TREE_PURPOSE (noncopied_parts), 0, 0);
7814 noncopied_parts = TREE_CHAIN (noncopied_parts);
7821 /* If lhs is complex, expand calls in rhs before computing it.
7822 That's so we don't compute a pointer and save it over a call.
7823 If lhs is simple, compute it first so we can give it as a
7824 target if the rhs is just a call. This avoids an extra temp and copy
7825 and that prevents a partial-subsumption which makes bad code.
7826 Actually we could treat component_ref's of vars like vars. */
7828 tree lhs = TREE_OPERAND (exp, 0);
7829 tree rhs = TREE_OPERAND (exp, 1);
7830 tree noncopied_parts = 0;
7831 tree lhs_type = TREE_TYPE (lhs);
7835 if (TREE_CODE (lhs) != VAR_DECL
7836 && TREE_CODE (lhs) != RESULT_DECL
7837 && TREE_CODE (lhs) != PARM_DECL
7838 && ! (TREE_CODE (lhs) == INDIRECT_REF
7839 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7840 preexpand_calls (exp);
7842 /* Check for |= or &= of a bitfield of size one into another bitfield
7843 of size 1. In this case, (unless we need the result of the
7844 assignment) we can do this more efficiently with a
7845 test followed by an assignment, if necessary.
7847 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7848 things change so we do, this code should be enhanced to
7851 && TREE_CODE (lhs) == COMPONENT_REF
7852 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7853 || TREE_CODE (rhs) == BIT_AND_EXPR)
7854 && TREE_OPERAND (rhs, 0) == lhs
7855 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7856 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7857 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7859 rtx label = gen_label_rtx ();
7861 do_jump (TREE_OPERAND (rhs, 1),
7862 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7863 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7864 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7865 (TREE_CODE (rhs) == BIT_IOR_EXPR
7867 : integer_zero_node)),
7869 do_pending_stack_adjust ();
7874 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7875 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7876 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7877 TYPE_NONCOPIED_PARTS (lhs_type));
7879 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7880 while (noncopied_parts != 0)
7882 expand_assignment (TREE_PURPOSE (noncopied_parts),
7883 TREE_VALUE (noncopied_parts), 0, 0);
7884 noncopied_parts = TREE_CHAIN (noncopied_parts);
7890 if (!TREE_OPERAND (exp, 0))
7891 expand_null_return ();
7893 expand_return (TREE_OPERAND (exp, 0));
7896 case PREINCREMENT_EXPR:
7897 case PREDECREMENT_EXPR:
7898 return expand_increment (exp, 0, ignore);
7900 case POSTINCREMENT_EXPR:
7901 case POSTDECREMENT_EXPR:
7902 /* Faster to treat as pre-increment if result is not used. */
7903 return expand_increment (exp, ! ignore, ignore);
7906 /* If nonzero, TEMP will be set to the address of something that might
7907 be a MEM corresponding to a stack slot. */
7910 /* Are we taking the address of a nested function? */
7911 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7912 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7913 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7914 && ! TREE_STATIC (exp))
7916 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7917 op0 = force_operand (op0, target);
7919 /* If we are taking the address of something erroneous, just
7921 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7925 /* We make sure to pass const0_rtx down if we came in with
7926 ignore set, to avoid doing the cleanups twice for something. */
7927 op0 = expand_expr (TREE_OPERAND (exp, 0),
7928 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7929 (modifier == EXPAND_INITIALIZER
7930 ? modifier : EXPAND_CONST_ADDRESS));
7932 /* If we are going to ignore the result, OP0 will have been set
7933 to const0_rtx, so just return it. Don't get confused and
7934 think we are taking the address of the constant. */
7938 op0 = protect_from_queue (op0, 0);
7940 /* We would like the object in memory. If it is a constant,
7941 we can have it be statically allocated into memory. For
7942 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7943 memory and store the value into it. */
7945 if (CONSTANT_P (op0))
7946 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7948 else if (GET_CODE (op0) == MEM)
7950 mark_temp_addr_taken (op0);
7951 temp = XEXP (op0, 0);
7954 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7955 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7957 /* If this object is in a register, it must be not
7959 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7960 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7962 mark_temp_addr_taken (memloc);
7963 emit_move_insn (memloc, op0);
7967 if (GET_CODE (op0) != MEM)
7970 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7972 temp = XEXP (op0, 0);
7973 #ifdef POINTERS_EXTEND_UNSIGNED
7974 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7975 && mode == ptr_mode)
7976 temp = convert_memory_address (ptr_mode, temp);
7981 op0 = force_operand (XEXP (op0, 0), target);
7984 if (flag_force_addr && GET_CODE (op0) != REG)
7985 op0 = force_reg (Pmode, op0);
7987 if (GET_CODE (op0) == REG
7988 && ! REG_USERVAR_P (op0))
7989 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7991 /* If we might have had a temp slot, add an equivalent address
7994 update_temp_slot_address (temp, op0);
7996 #ifdef POINTERS_EXTEND_UNSIGNED
7997 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7998 && mode == ptr_mode)
7999 op0 = convert_memory_address (ptr_mode, op0);
8004 case ENTRY_VALUE_EXPR:
8007 /* COMPLEX type for Extended Pascal & Fortran */
8010 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8013 /* Get the rtx code of the operands. */
8014 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8015 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8018 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8022 /* Move the real (op0) and imaginary (op1) parts to their location. */
8023 emit_move_insn (gen_realpart (mode, target), op0);
8024 emit_move_insn (gen_imagpart (mode, target), op1);
8026 insns = get_insns ();
8029 /* Complex construction should appear as a single unit. */
8030 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8031 each with a separate pseudo as destination.
8032 It's not correct for flow to treat them as a unit. */
8033 if (GET_CODE (target) != CONCAT)
8034 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8042 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8043 return gen_realpart (mode, op0);
8046 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8047 return gen_imagpart (mode, op0);
8051 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8055 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8058 target = gen_reg_rtx (mode);
8062 /* Store the realpart and the negated imagpart to target. */
8063 emit_move_insn (gen_realpart (partmode, target),
8064 gen_realpart (partmode, op0));
8066 imag_t = gen_imagpart (partmode, target);
8067 temp = expand_unop (partmode, neg_optab,
8068 gen_imagpart (partmode, op0), imag_t, 0);
8070 emit_move_insn (imag_t, temp);
8072 insns = get_insns ();
8075 /* Conjugate should appear as a single unit
8076 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8077 each with a separate pseudo as destination.
8078 It's not correct for flow to treat them as a unit. */
8079 if (GET_CODE (target) != CONCAT)
8080 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8087 case TRY_CATCH_EXPR:
8089 tree handler = TREE_OPERAND (exp, 1);
8091 expand_eh_region_start ();
8093 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8095 expand_eh_region_end (handler);
8100 case TRY_FINALLY_EXPR:
8102 tree try_block = TREE_OPERAND (exp, 0);
8103 tree finally_block = TREE_OPERAND (exp, 1);
8104 rtx finally_label = gen_label_rtx ();
8105 rtx done_label = gen_label_rtx ();
8106 rtx return_link = gen_reg_rtx (Pmode);
8107 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8108 (tree) finally_label, (tree) return_link);
8109 TREE_SIDE_EFFECTS (cleanup) = 1;
8111 /* Start a new binding layer that will keep track of all cleanup
8112 actions to be performed. */
8113 expand_start_bindings (0);
8115 target_temp_slot_level = temp_slot_level;
8117 expand_decl_cleanup (NULL_TREE, cleanup);
8118 op0 = expand_expr (try_block, target, tmode, modifier);
8120 preserve_temp_slots (op0);
8121 expand_end_bindings (NULL_TREE, 0, 0);
8122 emit_jump (done_label);
8123 emit_label (finally_label);
8124 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8125 emit_indirect_jump (return_link);
8126 emit_label (done_label);
8130 case GOTO_SUBROUTINE_EXPR:
8132 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8133 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8134 rtx return_address = gen_label_rtx ();
8135 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8137 emit_label (return_address);
8143 rtx dcc = get_dynamic_cleanup_chain ();
8144 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8150 rtx dhc = get_dynamic_handler_chain ();
8151 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8156 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8159 /* Here to do an ordinary binary operator, generating an instruction
8160 from the optab already placed in `this_optab'. */
8162 preexpand_calls (exp);
8163 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8165 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8166 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8168 temp = expand_binop (mode, this_optab, op0, op1, target,
8169 unsignedp, OPTAB_LIB_WIDEN);
8177 /* Return the alignment in bits of EXP, a pointer valued expression.
8178 But don't return more than MAX_ALIGN no matter what.
8179 The alignment returned is, by default, the alignment of the thing that
8180 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8182 Otherwise, look at the expression to see if we can do better, i.e., if the
8183 expression is actually pointing at an object whose alignment is tighter. */
8186 get_pointer_alignment (exp, max_align)
8190 unsigned align, inner;
8192 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8195 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8196 align = MIN (align, max_align);
8200 switch (TREE_CODE (exp))
8204 case NON_LVALUE_EXPR:
8205 exp = TREE_OPERAND (exp, 0);
8206 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8208 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8209 align = MIN (inner, max_align);
8213 /* If sum of pointer + int, restrict our maximum alignment to that
8214 imposed by the integer. If not, we can't do any better than
8216 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8219 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8224 exp = TREE_OPERAND (exp, 0);
8228 /* See what we are pointing at and look at its alignment. */
8229 exp = TREE_OPERAND (exp, 0);
8230 if (TREE_CODE (exp) == FUNCTION_DECL)
8231 align = FUNCTION_BOUNDARY;
8232 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8233 align = DECL_ALIGN (exp);
8234 #ifdef CONSTANT_ALIGNMENT
8235 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8236 align = CONSTANT_ALIGNMENT (exp, align);
8238 return MIN (align, max_align);
8246 /* Return the tree node and offset if a given argument corresponds to
8247 a string constant. */
8250 string_constant (arg, ptr_offset)
8256 if (TREE_CODE (arg) == ADDR_EXPR
8257 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8259 *ptr_offset = integer_zero_node;
8260 return TREE_OPERAND (arg, 0);
8262 else if (TREE_CODE (arg) == PLUS_EXPR)
8264 tree arg0 = TREE_OPERAND (arg, 0);
8265 tree arg1 = TREE_OPERAND (arg, 1);
8270 if (TREE_CODE (arg0) == ADDR_EXPR
8271 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8274 return TREE_OPERAND (arg0, 0);
8276 else if (TREE_CODE (arg1) == ADDR_EXPR
8277 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8280 return TREE_OPERAND (arg1, 0);
8287 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8288 way, because it could contain a zero byte in the middle.
8289 TREE_STRING_LENGTH is the size of the character array, not the string.
8291 Unfortunately, string_constant can't access the values of const char
8292 arrays with initializers, so neither can we do so here. */
8302 src = string_constant (src, &offset_node);
8305 max = TREE_STRING_LENGTH (src);
8306 ptr = TREE_STRING_POINTER (src);
8307 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8309 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8310 compute the offset to the following null if we don't know where to
8311 start searching for it. */
8313 for (i = 0; i < max; i++)
8316 /* We don't know the starting offset, but we do know that the string
8317 has no internal zero bytes. We can assume that the offset falls
8318 within the bounds of the string; otherwise, the programmer deserves
8319 what he gets. Subtract the offset from the length of the string,
8321 /* This would perhaps not be valid if we were dealing with named
8322 arrays in addition to literal string constants. */
8323 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8326 /* We have a known offset into the string. Start searching there for
8327 a null character. */
8328 if (offset_node == 0)
8332 /* Did we get a long long offset? If so, punt. */
8333 if (TREE_INT_CST_HIGH (offset_node) != 0)
8335 offset = TREE_INT_CST_LOW (offset_node);
8337 /* If the offset is known to be out of bounds, warn, and call strlen at
8339 if (offset < 0 || offset > max)
8341 warning ("offset outside bounds of constant string");
8344 /* Use strlen to search for the first zero byte. Since any strings
8345 constructed with build_string will have nulls appended, we win even
8346 if we get handed something like (char[4])"abcd".
8348 Since OFFSET is our starting index into the string, no further
8349 calculation is needed. */
8350 return size_int (strlen (ptr + offset));
8354 expand_builtin_return_addr (fndecl_code, count, tem)
8355 enum built_in_function fndecl_code;
8361 /* Some machines need special handling before we can access
8362 arbitrary frames. For example, on the sparc, we must first flush
8363 all register windows to the stack. */
8364 #ifdef SETUP_FRAME_ADDRESSES
8366 SETUP_FRAME_ADDRESSES ();
8369 /* On the sparc, the return address is not in the frame, it is in a
8370 register. There is no way to access it off of the current frame
8371 pointer, but it can be accessed off the previous frame pointer by
8372 reading the value from the register window save area. */
8373 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8374 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8378 /* Scan back COUNT frames to the specified frame. */
8379 for (i = 0; i < count; i++)
8381 /* Assume the dynamic chain pointer is in the word that the
8382 frame address points to, unless otherwise specified. */
8383 #ifdef DYNAMIC_CHAIN_ADDRESS
8384 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8386 tem = memory_address (Pmode, tem);
8387 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8390 /* For __builtin_frame_address, return what we've got. */
8391 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8394 /* For __builtin_return_address, Get the return address from that
8396 #ifdef RETURN_ADDR_RTX
8397 tem = RETURN_ADDR_RTX (count, tem);
8399 tem = memory_address (Pmode,
8400 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8401 tem = gen_rtx_MEM (Pmode, tem);
8406 /* __builtin_setjmp is passed a pointer to an array of five words (not
8407 all will be used on all machines). It operates similarly to the C
8408 library function of the same name, but is more efficient. Much of
8409 the code below (and for longjmp) is copied from the handling of
8412 NOTE: This is intended for use by GNAT and the exception handling
8413 scheme in the compiler and will only work in the method used by
8417 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
8420 rtx first_label, next_label;
8422 rtx lab1 = gen_label_rtx ();
8423 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8424 enum machine_mode value_mode;
8427 value_mode = TYPE_MODE (integer_type_node);
8429 #ifdef POINTERS_EXTEND_UNSIGNED
8430 buf_addr = convert_memory_address (Pmode, buf_addr);
8433 buf_addr = force_reg (Pmode, buf_addr);
8435 if (target == 0 || GET_CODE (target) != REG
8436 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8437 target = gen_reg_rtx (value_mode);
8441 /* We store the frame pointer and the address of lab1 in the buffer
8442 and use the rest of it for the stack save area, which is
8443 machine-dependent. */
8445 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8446 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8449 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8450 BUILTIN_SETJMP_FRAME_VALUE);
8451 emit_move_insn (validize_mem
8452 (gen_rtx_MEM (Pmode,
8453 plus_constant (buf_addr,
8454 GET_MODE_SIZE (Pmode)))),
8455 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, lab1)));
8457 stack_save = gen_rtx_MEM (sa_mode,
8458 plus_constant (buf_addr,
8459 2 * GET_MODE_SIZE (Pmode)));
8460 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8462 /* If there is further processing to do, do it. */
8463 #ifdef HAVE_builtin_setjmp_setup
8464 if (HAVE_builtin_setjmp_setup)
8465 emit_insn (gen_builtin_setjmp_setup (buf_addr));
8468 /* Set TARGET to zero and branch to the first-time-through label. */
8469 emit_move_insn (target, const0_rtx);
8470 emit_jump_insn (gen_jump (first_label));
8474 /* Tell flow about the strange goings on. Putting `lab1' on
8475 `nonlocal_goto_handler_labels' to indicates that function
8476 calls may traverse the arc back to this label. */
8478 current_function_has_nonlocal_label = 1;
8479 nonlocal_goto_handler_labels =
8480 gen_rtx_EXPR_LIST (VOIDmode, lab1, nonlocal_goto_handler_labels);
8482 /* Clobber the FP when we get here, so we have to make sure it's
8483 marked as used by this function. */
8484 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8486 /* Mark the static chain as clobbered here so life information
8487 doesn't get messed up for it. */
8488 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8490 /* Now put in the code to restore the frame pointer, and argument
8491 pointer, if needed. The code below is from expand_end_bindings
8492 in stmt.c; see detailed documentation there. */
8493 #ifdef HAVE_nonlocal_goto
8494 if (! HAVE_nonlocal_goto)
8496 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8498 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8499 if (fixed_regs[ARG_POINTER_REGNUM])
8501 #ifdef ELIMINABLE_REGS
8503 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8505 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8506 if (elim_regs[i].from == ARG_POINTER_REGNUM
8507 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8510 if (i == sizeof elim_regs / sizeof elim_regs [0])
8513 /* Now restore our arg pointer from the address at which it
8514 was saved in our stack frame.
8515 If there hasn't be space allocated for it yet, make
8517 if (arg_pointer_save_area == 0)
8518 arg_pointer_save_area
8519 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8520 emit_move_insn (virtual_incoming_args_rtx,
8521 copy_to_reg (arg_pointer_save_area));
8526 #ifdef HAVE_builtin_setjmp_receiver
8527 if (HAVE_builtin_setjmp_receiver)
8528 emit_insn (gen_builtin_setjmp_receiver (lab1));
8531 #ifdef HAVE_nonlocal_goto_receiver
8532 if (HAVE_nonlocal_goto_receiver)
8533 emit_insn (gen_nonlocal_goto_receiver ());
8540 /* Set TARGET, and branch to the next-time-through label. */
8541 emit_move_insn (target, const1_rtx);
8542 emit_jump_insn (gen_jump (next_label));
8549 expand_builtin_longjmp (buf_addr, value)
8550 rtx buf_addr, value;
8553 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8555 #ifdef POINTERS_EXTEND_UNSIGNED
8556 buf_addr = convert_memory_address (Pmode, buf_addr);
8558 buf_addr = force_reg (Pmode, buf_addr);
8560 /* We used to store value in static_chain_rtx, but that fails if pointers
8561 are smaller than integers. We instead require that the user must pass
8562 a second argument of 1, because that is what builtin_setjmp will
8563 return. This also makes EH slightly more efficient, since we are no
8564 longer copying around a value that we don't care about. */
8565 if (value != const1_rtx)
8568 #ifdef HAVE_builtin_longjmp
8569 if (HAVE_builtin_longjmp)
8570 emit_insn (gen_builtin_longjmp (buf_addr));
8574 fp = gen_rtx_MEM (Pmode, buf_addr);
8575 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8576 GET_MODE_SIZE (Pmode)));
8578 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8579 2 * GET_MODE_SIZE (Pmode)));
8581 /* Pick up FP, label, and SP from the block and jump. This code is
8582 from expand_goto in stmt.c; see there for detailed comments. */
8583 #if HAVE_nonlocal_goto
8584 if (HAVE_nonlocal_goto)
8585 /* We have to pass a value to the nonlocal_goto pattern that will
8586 get copied into the static_chain pointer, but it does not matter
8587 what that value is, because builtin_setjmp does not use it. */
8588 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8592 lab = copy_to_reg (lab);
8594 emit_move_insn (hard_frame_pointer_rtx, fp);
8595 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8597 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8598 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8599 emit_indirect_jump (lab);
8605 get_memory_rtx (exp)
8611 mem = gen_rtx_MEM (BLKmode,
8612 memory_address (BLKmode,
8613 expand_expr (exp, NULL_RTX,
8614 ptr_mode, EXPAND_SUM)));
8616 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8618 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8619 if the value is the address of a structure or if the expression is
8620 cast to a pointer to structure type. */
8623 while (TREE_CODE (exp) == NOP_EXPR)
8625 tree cast_type = TREE_TYPE (exp);
8626 if (TREE_CODE (cast_type) == POINTER_TYPE
8627 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8632 exp = TREE_OPERAND (exp, 0);
8635 if (is_aggregate == 0)
8639 if (TREE_CODE (exp) == ADDR_EXPR)
8640 /* If this is the address of an object, check whether the
8641 object is an array. */
8642 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8644 type = TREE_TYPE (TREE_TYPE (exp));
8645 is_aggregate = AGGREGATE_TYPE_P (type);
8648 MEM_SET_IN_STRUCT_P (mem, is_aggregate);
8653 /* Expand an expression EXP that calls a built-in function,
8654 with result going to TARGET if that's convenient
8655 (and in mode MODE if that's convenient).
8656 SUBTARGET may be used as the target for computing one of EXP's operands.
8657 IGNORE is nonzero if the value is to be ignored. */
8659 #define CALLED_AS_BUILT_IN(NODE) \
8660 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8663 expand_builtin (exp, target, subtarget, mode, ignore)
8667 enum machine_mode mode;
8670 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8671 tree arglist = TREE_OPERAND (exp, 1);
8674 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8675 optab builtin_optab;
8677 switch (DECL_FUNCTION_CODE (fndecl))
8682 /* build_function_call changes these into ABS_EXPR. */
8687 /* Treat these like sqrt, but only if the user asks for them. */
8688 if (! flag_fast_math)
8690 case BUILT_IN_FSQRT:
8691 /* If not optimizing, call the library function. */
8696 /* Arg could be wrong type if user redeclared this fcn wrong. */
8697 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8700 /* Stabilize and compute the argument. */
8701 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8702 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8704 exp = copy_node (exp);
8705 arglist = copy_node (arglist);
8706 TREE_OPERAND (exp, 1) = arglist;
8707 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8709 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8711 /* Make a suitable register to place result in. */
8712 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8717 switch (DECL_FUNCTION_CODE (fndecl))
8720 builtin_optab = sin_optab; break;
8722 builtin_optab = cos_optab; break;
8723 case BUILT_IN_FSQRT:
8724 builtin_optab = sqrt_optab; break;
8729 /* Compute into TARGET.
8730 Set TARGET to wherever the result comes back. */
8731 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8732 builtin_optab, op0, target, 0);
8734 /* If we were unable to expand via the builtin, stop the
8735 sequence (without outputting the insns) and break, causing
8736 a call to the library function. */
8743 /* Check the results by default. But if flag_fast_math is turned on,
8744 then assume sqrt will always be called with valid arguments. */
8746 if (! flag_fast_math)
8748 /* Don't define the builtin FP instructions
8749 if your machine is not IEEE. */
8750 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8753 lab1 = gen_label_rtx ();
8755 /* Test the result; if it is NaN, set errno=EDOM because
8756 the argument was not in the domain. */
8757 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
8762 #ifdef GEN_ERRNO_RTX
8763 rtx errno_rtx = GEN_ERRNO_RTX;
8766 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8769 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8772 /* We can't set errno=EDOM directly; let the library call do it.
8773 Pop the arguments right away in case the call gets deleted. */
8775 expand_call (exp, target, 0);
8782 /* Output the entire sequence. */
8783 insns = get_insns ();
8792 /* __builtin_apply_args returns block of memory allocated on
8793 the stack into which is stored the arg pointer, structure
8794 value address, static chain, and all the registers that might
8795 possibly be used in performing a function call. The code is
8796 moved to the start of the function so the incoming values are
8798 case BUILT_IN_APPLY_ARGS:
8799 /* Don't do __builtin_apply_args more than once in a function.
8800 Save the result of the first call and reuse it. */
8801 if (apply_args_value != 0)
8802 return apply_args_value;
8804 /* When this function is called, it means that registers must be
8805 saved on entry to this function. So we migrate the
8806 call to the first insn of this function. */
8811 temp = expand_builtin_apply_args ();
8815 apply_args_value = temp;
8817 /* Put the sequence after the NOTE that starts the function.
8818 If this is inside a SEQUENCE, make the outer-level insn
8819 chain current, so the code is placed at the start of the
8821 push_topmost_sequence ();
8822 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8823 pop_topmost_sequence ();
8827 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8828 FUNCTION with a copy of the parameters described by
8829 ARGUMENTS, and ARGSIZE. It returns a block of memory
8830 allocated on the stack into which is stored all the registers
8831 that might possibly be used for returning the result of a
8832 function. ARGUMENTS is the value returned by
8833 __builtin_apply_args. ARGSIZE is the number of bytes of
8834 arguments that must be copied. ??? How should this value be
8835 computed? We'll also need a safe worst case value for varargs
8837 case BUILT_IN_APPLY:
8839 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8840 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8841 || TREE_CHAIN (arglist) == 0
8842 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8843 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8844 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8852 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8853 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8855 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8858 /* __builtin_return (RESULT) causes the function to return the
8859 value described by RESULT. RESULT is address of the block of
8860 memory returned by __builtin_apply. */
8861 case BUILT_IN_RETURN:
8863 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8864 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8865 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8866 NULL_RTX, VOIDmode, 0));
8869 case BUILT_IN_SAVEREGS:
8870 /* Don't do __builtin_saveregs more than once in a function.
8871 Save the result of the first call and reuse it. */
8872 if (saveregs_value != 0)
8873 return saveregs_value;
8875 /* When this function is called, it means that registers must be
8876 saved on entry to this function. So we migrate the
8877 call to the first insn of this function. */
8881 /* Now really call the function. `expand_call' does not call
8882 expand_builtin, so there is no danger of infinite recursion here. */
8885 #ifdef EXPAND_BUILTIN_SAVEREGS
8886 /* Do whatever the machine needs done in this case. */
8887 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8889 /* The register where the function returns its value
8890 is likely to have something else in it, such as an argument.
8891 So preserve that register around the call. */
8893 if (value_mode != VOIDmode)
8895 rtx valreg = hard_libcall_value (value_mode);
8896 rtx saved_valreg = gen_reg_rtx (value_mode);
8898 emit_move_insn (saved_valreg, valreg);
8899 temp = expand_call (exp, target, ignore);
8900 emit_move_insn (valreg, saved_valreg);
8903 /* Generate the call, putting the value in a pseudo. */
8904 temp = expand_call (exp, target, ignore);
8910 saveregs_value = temp;
8912 /* Put the sequence after the NOTE that starts the function.
8913 If this is inside a SEQUENCE, make the outer-level insn
8914 chain current, so the code is placed at the start of the
8916 push_topmost_sequence ();
8917 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8918 pop_topmost_sequence ();
8922 /* __builtin_args_info (N) returns word N of the arg space info
8923 for the current function. The number and meanings of words
8924 is controlled by the definition of CUMULATIVE_ARGS. */
8925 case BUILT_IN_ARGS_INFO:
8927 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8928 int *word_ptr = (int *) ¤t_function_args_info;
8930 /* These are used by the code below that is if 0'ed away */
8932 tree type, elts, result;
8935 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8936 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8937 __FILE__, __LINE__);
8941 tree arg = TREE_VALUE (arglist);
8942 if (TREE_CODE (arg) != INTEGER_CST)
8943 error ("argument of `__builtin_args_info' must be constant");
8946 int wordnum = TREE_INT_CST_LOW (arg);
8948 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8949 error ("argument of `__builtin_args_info' out of range");
8951 return GEN_INT (word_ptr[wordnum]);
8955 error ("missing argument in `__builtin_args_info'");
8960 for (i = 0; i < nwords; i++)
8961 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8963 type = build_array_type (integer_type_node,
8964 build_index_type (build_int_2 (nwords, 0)));
8965 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8966 TREE_CONSTANT (result) = 1;
8967 TREE_STATIC (result) = 1;
8968 result = build (INDIRECT_REF, build_pointer_type (type), result);
8969 TREE_CONSTANT (result) = 1;
8970 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8974 /* Return the address of the first anonymous stack arg. */
8975 case BUILT_IN_NEXT_ARG:
8977 tree fntype = TREE_TYPE (current_function_decl);
8979 if ((TYPE_ARG_TYPES (fntype) == 0
8980 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8982 && ! current_function_varargs)
8984 error ("`va_start' used in function with fixed args");
8990 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8991 tree arg = TREE_VALUE (arglist);
8993 /* Strip off all nops for the sake of the comparison. This
8994 is not quite the same as STRIP_NOPS. It does more.
8995 We must also strip off INDIRECT_EXPR for C++ reference
8997 while (TREE_CODE (arg) == NOP_EXPR
8998 || TREE_CODE (arg) == CONVERT_EXPR
8999 || TREE_CODE (arg) == NON_LVALUE_EXPR
9000 || TREE_CODE (arg) == INDIRECT_REF)
9001 arg = TREE_OPERAND (arg, 0);
9002 if (arg != last_parm)
9003 warning ("second parameter of `va_start' not last named argument");
9005 else if (! current_function_varargs)
9006 /* Evidently an out of date version of <stdarg.h>; can't validate
9007 va_start's second argument, but can still work as intended. */
9008 warning ("`__builtin_next_arg' called without an argument");
9011 return expand_binop (Pmode, add_optab,
9012 current_function_internal_arg_pointer,
9013 current_function_arg_offset_rtx,
9014 NULL_RTX, 0, OPTAB_LIB_WIDEN);
9016 case BUILT_IN_CLASSIFY_TYPE:
9019 tree type = TREE_TYPE (TREE_VALUE (arglist));
9020 enum tree_code code = TREE_CODE (type);
9021 if (code == VOID_TYPE)
9022 return GEN_INT (void_type_class);
9023 if (code == INTEGER_TYPE)
9024 return GEN_INT (integer_type_class);
9025 if (code == CHAR_TYPE)
9026 return GEN_INT (char_type_class);
9027 if (code == ENUMERAL_TYPE)
9028 return GEN_INT (enumeral_type_class);
9029 if (code == BOOLEAN_TYPE)
9030 return GEN_INT (boolean_type_class);
9031 if (code == POINTER_TYPE)
9032 return GEN_INT (pointer_type_class);
9033 if (code == REFERENCE_TYPE)
9034 return GEN_INT (reference_type_class);
9035 if (code == OFFSET_TYPE)
9036 return GEN_INT (offset_type_class);
9037 if (code == REAL_TYPE)
9038 return GEN_INT (real_type_class);
9039 if (code == COMPLEX_TYPE)
9040 return GEN_INT (complex_type_class);
9041 if (code == FUNCTION_TYPE)
9042 return GEN_INT (function_type_class);
9043 if (code == METHOD_TYPE)
9044 return GEN_INT (method_type_class);
9045 if (code == RECORD_TYPE)
9046 return GEN_INT (record_type_class);
9047 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
9048 return GEN_INT (union_type_class);
9049 if (code == ARRAY_TYPE)
9051 if (TYPE_STRING_FLAG (type))
9052 return GEN_INT (string_type_class);
9054 return GEN_INT (array_type_class);
9056 if (code == SET_TYPE)
9057 return GEN_INT (set_type_class);
9058 if (code == FILE_TYPE)
9059 return GEN_INT (file_type_class);
9060 if (code == LANG_TYPE)
9061 return GEN_INT (lang_type_class);
9063 return GEN_INT (no_type_class);
9065 case BUILT_IN_CONSTANT_P:
9070 tree arg = TREE_VALUE (arglist);
9073 /* We return 1 for a numeric type that's known to be a constant
9074 value at compile-time or for an aggregate type that's a
9075 literal constant. */
9078 /* If we know this is a constant, emit the constant of one. */
9079 if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
9080 || (TREE_CODE (arg) == CONSTRUCTOR
9081 && TREE_CONSTANT (arg))
9082 || (TREE_CODE (arg) == ADDR_EXPR
9083 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9086 /* If we aren't going to be running CSE or this expression
9087 has side effects, show we don't know it to be a constant.
9088 Likewise if it's a pointer or aggregate type since in those
9089 case we only want literals, since those are only optimized
9090 when generating RTL, not later. */
9091 if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
9092 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9093 || POINTER_TYPE_P (TREE_TYPE (arg)))
9096 /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9097 chance to see if it can deduce whether ARG is constant. */
9099 tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
9100 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
9104 case BUILT_IN_FRAME_ADDRESS:
9105 /* The argument must be a nonnegative integer constant.
9106 It counts the number of frames to scan up the stack.
9107 The value is the address of that frame. */
9108 case BUILT_IN_RETURN_ADDRESS:
9109 /* The argument must be a nonnegative integer constant.
9110 It counts the number of frames to scan up the stack.
9111 The value is the return address saved in that frame. */
9113 /* Warning about missing arg was already issued. */
9115 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9116 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9118 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9119 error ("invalid arg to `__builtin_frame_address'");
9121 error ("invalid arg to `__builtin_return_address'");
9126 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9127 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9128 hard_frame_pointer_rtx);
9130 /* Some ports cannot access arbitrary stack frames. */
9133 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9134 warning ("unsupported arg to `__builtin_frame_address'");
9136 warning ("unsupported arg to `__builtin_return_address'");
9140 /* For __builtin_frame_address, return what we've got. */
9141 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9144 if (GET_CODE (tem) != REG
9145 && ! CONSTANT_P (tem))
9146 tem = copy_to_mode_reg (Pmode, tem);
9150 /* Returns the address of the area where the structure is returned.
9152 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9154 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9155 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9158 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9160 case BUILT_IN_ALLOCA:
9162 /* Arg could be non-integer if user redeclared this fcn wrong. */
9163 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9166 /* Compute the argument. */
9167 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9169 /* Allocate the desired space. */
9170 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9173 /* If not optimizing, call the library function. */
9174 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9178 /* Arg could be non-integer if user redeclared this fcn wrong. */
9179 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9182 /* Compute the argument. */
9183 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9184 /* Compute ffs, into TARGET if possible.
9185 Set TARGET to wherever the result comes back. */
9186 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9187 ffs_optab, op0, target, 1);
9192 case BUILT_IN_STRLEN:
9193 /* If not optimizing, call the library function. */
9194 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9198 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9199 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9203 tree src = TREE_VALUE (arglist);
9204 tree len = c_strlen (src);
9207 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9209 rtx result, src_rtx, char_rtx;
9210 enum machine_mode insn_mode = value_mode, char_mode;
9211 enum insn_code icode;
9213 /* If the length is known, just return it. */
9215 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9217 /* If SRC is not a pointer type, don't do this operation inline. */
9221 /* Call a function if we can't compute strlen in the right mode. */
9223 while (insn_mode != VOIDmode)
9225 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9226 if (icode != CODE_FOR_nothing)
9229 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9231 if (insn_mode == VOIDmode)
9234 /* Make a place to write the result of the instruction. */
9237 && GET_CODE (result) == REG
9238 && GET_MODE (result) == insn_mode
9239 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9240 result = gen_reg_rtx (insn_mode);
9242 /* Make sure the operands are acceptable to the predicates. */
9244 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9245 result = gen_reg_rtx (insn_mode);
9246 src_rtx = memory_address (BLKmode,
9247 expand_expr (src, NULL_RTX, ptr_mode,
9250 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9251 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9253 /* Check the string is readable and has an end. */
9254 if (current_function_check_memory_usage)
9255 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9257 GEN_INT (MEMORY_USE_RO),
9258 TYPE_MODE (integer_type_node));
9260 char_rtx = const0_rtx;
9261 char_mode = insn_operand_mode[(int)icode][2];
9262 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9263 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9265 emit_insn (GEN_FCN (icode) (result,
9266 gen_rtx_MEM (BLKmode, src_rtx),
9267 char_rtx, GEN_INT (align)));
9269 /* Return the value in the proper mode for this function. */
9270 if (GET_MODE (result) == value_mode)
9272 else if (target != 0)
9274 convert_move (target, result, 0);
9278 return convert_to_mode (value_mode, result, 0);
9281 case BUILT_IN_STRCPY:
9282 /* If not optimizing, call the library function. */
9283 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9287 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9288 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9289 || TREE_CHAIN (arglist) == 0
9290 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9294 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9299 len = size_binop (PLUS_EXPR, len, integer_one_node);
9301 chainon (arglist, build_tree_list (NULL_TREE, len));
9305 case BUILT_IN_MEMCPY:
9306 /* If not optimizing, call the library function. */
9307 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9311 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9312 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9313 || TREE_CHAIN (arglist) == 0
9314 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9316 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9317 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9318 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9323 tree dest = TREE_VALUE (arglist);
9324 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9325 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9328 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9330 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9331 rtx dest_mem, src_mem, dest_addr, len_rtx;
9333 /* If either SRC or DEST is not a pointer type, don't do
9334 this operation in-line. */
9335 if (src_align == 0 || dest_align == 0)
9337 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9338 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9342 dest_mem = get_memory_rtx (dest);
9343 src_mem = get_memory_rtx (src);
9344 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9346 /* Just copy the rights of SRC to the rights of DEST. */
9347 if (current_function_check_memory_usage)
9348 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9349 XEXP (dest_mem, 0), ptr_mode,
9350 XEXP (src_mem, 0), ptr_mode,
9351 len_rtx, TYPE_MODE (sizetype));
9353 /* Copy word part most expediently. */
9355 = emit_block_move (dest_mem, src_mem, len_rtx,
9356 MIN (src_align, dest_align));
9359 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9364 case BUILT_IN_MEMSET:
9365 /* If not optimizing, call the library function. */
9366 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9370 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9371 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9372 || TREE_CHAIN (arglist) == 0
9373 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9375 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9377 != (TREE_CODE (TREE_TYPE
9379 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9383 tree dest = TREE_VALUE (arglist);
9384 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9385 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9388 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9389 rtx dest_mem, dest_addr, len_rtx;
9391 /* If DEST is not a pointer type, don't do this
9392 operation in-line. */
9393 if (dest_align == 0)
9396 /* If the arguments have side-effects, then we can only evaluate
9397 them at most once. The following code evaluates them twice if
9398 they are not constants because we break out to expand_call
9399 in that case. They can't be constants if they have side-effects
9400 so we can check for that first. Alternatively, we could call
9401 save_expr to make multiple evaluation safe. */
9402 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9405 /* If VAL is not 0, don't do this operation in-line. */
9406 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9409 /* If LEN does not expand to a constant, don't do this
9410 operation in-line. */
9411 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9412 if (GET_CODE (len_rtx) != CONST_INT)
9415 dest_mem = get_memory_rtx (dest);
9417 /* Just check DST is writable and mark it as readable. */
9418 if (current_function_check_memory_usage)
9419 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9420 XEXP (dest_mem, 0), ptr_mode,
9421 len_rtx, TYPE_MODE (sizetype),
9422 GEN_INT (MEMORY_USE_WO),
9423 TYPE_MODE (integer_type_node));
9426 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9429 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9434 /* These comparison functions need an instruction that returns an actual
9435 index. An ordinary compare that just sets the condition codes
9437 #ifdef HAVE_cmpstrsi
9438 case BUILT_IN_STRCMP:
9439 /* If not optimizing, call the library function. */
9440 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9443 /* If we need to check memory accesses, call the library function. */
9444 if (current_function_check_memory_usage)
9448 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9449 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9450 || TREE_CHAIN (arglist) == 0
9451 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9453 else if (!HAVE_cmpstrsi)
9456 tree arg1 = TREE_VALUE (arglist);
9457 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9460 len = c_strlen (arg1);
9462 len = size_binop (PLUS_EXPR, integer_one_node, len);
9463 len2 = c_strlen (arg2);
9465 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9467 /* If we don't have a constant length for the first, use the length
9468 of the second, if we know it. We don't require a constant for
9469 this case; some cost analysis could be done if both are available
9470 but neither is constant. For now, assume they're equally cheap.
9472 If both strings have constant lengths, use the smaller. This
9473 could arise if optimization results in strcpy being called with
9474 two fixed strings, or if the code was machine-generated. We should
9475 add some code to the `memcmp' handler below to deal with such
9476 situations, someday. */
9477 if (!len || TREE_CODE (len) != INTEGER_CST)
9484 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9486 if (tree_int_cst_lt (len2, len))
9490 chainon (arglist, build_tree_list (NULL_TREE, len));
9494 case BUILT_IN_MEMCMP:
9495 /* If not optimizing, call the library function. */
9496 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9499 /* If we need to check memory accesses, call the library function. */
9500 if (current_function_check_memory_usage)
9504 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9505 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9506 || TREE_CHAIN (arglist) == 0
9507 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9508 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9509 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9511 else if (!HAVE_cmpstrsi)
9514 tree arg1 = TREE_VALUE (arglist);
9515 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9516 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9520 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9522 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9523 enum machine_mode insn_mode
9524 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9526 /* If we don't have POINTER_TYPE, call the function. */
9527 if (arg1_align == 0 || arg2_align == 0)
9529 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9530 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9534 /* Make a place to write the result of the instruction. */
9537 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9538 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9539 result = gen_reg_rtx (insn_mode);
9541 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9542 get_memory_rtx (arg2),
9543 expand_expr (len, NULL_RTX, VOIDmode, 0),
9544 GEN_INT (MIN (arg1_align, arg2_align))));
9546 /* Return the value in the proper mode for this function. */
9547 mode = TYPE_MODE (TREE_TYPE (exp));
9548 if (GET_MODE (result) == mode)
9550 else if (target != 0)
9552 convert_move (target, result, 0);
9556 return convert_to_mode (mode, result, 0);
9559 case BUILT_IN_STRCMP:
9560 case BUILT_IN_MEMCMP:
9564 case BUILT_IN_SETJMP:
9566 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9570 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9572 rtx lab = gen_label_rtx ();
9573 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9578 /* __builtin_longjmp is passed a pointer to an array of five words.
9579 It's similar to the C library longjmp function but works with
9580 __builtin_setjmp above. */
9581 case BUILT_IN_LONGJMP:
9582 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9583 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9587 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9589 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9590 NULL_RTX, VOIDmode, 0);
9592 if (value != const1_rtx)
9594 error ("__builtin_longjmp second argument must be 1");
9598 expand_builtin_longjmp (buf_addr, value);
9605 emit_insn (gen_trap ());
9608 error ("__builtin_trap not supported by this target");
9612 /* Various hooks for the DWARF 2 __throw routine. */
9613 case BUILT_IN_UNWIND_INIT:
9614 expand_builtin_unwind_init ();
9616 case BUILT_IN_DWARF_CFA:
9617 return virtual_cfa_rtx;
9618 #ifdef DWARF2_UNWIND_INFO
9619 case BUILT_IN_DWARF_FP_REGNUM:
9620 return expand_builtin_dwarf_fp_regnum ();
9621 case BUILT_IN_DWARF_REG_SIZE:
9622 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9624 case BUILT_IN_FROB_RETURN_ADDR:
9625 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9626 case BUILT_IN_EXTRACT_RETURN_ADDR:
9627 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9628 case BUILT_IN_EH_RETURN:
9629 expand_builtin_eh_return (TREE_VALUE (arglist),
9630 TREE_VALUE (TREE_CHAIN (arglist)),
9631 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9634 default: /* just do library call, if unknown builtin */
9635 error ("built-in function `%s' not currently supported",
9636 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9639 /* The switch statement above can drop through to cause the function
9640 to be called normally. */
9642 return expand_call (exp, target, ignore);
9645 /* Built-in functions to perform an untyped call and return. */
9647 /* For each register that may be used for calling a function, this
9648 gives a mode used to copy the register's value. VOIDmode indicates
9649 the register is not used for calling a function. If the machine
9650 has register windows, this gives only the outbound registers.
9651 INCOMING_REGNO gives the corresponding inbound register. */
9652 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9654 /* For each register that may be used for returning values, this gives
9655 a mode used to copy the register's value. VOIDmode indicates the
9656 register is not used for returning values. If the machine has
9657 register windows, this gives only the outbound registers.
9658 INCOMING_REGNO gives the corresponding inbound register. */
9659 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9661 /* For each register that may be used for calling a function, this
9662 gives the offset of that register into the block returned by
9663 __builtin_apply_args. 0 indicates that the register is not
9664 used for calling a function. */
9665 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9667 /* Return the offset of register REGNO into the block returned by
9668 __builtin_apply_args. This is not declared static, since it is
9669 needed in objc-act.c. */
9672 apply_args_register_offset (regno)
9677 /* Arguments are always put in outgoing registers (in the argument
9678 block) if such make sense. */
9679 #ifdef OUTGOING_REGNO
9680 regno = OUTGOING_REGNO(regno);
9682 return apply_args_reg_offset[regno];
9685 /* Return the size required for the block returned by __builtin_apply_args,
9686 and initialize apply_args_mode. */
9691 static int size = -1;
9693 enum machine_mode mode;
9695 /* The values computed by this function never change. */
9698 /* The first value is the incoming arg-pointer. */
9699 size = GET_MODE_SIZE (Pmode);
9701 /* The second value is the structure value address unless this is
9702 passed as an "invisible" first argument. */
9703 if (struct_value_rtx)
9704 size += GET_MODE_SIZE (Pmode);
9706 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9707 if (FUNCTION_ARG_REGNO_P (regno))
9709 /* Search for the proper mode for copying this register's
9710 value. I'm not sure this is right, but it works so far. */
9711 enum machine_mode best_mode = VOIDmode;
9713 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9715 mode = GET_MODE_WIDER_MODE (mode))
9716 if (HARD_REGNO_MODE_OK (regno, mode)
9717 && HARD_REGNO_NREGS (regno, mode) == 1)
9720 if (best_mode == VOIDmode)
9721 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9723 mode = GET_MODE_WIDER_MODE (mode))
9724 if (HARD_REGNO_MODE_OK (regno, mode)
9725 && (mov_optab->handlers[(int) mode].insn_code
9726 != CODE_FOR_nothing))
9730 if (mode == VOIDmode)
9733 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9734 if (size % align != 0)
9735 size = CEIL (size, align) * align;
9736 apply_args_reg_offset[regno] = size;
9737 size += GET_MODE_SIZE (mode);
9738 apply_args_mode[regno] = mode;
9742 apply_args_mode[regno] = VOIDmode;
9743 apply_args_reg_offset[regno] = 0;
9749 /* Return the size required for the block returned by __builtin_apply,
9750 and initialize apply_result_mode. */
9753 apply_result_size ()
9755 static int size = -1;
9757 enum machine_mode mode;
9759 /* The values computed by this function never change. */
9764 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9765 if (FUNCTION_VALUE_REGNO_P (regno))
9767 /* Search for the proper mode for copying this register's
9768 value. I'm not sure this is right, but it works so far. */
9769 enum machine_mode best_mode = VOIDmode;
9771 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9773 mode = GET_MODE_WIDER_MODE (mode))
9774 if (HARD_REGNO_MODE_OK (regno, mode))
9777 if (best_mode == VOIDmode)
9778 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9780 mode = GET_MODE_WIDER_MODE (mode))
9781 if (HARD_REGNO_MODE_OK (regno, mode)
9782 && (mov_optab->handlers[(int) mode].insn_code
9783 != CODE_FOR_nothing))
9787 if (mode == VOIDmode)
9790 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9791 if (size % align != 0)
9792 size = CEIL (size, align) * align;
9793 size += GET_MODE_SIZE (mode);
9794 apply_result_mode[regno] = mode;
9797 apply_result_mode[regno] = VOIDmode;
9799 /* Allow targets that use untyped_call and untyped_return to override
9800 the size so that machine-specific information can be stored here. */
9801 #ifdef APPLY_RESULT_SIZE
9802 size = APPLY_RESULT_SIZE;
9808 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9809 /* Create a vector describing the result block RESULT. If SAVEP is true,
9810 the result block is used to save the values; otherwise it is used to
9811 restore the values. */
9814 result_vector (savep, result)
9818 int regno, size, align, nelts;
9819 enum machine_mode mode;
9821 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9824 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9825 if ((mode = apply_result_mode[regno]) != VOIDmode)
9827 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9828 if (size % align != 0)
9829 size = CEIL (size, align) * align;
9830 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9831 mem = change_address (result, mode,
9832 plus_constant (XEXP (result, 0), size));
9833 savevec[nelts++] = (savep
9834 ? gen_rtx_SET (VOIDmode, mem, reg)
9835 : gen_rtx_SET (VOIDmode, reg, mem));
9836 size += GET_MODE_SIZE (mode);
9838 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9840 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9842 /* Save the state required to perform an untyped call with the same
9843 arguments as were passed to the current function. */
9846 expand_builtin_apply_args ()
9849 int size, align, regno;
9850 enum machine_mode mode;
9852 /* Create a block where the arg-pointer, structure value address,
9853 and argument registers can be saved. */
9854 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9856 /* Walk past the arg-pointer and structure value address. */
9857 size = GET_MODE_SIZE (Pmode);
9858 if (struct_value_rtx)
9859 size += GET_MODE_SIZE (Pmode);
9861 /* Save each register used in calling a function to the block. */
9862 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9863 if ((mode = apply_args_mode[regno]) != VOIDmode)
9867 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9868 if (size % align != 0)
9869 size = CEIL (size, align) * align;
9871 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9874 /* For reg-stack.c's stack register household.
9875 Compare with a similar piece of code in function.c. */
9877 emit_insn (gen_rtx_USE (mode, tem));
9880 emit_move_insn (change_address (registers, mode,
9881 plus_constant (XEXP (registers, 0),
9884 size += GET_MODE_SIZE (mode);
9887 /* Save the arg pointer to the block. */
9888 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9889 copy_to_reg (virtual_incoming_args_rtx));
9890 size = GET_MODE_SIZE (Pmode);
9892 /* Save the structure value address unless this is passed as an
9893 "invisible" first argument. */
9894 if (struct_value_incoming_rtx)
9896 emit_move_insn (change_address (registers, Pmode,
9897 plus_constant (XEXP (registers, 0),
9899 copy_to_reg (struct_value_incoming_rtx));
9900 size += GET_MODE_SIZE (Pmode);
9903 /* Return the address of the block. */
9904 return copy_addr_to_reg (XEXP (registers, 0));
9907 /* Perform an untyped call and save the state required to perform an
9908 untyped return of whatever value was returned by the given function. */
9911 expand_builtin_apply (function, arguments, argsize)
9912 rtx function, arguments, argsize;
9914 int size, align, regno;
9915 enum machine_mode mode;
9916 rtx incoming_args, result, reg, dest, call_insn;
9917 rtx old_stack_level = 0;
9918 rtx call_fusage = 0;
9920 /* Create a block where the return registers can be saved. */
9921 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9923 /* ??? The argsize value should be adjusted here. */
9925 /* Fetch the arg pointer from the ARGUMENTS block. */
9926 incoming_args = gen_reg_rtx (Pmode);
9927 emit_move_insn (incoming_args,
9928 gen_rtx_MEM (Pmode, arguments));
9929 #ifndef STACK_GROWS_DOWNWARD
9930 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9931 incoming_args, 0, OPTAB_LIB_WIDEN);
9934 /* Perform postincrements before actually calling the function. */
9937 /* Push a new argument block and copy the arguments. */
9938 do_pending_stack_adjust ();
9940 /* Save the stack with nonlocal if available */
9941 #ifdef HAVE_save_stack_nonlocal
9942 if (HAVE_save_stack_nonlocal)
9943 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9946 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9948 /* Push a block of memory onto the stack to store the memory arguments.
9949 Save the address in a register, and copy the memory arguments. ??? I
9950 haven't figured out how the calling convention macros effect this,
9951 but it's likely that the source and/or destination addresses in
9952 the block copy will need updating in machine specific ways. */
9953 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9954 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9955 gen_rtx_MEM (BLKmode, incoming_args),
9957 PARM_BOUNDARY / BITS_PER_UNIT);
9959 /* Refer to the argument block. */
9961 arguments = gen_rtx_MEM (BLKmode, arguments);
9963 /* Walk past the arg-pointer and structure value address. */
9964 size = GET_MODE_SIZE (Pmode);
9965 if (struct_value_rtx)
9966 size += GET_MODE_SIZE (Pmode);
9968 /* Restore each of the registers previously saved. Make USE insns
9969 for each of these registers for use in making the call. */
9970 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9971 if ((mode = apply_args_mode[regno]) != VOIDmode)
9973 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9974 if (size % align != 0)
9975 size = CEIL (size, align) * align;
9976 reg = gen_rtx_REG (mode, regno);
9977 emit_move_insn (reg,
9978 change_address (arguments, mode,
9979 plus_constant (XEXP (arguments, 0),
9982 use_reg (&call_fusage, reg);
9983 size += GET_MODE_SIZE (mode);
9986 /* Restore the structure value address unless this is passed as an
9987 "invisible" first argument. */
9988 size = GET_MODE_SIZE (Pmode);
9989 if (struct_value_rtx)
9991 rtx value = gen_reg_rtx (Pmode);
9992 emit_move_insn (value,
9993 change_address (arguments, Pmode,
9994 plus_constant (XEXP (arguments, 0),
9996 emit_move_insn (struct_value_rtx, value);
9997 if (GET_CODE (struct_value_rtx) == REG)
9998 use_reg (&call_fusage, struct_value_rtx);
9999 size += GET_MODE_SIZE (Pmode);
10002 /* All arguments and registers used for the call are set up by now! */
10003 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
10005 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
10006 and we don't want to load it into a register as an optimization,
10007 because prepare_call_address already did it if it should be done. */
10008 if (GET_CODE (function) != SYMBOL_REF)
10009 function = memory_address (FUNCTION_MODE, function);
10011 /* Generate the actual call instruction and save the return value. */
10012 #ifdef HAVE_untyped_call
10013 if (HAVE_untyped_call)
10014 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
10015 result, result_vector (1, result)));
10018 #ifdef HAVE_call_value
10019 if (HAVE_call_value)
10023 /* Locate the unique return register. It is not possible to
10024 express a call that sets more than one return register using
10025 call_value; use untyped_call for that. In fact, untyped_call
10026 only needs to save the return registers in the given block. */
10027 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10028 if ((mode = apply_result_mode[regno]) != VOIDmode)
10031 abort (); /* HAVE_untyped_call required. */
10032 valreg = gen_rtx_REG (mode, regno);
10035 emit_call_insn (gen_call_value (valreg,
10036 gen_rtx_MEM (FUNCTION_MODE, function),
10037 const0_rtx, NULL_RTX, const0_rtx));
10039 emit_move_insn (change_address (result, GET_MODE (valreg),
10047 /* Find the CALL insn we just emitted. */
10048 for (call_insn = get_last_insn ();
10049 call_insn && GET_CODE (call_insn) != CALL_INSN;
10050 call_insn = PREV_INSN (call_insn))
10056 /* Put the register usage information on the CALL. If there is already
10057 some usage information, put ours at the end. */
10058 if (CALL_INSN_FUNCTION_USAGE (call_insn))
10062 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10063 link = XEXP (link, 1))
10066 XEXP (link, 1) = call_fusage;
10069 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
10071 /* Restore the stack. */
10072 #ifdef HAVE_save_stack_nonlocal
10073 if (HAVE_save_stack_nonlocal)
10074 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10077 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10079 /* Return the address of the result block. */
10080 return copy_addr_to_reg (XEXP (result, 0));
10083 /* Perform an untyped return. */
10086 expand_builtin_return (result)
10089 int size, align, regno;
10090 enum machine_mode mode;
10092 rtx call_fusage = 0;
10094 apply_result_size ();
10095 result = gen_rtx_MEM (BLKmode, result);
10097 #ifdef HAVE_untyped_return
10098 if (HAVE_untyped_return)
10100 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10106 /* Restore the return value and note that each value is used. */
10108 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10109 if ((mode = apply_result_mode[regno]) != VOIDmode)
10111 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10112 if (size % align != 0)
10113 size = CEIL (size, align) * align;
10114 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10115 emit_move_insn (reg,
10116 change_address (result, mode,
10117 plus_constant (XEXP (result, 0),
10120 push_to_sequence (call_fusage);
10121 emit_insn (gen_rtx_USE (VOIDmode, reg));
10122 call_fusage = get_insns ();
10124 size += GET_MODE_SIZE (mode);
10127 /* Put the USE insns before the return. */
10128 emit_insns (call_fusage);
10130 /* Return whatever values was restored by jumping directly to the end
10131 of the function. */
10132 expand_null_return ();
10135 /* Expand code for a post- or pre- increment or decrement
10136 and return the RTX for the result.
10137 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
10140 expand_increment (exp, post, ignore)
10144 register rtx op0, op1;
10145 register rtx temp, value;
10146 register tree incremented = TREE_OPERAND (exp, 0);
10147 optab this_optab = add_optab;
10149 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10150 int op0_is_copy = 0;
10151 int single_insn = 0;
10152 /* 1 means we can't store into OP0 directly,
10153 because it is a subreg narrower than a word,
10154 and we don't dare clobber the rest of the word. */
10155 int bad_subreg = 0;
10157 /* Stabilize any component ref that might need to be
10158 evaluated more than once below. */
10160 || TREE_CODE (incremented) == BIT_FIELD_REF
10161 || (TREE_CODE (incremented) == COMPONENT_REF
10162 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10163 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10164 incremented = stabilize_reference (incremented);
10165 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10166 ones into save exprs so that they don't accidentally get evaluated
10167 more than once by the code below. */
10168 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10169 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10170 incremented = save_expr (incremented);
10172 /* Compute the operands as RTX.
10173 Note whether OP0 is the actual lvalue or a copy of it:
10174 I believe it is a copy iff it is a register or subreg
10175 and insns were generated in computing it. */
10177 temp = get_last_insn ();
10178 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10180 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10181 in place but instead must do sign- or zero-extension during assignment,
10182 so we copy it into a new register and let the code below use it as
10185 Note that we can safely modify this SUBREG since it is know not to be
10186 shared (it was made by the expand_expr call above). */
10188 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10191 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10195 else if (GET_CODE (op0) == SUBREG
10196 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10198 /* We cannot increment this SUBREG in place. If we are
10199 post-incrementing, get a copy of the old value. Otherwise,
10200 just mark that we cannot increment in place. */
10202 op0 = copy_to_reg (op0);
10207 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10208 && temp != get_last_insn ());
10209 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10210 EXPAND_MEMORY_USE_BAD);
10212 /* Decide whether incrementing or decrementing. */
10213 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10214 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10215 this_optab = sub_optab;
10217 /* Convert decrement by a constant into a negative increment. */
10218 if (this_optab == sub_optab
10219 && GET_CODE (op1) == CONST_INT)
10221 op1 = GEN_INT (- INTVAL (op1));
10222 this_optab = add_optab;
10225 /* For a preincrement, see if we can do this with a single instruction. */
10228 icode = (int) this_optab->handlers[(int) mode].insn_code;
10229 if (icode != (int) CODE_FOR_nothing
10230 /* Make sure that OP0 is valid for operands 0 and 1
10231 of the insn we want to queue. */
10232 && (*insn_operand_predicate[icode][0]) (op0, mode)
10233 && (*insn_operand_predicate[icode][1]) (op0, mode)
10234 && (*insn_operand_predicate[icode][2]) (op1, mode))
10238 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10239 then we cannot just increment OP0. We must therefore contrive to
10240 increment the original value. Then, for postincrement, we can return
10241 OP0 since it is a copy of the old value. For preincrement, expand here
10242 unless we can do it with a single insn.
10244 Likewise if storing directly into OP0 would clobber high bits
10245 we need to preserve (bad_subreg). */
10246 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10248 /* This is the easiest way to increment the value wherever it is.
10249 Problems with multiple evaluation of INCREMENTED are prevented
10250 because either (1) it is a component_ref or preincrement,
10251 in which case it was stabilized above, or (2) it is an array_ref
10252 with constant index in an array in a register, which is
10253 safe to reevaluate. */
10254 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10255 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10256 ? MINUS_EXPR : PLUS_EXPR),
10259 TREE_OPERAND (exp, 1));
10261 while (TREE_CODE (incremented) == NOP_EXPR
10262 || TREE_CODE (incremented) == CONVERT_EXPR)
10264 newexp = convert (TREE_TYPE (incremented), newexp);
10265 incremented = TREE_OPERAND (incremented, 0);
10268 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10269 return post ? op0 : temp;
10274 /* We have a true reference to the value in OP0.
10275 If there is an insn to add or subtract in this mode, queue it.
10276 Queueing the increment insn avoids the register shuffling
10277 that often results if we must increment now and first save
10278 the old value for subsequent use. */
10280 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10281 op0 = stabilize (op0);
10284 icode = (int) this_optab->handlers[(int) mode].insn_code;
10285 if (icode != (int) CODE_FOR_nothing
10286 /* Make sure that OP0 is valid for operands 0 and 1
10287 of the insn we want to queue. */
10288 && (*insn_operand_predicate[icode][0]) (op0, mode)
10289 && (*insn_operand_predicate[icode][1]) (op0, mode))
10291 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10292 op1 = force_reg (mode, op1);
10294 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10296 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10298 rtx addr = (general_operand (XEXP (op0, 0), mode)
10299 ? force_reg (Pmode, XEXP (op0, 0))
10300 : copy_to_reg (XEXP (op0, 0)));
10303 op0 = change_address (op0, VOIDmode, addr);
10304 temp = force_reg (GET_MODE (op0), op0);
10305 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10306 op1 = force_reg (mode, op1);
10308 /* The increment queue is LIFO, thus we have to `queue'
10309 the instructions in reverse order. */
10310 enqueue_insn (op0, gen_move_insn (op0, temp));
10311 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10316 /* Preincrement, or we can't increment with one simple insn. */
10318 /* Save a copy of the value before inc or dec, to return it later. */
10319 temp = value = copy_to_reg (op0);
10321 /* Arrange to return the incremented value. */
10322 /* Copy the rtx because expand_binop will protect from the queue,
10323 and the results of that would be invalid for us to return
10324 if our caller does emit_queue before using our result. */
10325 temp = copy_rtx (value = op0);
10327 /* Increment however we can. */
10328 op1 = expand_binop (mode, this_optab, value, op1,
10329 current_function_check_memory_usage ? NULL_RTX : op0,
10330 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10331 /* Make sure the value is stored into OP0. */
10333 emit_move_insn (op0, op1);
10338 /* Expand all function calls contained within EXP, innermost ones first.
10339 But don't look within expressions that have sequence points.
10340 For each CALL_EXPR, record the rtx for its value
10341 in the CALL_EXPR_RTL field. */
10344 preexpand_calls (exp)
10347 register int nops, i;
10348 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10350 if (! do_preexpand_calls)
10353 /* Only expressions and references can contain calls. */
10355 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10358 switch (TREE_CODE (exp))
10361 /* Do nothing if already expanded. */
10362 if (CALL_EXPR_RTL (exp) != 0
10363 /* Do nothing if the call returns a variable-sized object. */
10364 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10365 /* Do nothing to built-in functions. */
10366 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10367 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10369 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10372 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10375 case COMPOUND_EXPR:
10377 case TRUTH_ANDIF_EXPR:
10378 case TRUTH_ORIF_EXPR:
10379 /* If we find one of these, then we can be sure
10380 the adjust will be done for it (since it makes jumps).
10381 Do it now, so that if this is inside an argument
10382 of a function, we don't get the stack adjustment
10383 after some other args have already been pushed. */
10384 do_pending_stack_adjust ();
10389 case WITH_CLEANUP_EXPR:
10390 case CLEANUP_POINT_EXPR:
10391 case TRY_CATCH_EXPR:
10395 if (SAVE_EXPR_RTL (exp) != 0)
10402 nops = tree_code_length[(int) TREE_CODE (exp)];
10403 for (i = 0; i < nops; i++)
10404 if (TREE_OPERAND (exp, i) != 0)
10406 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10407 if (type == 'e' || type == '<' || type == '1' || type == '2'
10409 preexpand_calls (TREE_OPERAND (exp, i));
10413 /* At the start of a function, record that we have no previously-pushed
10414 arguments waiting to be popped. */
10417 init_pending_stack_adjust ()
10419 pending_stack_adjust = 0;
10422 /* When exiting from function, if safe, clear out any pending stack adjust
10423 so the adjustment won't get done.
10425 Note, if the current function calls alloca, then it must have a
10426 frame pointer regardless of the value of flag_omit_frame_pointer. */
10429 clear_pending_stack_adjust ()
10431 #ifdef EXIT_IGNORE_STACK
10433 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10434 && EXIT_IGNORE_STACK
10435 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10436 && ! flag_inline_functions)
10437 pending_stack_adjust = 0;
10441 /* Pop any previously-pushed arguments that have not been popped yet. */
10444 do_pending_stack_adjust ()
10446 if (inhibit_defer_pop == 0)
10448 if (pending_stack_adjust != 0)
10449 adjust_stack (GEN_INT (pending_stack_adjust));
10450 pending_stack_adjust = 0;
10454 /* Expand conditional expressions. */
10456 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10457 LABEL is an rtx of code CODE_LABEL, in this function and all the
10461 jumpifnot (exp, label)
10465 do_jump (exp, label, NULL_RTX);
10468 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10471 jumpif (exp, label)
10475 do_jump (exp, NULL_RTX, label);
10478 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10479 the result is zero, or IF_TRUE_LABEL if the result is one.
10480 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10481 meaning fall through in that case.
10483 do_jump always does any pending stack adjust except when it does not
10484 actually perform a jump. An example where there is no jump
10485 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10487 This function is responsible for optimizing cases such as
10488 &&, || and comparison operators in EXP. */
10491 do_jump (exp, if_false_label, if_true_label)
10493 rtx if_false_label, if_true_label;
10495 register enum tree_code code = TREE_CODE (exp);
10496 /* Some cases need to create a label to jump to
10497 in order to properly fall through.
10498 These cases set DROP_THROUGH_LABEL nonzero. */
10499 rtx drop_through_label = 0;
10501 rtx comparison = 0;
10504 enum machine_mode mode;
10506 #ifdef MAX_INTEGER_COMPUTATION_MODE
10507 check_max_integer_computation_mode (exp);
10518 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10524 /* This is not true with #pragma weak */
10526 /* The address of something can never be zero. */
10528 emit_jump (if_true_label);
10533 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10534 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10535 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10538 /* If we are narrowing the operand, we have to do the compare in the
10540 if ((TYPE_PRECISION (TREE_TYPE (exp))
10541 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10543 case NON_LVALUE_EXPR:
10544 case REFERENCE_EXPR:
10549 /* These cannot change zero->non-zero or vice versa. */
10550 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10554 /* This is never less insns than evaluating the PLUS_EXPR followed by
10555 a test and can be longer if the test is eliminated. */
10557 /* Reduce to minus. */
10558 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10559 TREE_OPERAND (exp, 0),
10560 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10561 TREE_OPERAND (exp, 1))));
10562 /* Process as MINUS. */
10566 /* Non-zero iff operands of minus differ. */
10567 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10568 TREE_OPERAND (exp, 0),
10569 TREE_OPERAND (exp, 1)),
10574 /* If we are AND'ing with a small constant, do this comparison in the
10575 smallest type that fits. If the machine doesn't have comparisons
10576 that small, it will be converted back to the wider comparison.
10577 This helps if we are testing the sign bit of a narrower object.
10578 combine can't do this for us because it can't know whether a
10579 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10581 if (! SLOW_BYTE_ACCESS
10582 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10583 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10584 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10585 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10586 && (type = type_for_mode (mode, 1)) != 0
10587 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10588 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10589 != CODE_FOR_nothing))
10591 do_jump (convert (type, exp), if_false_label, if_true_label);
10596 case TRUTH_NOT_EXPR:
10597 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10600 case TRUTH_ANDIF_EXPR:
10601 if (if_false_label == 0)
10602 if_false_label = drop_through_label = gen_label_rtx ();
10603 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10604 start_cleanup_deferral ();
10605 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10606 end_cleanup_deferral ();
10609 case TRUTH_ORIF_EXPR:
10610 if (if_true_label == 0)
10611 if_true_label = drop_through_label = gen_label_rtx ();
10612 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10613 start_cleanup_deferral ();
10614 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10615 end_cleanup_deferral ();
10618 case COMPOUND_EXPR:
10619 push_temp_slots ();
10620 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10621 preserve_temp_slots (NULL_RTX);
10622 free_temp_slots ();
10625 do_pending_stack_adjust ();
10626 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10629 case COMPONENT_REF:
10630 case BIT_FIELD_REF:
10633 int bitsize, bitpos, unsignedp;
10634 enum machine_mode mode;
10640 /* Get description of this reference. We don't actually care
10641 about the underlying object here. */
10642 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10643 &mode, &unsignedp, &volatilep,
10646 type = type_for_size (bitsize, unsignedp);
10647 if (! SLOW_BYTE_ACCESS
10648 && type != 0 && bitsize >= 0
10649 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10650 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10651 != CODE_FOR_nothing))
10653 do_jump (convert (type, exp), if_false_label, if_true_label);
10660 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10661 if (integer_onep (TREE_OPERAND (exp, 1))
10662 && integer_zerop (TREE_OPERAND (exp, 2)))
10663 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10665 else if (integer_zerop (TREE_OPERAND (exp, 1))
10666 && integer_onep (TREE_OPERAND (exp, 2)))
10667 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10671 register rtx label1 = gen_label_rtx ();
10672 drop_through_label = gen_label_rtx ();
10674 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10676 start_cleanup_deferral ();
10677 /* Now the THEN-expression. */
10678 do_jump (TREE_OPERAND (exp, 1),
10679 if_false_label ? if_false_label : drop_through_label,
10680 if_true_label ? if_true_label : drop_through_label);
10681 /* In case the do_jump just above never jumps. */
10682 do_pending_stack_adjust ();
10683 emit_label (label1);
10685 /* Now the ELSE-expression. */
10686 do_jump (TREE_OPERAND (exp, 2),
10687 if_false_label ? if_false_label : drop_through_label,
10688 if_true_label ? if_true_label : drop_through_label);
10689 end_cleanup_deferral ();
10695 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10697 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10698 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10700 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10701 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10704 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10705 fold (build (EQ_EXPR, TREE_TYPE (exp),
10706 fold (build1 (REALPART_EXPR,
10707 TREE_TYPE (inner_type),
10709 fold (build1 (REALPART_EXPR,
10710 TREE_TYPE (inner_type),
10712 fold (build (EQ_EXPR, TREE_TYPE (exp),
10713 fold (build1 (IMAGPART_EXPR,
10714 TREE_TYPE (inner_type),
10716 fold (build1 (IMAGPART_EXPR,
10717 TREE_TYPE (inner_type),
10719 if_false_label, if_true_label);
10722 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10723 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10725 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10726 && !can_compare_p (TYPE_MODE (inner_type)))
10727 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10729 comparison = compare (exp, EQ, EQ);
10735 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10737 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10738 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10740 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10741 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10744 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10745 fold (build (NE_EXPR, TREE_TYPE (exp),
10746 fold (build1 (REALPART_EXPR,
10747 TREE_TYPE (inner_type),
10749 fold (build1 (REALPART_EXPR,
10750 TREE_TYPE (inner_type),
10752 fold (build (NE_EXPR, TREE_TYPE (exp),
10753 fold (build1 (IMAGPART_EXPR,
10754 TREE_TYPE (inner_type),
10756 fold (build1 (IMAGPART_EXPR,
10757 TREE_TYPE (inner_type),
10759 if_false_label, if_true_label);
10762 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10763 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10765 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10766 && !can_compare_p (TYPE_MODE (inner_type)))
10767 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10769 comparison = compare (exp, NE, NE);
10774 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10776 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10777 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10779 comparison = compare (exp, LT, LTU);
10783 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10785 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10786 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10788 comparison = compare (exp, LE, LEU);
10792 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10794 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10795 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10797 comparison = compare (exp, GT, GTU);
10801 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10803 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10804 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10806 comparison = compare (exp, GE, GEU);
10811 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10813 /* This is not needed any more and causes poor code since it causes
10814 comparisons and tests from non-SI objects to have different code
10816 /* Copy to register to avoid generating bad insns by cse
10817 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10818 if (!cse_not_expected && GET_CODE (temp) == MEM)
10819 temp = copy_to_reg (temp);
10821 do_pending_stack_adjust ();
10822 if (GET_CODE (temp) == CONST_INT)
10823 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10824 else if (GET_CODE (temp) == LABEL_REF)
10825 comparison = const_true_rtx;
10826 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10827 && !can_compare_p (GET_MODE (temp)))
10828 /* Note swapping the labels gives us not-equal. */
10829 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10830 else if (GET_MODE (temp) != VOIDmode)
10831 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10832 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10833 GET_MODE (temp), NULL_RTX, 0);
10838 /* Do any postincrements in the expression that was tested. */
10841 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10842 straight into a conditional jump instruction as the jump condition.
10843 Otherwise, all the work has been done already. */
10845 if (comparison == const_true_rtx)
10848 emit_jump (if_true_label);
10850 else if (comparison == const0_rtx)
10852 if (if_false_label)
10853 emit_jump (if_false_label);
10855 else if (comparison)
10856 do_jump_for_compare (comparison, if_false_label, if_true_label);
10858 if (drop_through_label)
10860 /* If do_jump produces code that might be jumped around,
10861 do any stack adjusts from that code, before the place
10862 where control merges in. */
10863 do_pending_stack_adjust ();
10864 emit_label (drop_through_label);
10868 /* Given a comparison expression EXP for values too wide to be compared
10869 with one insn, test the comparison and jump to the appropriate label.
10870 The code of EXP is ignored; we always test GT if SWAP is 0,
10871 and LT if SWAP is 1. */
10874 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10877 rtx if_false_label, if_true_label;
10879 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10880 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10881 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10882 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10883 rtx drop_through_label = 0;
10884 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10887 if (! if_true_label || ! if_false_label)
10888 drop_through_label = gen_label_rtx ();
10889 if (! if_true_label)
10890 if_true_label = drop_through_label;
10891 if (! if_false_label)
10892 if_false_label = drop_through_label;
10894 /* Compare a word at a time, high order first. */
10895 for (i = 0; i < nwords; i++)
10898 rtx op0_word, op1_word;
10900 if (WORDS_BIG_ENDIAN)
10902 op0_word = operand_subword_force (op0, i, mode);
10903 op1_word = operand_subword_force (op1, i, mode);
10907 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10908 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10911 /* All but high-order word must be compared as unsigned. */
10912 comp = compare_from_rtx (op0_word, op1_word,
10913 (unsignedp || i > 0) ? GTU : GT,
10914 unsignedp, word_mode, NULL_RTX, 0);
10915 if (comp == const_true_rtx)
10916 emit_jump (if_true_label);
10917 else if (comp != const0_rtx)
10918 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10920 /* Consider lower words only if these are equal. */
10921 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10923 if (comp == const_true_rtx)
10924 emit_jump (if_false_label);
10925 else if (comp != const0_rtx)
10926 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10929 if (if_false_label)
10930 emit_jump (if_false_label);
10931 if (drop_through_label)
10932 emit_label (drop_through_label);
10935 /* Compare OP0 with OP1, word at a time, in mode MODE.
10936 UNSIGNEDP says to do unsigned comparison.
10937 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10940 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10941 enum machine_mode mode;
10944 rtx if_false_label, if_true_label;
10946 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10947 rtx drop_through_label = 0;
10950 if (! if_true_label || ! if_false_label)
10951 drop_through_label = gen_label_rtx ();
10952 if (! if_true_label)
10953 if_true_label = drop_through_label;
10954 if (! if_false_label)
10955 if_false_label = drop_through_label;
10957 /* Compare a word at a time, high order first. */
10958 for (i = 0; i < nwords; i++)
10961 rtx op0_word, op1_word;
10963 if (WORDS_BIG_ENDIAN)
10965 op0_word = operand_subword_force (op0, i, mode);
10966 op1_word = operand_subword_force (op1, i, mode);
10970 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10971 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10974 /* All but high-order word must be compared as unsigned. */
10975 comp = compare_from_rtx (op0_word, op1_word,
10976 (unsignedp || i > 0) ? GTU : GT,
10977 unsignedp, word_mode, NULL_RTX, 0);
10978 if (comp == const_true_rtx)
10979 emit_jump (if_true_label);
10980 else if (comp != const0_rtx)
10981 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10983 /* Consider lower words only if these are equal. */
10984 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10986 if (comp == const_true_rtx)
10987 emit_jump (if_false_label);
10988 else if (comp != const0_rtx)
10989 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10992 if (if_false_label)
10993 emit_jump (if_false_label);
10994 if (drop_through_label)
10995 emit_label (drop_through_label);
10998 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10999 with one insn, test the comparison and jump to the appropriate label. */
11002 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
11004 rtx if_false_label, if_true_label;
11006 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11007 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11008 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11009 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11011 rtx drop_through_label = 0;
11013 if (! if_false_label)
11014 drop_through_label = if_false_label = gen_label_rtx ();
11016 for (i = 0; i < nwords; i++)
11018 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
11019 operand_subword_force (op1, i, mode),
11020 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
11021 word_mode, NULL_RTX, 0);
11022 if (comp == const_true_rtx)
11023 emit_jump (if_false_label);
11024 else if (comp != const0_rtx)
11025 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11029 emit_jump (if_true_label);
11030 if (drop_through_label)
11031 emit_label (drop_through_label);
11034 /* Jump according to whether OP0 is 0.
11035 We assume that OP0 has an integer mode that is too wide
11036 for the available compare insns. */
11039 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
11041 rtx if_false_label, if_true_label;
11043 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
11046 rtx drop_through_label = 0;
11048 /* The fastest way of doing this comparison on almost any machine is to
11049 "or" all the words and compare the result. If all have to be loaded
11050 from memory and this is a very wide item, it's possible this may
11051 be slower, but that's highly unlikely. */
11053 part = gen_reg_rtx (word_mode);
11054 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11055 for (i = 1; i < nwords && part != 0; i++)
11056 part = expand_binop (word_mode, ior_optab, part,
11057 operand_subword_force (op0, i, GET_MODE (op0)),
11058 part, 1, OPTAB_WIDEN);
11062 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11065 if (comp == const_true_rtx)
11066 emit_jump (if_false_label);
11067 else if (comp == const0_rtx)
11068 emit_jump (if_true_label);
11070 do_jump_for_compare (comp, if_false_label, if_true_label);
11075 /* If we couldn't do the "or" simply, do this with a series of compares. */
11076 if (! if_false_label)
11077 drop_through_label = if_false_label = gen_label_rtx ();
11079 for (i = 0; i < nwords; i++)
11081 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11083 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11084 if (comp == const_true_rtx)
11085 emit_jump (if_false_label);
11086 else if (comp != const0_rtx)
11087 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11091 emit_jump (if_true_label);
11093 if (drop_through_label)
11094 emit_label (drop_through_label);
11097 /* Given a comparison expression in rtl form, output conditional branches to
11098 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
11101 do_jump_for_compare (comparison, if_false_label, if_true_label)
11102 rtx comparison, if_false_label, if_true_label;
11106 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11107 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11112 if (if_false_label)
11113 emit_jump (if_false_label);
11115 else if (if_false_label)
11117 rtx first = get_last_insn (), insn, branch;
11120 /* Output the branch with the opposite condition. Then try to invert
11121 what is generated. If more than one insn is a branch, or if the
11122 branch is not the last insn written, abort. If we can't invert
11123 the branch, emit make a true label, redirect this jump to that,
11124 emit a jump to the false label and define the true label. */
11125 /* ??? Note that we wouldn't have to do any of this nonsense if
11126 we passed both labels into a combined compare-and-branch.
11127 Ah well, jump threading does a good job of repairing the damage. */
11129 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11130 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11135 /* Here we get the first insn that was just emitted. It used to be the
11136 case that, on some machines, emitting the branch would discard
11137 the previous compare insn and emit a replacement. This isn't
11138 done anymore, but abort if we see that FIRST is deleted. */
11141 first = get_insns ();
11142 else if (INSN_DELETED_P (first))
11145 first = NEXT_INSN (first);
11147 /* Look for multiple branches in this sequence, as might be generated
11148 for a multi-word integer comparison. */
11152 for (insn = first; insn ; insn = NEXT_INSN (insn))
11153 if (GET_CODE (insn) == JUMP_INSN)
11159 /* If we've got one branch at the end of the sequence,
11160 we can try to reverse it. */
11162 if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
11165 insn_label = XEXP (condjump_label (branch), 0);
11166 JUMP_LABEL (branch) = insn_label;
11168 if (insn_label != if_false_label)
11171 if (invert_jump (branch, if_false_label))
11175 /* Multiple branches, or reversion failed. Convert to branches
11176 around an unconditional jump. */
11178 if_true_label = gen_label_rtx ();
11179 for (insn = first; insn; insn = NEXT_INSN (insn))
11180 if (GET_CODE (insn) == JUMP_INSN)
11183 insn_label = XEXP (condjump_label (insn), 0);
11184 JUMP_LABEL (insn) = insn_label;
11186 if (insn_label == if_false_label)
11187 redirect_jump (insn, if_true_label);
11189 emit_jump (if_false_label);
11190 emit_label (if_true_label);
11194 /* Generate code for a comparison expression EXP
11195 (including code to compute the values to be compared)
11196 and set (CC0) according to the result.
11197 SIGNED_CODE should be the rtx operation for this comparison for
11198 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11200 We force a stack adjustment unless there are currently
11201 things pushed on the stack that aren't yet used. */
11204 compare (exp, signed_code, unsigned_code)
11206 enum rtx_code signed_code, unsigned_code;
11209 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11211 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11212 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
11213 register enum machine_mode mode = TYPE_MODE (type);
11214 int unsignedp = TREE_UNSIGNED (type);
11215 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
11217 #ifdef HAVE_canonicalize_funcptr_for_compare
11218 /* If function pointers need to be "canonicalized" before they can
11219 be reliably compared, then canonicalize them. */
11220 if (HAVE_canonicalize_funcptr_for_compare
11221 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11222 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11225 rtx new_op0 = gen_reg_rtx (mode);
11227 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11231 if (HAVE_canonicalize_funcptr_for_compare
11232 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11233 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11236 rtx new_op1 = gen_reg_rtx (mode);
11238 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11243 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11245 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11246 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11249 /* Like compare but expects the values to compare as two rtx's.
11250 The decision as to signed or unsigned comparison must be made by the caller.
11252 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11255 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11256 size of MODE should be used. */
11259 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11260 register rtx op0, op1;
11261 enum rtx_code code;
11263 enum machine_mode mode;
11269 /* If one operand is constant, make it the second one. Only do this
11270 if the other operand is not constant as well. */
11272 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11273 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11278 code = swap_condition (code);
11281 if (flag_force_mem)
11283 op0 = force_not_mem (op0);
11284 op1 = force_not_mem (op1);
11287 do_pending_stack_adjust ();
11289 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11290 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11294 /* There's no need to do this now that combine.c can eliminate lots of
11295 sign extensions. This can be less efficient in certain cases on other
11298 /* If this is a signed equality comparison, we can do it as an
11299 unsigned comparison since zero-extension is cheaper than sign
11300 extension and comparisons with zero are done as unsigned. This is
11301 the case even on machines that can do fast sign extension, since
11302 zero-extension is easier to combine with other operations than
11303 sign-extension is. If we are comparing against a constant, we must
11304 convert it to what it would look like unsigned. */
11305 if ((code == EQ || code == NE) && ! unsignedp
11306 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11308 if (GET_CODE (op1) == CONST_INT
11309 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11310 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11315 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11317 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11320 /* Generate code to calculate EXP using a store-flag instruction
11321 and return an rtx for the result. EXP is either a comparison
11322 or a TRUTH_NOT_EXPR whose operand is a comparison.
11324 If TARGET is nonzero, store the result there if convenient.
11326 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11329 Return zero if there is no suitable set-flag instruction
11330 available on this machine.
11332 Once expand_expr has been called on the arguments of the comparison,
11333 we are committed to doing the store flag, since it is not safe to
11334 re-evaluate the expression. We emit the store-flag insn by calling
11335 emit_store_flag, but only expand the arguments if we have a reason
11336 to believe that emit_store_flag will be successful. If we think that
11337 it will, but it isn't, we have to simulate the store-flag with a
11338 set/jump/set sequence. */
11341 do_store_flag (exp, target, mode, only_cheap)
11344 enum machine_mode mode;
11347 enum rtx_code code;
11348 tree arg0, arg1, type;
11350 enum machine_mode operand_mode;
11354 enum insn_code icode;
11355 rtx subtarget = target;
11358 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11359 result at the end. We can't simply invert the test since it would
11360 have already been inverted if it were valid. This case occurs for
11361 some floating-point comparisons. */
11363 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11364 invert = 1, exp = TREE_OPERAND (exp, 0);
11366 arg0 = TREE_OPERAND (exp, 0);
11367 arg1 = TREE_OPERAND (exp, 1);
11368 type = TREE_TYPE (arg0);
11369 operand_mode = TYPE_MODE (type);
11370 unsignedp = TREE_UNSIGNED (type);
11372 /* We won't bother with BLKmode store-flag operations because it would mean
11373 passing a lot of information to emit_store_flag. */
11374 if (operand_mode == BLKmode)
11377 /* We won't bother with store-flag operations involving function pointers
11378 when function pointers must be canonicalized before comparisons. */
11379 #ifdef HAVE_canonicalize_funcptr_for_compare
11380 if (HAVE_canonicalize_funcptr_for_compare
11381 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11382 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11384 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11385 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11386 == FUNCTION_TYPE))))
11393 /* Get the rtx comparison code to use. We know that EXP is a comparison
11394 operation of some type. Some comparisons against 1 and -1 can be
11395 converted to comparisons with zero. Do so here so that the tests
11396 below will be aware that we have a comparison with zero. These
11397 tests will not catch constants in the first operand, but constants
11398 are rarely passed as the first operand. */
11400 switch (TREE_CODE (exp))
11409 if (integer_onep (arg1))
11410 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11412 code = unsignedp ? LTU : LT;
11415 if (! unsignedp && integer_all_onesp (arg1))
11416 arg1 = integer_zero_node, code = LT;
11418 code = unsignedp ? LEU : LE;
11421 if (! unsignedp && integer_all_onesp (arg1))
11422 arg1 = integer_zero_node, code = GE;
11424 code = unsignedp ? GTU : GT;
11427 if (integer_onep (arg1))
11428 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11430 code = unsignedp ? GEU : GE;
11436 /* Put a constant second. */
11437 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11439 tem = arg0; arg0 = arg1; arg1 = tem;
11440 code = swap_condition (code);
11443 /* If this is an equality or inequality test of a single bit, we can
11444 do this by shifting the bit being tested to the low-order bit and
11445 masking the result with the constant 1. If the condition was EQ,
11446 we xor it with 1. This does not require an scc insn and is faster
11447 than an scc insn even if we have it. */
11449 if ((code == NE || code == EQ)
11450 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11451 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11453 tree inner = TREE_OPERAND (arg0, 0);
11454 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11457 /* If INNER is a right shift of a constant and it plus BITNUM does
11458 not overflow, adjust BITNUM and INNER. */
11460 if (TREE_CODE (inner) == RSHIFT_EXPR
11461 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11462 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11463 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11464 < TYPE_PRECISION (type)))
11466 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11467 inner = TREE_OPERAND (inner, 0);
11470 /* If we are going to be able to omit the AND below, we must do our
11471 operations as unsigned. If we must use the AND, we have a choice.
11472 Normally unsigned is faster, but for some machines signed is. */
11473 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11474 #ifdef LOAD_EXTEND_OP
11475 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11481 if (subtarget == 0 || GET_CODE (subtarget) != REG
11482 || GET_MODE (subtarget) != operand_mode
11483 || ! safe_from_p (subtarget, inner, 1))
11486 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11489 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11490 size_int (bitnum), subtarget, ops_unsignedp);
11492 if (GET_MODE (op0) != mode)
11493 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11495 if ((code == EQ && ! invert) || (code == NE && invert))
11496 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11497 ops_unsignedp, OPTAB_LIB_WIDEN);
11499 /* Put the AND last so it can combine with more things. */
11500 if (bitnum != TYPE_PRECISION (type) - 1)
11501 op0 = expand_and (op0, const1_rtx, subtarget);
11506 /* Now see if we are likely to be able to do this. Return if not. */
11507 if (! can_compare_p (operand_mode))
11509 icode = setcc_gen_code[(int) code];
11510 if (icode == CODE_FOR_nothing
11511 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11513 /* We can only do this if it is one of the special cases that
11514 can be handled without an scc insn. */
11515 if ((code == LT && integer_zerop (arg1))
11516 || (! only_cheap && code == GE && integer_zerop (arg1)))
11518 else if (BRANCH_COST >= 0
11519 && ! only_cheap && (code == NE || code == EQ)
11520 && TREE_CODE (type) != REAL_TYPE
11521 && ((abs_optab->handlers[(int) operand_mode].insn_code
11522 != CODE_FOR_nothing)
11523 || (ffs_optab->handlers[(int) operand_mode].insn_code
11524 != CODE_FOR_nothing)))
11530 preexpand_calls (exp);
11531 if (subtarget == 0 || GET_CODE (subtarget) != REG
11532 || GET_MODE (subtarget) != operand_mode
11533 || ! safe_from_p (subtarget, arg1, 1))
11536 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11537 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11540 target = gen_reg_rtx (mode);
11542 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11543 because, if the emit_store_flag does anything it will succeed and
11544 OP0 and OP1 will not be used subsequently. */
11546 result = emit_store_flag (target, code,
11547 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11548 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11549 operand_mode, unsignedp, 1);
11554 result = expand_binop (mode, xor_optab, result, const1_rtx,
11555 result, 0, OPTAB_LIB_WIDEN);
11559 /* If this failed, we have to do this with set/compare/jump/set code. */
11560 if (GET_CODE (target) != REG
11561 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11562 target = gen_reg_rtx (GET_MODE (target));
11564 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11565 result = compare_from_rtx (op0, op1, code, unsignedp,
11566 operand_mode, NULL_RTX, 0);
11567 if (GET_CODE (result) == CONST_INT)
11568 return (((result == const0_rtx && ! invert)
11569 || (result != const0_rtx && invert))
11570 ? const0_rtx : const1_rtx);
11572 label = gen_label_rtx ();
11573 if (bcc_gen_fctn[(int) code] == 0)
11576 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11577 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11578 emit_label (label);
11583 /* Generate a tablejump instruction (used for switch statements). */
11585 #ifdef HAVE_tablejump
11587 /* INDEX is the value being switched on, with the lowest value
11588 in the table already subtracted.
11589 MODE is its expected mode (needed if INDEX is constant).
11590 RANGE is the length of the jump table.
11591 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11593 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11594 index value is out of range. */
11597 do_tablejump (index, mode, range, table_label, default_label)
11598 rtx index, range, table_label, default_label;
11599 enum machine_mode mode;
11601 register rtx temp, vector;
11603 /* Do an unsigned comparison (in the proper mode) between the index
11604 expression and the value which represents the length of the range.
11605 Since we just finished subtracting the lower bound of the range
11606 from the index expression, this comparison allows us to simultaneously
11607 check that the original index expression value is both greater than
11608 or equal to the minimum value of the range and less than or equal to
11609 the maximum value of the range. */
11611 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11614 /* If index is in range, it must fit in Pmode.
11615 Convert to Pmode so we can index with it. */
11617 index = convert_to_mode (Pmode, index, 1);
11619 /* Don't let a MEM slip thru, because then INDEX that comes
11620 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11621 and break_out_memory_refs will go to work on it and mess it up. */
11622 #ifdef PIC_CASE_VECTOR_ADDRESS
11623 if (flag_pic && GET_CODE (index) != REG)
11624 index = copy_to_mode_reg (Pmode, index);
11627 /* If flag_force_addr were to affect this address
11628 it could interfere with the tricky assumptions made
11629 about addresses that contain label-refs,
11630 which may be valid only very near the tablejump itself. */
11631 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11632 GET_MODE_SIZE, because this indicates how large insns are. The other
11633 uses should all be Pmode, because they are addresses. This code
11634 could fail if addresses and insns are not the same size. */
11635 index = gen_rtx_PLUS (Pmode,
11636 gen_rtx_MULT (Pmode, index,
11637 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11638 gen_rtx_LABEL_REF (Pmode, table_label));
11639 #ifdef PIC_CASE_VECTOR_ADDRESS
11641 index = PIC_CASE_VECTOR_ADDRESS (index);
11644 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11645 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11646 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11647 RTX_UNCHANGING_P (vector) = 1;
11648 convert_move (temp, vector, 0);
11650 emit_jump_insn (gen_tablejump (temp, table_label));
11652 /* If we are generating PIC code or if the table is PC-relative, the
11653 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11654 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11658 #endif /* HAVE_tablejump */