1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "typeclass.h"
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
63 #define STACK_PUSH_CODE PRE_INC
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
97 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
114 /* This structure is used by store_by_pieces to describe the clear to
117 struct store_by_pieces
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
130 extern struct obstack permanent_obstack;
132 static rtx get_push_address PARAMS ((int));
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
184 /* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
188 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
191 /* If we are optimizing for space (-Os), cut down the default move ratio. */
192 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 /* This macro is used to determine whether move_by_pieces should be called
197 to perform a structure copy. */
198 #ifndef MOVE_BY_PIECES_P
199 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
203 /* This array records the insn_code of insns to perform block moves. */
204 enum insn_code movstr_optab[NUM_MACHINE_MODES];
206 /* This array records the insn_code of insns to perform block clears. */
207 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
209 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
211 #ifndef SLOW_UNALIGNED_ACCESS
212 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
215 /* This is run once per compilation to set up which modes can be used
216 directly in memory and to initialize the block move optab. */
222 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
235 pat = PATTERN (insn);
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
255 if (! HARD_REGNO_MODE_OK (regno, mode))
258 reg = gen_rtx_REG (mode, regno);
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
285 /* This is run at the start of compiling a function. */
290 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
293 pending_stack_adjust = 0;
294 stack_pointer_delta = 0;
295 inhibit_defer_pop = 0;
297 apply_args_value = 0;
303 struct expr_status *p;
308 ggc_mark_rtx (p->x_saveregs_value);
309 ggc_mark_rtx (p->x_apply_args_value);
310 ggc_mark_rtx (p->x_forced_labels);
321 /* Small sanity check that the queue is empty at the end of a function. */
324 finish_expr_for_function ()
330 /* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
333 /* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
341 enqueue_insn (var, body)
344 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
345 body, pending_chain);
346 return pending_chain;
349 /* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
365 protect_from_queue (x, modify)
369 register RTX_CODE code = GET_CODE (x);
371 #if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain == 0)
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
384 if (code == MEM && GET_MODE (x) != BLKmode
385 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 register rtx y = XEXP (x, 0);
388 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
390 MEM_COPY_ATTRIBUTES (new, x);
394 register rtx temp = gen_reg_rtx (GET_MODE (new));
395 emit_insn_before (gen_move_insn (temp, new),
401 /* Otherwise, recursively protect the subexpressions of all
402 the kinds of rtx's that can contain a QUEUED. */
405 rtx tem = protect_from_queue (XEXP (x, 0), 0);
406 if (tem != XEXP (x, 0))
412 else if (code == PLUS || code == MULT)
414 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
415 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
416 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
425 /* If the increment has not happened, use the variable itself. */
426 if (QUEUED_INSN (x) == 0)
427 return QUEUED_VAR (x);
428 /* If the increment has happened and a pre-increment copy exists,
430 if (QUEUED_COPY (x) != 0)
431 return QUEUED_COPY (x);
432 /* The increment has happened but we haven't set up a pre-increment copy.
433 Set one up now, and use it. */
434 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
435 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
437 return QUEUED_COPY (x);
440 /* Return nonzero if X contains a QUEUED expression:
441 if it contains anything that will be altered by a queued increment.
442 We handle only combinations of MEM, PLUS, MINUS and MULT operators
443 since memory addresses generally contain only those. */
449 register enum rtx_code code = GET_CODE (x);
455 return queued_subexp_p (XEXP (x, 0));
459 return (queued_subexp_p (XEXP (x, 0))
460 || queued_subexp_p (XEXP (x, 1)));
466 /* Perform all the pending incrementations. */
472 while ((p = pending_chain))
474 rtx body = QUEUED_BODY (p);
476 if (GET_CODE (body) == SEQUENCE)
478 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
479 emit_insn (QUEUED_BODY (p));
482 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
483 pending_chain = QUEUED_NEXT (p);
487 /* Copy data from FROM to TO, where the machine modes are not the same.
488 Both modes may be integer, or both may be floating.
489 UNSIGNEDP should be nonzero if FROM is an unsigned type.
490 This causes zero-extension instead of sign-extension. */
493 convert_move (to, from, unsignedp)
494 register rtx to, from;
497 enum machine_mode to_mode = GET_MODE (to);
498 enum machine_mode from_mode = GET_MODE (from);
499 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
500 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
504 /* rtx code for making an equivalent value. */
505 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
507 to = protect_from_queue (to, 1);
508 from = protect_from_queue (from, 0);
510 if (to_real != from_real)
513 /* If FROM is a SUBREG that indicates that we have already done at least
514 the required extension, strip it. We don't handle such SUBREGs as
517 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
518 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
519 >= GET_MODE_SIZE (to_mode))
520 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
521 from = gen_lowpart (to_mode, from), from_mode = to_mode;
523 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
526 if (to_mode == from_mode
527 || (from_mode == VOIDmode && CONSTANT_P (from)))
529 emit_move_insn (to, from);
533 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
535 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
538 if (VECTOR_MODE_P (to_mode))
539 from = gen_rtx_SUBREG (to_mode, from, 0);
541 to = gen_rtx_SUBREG (from_mode, to, 0);
543 emit_move_insn (to, from);
547 if (to_real != from_real)
554 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
556 /* Try converting directly if the insn is supported. */
557 if ((code = can_extend_p (to_mode, from_mode, 0))
560 emit_unop_insn (code, to, from, UNKNOWN);
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
572 #ifdef HAVE_trunctqfqf2
573 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
575 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
579 #ifdef HAVE_truncsfqf2
580 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
586 #ifdef HAVE_truncdfqf2
587 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
593 #ifdef HAVE_truncxfqf2
594 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
600 #ifdef HAVE_trunctfqf2
601 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
603 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
608 #ifdef HAVE_trunctqfhf2
609 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
611 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
615 #ifdef HAVE_truncsfhf2
616 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
618 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
622 #ifdef HAVE_truncdfhf2
623 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
625 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
629 #ifdef HAVE_truncxfhf2
630 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
632 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
636 #ifdef HAVE_trunctfhf2
637 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
639 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
644 #ifdef HAVE_truncsftqf2
645 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
647 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
651 #ifdef HAVE_truncdftqf2
652 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
654 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
658 #ifdef HAVE_truncxftqf2
659 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
661 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
665 #ifdef HAVE_trunctftqf2
666 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
668 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
673 #ifdef HAVE_truncdfsf2
674 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
676 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
680 #ifdef HAVE_truncxfsf2
681 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
683 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
687 #ifdef HAVE_trunctfsf2
688 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
690 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
694 #ifdef HAVE_truncxfdf2
695 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
697 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
701 #ifdef HAVE_trunctfdf2
702 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
704 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
716 libcall = extendsfdf2_libfunc;
720 libcall = extendsfxf2_libfunc;
724 libcall = extendsftf2_libfunc;
736 libcall = truncdfsf2_libfunc;
740 libcall = extenddfxf2_libfunc;
744 libcall = extenddftf2_libfunc;
756 libcall = truncxfsf2_libfunc;
760 libcall = truncxfdf2_libfunc;
772 libcall = trunctfsf2_libfunc;
776 libcall = trunctfdf2_libfunc;
788 if (libcall == (rtx) 0)
789 /* This conversion is not implemented yet. */
793 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
795 insns = get_insns ();
797 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
802 /* Now both modes are integers. */
804 /* Handle expanding beyond a word. */
805 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
806 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
813 enum machine_mode lowpart_mode;
814 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
816 /* Try converting directly if the insn is supported. */
817 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
820 /* If FROM is a SUBREG, put it into a register. Do this
821 so that we always generate the same set of insns for
822 better cse'ing; if an intermediate assignment occurred,
823 we won't be doing the operation directly on the SUBREG. */
824 if (optimize > 0 && GET_CODE (from) == SUBREG)
825 from = force_reg (from_mode, from);
826 emit_unop_insn (code, to, from, equiv_code);
829 /* Next, try converting via full word. */
830 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
831 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
832 != CODE_FOR_nothing))
834 if (GET_CODE (to) == REG)
835 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
836 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
837 emit_unop_insn (code, to,
838 gen_lowpart (word_mode, to), equiv_code);
842 /* No special multiword conversion insn; do it by hand. */
845 /* Since we will turn this into a no conflict block, we must ensure
846 that the source does not overlap the target. */
848 if (reg_overlap_mentioned_p (to, from))
849 from = force_reg (from_mode, from);
851 /* Get a copy of FROM widened to a word, if necessary. */
852 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
853 lowpart_mode = word_mode;
855 lowpart_mode = from_mode;
857 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
859 lowpart = gen_lowpart (lowpart_mode, to);
860 emit_move_insn (lowpart, lowfrom);
862 /* Compute the value to put in each remaining word. */
864 fill_value = const0_rtx;
869 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
870 && STORE_FLAG_VALUE == -1)
872 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
874 fill_value = gen_reg_rtx (word_mode);
875 emit_insn (gen_slt (fill_value));
881 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
882 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
884 fill_value = convert_to_mode (word_mode, fill_value, 1);
888 /* Fill the remaining words. */
889 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
891 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
892 rtx subword = operand_subword (to, index, 1, to_mode);
897 if (fill_value != subword)
898 emit_move_insn (subword, fill_value);
901 insns = get_insns ();
904 emit_no_conflict_block (insns, to, from, NULL_RTX,
905 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
909 /* Truncating multi-word to a word or less. */
910 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
911 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
913 if (!((GET_CODE (from) == MEM
914 && ! MEM_VOLATILE_P (from)
915 && direct_load[(int) to_mode]
916 && ! mode_dependent_address_p (XEXP (from, 0)))
917 || GET_CODE (from) == REG
918 || GET_CODE (from) == SUBREG))
919 from = force_reg (from_mode, from);
920 convert_move (to, gen_lowpart (word_mode, from), 0);
924 /* Handle pointer conversion. */ /* SPEE 900220. */
925 if (to_mode == PQImode)
927 if (from_mode != QImode)
928 from = convert_to_mode (QImode, from, unsignedp);
930 #ifdef HAVE_truncqipqi2
931 if (HAVE_truncqipqi2)
933 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
936 #endif /* HAVE_truncqipqi2 */
940 if (from_mode == PQImode)
942 if (to_mode != QImode)
944 from = convert_to_mode (QImode, from, unsignedp);
949 #ifdef HAVE_extendpqiqi2
950 if (HAVE_extendpqiqi2)
952 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
955 #endif /* HAVE_extendpqiqi2 */
960 if (to_mode == PSImode)
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
971 #endif /* HAVE_truncsipsi2 */
975 if (from_mode == PSImode)
977 if (to_mode != SImode)
979 from = convert_to_mode (SImode, from, unsignedp);
984 #ifdef HAVE_extendpsisi2
985 if (! unsignedp && HAVE_extendpsisi2)
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
990 #endif /* HAVE_extendpsisi2 */
991 #ifdef HAVE_zero_extendpsisi2
992 if (unsignedp && HAVE_zero_extendpsisi2)
994 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
997 #endif /* HAVE_zero_extendpsisi2 */
1002 if (to_mode == PDImode)
1004 if (from_mode != DImode)
1005 from = convert_to_mode (DImode, from, unsignedp);
1007 #ifdef HAVE_truncdipdi2
1008 if (HAVE_truncdipdi2)
1010 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1013 #endif /* HAVE_truncdipdi2 */
1017 if (from_mode == PDImode)
1019 if (to_mode != DImode)
1021 from = convert_to_mode (DImode, from, unsignedp);
1026 #ifdef HAVE_extendpdidi2
1027 if (HAVE_extendpdidi2)
1029 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1032 #endif /* HAVE_extendpdidi2 */
1037 /* Now follow all the conversions between integers
1038 no more than a word long. */
1040 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1041 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1042 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1043 GET_MODE_BITSIZE (from_mode)))
1045 if (!((GET_CODE (from) == MEM
1046 && ! MEM_VOLATILE_P (from)
1047 && direct_load[(int) to_mode]
1048 && ! mode_dependent_address_p (XEXP (from, 0)))
1049 || GET_CODE (from) == REG
1050 || GET_CODE (from) == SUBREG))
1051 from = force_reg (from_mode, from);
1052 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1053 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1054 from = copy_to_reg (from);
1055 emit_move_insn (to, gen_lowpart (to_mode, from));
1059 /* Handle extension. */
1060 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1062 /* Convert directly if that works. */
1063 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1064 != CODE_FOR_nothing)
1066 emit_unop_insn (code, to, from, equiv_code);
1071 enum machine_mode intermediate;
1075 /* Search for a mode to convert via. */
1076 for (intermediate = from_mode; intermediate != VOIDmode;
1077 intermediate = GET_MODE_WIDER_MODE (intermediate))
1078 if (((can_extend_p (to_mode, intermediate, unsignedp)
1079 != CODE_FOR_nothing)
1080 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1081 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1082 GET_MODE_BITSIZE (intermediate))))
1083 && (can_extend_p (intermediate, from_mode, unsignedp)
1084 != CODE_FOR_nothing))
1086 convert_move (to, convert_to_mode (intermediate, from,
1087 unsignedp), unsignedp);
1091 /* No suitable intermediate mode.
1092 Generate what we need with shifts. */
1093 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1094 - GET_MODE_BITSIZE (from_mode), 0);
1095 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1096 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1098 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1101 emit_move_insn (to, tmp);
1106 /* Support special truncate insns for certain modes. */
1108 if (from_mode == DImode && to_mode == SImode)
1110 #ifdef HAVE_truncdisi2
1111 if (HAVE_truncdisi2)
1113 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1117 convert_move (to, force_reg (from_mode, from), unsignedp);
1121 if (from_mode == DImode && to_mode == HImode)
1123 #ifdef HAVE_truncdihi2
1124 if (HAVE_truncdihi2)
1126 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1130 convert_move (to, force_reg (from_mode, from), unsignedp);
1134 if (from_mode == DImode && to_mode == QImode)
1136 #ifdef HAVE_truncdiqi2
1137 if (HAVE_truncdiqi2)
1139 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1143 convert_move (to, force_reg (from_mode, from), unsignedp);
1147 if (from_mode == SImode && to_mode == HImode)
1149 #ifdef HAVE_truncsihi2
1150 if (HAVE_truncsihi2)
1152 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1156 convert_move (to, force_reg (from_mode, from), unsignedp);
1160 if (from_mode == SImode && to_mode == QImode)
1162 #ifdef HAVE_truncsiqi2
1163 if (HAVE_truncsiqi2)
1165 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1169 convert_move (to, force_reg (from_mode, from), unsignedp);
1173 if (from_mode == HImode && to_mode == QImode)
1175 #ifdef HAVE_trunchiqi2
1176 if (HAVE_trunchiqi2)
1178 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 if (from_mode == TImode && to_mode == DImode)
1188 #ifdef HAVE_trunctidi2
1189 if (HAVE_trunctidi2)
1191 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 if (from_mode == TImode && to_mode == SImode)
1201 #ifdef HAVE_trunctisi2
1202 if (HAVE_trunctisi2)
1204 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 if (from_mode == TImode && to_mode == HImode)
1214 #ifdef HAVE_trunctihi2
1215 if (HAVE_trunctihi2)
1217 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 if (from_mode == TImode && to_mode == QImode)
1227 #ifdef HAVE_trunctiqi2
1228 if (HAVE_trunctiqi2)
1230 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 /* Handle truncation of volatile memrefs, and so on;
1239 the things that couldn't be truncated directly,
1240 and for which there was no special instruction. */
1241 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1243 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1244 emit_move_insn (to, temp);
1248 /* Mode combination is not recognized. */
1252 /* Return an rtx for a value that would result
1253 from converting X to mode MODE.
1254 Both X and MODE may be floating, or both integer.
1255 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 This function *must not* call protect_from_queue
1260 except when putting X into an insn (in which case convert_move does it). */
1263 convert_to_mode (mode, x, unsignedp)
1264 enum machine_mode mode;
1268 return convert_modes (mode, VOIDmode, x, unsignedp);
1271 /* Return an rtx for a value that would result
1272 from converting X from mode OLDMODE to mode MODE.
1273 Both modes may be floating, or both integer.
1274 UNSIGNEDP is nonzero if X is an unsigned value.
1276 This can be done by referring to a part of X in place
1277 or by copying to a new temporary with conversion.
1279 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1281 This function *must not* call protect_from_queue
1282 except when putting X into an insn (in which case convert_move does it). */
1285 convert_modes (mode, oldmode, x, unsignedp)
1286 enum machine_mode mode, oldmode;
1292 /* If FROM is a SUBREG that indicates that we have already done at least
1293 the required extension, strip it. */
1295 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1296 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1297 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1298 x = gen_lowpart (mode, x);
1300 if (GET_MODE (x) != VOIDmode)
1301 oldmode = GET_MODE (x);
1303 if (mode == oldmode)
1306 /* There is one case that we must handle specially: If we are converting
1307 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1308 we are to interpret the constant as unsigned, gen_lowpart will do
1309 the wrong if the constant appears negative. What we want to do is
1310 make the high-order word of the constant zero, not all ones. */
1312 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1313 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1314 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1316 HOST_WIDE_INT val = INTVAL (x);
1318 if (oldmode != VOIDmode
1319 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1321 int width = GET_MODE_BITSIZE (oldmode);
1323 /* We need to zero extend VAL. */
1324 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1327 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1330 /* We can do this with a gen_lowpart if both desired and current modes
1331 are integer, and this is either a constant integer, a register, or a
1332 non-volatile MEM. Except for the constant case where MODE is no
1333 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1335 if ((GET_CODE (x) == CONST_INT
1336 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1337 || (GET_MODE_CLASS (mode) == MODE_INT
1338 && GET_MODE_CLASS (oldmode) == MODE_INT
1339 && (GET_CODE (x) == CONST_DOUBLE
1340 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1341 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1342 && direct_load[(int) mode])
1343 || (GET_CODE (x) == REG
1344 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1345 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1347 /* ?? If we don't know OLDMODE, we have to assume here that
1348 X does not need sign- or zero-extension. This may not be
1349 the case, but it's the best we can do. */
1350 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1351 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1353 HOST_WIDE_INT val = INTVAL (x);
1354 int width = GET_MODE_BITSIZE (oldmode);
1356 /* We must sign or zero-extend in this case. Start by
1357 zero-extending, then sign extend if we need to. */
1358 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1360 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1361 val |= (HOST_WIDE_INT) (-1) << width;
1363 return GEN_INT (trunc_int_for_mode (val, mode));
1366 return gen_lowpart (mode, x);
1369 temp = gen_reg_rtx (mode);
1370 convert_move (temp, x, unsignedp);
1374 /* This macro is used to determine what the largest unit size that
1375 move_by_pieces can use is. */
1377 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1378 move efficiently, as opposed to MOVE_MAX which is the maximum
1379 number of bytes we can move with a single instruction. */
1381 #ifndef MOVE_MAX_PIECES
1382 #define MOVE_MAX_PIECES MOVE_MAX
1385 /* Generate several move instructions to copy LEN bytes
1386 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1387 The caller must pass FROM and TO
1388 through protect_from_queue before calling.
1390 When TO is NULL, the emit_single_push_insn is used to push the
1393 ALIGN is maximum alignment we can assume. */
1396 move_by_pieces (to, from, len, align)
1398 unsigned HOST_WIDE_INT len;
1401 struct move_by_pieces data;
1402 rtx to_addr, from_addr = XEXP (from, 0);
1403 unsigned int max_size = MOVE_MAX_PIECES + 1;
1404 enum machine_mode mode = VOIDmode, tmode;
1405 enum insn_code icode;
1408 data.from_addr = from_addr;
1411 to_addr = XEXP (to, 0);
1414 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1415 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1417 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1424 #ifdef STACK_GROWS_DOWNWARD
1430 data.to_addr = to_addr;
1433 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1434 || GET_CODE (from_addr) == POST_INC
1435 || GET_CODE (from_addr) == POST_DEC);
1437 data.explicit_inc_from = 0;
1438 data.explicit_inc_to = 0;
1439 if (data.reverse) data.offset = len;
1442 /* If copying requires more than two move insns,
1443 copy addresses to registers (to make displacements shorter)
1444 and use post-increment if available. */
1445 if (!(data.autinc_from && data.autinc_to)
1446 && move_by_pieces_ninsns (len, align) > 2)
1448 /* Find the mode of the largest move... */
1449 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1450 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1451 if (GET_MODE_SIZE (tmode) < max_size)
1454 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1456 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1457 data.autinc_from = 1;
1458 data.explicit_inc_from = -1;
1460 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1462 data.from_addr = copy_addr_to_reg (from_addr);
1463 data.autinc_from = 1;
1464 data.explicit_inc_from = 1;
1466 if (!data.autinc_from && CONSTANT_P (from_addr))
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1470 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1472 data.explicit_inc_to = -1;
1474 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1476 data.to_addr = copy_addr_to_reg (to_addr);
1478 data.explicit_inc_to = 1;
1480 if (!data.autinc_to && CONSTANT_P (to_addr))
1481 data.to_addr = copy_addr_to_reg (to_addr);
1484 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1485 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1486 align = MOVE_MAX * BITS_PER_UNIT;
1488 /* First move what we can in the largest integer mode, then go to
1489 successively smaller modes. */
1491 while (max_size > 1)
1493 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1494 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1495 if (GET_MODE_SIZE (tmode) < max_size)
1498 if (mode == VOIDmode)
1501 icode = mov_optab->handlers[(int) mode].insn_code;
1502 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1503 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1505 max_size = GET_MODE_SIZE (mode);
1508 /* The code above should have handled everything. */
1513 /* Return number of insns required to move L bytes by pieces.
1514 ALIGN (in bytes) is maximum alignment we can assume. */
1516 static unsigned HOST_WIDE_INT
1517 move_by_pieces_ninsns (l, align)
1518 unsigned HOST_WIDE_INT l;
1521 unsigned HOST_WIDE_INT n_insns = 0;
1522 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1524 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1525 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1526 align = MOVE_MAX * BITS_PER_UNIT;
1528 while (max_size > 1)
1530 enum machine_mode mode = VOIDmode, tmode;
1531 enum insn_code icode;
1533 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1534 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1535 if (GET_MODE_SIZE (tmode) < max_size)
1538 if (mode == VOIDmode)
1541 icode = mov_optab->handlers[(int) mode].insn_code;
1542 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1543 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1545 max_size = GET_MODE_SIZE (mode);
1553 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1554 with move instructions for mode MODE. GENFUN is the gen_... function
1555 to make a move insn for that mode. DATA has all the other info. */
1558 move_by_pieces_1 (genfun, mode, data)
1559 rtx (*genfun) PARAMS ((rtx, ...));
1560 enum machine_mode mode;
1561 struct move_by_pieces *data;
1563 unsigned int size = GET_MODE_SIZE (mode);
1566 while (data->len >= size)
1569 data->offset -= size;
1573 if (data->autinc_to)
1575 to1 = gen_rtx_MEM (mode, data->to_addr);
1576 MEM_COPY_ATTRIBUTES (to1, data->to);
1579 to1 = change_address (data->to, mode,
1580 plus_constant (data->to_addr, data->offset));
1583 if (data->autinc_from)
1585 from1 = gen_rtx_MEM (mode, data->from_addr);
1586 MEM_COPY_ATTRIBUTES (from1, data->from);
1589 from1 = change_address (data->from, mode,
1590 plus_constant (data->from_addr, data->offset));
1592 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1593 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1594 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1595 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1598 emit_insn ((*genfun) (to1, from1));
1600 emit_single_push_insn (mode, from1, NULL);
1602 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1603 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1604 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1605 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1607 if (! data->reverse)
1608 data->offset += size;
1614 /* Emit code to move a block Y to a block X.
1615 This may be done with string-move instructions,
1616 with multiple scalar move instructions, or with a library call.
1618 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1620 SIZE is an rtx that says how long they are.
1621 ALIGN is the maximum alignment we can assume they have.
1623 Return the address of the new block, if memcpy is called and returns it,
1627 emit_block_move (x, y, size, align)
1633 #ifdef TARGET_MEM_FUNCTIONS
1635 tree call_expr, arg_list;
1638 if (GET_MODE (x) != BLKmode)
1641 if (GET_MODE (y) != BLKmode)
1644 x = protect_from_queue (x, 1);
1645 y = protect_from_queue (y, 0);
1646 size = protect_from_queue (size, 0);
1648 if (GET_CODE (x) != MEM)
1650 if (GET_CODE (y) != MEM)
1655 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1656 move_by_pieces (x, y, INTVAL (size), align);
1659 /* Try the most limited insn first, because there's no point
1660 including more than one in the machine description unless
1661 the more limited one has some advantage. */
1663 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1664 enum machine_mode mode;
1666 /* Since this is a move insn, we don't care about volatility. */
1669 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1670 mode = GET_MODE_WIDER_MODE (mode))
1672 enum insn_code code = movstr_optab[(int) mode];
1673 insn_operand_predicate_fn pred;
1675 if (code != CODE_FOR_nothing
1676 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1677 here because if SIZE is less than the mode mask, as it is
1678 returned by the macro, it will definitely be less than the
1679 actual mode mask. */
1680 && ((GET_CODE (size) == CONST_INT
1681 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1682 <= (GET_MODE_MASK (mode) >> 1)))
1683 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1684 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1685 || (*pred) (x, BLKmode))
1686 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1687 || (*pred) (y, BLKmode))
1688 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1689 || (*pred) (opalign, VOIDmode)))
1692 rtx last = get_last_insn ();
1695 op2 = convert_to_mode (mode, size, 1);
1696 pred = insn_data[(int) code].operand[2].predicate;
1697 if (pred != 0 && ! (*pred) (op2, mode))
1698 op2 = copy_to_mode_reg (mode, op2);
1700 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1708 delete_insns_since (last);
1714 /* X, Y, or SIZE may have been passed through protect_from_queue.
1716 It is unsafe to save the value generated by protect_from_queue
1717 and reuse it later. Consider what happens if emit_queue is
1718 called before the return value from protect_from_queue is used.
1720 Expansion of the CALL_EXPR below will call emit_queue before
1721 we are finished emitting RTL for argument setup. So if we are
1722 not careful we could get the wrong value for an argument.
1724 To avoid this problem we go ahead and emit code to copy X, Y &
1725 SIZE into new pseudos. We can then place those new pseudos
1726 into an RTL_EXPR and use them later, even after a call to
1729 Note this is not strictly needed for library calls since they
1730 do not call emit_queue before loading their arguments. However,
1731 we may need to have library calls call emit_queue in the future
1732 since failing to do so could cause problems for targets which
1733 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1734 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1735 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1737 #ifdef TARGET_MEM_FUNCTIONS
1738 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1740 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1741 TREE_UNSIGNED (integer_type_node));
1742 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1745 #ifdef TARGET_MEM_FUNCTIONS
1746 /* It is incorrect to use the libcall calling conventions to call
1747 memcpy in this context.
1749 This could be a user call to memcpy and the user may wish to
1750 examine the return value from memcpy.
1752 For targets where libcalls and normal calls have different conventions
1753 for returning pointers, we could end up generating incorrect code.
1755 So instead of using a libcall sequence we build up a suitable
1756 CALL_EXPR and expand the call in the normal fashion. */
1757 if (fn == NULL_TREE)
1761 /* This was copied from except.c, I don't know if all this is
1762 necessary in this context or not. */
1763 fn = get_identifier ("memcpy");
1764 fntype = build_pointer_type (void_type_node);
1765 fntype = build_function_type (fntype, NULL_TREE);
1766 fn = build_decl (FUNCTION_DECL, fn, fntype);
1767 ggc_add_tree_root (&fn, 1);
1768 DECL_EXTERNAL (fn) = 1;
1769 TREE_PUBLIC (fn) = 1;
1770 DECL_ARTIFICIAL (fn) = 1;
1771 make_decl_rtl (fn, NULL);
1772 assemble_external (fn);
1775 /* We need to make an argument list for the function call.
1777 memcpy has three arguments, the first two are void * addresses and
1778 the last is a size_t byte count for the copy. */
1780 = build_tree_list (NULL_TREE,
1781 make_tree (build_pointer_type (void_type_node), x));
1782 TREE_CHAIN (arg_list)
1783 = build_tree_list (NULL_TREE,
1784 make_tree (build_pointer_type (void_type_node), y));
1785 TREE_CHAIN (TREE_CHAIN (arg_list))
1786 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1787 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1789 /* Now we have to build up the CALL_EXPR itself. */
1790 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1791 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1792 call_expr, arg_list, NULL_TREE);
1793 TREE_SIDE_EFFECTS (call_expr) = 1;
1795 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1797 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1798 VOIDmode, 3, y, Pmode, x, Pmode,
1799 convert_to_mode (TYPE_MODE (integer_type_node), size,
1800 TREE_UNSIGNED (integer_type_node)),
1801 TYPE_MODE (integer_type_node));
1808 /* Copy all or part of a value X into registers starting at REGNO.
1809 The number of registers to be filled is NREGS. */
1812 move_block_to_reg (regno, x, nregs, mode)
1816 enum machine_mode mode;
1819 #ifdef HAVE_load_multiple
1827 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1828 x = validize_mem (force_const_mem (mode, x));
1830 /* See if the machine can do this with a load multiple insn. */
1831 #ifdef HAVE_load_multiple
1832 if (HAVE_load_multiple)
1834 last = get_last_insn ();
1835 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1843 delete_insns_since (last);
1847 for (i = 0; i < nregs; i++)
1848 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1849 operand_subword_force (x, i, mode));
1852 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1853 The number of registers to be filled is NREGS. SIZE indicates the number
1854 of bytes in the object X. */
1857 move_block_from_reg (regno, x, nregs, size)
1864 #ifdef HAVE_store_multiple
1868 enum machine_mode mode;
1873 /* If SIZE is that of a mode no bigger than a word, just use that
1874 mode's store operation. */
1875 if (size <= UNITS_PER_WORD
1876 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1878 emit_move_insn (change_address (x, mode, NULL),
1879 gen_rtx_REG (mode, regno));
1883 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884 to the left before storing to memory. Note that the previous test
1885 doesn't handle all cases (e.g. SIZE == 3). */
1886 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1888 rtx tem = operand_subword (x, 0, 1, BLKmode);
1894 shift = expand_shift (LSHIFT_EXPR, word_mode,
1895 gen_rtx_REG (word_mode, regno),
1896 build_int_2 ((UNITS_PER_WORD - size)
1897 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1898 emit_move_insn (tem, shift);
1902 /* See if the machine can do this with a store multiple insn. */
1903 #ifdef HAVE_store_multiple
1904 if (HAVE_store_multiple)
1906 last = get_last_insn ();
1907 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1915 delete_insns_since (last);
1919 for (i = 0; i < nregs; i++)
1921 rtx tem = operand_subword (x, i, 1, BLKmode);
1926 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1930 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1931 registers represented by a PARALLEL. SSIZE represents the total size of
1932 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1934 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1935 the balance will be in what would be the low-order memory addresses, i.e.
1936 left justified for big endian, right justified for little endian. This
1937 happens to be true for the targets currently using this support. If this
1938 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1942 emit_group_load (dst, orig_src, ssize, align)
1950 if (GET_CODE (dst) != PARALLEL)
1953 /* Check for a NULL entry, used to indicate that the parameter goes
1954 both on the stack and in registers. */
1955 if (XEXP (XVECEXP (dst, 0, 0), 0))
1960 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1962 /* Process the pieces. */
1963 for (i = start; i < XVECLEN (dst, 0); i++)
1965 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1966 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967 unsigned int bytelen = GET_MODE_SIZE (mode);
1970 /* Handle trailing fragments that run over the size of the struct. */
1971 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1973 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974 bytelen = ssize - bytepos;
1979 /* If we won't be loading directly from memory, protect the real source
1980 from strange tricks we might play; but make sure that the source can
1981 be loaded directly into the destination. */
1983 if (GET_CODE (orig_src) != MEM
1984 && (!CONSTANT_P (orig_src)
1985 || (GET_MODE (orig_src) != mode
1986 && GET_MODE (orig_src) != VOIDmode)))
1988 if (GET_MODE (orig_src) == VOIDmode)
1989 src = gen_reg_rtx (mode);
1991 src = gen_reg_rtx (GET_MODE (orig_src));
1992 emit_move_insn (src, orig_src);
1995 /* Optimize the access just a bit. */
1996 if (GET_CODE (src) == MEM
1997 && align >= GET_MODE_ALIGNMENT (mode)
1998 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1999 && bytelen == GET_MODE_SIZE (mode))
2001 tmps[i] = gen_reg_rtx (mode);
2002 emit_move_insn (tmps[i],
2003 change_address (src, mode,
2004 plus_constant (XEXP (src, 0),
2007 else if (GET_CODE (src) == CONCAT)
2010 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2011 tmps[i] = XEXP (src, 0);
2012 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2013 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2014 tmps[i] = XEXP (src, 1);
2018 else if (CONSTANT_P (src)
2019 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2022 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2023 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2024 mode, mode, align, ssize);
2026 if (BYTES_BIG_ENDIAN && shift)
2027 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2028 tmps[i], 0, OPTAB_WIDEN);
2033 /* Copy the extracted pieces into the proper (probable) hard regs. */
2034 for (i = start; i < XVECLEN (dst, 0); i++)
2035 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2038 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2039 registers represented by a PARALLEL. SSIZE represents the total size of
2040 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2043 emit_group_store (orig_dst, src, ssize, align)
2051 if (GET_CODE (src) != PARALLEL)
2054 /* Check for a NULL entry, used to indicate that the parameter goes
2055 both on the stack and in registers. */
2056 if (XEXP (XVECEXP (src, 0, 0), 0))
2061 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2063 /* Copy the (probable) hard regs into pseudos. */
2064 for (i = start; i < XVECLEN (src, 0); i++)
2066 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2067 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2068 emit_move_insn (tmps[i], reg);
2072 /* If we won't be storing directly into memory, protect the real destination
2073 from strange tricks we might play. */
2075 if (GET_CODE (dst) == PARALLEL)
2079 /* We can get a PARALLEL dst if there is a conditional expression in
2080 a return statement. In that case, the dst and src are the same,
2081 so no action is necessary. */
2082 if (rtx_equal_p (dst, src))
2085 /* It is unclear if we can ever reach here, but we may as well handle
2086 it. Allocate a temporary, and split this into a store/load to/from
2089 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2090 emit_group_store (temp, src, ssize, align);
2091 emit_group_load (dst, temp, ssize, align);
2094 else if (GET_CODE (dst) != MEM)
2096 dst = gen_reg_rtx (GET_MODE (orig_dst));
2097 /* Make life a bit easier for combine. */
2098 emit_move_insn (dst, const0_rtx);
2101 /* Process the pieces. */
2102 for (i = start; i < XVECLEN (src, 0); i++)
2104 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2105 enum machine_mode mode = GET_MODE (tmps[i]);
2106 unsigned int bytelen = GET_MODE_SIZE (mode);
2108 /* Handle trailing fragments that run over the size of the struct. */
2109 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2111 if (BYTES_BIG_ENDIAN)
2113 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2114 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2115 tmps[i], 0, OPTAB_WIDEN);
2117 bytelen = ssize - bytepos;
2120 /* Optimize the access just a bit. */
2121 if (GET_CODE (dst) == MEM
2122 && align >= GET_MODE_ALIGNMENT (mode)
2123 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2124 && bytelen == GET_MODE_SIZE (mode))
2125 emit_move_insn (change_address (dst, mode,
2126 plus_constant (XEXP (dst, 0),
2130 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2131 mode, tmps[i], align, ssize);
2136 /* Copy from the pseudo into the (probable) hard reg. */
2137 if (GET_CODE (dst) == REG)
2138 emit_move_insn (orig_dst, dst);
2141 /* Generate code to copy a BLKmode object of TYPE out of a
2142 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2143 is null, a stack temporary is created. TGTBLK is returned.
2145 The primary purpose of this routine is to handle functions
2146 that return BLKmode structures in registers. Some machines
2147 (the PA for example) want to return all small structures
2148 in registers regardless of the structure's alignment. */
2151 copy_blkmode_from_reg (tgtblk, srcreg, type)
2156 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2157 rtx src = NULL, dst = NULL;
2158 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2159 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2163 tgtblk = assign_temp (build_qualified_type (type,
2165 | TYPE_QUAL_CONST)),
2167 preserve_temp_slots (tgtblk);
2170 /* This code assumes srcreg is at least a full word. If it isn't,
2171 copy it into a new pseudo which is a full word. */
2172 if (GET_MODE (srcreg) != BLKmode
2173 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2174 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2176 /* Structures whose size is not a multiple of a word are aligned
2177 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2178 machine, this means we must skip the empty high order bytes when
2179 calculating the bit offset. */
2180 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2181 big_endian_correction
2182 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2184 /* Copy the structure BITSIZE bites at a time.
2186 We could probably emit more efficient code for machines which do not use
2187 strict alignment, but it doesn't seem worth the effort at the current
2189 for (bitpos = 0, xbitpos = big_endian_correction;
2190 bitpos < bytes * BITS_PER_UNIT;
2191 bitpos += bitsize, xbitpos += bitsize)
2193 /* We need a new source operand each time xbitpos is on a
2194 word boundary and when xbitpos == big_endian_correction
2195 (the first time through). */
2196 if (xbitpos % BITS_PER_WORD == 0
2197 || xbitpos == big_endian_correction)
2198 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2200 /* We need a new destination operand each time bitpos is on
2202 if (bitpos % BITS_PER_WORD == 0)
2203 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2205 /* Use xbitpos for the source extraction (right justified) and
2206 xbitpos for the destination store (left justified). */
2207 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2208 extract_bit_field (src, bitsize,
2209 xbitpos % BITS_PER_WORD, 1,
2210 NULL_RTX, word_mode, word_mode,
2211 bitsize, BITS_PER_WORD),
2212 bitsize, BITS_PER_WORD);
2218 /* Add a USE expression for REG to the (possibly empty) list pointed
2219 to by CALL_FUSAGE. REG must denote a hard register. */
2222 use_reg (call_fusage, reg)
2223 rtx *call_fusage, reg;
2225 if (GET_CODE (reg) != REG
2226 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2230 = gen_rtx_EXPR_LIST (VOIDmode,
2231 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2234 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2235 starting at REGNO. All of these registers must be hard registers. */
2238 use_regs (call_fusage, regno, nregs)
2245 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2248 for (i = 0; i < nregs; i++)
2249 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2252 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2253 PARALLEL REGS. This is for calls that pass values in multiple
2254 non-contiguous locations. The Irix 6 ABI has examples of this. */
2257 use_group_regs (call_fusage, regs)
2263 for (i = 0; i < XVECLEN (regs, 0); i++)
2265 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2267 /* A NULL entry means the parameter goes both on the stack and in
2268 registers. This can also be a MEM for targets that pass values
2269 partially on the stack and partially in registers. */
2270 if (reg != 0 && GET_CODE (reg) == REG)
2271 use_reg (call_fusage, reg);
2277 can_store_by_pieces (len, constfun, constfundata, align)
2278 unsigned HOST_WIDE_INT len;
2279 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2283 unsigned HOST_WIDE_INT max_size, l;
2284 HOST_WIDE_INT offset = 0;
2285 enum machine_mode mode, tmode;
2286 enum insn_code icode;
2290 if (! MOVE_BY_PIECES_P (len, align))
2293 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2294 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2295 align = MOVE_MAX * BITS_PER_UNIT;
2297 /* We would first store what we can in the largest integer mode, then go to
2298 successively smaller modes. */
2301 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2306 max_size = MOVE_MAX_PIECES + 1;
2307 while (max_size > 1)
2309 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2310 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2311 if (GET_MODE_SIZE (tmode) < max_size)
2314 if (mode == VOIDmode)
2317 icode = mov_optab->handlers[(int) mode].insn_code;
2318 if (icode != CODE_FOR_nothing
2319 && align >= GET_MODE_ALIGNMENT (mode))
2321 unsigned int size = GET_MODE_SIZE (mode);
2328 cst = (*constfun) (constfundata, offset, mode);
2329 if (!LEGITIMATE_CONSTANT_P (cst))
2339 max_size = GET_MODE_SIZE (mode);
2342 /* The code above should have handled everything. */
2350 /* Generate several move instructions to store LEN bytes generated by
2351 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2352 pointer which will be passed as argument in every CONSTFUN call.
2353 ALIGN is maximum alignment we can assume. */
2356 store_by_pieces (to, len, constfun, constfundata, align)
2358 unsigned HOST_WIDE_INT len;
2359 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2363 struct store_by_pieces data;
2365 if (! MOVE_BY_PIECES_P (len, align))
2367 to = protect_from_queue (to, 1);
2368 data.constfun = constfun;
2369 data.constfundata = constfundata;
2372 store_by_pieces_1 (&data, align);
2375 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2376 rtx with BLKmode). The caller must pass TO through protect_from_queue
2377 before calling. ALIGN is maximum alignment we can assume. */
2380 clear_by_pieces (to, len, align)
2382 unsigned HOST_WIDE_INT len;
2385 struct store_by_pieces data;
2387 data.constfun = clear_by_pieces_1;
2388 data.constfundata = NULL;
2391 store_by_pieces_1 (&data, align);
2394 /* Callback routine for clear_by_pieces.
2395 Return const0_rtx unconditionally. */
2398 clear_by_pieces_1 (data, offset, mode)
2399 PTR data ATTRIBUTE_UNUSED;
2400 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2401 enum machine_mode mode ATTRIBUTE_UNUSED;
2406 /* Subroutine of clear_by_pieces and store_by_pieces.
2407 Generate several move instructions to store LEN bytes of block TO. (A MEM
2408 rtx with BLKmode). The caller must pass TO through protect_from_queue
2409 before calling. ALIGN is maximum alignment we can assume. */
2412 store_by_pieces_1 (data, align)
2413 struct store_by_pieces *data;
2416 rtx to_addr = XEXP (data->to, 0);
2417 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2418 enum machine_mode mode = VOIDmode, tmode;
2419 enum insn_code icode;
2422 data->to_addr = to_addr;
2424 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2425 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2427 data->explicit_inc_to = 0;
2429 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2431 data->offset = data->len;
2433 /* If storing requires more than two move insns,
2434 copy addresses to registers (to make displacements shorter)
2435 and use post-increment if available. */
2436 if (!data->autinc_to
2437 && move_by_pieces_ninsns (data->len, align) > 2)
2439 /* Determine the main mode we'll be using. */
2440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2442 if (GET_MODE_SIZE (tmode) < max_size)
2445 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2447 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2448 data->autinc_to = 1;
2449 data->explicit_inc_to = -1;
2452 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2453 && ! data->autinc_to)
2455 data->to_addr = copy_addr_to_reg (to_addr);
2456 data->autinc_to = 1;
2457 data->explicit_inc_to = 1;
2460 if ( !data->autinc_to && CONSTANT_P (to_addr))
2461 data->to_addr = copy_addr_to_reg (to_addr);
2464 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2465 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2466 align = MOVE_MAX * BITS_PER_UNIT;
2468 /* First store what we can in the largest integer mode, then go to
2469 successively smaller modes. */
2471 while (max_size > 1)
2473 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2474 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2475 if (GET_MODE_SIZE (tmode) < max_size)
2478 if (mode == VOIDmode)
2481 icode = mov_optab->handlers[(int) mode].insn_code;
2482 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2483 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2485 max_size = GET_MODE_SIZE (mode);
2488 /* The code above should have handled everything. */
2493 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2494 with move instructions for mode MODE. GENFUN is the gen_... function
2495 to make a move insn for that mode. DATA has all the other info. */
2498 store_by_pieces_2 (genfun, mode, data)
2499 rtx (*genfun) PARAMS ((rtx, ...));
2500 enum machine_mode mode;
2501 struct store_by_pieces *data;
2503 unsigned int size = GET_MODE_SIZE (mode);
2506 while (data->len >= size)
2509 data->offset -= size;
2511 if (data->autinc_to)
2513 to1 = gen_rtx_MEM (mode, data->to_addr);
2514 MEM_COPY_ATTRIBUTES (to1, data->to);
2517 to1 = change_address (data->to, mode,
2518 plus_constant (data->to_addr, data->offset));
2520 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2521 emit_insn (gen_add2_insn (data->to_addr,
2522 GEN_INT (-(HOST_WIDE_INT) size)));
2524 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2525 emit_insn ((*genfun) (to1, cst));
2527 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2528 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2530 if (! data->reverse)
2531 data->offset += size;
2537 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2538 its length in bytes and ALIGN is the maximum alignment we can is has.
2540 If we call a function that returns the length of the block, return it. */
2543 clear_storage (object, size, align)
2548 #ifdef TARGET_MEM_FUNCTIONS
2550 tree call_expr, arg_list;
2554 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2555 just move a zero. Otherwise, do this a piece at a time. */
2556 if (GET_MODE (object) != BLKmode
2557 && GET_CODE (size) == CONST_INT
2558 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2559 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2562 object = protect_from_queue (object, 1);
2563 size = protect_from_queue (size, 0);
2565 if (GET_CODE (size) == CONST_INT
2566 && MOVE_BY_PIECES_P (INTVAL (size), align))
2567 clear_by_pieces (object, INTVAL (size), align);
2570 /* Try the most limited insn first, because there's no point
2571 including more than one in the machine description unless
2572 the more limited one has some advantage. */
2574 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2575 enum machine_mode mode;
2577 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2578 mode = GET_MODE_WIDER_MODE (mode))
2580 enum insn_code code = clrstr_optab[(int) mode];
2581 insn_operand_predicate_fn pred;
2583 if (code != CODE_FOR_nothing
2584 /* We don't need MODE to be narrower than
2585 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2586 the mode mask, as it is returned by the macro, it will
2587 definitely be less than the actual mode mask. */
2588 && ((GET_CODE (size) == CONST_INT
2589 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2590 <= (GET_MODE_MASK (mode) >> 1)))
2591 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2592 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2593 || (*pred) (object, BLKmode))
2594 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2595 || (*pred) (opalign, VOIDmode)))
2598 rtx last = get_last_insn ();
2601 op1 = convert_to_mode (mode, size, 1);
2602 pred = insn_data[(int) code].operand[1].predicate;
2603 if (pred != 0 && ! (*pred) (op1, mode))
2604 op1 = copy_to_mode_reg (mode, op1);
2606 pat = GEN_FCN ((int) code) (object, op1, opalign);
2613 delete_insns_since (last);
2617 /* OBJECT or SIZE may have been passed through protect_from_queue.
2619 It is unsafe to save the value generated by protect_from_queue
2620 and reuse it later. Consider what happens if emit_queue is
2621 called before the return value from protect_from_queue is used.
2623 Expansion of the CALL_EXPR below will call emit_queue before
2624 we are finished emitting RTL for argument setup. So if we are
2625 not careful we could get the wrong value for an argument.
2627 To avoid this problem we go ahead and emit code to copy OBJECT
2628 and SIZE into new pseudos. We can then place those new pseudos
2629 into an RTL_EXPR and use them later, even after a call to
2632 Note this is not strictly needed for library calls since they
2633 do not call emit_queue before loading their arguments. However,
2634 we may need to have library calls call emit_queue in the future
2635 since failing to do so could cause problems for targets which
2636 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2637 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2639 #ifdef TARGET_MEM_FUNCTIONS
2640 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2642 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2643 TREE_UNSIGNED (integer_type_node));
2644 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2647 #ifdef TARGET_MEM_FUNCTIONS
2648 /* It is incorrect to use the libcall calling conventions to call
2649 memset in this context.
2651 This could be a user call to memset and the user may wish to
2652 examine the return value from memset.
2654 For targets where libcalls and normal calls have different
2655 conventions for returning pointers, we could end up generating
2658 So instead of using a libcall sequence we build up a suitable
2659 CALL_EXPR and expand the call in the normal fashion. */
2660 if (fn == NULL_TREE)
2664 /* This was copied from except.c, I don't know if all this is
2665 necessary in this context or not. */
2666 fn = get_identifier ("memset");
2667 fntype = build_pointer_type (void_type_node);
2668 fntype = build_function_type (fntype, NULL_TREE);
2669 fn = build_decl (FUNCTION_DECL, fn, fntype);
2670 ggc_add_tree_root (&fn, 1);
2671 DECL_EXTERNAL (fn) = 1;
2672 TREE_PUBLIC (fn) = 1;
2673 DECL_ARTIFICIAL (fn) = 1;
2674 make_decl_rtl (fn, NULL);
2675 assemble_external (fn);
2678 /* We need to make an argument list for the function call.
2680 memset has three arguments, the first is a void * addresses, the
2681 second a integer with the initialization value, the last is a
2682 size_t byte count for the copy. */
2684 = build_tree_list (NULL_TREE,
2685 make_tree (build_pointer_type (void_type_node),
2687 TREE_CHAIN (arg_list)
2688 = build_tree_list (NULL_TREE,
2689 make_tree (integer_type_node, const0_rtx));
2690 TREE_CHAIN (TREE_CHAIN (arg_list))
2691 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2692 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2694 /* Now we have to build up the CALL_EXPR itself. */
2695 call_expr = build1 (ADDR_EXPR,
2696 build_pointer_type (TREE_TYPE (fn)), fn);
2697 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2698 call_expr, arg_list, NULL_TREE);
2699 TREE_SIDE_EFFECTS (call_expr) = 1;
2701 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2703 emit_library_call (bzero_libfunc, LCT_NORMAL,
2704 VOIDmode, 2, object, Pmode, size,
2705 TYPE_MODE (integer_type_node));
2713 /* Generate code to copy Y into X.
2714 Both Y and X must have the same mode, except that
2715 Y can be a constant with VOIDmode.
2716 This mode cannot be BLKmode; use emit_block_move for that.
2718 Return the last instruction emitted. */
2721 emit_move_insn (x, y)
2724 enum machine_mode mode = GET_MODE (x);
2725 rtx y_cst = NULL_RTX;
2728 x = protect_from_queue (x, 1);
2729 y = protect_from_queue (y, 0);
2731 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2734 /* Never force constant_p_rtx to memory. */
2735 if (GET_CODE (y) == CONSTANT_P_RTX)
2737 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2740 y = force_const_mem (mode, y);
2743 /* If X or Y are memory references, verify that their addresses are valid
2745 if (GET_CODE (x) == MEM
2746 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2747 && ! push_operand (x, GET_MODE (x)))
2749 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2750 x = change_address (x, VOIDmode, XEXP (x, 0));
2752 if (GET_CODE (y) == MEM
2753 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2755 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2756 y = change_address (y, VOIDmode, XEXP (y, 0));
2758 if (mode == BLKmode)
2761 last_insn = emit_move_insn_1 (x, y);
2763 if (y_cst && GET_CODE (x) == REG)
2764 REG_NOTES (last_insn)
2765 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2770 /* Low level part of emit_move_insn.
2771 Called just like emit_move_insn, but assumes X and Y
2772 are basically valid. */
2775 emit_move_insn_1 (x, y)
2778 enum machine_mode mode = GET_MODE (x);
2779 enum machine_mode submode;
2780 enum mode_class class = GET_MODE_CLASS (mode);
2783 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2786 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2788 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2790 /* Expand complex moves by moving real part and imag part, if possible. */
2791 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2792 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2794 (class == MODE_COMPLEX_INT
2795 ? MODE_INT : MODE_FLOAT),
2797 && (mov_optab->handlers[(int) submode].insn_code
2798 != CODE_FOR_nothing))
2800 /* Don't split destination if it is a stack push. */
2801 int stack = push_operand (x, GET_MODE (x));
2803 #ifdef PUSH_ROUNDING
2804 /* In case we output to the stack, but the size is smaller machine can
2805 push exactly, we need to use move instructions. */
2807 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2810 int offset1, offset2;
2812 /* Do not use anti_adjust_stack, since we don't want to update
2813 stack_pointer_delta. */
2814 temp = expand_binop (Pmode,
2815 #ifdef STACK_GROWS_DOWNWARD
2822 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2826 if (temp != stack_pointer_rtx)
2827 emit_move_insn (stack_pointer_rtx, temp);
2828 #ifdef STACK_GROWS_DOWNWARD
2830 offset2 = GET_MODE_SIZE (submode);
2832 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2833 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2834 + GET_MODE_SIZE (submode));
2836 emit_move_insn (change_address (x, submode,
2837 gen_rtx_PLUS (Pmode,
2839 GEN_INT (offset1))),
2840 gen_realpart (submode, y));
2841 emit_move_insn (change_address (x, submode,
2842 gen_rtx_PLUS (Pmode,
2844 GEN_INT (offset2))),
2845 gen_imagpart (submode, y));
2849 /* If this is a stack, push the highpart first, so it
2850 will be in the argument order.
2852 In that case, change_address is used only to convert
2853 the mode, not to change the address. */
2856 /* Note that the real part always precedes the imag part in memory
2857 regardless of machine's endianness. */
2858 #ifdef STACK_GROWS_DOWNWARD
2859 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2860 (gen_rtx_MEM (submode, XEXP (x, 0)),
2861 gen_imagpart (submode, y)));
2862 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2863 (gen_rtx_MEM (submode, XEXP (x, 0)),
2864 gen_realpart (submode, y)));
2866 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2867 (gen_rtx_MEM (submode, XEXP (x, 0)),
2868 gen_realpart (submode, y)));
2869 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2870 (gen_rtx_MEM (submode, XEXP (x, 0)),
2871 gen_imagpart (submode, y)));
2876 rtx realpart_x, realpart_y;
2877 rtx imagpart_x, imagpart_y;
2879 /* If this is a complex value with each part being smaller than a
2880 word, the usual calling sequence will likely pack the pieces into
2881 a single register. Unfortunately, SUBREG of hard registers only
2882 deals in terms of words, so we have a problem converting input
2883 arguments to the CONCAT of two registers that is used elsewhere
2884 for complex values. If this is before reload, we can copy it into
2885 memory and reload. FIXME, we should see about using extract and
2886 insert on integer registers, but complex short and complex char
2887 variables should be rarely used. */
2888 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2889 && (reload_in_progress | reload_completed) == 0)
2891 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2892 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2894 if (packed_dest_p || packed_src_p)
2896 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2897 ? MODE_FLOAT : MODE_INT);
2899 enum machine_mode reg_mode
2900 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2902 if (reg_mode != BLKmode)
2904 rtx mem = assign_stack_temp (reg_mode,
2905 GET_MODE_SIZE (mode), 0);
2906 rtx cmem = change_address (mem, mode, NULL_RTX);
2909 = N_("function using short complex types cannot be inline");
2913 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2914 emit_move_insn_1 (cmem, y);
2915 return emit_move_insn_1 (sreg, mem);
2919 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2920 emit_move_insn_1 (mem, sreg);
2921 return emit_move_insn_1 (x, cmem);
2927 realpart_x = gen_realpart (submode, x);
2928 realpart_y = gen_realpart (submode, y);
2929 imagpart_x = gen_imagpart (submode, x);
2930 imagpart_y = gen_imagpart (submode, y);
2932 /* Show the output dies here. This is necessary for SUBREGs
2933 of pseudos since we cannot track their lifetimes correctly;
2934 hard regs shouldn't appear here except as return values.
2935 We never want to emit such a clobber after reload. */
2937 && ! (reload_in_progress || reload_completed)
2938 && (GET_CODE (realpart_x) == SUBREG
2939 || GET_CODE (imagpart_x) == SUBREG))
2941 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2944 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2945 (realpart_x, realpart_y));
2946 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2947 (imagpart_x, imagpart_y));
2950 return get_last_insn ();
2953 /* This will handle any multi-word mode that lacks a move_insn pattern.
2954 However, you will get better code if you define such patterns,
2955 even if they must turn into multiple assembler instructions. */
2956 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2962 #ifdef PUSH_ROUNDING
2964 /* If X is a push on the stack, do the push now and replace
2965 X with a reference to the stack pointer. */
2966 if (push_operand (x, GET_MODE (x)))
2968 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2969 x = change_address (x, VOIDmode, stack_pointer_rtx);
2973 /* If we are in reload, see if either operand is a MEM whose address
2974 is scheduled for replacement. */
2975 if (reload_in_progress && GET_CODE (x) == MEM
2976 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2978 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2980 MEM_COPY_ATTRIBUTES (new, x);
2983 if (reload_in_progress && GET_CODE (y) == MEM
2984 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2986 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2988 MEM_COPY_ATTRIBUTES (new, y);
2996 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2999 rtx xpart = operand_subword (x, i, 1, mode);
3000 rtx ypart = operand_subword (y, i, 1, mode);
3002 /* If we can't get a part of Y, put Y into memory if it is a
3003 constant. Otherwise, force it into a register. If we still
3004 can't get a part of Y, abort. */
3005 if (ypart == 0 && CONSTANT_P (y))
3007 y = force_const_mem (mode, y);
3008 ypart = operand_subword (y, i, 1, mode);
3010 else if (ypart == 0)
3011 ypart = operand_subword_force (y, i, mode);
3013 if (xpart == 0 || ypart == 0)
3016 need_clobber |= (GET_CODE (xpart) == SUBREG);
3018 last_insn = emit_move_insn (xpart, ypart);
3021 seq = gen_sequence ();
3024 /* Show the output dies here. This is necessary for SUBREGs
3025 of pseudos since we cannot track their lifetimes correctly;
3026 hard regs shouldn't appear here except as return values.
3027 We never want to emit such a clobber after reload. */
3029 && ! (reload_in_progress || reload_completed)
3030 && need_clobber != 0)
3032 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3043 /* Pushing data onto the stack. */
3045 /* Push a block of length SIZE (perhaps variable)
3046 and return an rtx to address the beginning of the block.
3047 Note that it is not possible for the value returned to be a QUEUED.
3048 The value may be virtual_outgoing_args_rtx.
3050 EXTRA is the number of bytes of padding to push in addition to SIZE.
3051 BELOW nonzero means this padding comes at low addresses;
3052 otherwise, the padding comes at high addresses. */
3055 push_block (size, extra, below)
3061 size = convert_modes (Pmode, ptr_mode, size, 1);
3062 if (CONSTANT_P (size))
3063 anti_adjust_stack (plus_constant (size, extra));
3064 else if (GET_CODE (size) == REG && extra == 0)
3065 anti_adjust_stack (size);
3068 temp = copy_to_mode_reg (Pmode, size);
3070 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3071 temp, 0, OPTAB_LIB_WIDEN);
3072 anti_adjust_stack (temp);
3075 #ifndef STACK_GROWS_DOWNWARD
3076 #ifdef ARGS_GROW_DOWNWARD
3077 if (!ACCUMULATE_OUTGOING_ARGS)
3085 /* Return the lowest stack address when STACK or ARGS grow downward and
3086 we are not aaccumulating outgoing arguments (the c4x port uses such
3088 temp = virtual_outgoing_args_rtx;
3089 if (extra != 0 && below)
3090 temp = plus_constant (temp, extra);
3094 if (GET_CODE (size) == CONST_INT)
3095 temp = plus_constant (virtual_outgoing_args_rtx,
3096 -INTVAL (size) - (below ? 0 : extra));
3097 else if (extra != 0 && !below)
3098 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3099 negate_rtx (Pmode, plus_constant (size, extra)));
3101 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3102 negate_rtx (Pmode, size));
3105 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3109 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3110 block of SIZE bytes. */
3113 get_push_address (size)
3118 if (STACK_PUSH_CODE == POST_DEC)
3119 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3120 else if (STACK_PUSH_CODE == POST_INC)
3121 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3123 temp = stack_pointer_rtx;
3125 return copy_to_reg (temp);
3128 /* Emit single push insn. */
3130 emit_single_push_insn (mode, x, type)
3132 enum machine_mode mode;
3135 #ifdef PUSH_ROUNDING
3137 int rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3140 if (GET_MODE_SIZE (mode) == rounded_size)
3141 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3144 #ifdef STACK_GROWS_DOWNWARD
3145 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3146 GEN_INT (-rounded_size));
3148 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3149 GEN_INT (rounded_size));
3151 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3154 dest = gen_rtx_MEM (mode, dest_addr);
3156 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3160 set_mem_attributes (dest, type, 1);
3161 /* Function incoming arguments may overlap with sibling call
3162 outgoing arguments and we cannot allow reordering of reads
3163 from function arguments with stores to outgoing arguments
3164 of sibling calls. */
3165 MEM_ALIAS_SET (dest) = 0;
3167 emit_move_insn (dest, x);
3173 /* Generate code to push X onto the stack, assuming it has mode MODE and
3175 MODE is redundant except when X is a CONST_INT (since they don't
3177 SIZE is an rtx for the size of data to be copied (in bytes),
3178 needed only if X is BLKmode.
3180 ALIGN is maximum alignment we can assume.
3182 If PARTIAL and REG are both nonzero, then copy that many of the first
3183 words of X into registers starting with REG, and push the rest of X.
3184 The amount of space pushed is decreased by PARTIAL words,
3185 rounded *down* to a multiple of PARM_BOUNDARY.
3186 REG must be a hard register in this case.
3187 If REG is zero but PARTIAL is not, take any all others actions for an
3188 argument partially in registers, but do not actually load any
3191 EXTRA is the amount in bytes of extra space to leave next to this arg.
3192 This is ignored if an argument block has already been allocated.
3194 On a machine that lacks real push insns, ARGS_ADDR is the address of
3195 the bottom of the argument block for this call. We use indexing off there
3196 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3197 argument block has not been preallocated.
3199 ARGS_SO_FAR is the size of args previously pushed for this call.
3201 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3202 for arguments passed in registers. If nonzero, it will be the number
3203 of bytes required. */
3206 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3207 args_addr, args_so_far, reg_parm_stack_space,
3210 enum machine_mode mode;
3219 int reg_parm_stack_space;
3223 enum direction stack_direction
3224 #ifdef STACK_GROWS_DOWNWARD
3230 /* Decide where to pad the argument: `downward' for below,
3231 `upward' for above, or `none' for don't pad it.
3232 Default is below for small data on big-endian machines; else above. */
3233 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3235 /* Invert direction if stack is post-update. */
3236 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3237 if (where_pad != none)
3238 where_pad = (where_pad == downward ? upward : downward);
3240 xinner = x = protect_from_queue (x, 0);
3242 if (mode == BLKmode)
3244 /* Copy a block into the stack, entirely or partially. */
3247 int used = partial * UNITS_PER_WORD;
3248 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3256 /* USED is now the # of bytes we need not copy to the stack
3257 because registers will take care of them. */
3260 xinner = change_address (xinner, BLKmode,
3261 plus_constant (XEXP (xinner, 0), used));
3263 /* If the partial register-part of the arg counts in its stack size,
3264 skip the part of stack space corresponding to the registers.
3265 Otherwise, start copying to the beginning of the stack space,
3266 by setting SKIP to 0. */
3267 skip = (reg_parm_stack_space == 0) ? 0 : used;
3269 #ifdef PUSH_ROUNDING
3270 /* Do it with several push insns if that doesn't take lots of insns
3271 and if there is no difficulty with push insns that skip bytes
3272 on the stack for alignment purposes. */
3275 && GET_CODE (size) == CONST_INT
3277 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3278 /* Here we avoid the case of a structure whose weak alignment
3279 forces many pushes of a small amount of data,
3280 and such small pushes do rounding that causes trouble. */
3281 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3282 || align >= BIGGEST_ALIGNMENT
3283 || PUSH_ROUNDING (align) == align)
3284 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3286 /* Push padding now if padding above and stack grows down,
3287 or if padding below and stack grows up.
3288 But if space already allocated, this has already been done. */
3289 if (extra && args_addr == 0
3290 && where_pad != none && where_pad != stack_direction)
3291 anti_adjust_stack (GEN_INT (extra));
3293 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3295 if (current_function_check_memory_usage && ! in_check_memory_usage)
3299 in_check_memory_usage = 1;
3300 temp = get_push_address (INTVAL (size) - used);
3301 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3302 emit_library_call (chkr_copy_bitmap_libfunc,
3303 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3304 Pmode, XEXP (xinner, 0), Pmode,
3305 GEN_INT (INTVAL (size) - used),
3306 TYPE_MODE (sizetype));
3308 emit_library_call (chkr_set_right_libfunc,
3309 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3310 Pmode, GEN_INT (INTVAL (size) - used),
3311 TYPE_MODE (sizetype),
3312 GEN_INT (MEMORY_USE_RW),
3313 TYPE_MODE (integer_type_node));
3314 in_check_memory_usage = 0;
3318 #endif /* PUSH_ROUNDING */
3322 /* Otherwise make space on the stack and copy the data
3323 to the address of that space. */
3325 /* Deduct words put into registers from the size we must copy. */
3328 if (GET_CODE (size) == CONST_INT)
3329 size = GEN_INT (INTVAL (size) - used);
3331 size = expand_binop (GET_MODE (size), sub_optab, size,
3332 GEN_INT (used), NULL_RTX, 0,
3336 /* Get the address of the stack space.
3337 In this case, we do not deal with EXTRA separately.
3338 A single stack adjust will do. */
3341 temp = push_block (size, extra, where_pad == downward);
3344 else if (GET_CODE (args_so_far) == CONST_INT)
3345 temp = memory_address (BLKmode,
3346 plus_constant (args_addr,
3347 skip + INTVAL (args_so_far)));
3349 temp = memory_address (BLKmode,
3350 plus_constant (gen_rtx_PLUS (Pmode,
3354 if (current_function_check_memory_usage && ! in_check_memory_usage)
3356 in_check_memory_usage = 1;
3357 target = copy_to_reg (temp);
3358 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3359 emit_library_call (chkr_copy_bitmap_libfunc,
3360 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3362 XEXP (xinner, 0), Pmode,
3363 size, TYPE_MODE (sizetype));
3365 emit_library_call (chkr_set_right_libfunc,
3366 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3368 size, TYPE_MODE (sizetype),
3369 GEN_INT (MEMORY_USE_RW),
3370 TYPE_MODE (integer_type_node));
3371 in_check_memory_usage = 0;
3374 target = gen_rtx_MEM (BLKmode, temp);
3378 set_mem_attributes (target, type, 1);
3379 /* Function incoming arguments may overlap with sibling call
3380 outgoing arguments and we cannot allow reordering of reads
3381 from function arguments with stores to outgoing arguments
3382 of sibling calls. */
3383 MEM_ALIAS_SET (target) = 0;
3386 /* TEMP is the address of the block. Copy the data there. */
3387 if (GET_CODE (size) == CONST_INT
3388 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3390 move_by_pieces (target, xinner, INTVAL (size), align);
3395 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3396 enum machine_mode mode;
3398 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3400 mode = GET_MODE_WIDER_MODE (mode))
3402 enum insn_code code = movstr_optab[(int) mode];
3403 insn_operand_predicate_fn pred;
3405 if (code != CODE_FOR_nothing
3406 && ((GET_CODE (size) == CONST_INT
3407 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3408 <= (GET_MODE_MASK (mode) >> 1)))
3409 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3410 && (!(pred = insn_data[(int) code].operand[0].predicate)
3411 || ((*pred) (target, BLKmode)))
3412 && (!(pred = insn_data[(int) code].operand[1].predicate)
3413 || ((*pred) (xinner, BLKmode)))
3414 && (!(pred = insn_data[(int) code].operand[3].predicate)
3415 || ((*pred) (opalign, VOIDmode))))
3417 rtx op2 = convert_to_mode (mode, size, 1);
3418 rtx last = get_last_insn ();
3421 pred = insn_data[(int) code].operand[2].predicate;
3422 if (pred != 0 && ! (*pred) (op2, mode))
3423 op2 = copy_to_mode_reg (mode, op2);
3425 pat = GEN_FCN ((int) code) (target, xinner,
3433 delete_insns_since (last);
3438 if (!ACCUMULATE_OUTGOING_ARGS)
3440 /* If the source is referenced relative to the stack pointer,
3441 copy it to another register to stabilize it. We do not need
3442 to do this if we know that we won't be changing sp. */
3444 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3445 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3446 temp = copy_to_reg (temp);
3449 /* Make inhibit_defer_pop nonzero around the library call
3450 to force it to pop the bcopy-arguments right away. */
3452 #ifdef TARGET_MEM_FUNCTIONS
3453 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3454 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3455 convert_to_mode (TYPE_MODE (sizetype),
3456 size, TREE_UNSIGNED (sizetype)),
3457 TYPE_MODE (sizetype));
3459 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3460 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3461 convert_to_mode (TYPE_MODE (integer_type_node),
3463 TREE_UNSIGNED (integer_type_node)),
3464 TYPE_MODE (integer_type_node));
3469 else if (partial > 0)
3471 /* Scalar partly in registers. */
3473 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3476 /* # words of start of argument
3477 that we must make space for but need not store. */
3478 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3479 int args_offset = INTVAL (args_so_far);
3482 /* Push padding now if padding above and stack grows down,
3483 or if padding below and stack grows up.
3484 But if space already allocated, this has already been done. */
3485 if (extra && args_addr == 0
3486 && where_pad != none && where_pad != stack_direction)
3487 anti_adjust_stack (GEN_INT (extra));
3489 /* If we make space by pushing it, we might as well push
3490 the real data. Otherwise, we can leave OFFSET nonzero
3491 and leave the space uninitialized. */
3495 /* Now NOT_STACK gets the number of words that we don't need to
3496 allocate on the stack. */
3497 not_stack = partial - offset;
3499 /* If the partial register-part of the arg counts in its stack size,
3500 skip the part of stack space corresponding to the registers.
3501 Otherwise, start copying to the beginning of the stack space,
3502 by setting SKIP to 0. */
3503 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3505 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3506 x = validize_mem (force_const_mem (mode, x));
3508 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3509 SUBREGs of such registers are not allowed. */
3510 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3511 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3512 x = copy_to_reg (x);
3514 /* Loop over all the words allocated on the stack for this arg. */
3515 /* We can do it by words, because any scalar bigger than a word
3516 has a size a multiple of a word. */
3517 #ifndef PUSH_ARGS_REVERSED
3518 for (i = not_stack; i < size; i++)
3520 for (i = size - 1; i >= not_stack; i--)
3522 if (i >= not_stack + offset)
3523 emit_push_insn (operand_subword_force (x, i, mode),
3524 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3526 GEN_INT (args_offset + ((i - not_stack + skip)
3528 reg_parm_stack_space, alignment_pad);
3533 rtx target = NULL_RTX;
3536 /* Push padding now if padding above and stack grows down,
3537 or if padding below and stack grows up.
3538 But if space already allocated, this has already been done. */
3539 if (extra && args_addr == 0
3540 && where_pad != none && where_pad != stack_direction)
3541 anti_adjust_stack (GEN_INT (extra));
3543 #ifdef PUSH_ROUNDING
3544 if (args_addr == 0 && PUSH_ARGS)
3545 emit_single_push_insn (mode, x, type);
3549 if (GET_CODE (args_so_far) == CONST_INT)
3551 = memory_address (mode,
3552 plus_constant (args_addr,
3553 INTVAL (args_so_far)));
3555 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3558 dest = gen_rtx_MEM (mode, addr);
3561 set_mem_attributes (dest, type, 1);
3562 /* Function incoming arguments may overlap with sibling call
3563 outgoing arguments and we cannot allow reordering of reads
3564 from function arguments with stores to outgoing arguments
3565 of sibling calls. */
3566 MEM_ALIAS_SET (dest) = 0;
3569 emit_move_insn (dest, x);
3573 if (current_function_check_memory_usage && ! in_check_memory_usage)
3575 in_check_memory_usage = 1;
3577 target = get_push_address (GET_MODE_SIZE (mode));
3579 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3580 emit_library_call (chkr_copy_bitmap_libfunc,
3581 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3582 Pmode, XEXP (x, 0), Pmode,
3583 GEN_INT (GET_MODE_SIZE (mode)),
3584 TYPE_MODE (sizetype));
3586 emit_library_call (chkr_set_right_libfunc,
3587 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3588 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3589 TYPE_MODE (sizetype),
3590 GEN_INT (MEMORY_USE_RW),
3591 TYPE_MODE (integer_type_node));
3592 in_check_memory_usage = 0;
3597 /* If part should go in registers, copy that part
3598 into the appropriate registers. Do this now, at the end,
3599 since mem-to-mem copies above may do function calls. */
3600 if (partial > 0 && reg != 0)
3602 /* Handle calls that pass values in multiple non-contiguous locations.
3603 The Irix 6 ABI has examples of this. */
3604 if (GET_CODE (reg) == PARALLEL)
3605 emit_group_load (reg, x, -1, align); /* ??? size? */
3607 move_block_to_reg (REGNO (reg), x, partial, mode);
3610 if (extra && args_addr == 0 && where_pad == stack_direction)
3611 anti_adjust_stack (GEN_INT (extra));
3613 if (alignment_pad && args_addr == 0)
3614 anti_adjust_stack (alignment_pad);
3617 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3625 /* Only registers can be subtargets. */
3626 || GET_CODE (x) != REG
3627 /* If the register is readonly, it can't be set more than once. */
3628 || RTX_UNCHANGING_P (x)
3629 /* Don't use hard regs to avoid extending their life. */
3630 || REGNO (x) < FIRST_PSEUDO_REGISTER
3631 /* Avoid subtargets inside loops,
3632 since they hide some invariant expressions. */
3633 || preserve_subexpressions_p ())
3637 /* Expand an assignment that stores the value of FROM into TO.
3638 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3639 (This may contain a QUEUED rtx;
3640 if the value is constant, this rtx is a constant.)
3641 Otherwise, the returned value is NULL_RTX.
3643 SUGGEST_REG is no longer actually used.
3644 It used to mean, copy the value through a register
3645 and return that register, if that is possible.
3646 We now use WANT_VALUE to decide whether to do this. */
3649 expand_assignment (to, from, want_value, suggest_reg)
3652 int suggest_reg ATTRIBUTE_UNUSED;
3654 register rtx to_rtx = 0;
3657 /* Don't crash if the lhs of the assignment was erroneous. */
3659 if (TREE_CODE (to) == ERROR_MARK)
3661 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3662 return want_value ? result : NULL_RTX;
3665 /* Assignment of a structure component needs special treatment
3666 if the structure component's rtx is not simply a MEM.
3667 Assignment of an array element at a constant index, and assignment of
3668 an array element in an unaligned packed structure field, has the same
3671 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3672 || TREE_CODE (to) == ARRAY_REF)
3674 enum machine_mode mode1;
3675 HOST_WIDE_INT bitsize, bitpos;
3680 unsigned int alignment;
3683 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3684 &unsignedp, &volatilep, &alignment);
3686 /* If we are going to use store_bit_field and extract_bit_field,
3687 make sure to_rtx will be safe for multiple use. */
3689 if (mode1 == VOIDmode && want_value)
3690 tem = stabilize_reference (tem);
3692 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3695 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3697 if (GET_CODE (to_rtx) != MEM)
3700 if (GET_MODE (offset_rtx) != ptr_mode)
3702 #ifdef POINTERS_EXTEND_UNSIGNED
3703 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3705 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3709 /* A constant address in TO_RTX can have VOIDmode, we must not try
3710 to call force_reg for that case. Avoid that case. */
3711 if (GET_CODE (to_rtx) == MEM
3712 && GET_MODE (to_rtx) == BLKmode
3713 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3715 && (bitpos % bitsize) == 0
3716 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3717 && alignment == GET_MODE_ALIGNMENT (mode1))
3719 rtx temp = change_address (to_rtx, mode1,
3720 plus_constant (XEXP (to_rtx, 0),
3723 if (GET_CODE (XEXP (temp, 0)) == REG)
3726 to_rtx = change_address (to_rtx, mode1,
3727 force_reg (GET_MODE (XEXP (temp, 0)),
3732 to_rtx = change_address (to_rtx, VOIDmode,
3733 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3734 force_reg (ptr_mode,
3740 if (GET_CODE (to_rtx) == MEM)
3742 /* When the offset is zero, to_rtx is the address of the
3743 structure we are storing into, and hence may be shared.
3744 We must make a new MEM before setting the volatile bit. */
3746 to_rtx = copy_rtx (to_rtx);
3748 MEM_VOLATILE_P (to_rtx) = 1;
3750 #if 0 /* This was turned off because, when a field is volatile
3751 in an object which is not volatile, the object may be in a register,
3752 and then we would abort over here. */
3758 if (TREE_CODE (to) == COMPONENT_REF
3759 && TREE_READONLY (TREE_OPERAND (to, 1)))
3762 to_rtx = copy_rtx (to_rtx);
3764 RTX_UNCHANGING_P (to_rtx) = 1;
3767 /* Check the access. */
3768 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3773 enum machine_mode best_mode;
3775 best_mode = get_best_mode (bitsize, bitpos,
3776 TYPE_ALIGN (TREE_TYPE (tem)),
3778 if (best_mode == VOIDmode)
3781 best_mode_size = GET_MODE_BITSIZE (best_mode);
3782 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3783 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3784 size *= GET_MODE_SIZE (best_mode);
3786 /* Check the access right of the pointer. */
3787 in_check_memory_usage = 1;
3789 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3790 VOIDmode, 3, to_addr, Pmode,
3791 GEN_INT (size), TYPE_MODE (sizetype),
3792 GEN_INT (MEMORY_USE_WO),
3793 TYPE_MODE (integer_type_node));
3794 in_check_memory_usage = 0;
3797 /* If this is a varying-length object, we must get the address of
3798 the source and do an explicit block move. */
3801 unsigned int from_align;
3802 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3804 = change_address (to_rtx, VOIDmode,
3805 plus_constant (XEXP (to_rtx, 0),
3806 bitpos / BITS_PER_UNIT));
3808 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3809 MIN (alignment, from_align));
3816 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3818 /* Spurious cast for HPUX compiler. */
3819 ? ((enum machine_mode)
3820 TYPE_MODE (TREE_TYPE (to)))
3824 int_size_in_bytes (TREE_TYPE (tem)),
3825 get_alias_set (to));
3827 preserve_temp_slots (result);
3831 /* If the value is meaningful, convert RESULT to the proper mode.
3832 Otherwise, return nothing. */
3833 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3834 TYPE_MODE (TREE_TYPE (from)),
3836 TREE_UNSIGNED (TREE_TYPE (to)))
3841 /* If the rhs is a function call and its value is not an aggregate,
3842 call the function before we start to compute the lhs.
3843 This is needed for correct code for cases such as
3844 val = setjmp (buf) on machines where reference to val
3845 requires loading up part of an address in a separate insn.
3847 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3848 since it might be a promoted variable where the zero- or sign- extension
3849 needs to be done. Handling this in the normal way is safe because no
3850 computation is done before the call. */
3851 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3852 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3853 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3854 && GET_CODE (DECL_RTL (to)) == REG))
3859 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3861 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3863 /* Handle calls that return values in multiple non-contiguous locations.
3864 The Irix 6 ABI has examples of this. */
3865 if (GET_CODE (to_rtx) == PARALLEL)
3866 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3867 TYPE_ALIGN (TREE_TYPE (from)));
3868 else if (GET_MODE (to_rtx) == BLKmode)
3869 emit_block_move (to_rtx, value, expr_size (from),
3870 TYPE_ALIGN (TREE_TYPE (from)));
3873 #ifdef POINTERS_EXTEND_UNSIGNED
3874 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3875 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3876 value = convert_memory_address (GET_MODE (to_rtx), value);
3878 emit_move_insn (to_rtx, value);
3880 preserve_temp_slots (to_rtx);
3883 return want_value ? to_rtx : NULL_RTX;
3886 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3887 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3891 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3892 if (GET_CODE (to_rtx) == MEM)
3893 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3896 /* Don't move directly into a return register. */
3897 if (TREE_CODE (to) == RESULT_DECL
3898 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3903 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3905 if (GET_CODE (to_rtx) == PARALLEL)
3906 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3907 TYPE_ALIGN (TREE_TYPE (from)));
3909 emit_move_insn (to_rtx, temp);
3911 preserve_temp_slots (to_rtx);
3914 return want_value ? to_rtx : NULL_RTX;
3917 /* In case we are returning the contents of an object which overlaps
3918 the place the value is being stored, use a safe function when copying
3919 a value through a pointer into a structure value return block. */
3920 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3921 && current_function_returns_struct
3922 && !current_function_returns_pcc_struct)
3927 size = expr_size (from);
3928 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3929 EXPAND_MEMORY_USE_DONT);
3931 /* Copy the rights of the bitmap. */
3932 if (current_function_check_memory_usage)
3933 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3934 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3935 XEXP (from_rtx, 0), Pmode,
3936 convert_to_mode (TYPE_MODE (sizetype),
3937 size, TREE_UNSIGNED (sizetype)),
3938 TYPE_MODE (sizetype));
3940 #ifdef TARGET_MEM_FUNCTIONS
3941 emit_library_call (memmove_libfunc, LCT_NORMAL,
3942 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3943 XEXP (from_rtx, 0), Pmode,
3944 convert_to_mode (TYPE_MODE (sizetype),
3945 size, TREE_UNSIGNED (sizetype)),
3946 TYPE_MODE (sizetype));
3948 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3949 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3950 XEXP (to_rtx, 0), Pmode,
3951 convert_to_mode (TYPE_MODE (integer_type_node),
3952 size, TREE_UNSIGNED (integer_type_node)),
3953 TYPE_MODE (integer_type_node));
3956 preserve_temp_slots (to_rtx);
3959 return want_value ? to_rtx : NULL_RTX;
3962 /* Compute FROM and store the value in the rtx we got. */
3965 result = store_expr (from, to_rtx, want_value);
3966 preserve_temp_slots (result);
3969 return want_value ? result : NULL_RTX;
3972 /* Generate code for computing expression EXP,
3973 and storing the value into TARGET.
3974 TARGET may contain a QUEUED rtx.
3976 If WANT_VALUE is nonzero, return a copy of the value
3977 not in TARGET, so that we can be sure to use the proper
3978 value in a containing expression even if TARGET has something
3979 else stored in it. If possible, we copy the value through a pseudo
3980 and return that pseudo. Or, if the value is constant, we try to
3981 return the constant. In some cases, we return a pseudo
3982 copied *from* TARGET.
3984 If the mode is BLKmode then we may return TARGET itself.
3985 It turns out that in BLKmode it doesn't cause a problem.
3986 because C has no operators that could combine two different
3987 assignments into the same BLKmode object with different values
3988 with no sequence point. Will other languages need this to
3991 If WANT_VALUE is 0, we return NULL, to make sure
3992 to catch quickly any cases where the caller uses the value
3993 and fails to set WANT_VALUE. */
3996 store_expr (exp, target, want_value)
3998 register rtx target;
4002 int dont_return_target = 0;
4003 int dont_store_target = 0;
4005 if (TREE_CODE (exp) == COMPOUND_EXPR)
4007 /* Perform first part of compound expression, then assign from second
4009 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4011 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4013 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4015 /* For conditional expression, get safe form of the target. Then
4016 test the condition, doing the appropriate assignment on either
4017 side. This avoids the creation of unnecessary temporaries.
4018 For non-BLKmode, it is more efficient not to do this. */
4020 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4023 target = protect_from_queue (target, 1);
4025 do_pending_stack_adjust ();
4027 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4028 start_cleanup_deferral ();
4029 store_expr (TREE_OPERAND (exp, 1), target, 0);
4030 end_cleanup_deferral ();
4032 emit_jump_insn (gen_jump (lab2));
4035 start_cleanup_deferral ();
4036 store_expr (TREE_OPERAND (exp, 2), target, 0);
4037 end_cleanup_deferral ();
4042 return want_value ? target : NULL_RTX;
4044 else if (queued_subexp_p (target))
4045 /* If target contains a postincrement, let's not risk
4046 using it as the place to generate the rhs. */
4048 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4050 /* Expand EXP into a new pseudo. */
4051 temp = gen_reg_rtx (GET_MODE (target));
4052 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4055 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4057 /* If target is volatile, ANSI requires accessing the value
4058 *from* the target, if it is accessed. So make that happen.
4059 In no case return the target itself. */
4060 if (! MEM_VOLATILE_P (target) && want_value)
4061 dont_return_target = 1;
4063 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4064 && GET_MODE (target) != BLKmode)
4065 /* If target is in memory and caller wants value in a register instead,
4066 arrange that. Pass TARGET as target for expand_expr so that,
4067 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4068 We know expand_expr will not use the target in that case.
4069 Don't do this if TARGET is volatile because we are supposed
4070 to write it and then read it. */
4072 temp = expand_expr (exp, target, GET_MODE (target), 0);
4073 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4075 /* If TEMP is already in the desired TARGET, only copy it from
4076 memory and don't store it there again. */
4078 || (rtx_equal_p (temp, target)
4079 && ! side_effects_p (temp) && ! side_effects_p (target)))
4080 dont_store_target = 1;
4081 temp = copy_to_reg (temp);
4083 dont_return_target = 1;
4085 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4086 /* If this is an scalar in a register that is stored in a wider mode
4087 than the declared mode, compute the result into its declared mode
4088 and then convert to the wider mode. Our value is the computed
4091 /* If we don't want a value, we can do the conversion inside EXP,
4092 which will often result in some optimizations. Do the conversion
4093 in two steps: first change the signedness, if needed, then
4094 the extend. But don't do this if the type of EXP is a subtype
4095 of something else since then the conversion might involve
4096 more than just converting modes. */
4097 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4098 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4100 if (TREE_UNSIGNED (TREE_TYPE (exp))
4101 != SUBREG_PROMOTED_UNSIGNED_P (target))
4104 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4108 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4109 SUBREG_PROMOTED_UNSIGNED_P (target)),
4113 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4115 /* If TEMP is a volatile MEM and we want a result value, make
4116 the access now so it gets done only once. Likewise if
4117 it contains TARGET. */
4118 if (GET_CODE (temp) == MEM && want_value
4119 && (MEM_VOLATILE_P (temp)
4120 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4121 temp = copy_to_reg (temp);
4123 /* If TEMP is a VOIDmode constant, use convert_modes to make
4124 sure that we properly convert it. */
4125 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4126 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4127 TYPE_MODE (TREE_TYPE (exp)), temp,
4128 SUBREG_PROMOTED_UNSIGNED_P (target));
4130 convert_move (SUBREG_REG (target), temp,
4131 SUBREG_PROMOTED_UNSIGNED_P (target));
4133 /* If we promoted a constant, change the mode back down to match
4134 target. Otherwise, the caller might get confused by a result whose
4135 mode is larger than expected. */
4137 if (want_value && GET_MODE (temp) != GET_MODE (target)
4138 && GET_MODE (temp) != VOIDmode)
4140 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4141 SUBREG_PROMOTED_VAR_P (temp) = 1;
4142 SUBREG_PROMOTED_UNSIGNED_P (temp)
4143 = SUBREG_PROMOTED_UNSIGNED_P (target);
4146 return want_value ? temp : NULL_RTX;
4150 temp = expand_expr (exp, target, GET_MODE (target), 0);
4151 /* Return TARGET if it's a specified hardware register.
4152 If TARGET is a volatile mem ref, either return TARGET
4153 or return a reg copied *from* TARGET; ANSI requires this.
4155 Otherwise, if TEMP is not TARGET, return TEMP
4156 if it is constant (for efficiency),
4157 or if we really want the correct value. */
4158 if (!(target && GET_CODE (target) == REG
4159 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4160 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4161 && ! rtx_equal_p (temp, target)
4162 && (CONSTANT_P (temp) || want_value))
4163 dont_return_target = 1;
4166 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4167 the same as that of TARGET, adjust the constant. This is needed, for
4168 example, in case it is a CONST_DOUBLE and we want only a word-sized
4170 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4171 && TREE_CODE (exp) != ERROR_MARK
4172 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4173 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4174 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4176 if (current_function_check_memory_usage
4177 && GET_CODE (target) == MEM
4178 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4180 in_check_memory_usage = 1;
4181 if (GET_CODE (temp) == MEM)
4182 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4183 VOIDmode, 3, XEXP (target, 0), Pmode,
4184 XEXP (temp, 0), Pmode,
4185 expr_size (exp), TYPE_MODE (sizetype));
4187 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4188 VOIDmode, 3, XEXP (target, 0), Pmode,
4189 expr_size (exp), TYPE_MODE (sizetype),
4190 GEN_INT (MEMORY_USE_WO),
4191 TYPE_MODE (integer_type_node));
4192 in_check_memory_usage = 0;
4195 /* If value was not generated in the target, store it there.
4196 Convert the value to TARGET's type first if nec. */
4197 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4198 one or both of them are volatile memory refs, we have to distinguish
4200 - expand_expr has used TARGET. In this case, we must not generate
4201 another copy. This can be detected by TARGET being equal according
4203 - expand_expr has not used TARGET - that means that the source just
4204 happens to have the same RTX form. Since temp will have been created
4205 by expand_expr, it will compare unequal according to == .
4206 We must generate a copy in this case, to reach the correct number
4207 of volatile memory references. */
4209 if ((! rtx_equal_p (temp, target)
4210 || (temp != target && (side_effects_p (temp)
4211 || side_effects_p (target))))
4212 && TREE_CODE (exp) != ERROR_MARK
4213 && ! dont_store_target)
4215 target = protect_from_queue (target, 1);
4216 if (GET_MODE (temp) != GET_MODE (target)
4217 && GET_MODE (temp) != VOIDmode)
4219 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4220 if (dont_return_target)
4222 /* In this case, we will return TEMP,
4223 so make sure it has the proper mode.
4224 But don't forget to store the value into TARGET. */
4225 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4226 emit_move_insn (target, temp);
4229 convert_move (target, temp, unsignedp);
4232 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4234 /* Handle copying a string constant into an array.
4235 The string constant may be shorter than the array.
4236 So copy just the string's actual length, and clear the rest. */
4240 /* Get the size of the data type of the string,
4241 which is actually the size of the target. */
4242 size = expr_size (exp);
4243 if (GET_CODE (size) == CONST_INT
4244 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4245 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4248 /* Compute the size of the data to copy from the string. */
4250 = size_binop (MIN_EXPR,
4251 make_tree (sizetype, size),
4252 size_int (TREE_STRING_LENGTH (exp)));
4253 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4254 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4258 /* Copy that much. */
4259 emit_block_move (target, temp, copy_size_rtx,
4260 TYPE_ALIGN (TREE_TYPE (exp)));
4262 /* Figure out how much is left in TARGET that we have to clear.
4263 Do all calculations in ptr_mode. */
4265 addr = XEXP (target, 0);
4266 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4268 if (GET_CODE (copy_size_rtx) == CONST_INT)
4270 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4271 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4273 (unsigned int) (BITS_PER_UNIT
4274 * (INTVAL (copy_size_rtx)
4275 & - INTVAL (copy_size_rtx))));
4279 addr = force_reg (ptr_mode, addr);
4280 addr = expand_binop (ptr_mode, add_optab, addr,
4281 copy_size_rtx, NULL_RTX, 0,
4284 size = expand_binop (ptr_mode, sub_optab, size,
4285 copy_size_rtx, NULL_RTX, 0,
4288 align = BITS_PER_UNIT;
4289 label = gen_label_rtx ();
4290 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4291 GET_MODE (size), 0, 0, label);
4293 align = MIN (align, expr_align (copy_size));
4295 if (size != const0_rtx)
4297 rtx dest = gen_rtx_MEM (BLKmode, addr);
4299 MEM_COPY_ATTRIBUTES (dest, target);
4301 /* Be sure we can write on ADDR. */
4302 in_check_memory_usage = 1;
4303 if (current_function_check_memory_usage)
4304 emit_library_call (chkr_check_addr_libfunc,
4305 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4307 size, TYPE_MODE (sizetype),
4308 GEN_INT (MEMORY_USE_WO),
4309 TYPE_MODE (integer_type_node));
4310 in_check_memory_usage = 0;
4311 clear_storage (dest, size, align);
4318 /* Handle calls that return values in multiple non-contiguous locations.
4319 The Irix 6 ABI has examples of this. */
4320 else if (GET_CODE (target) == PARALLEL)
4321 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4322 TYPE_ALIGN (TREE_TYPE (exp)));
4323 else if (GET_MODE (temp) == BLKmode)
4324 emit_block_move (target, temp, expr_size (exp),
4325 TYPE_ALIGN (TREE_TYPE (exp)));
4327 emit_move_insn (target, temp);
4330 /* If we don't want a value, return NULL_RTX. */
4334 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4335 ??? The latter test doesn't seem to make sense. */
4336 else if (dont_return_target && GET_CODE (temp) != MEM)
4339 /* Return TARGET itself if it is a hard register. */
4340 else if (want_value && GET_MODE (target) != BLKmode
4341 && ! (GET_CODE (target) == REG
4342 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4343 return copy_to_reg (target);
4349 /* Return 1 if EXP just contains zeros. */
4357 switch (TREE_CODE (exp))
4361 case NON_LVALUE_EXPR:
4362 return is_zeros_p (TREE_OPERAND (exp, 0));
4365 return integer_zerop (exp);
4369 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4372 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4375 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4376 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4377 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4378 if (! is_zeros_p (TREE_VALUE (elt)))
4388 /* Return 1 if EXP contains mostly (3/4) zeros. */
4391 mostly_zeros_p (exp)
4394 if (TREE_CODE (exp) == CONSTRUCTOR)
4396 int elts = 0, zeros = 0;
4397 tree elt = CONSTRUCTOR_ELTS (exp);
4398 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4400 /* If there are no ranges of true bits, it is all zero. */
4401 return elt == NULL_TREE;
4403 for (; elt; elt = TREE_CHAIN (elt))
4405 /* We do not handle the case where the index is a RANGE_EXPR,
4406 so the statistic will be somewhat inaccurate.
4407 We do make a more accurate count in store_constructor itself,
4408 so since this function is only used for nested array elements,
4409 this should be close enough. */
4410 if (mostly_zeros_p (TREE_VALUE (elt)))
4415 return 4 * zeros >= 3 * elts;
4418 return is_zeros_p (exp);
4421 /* Helper function for store_constructor.
4422 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4423 TYPE is the type of the CONSTRUCTOR, not the element type.
4424 ALIGN and CLEARED are as for store_constructor.
4425 ALIAS_SET is the alias set to use for any stores.
4427 This provides a recursive shortcut back to store_constructor when it isn't
4428 necessary to go through store_field. This is so that we can pass through
4429 the cleared field to let store_constructor know that we may not have to
4430 clear a substructure if the outer structure has already been cleared. */
4433 store_constructor_field (target, bitsize, bitpos,
4434 mode, exp, type, align, cleared, alias_set)
4436 unsigned HOST_WIDE_INT bitsize;
4437 HOST_WIDE_INT bitpos;
4438 enum machine_mode mode;
4444 if (TREE_CODE (exp) == CONSTRUCTOR
4445 && bitpos % BITS_PER_UNIT == 0
4446 /* If we have a non-zero bitpos for a register target, then we just
4447 let store_field do the bitfield handling. This is unlikely to
4448 generate unnecessary clear instructions anyways. */
4449 && (bitpos == 0 || GET_CODE (target) == MEM))
4453 = change_address (target,
4454 GET_MODE (target) == BLKmode
4456 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4457 ? BLKmode : VOIDmode,
4458 plus_constant (XEXP (target, 0),
4459 bitpos / BITS_PER_UNIT));
4462 /* Show the alignment may no longer be what it was and update the alias
4463 set, if required. */
4465 align = MIN (align, (unsigned int) bitpos & - bitpos);
4466 if (GET_CODE (target) == MEM)
4467 MEM_ALIAS_SET (target) = alias_set;
4469 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4472 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4473 int_size_in_bytes (type), alias_set);
4476 /* Store the value of constructor EXP into the rtx TARGET.
4477 TARGET is either a REG or a MEM.
4478 ALIGN is the maximum known alignment for TARGET.
4479 CLEARED is true if TARGET is known to have been zero'd.
4480 SIZE is the number of bytes of TARGET we are allowed to modify: this
4481 may not be the same as the size of EXP if we are assigning to a field
4482 which has been packed to exclude padding bits. */
4485 store_constructor (exp, target, align, cleared, size)
4492 tree type = TREE_TYPE (exp);
4493 #ifdef WORD_REGISTER_OPERATIONS
4494 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4497 /* We know our target cannot conflict, since safe_from_p has been called. */
4499 /* Don't try copying piece by piece into a hard register
4500 since that is vulnerable to being clobbered by EXP.
4501 Instead, construct in a pseudo register and then copy it all. */
4502 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4504 rtx temp = gen_reg_rtx (GET_MODE (target));
4505 store_constructor (exp, temp, align, cleared, size);
4506 emit_move_insn (target, temp);
4511 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4512 || TREE_CODE (type) == QUAL_UNION_TYPE)
4516 /* Inform later passes that the whole union value is dead. */
4517 if ((TREE_CODE (type) == UNION_TYPE
4518 || TREE_CODE (type) == QUAL_UNION_TYPE)
4521 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4523 /* If the constructor is empty, clear the union. */
4524 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4525 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4528 /* If we are building a static constructor into a register,
4529 set the initial value as zero so we can fold the value into
4530 a constant. But if more than one register is involved,
4531 this probably loses. */
4532 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4533 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4536 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4541 /* If the constructor has fewer fields than the structure
4542 or if we are initializing the structure to mostly zeros,
4543 clear the whole structure first. Don't do this is TARGET is
4544 register whose mode size isn't equal to SIZE since clear_storage
4545 can't handle this case. */
4547 && ((list_length (CONSTRUCTOR_ELTS (exp))
4548 != fields_length (type))
4549 || mostly_zeros_p (exp))
4550 && (GET_CODE (target) != REG
4551 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4554 clear_storage (target, GEN_INT (size), align);
4559 /* Inform later passes that the old value is dead. */
4560 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4562 /* Store each element of the constructor into
4563 the corresponding field of TARGET. */
4565 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4567 register tree field = TREE_PURPOSE (elt);
4568 #ifdef WORD_REGISTER_OPERATIONS
4569 tree value = TREE_VALUE (elt);
4571 register enum machine_mode mode;
4572 HOST_WIDE_INT bitsize;
4573 HOST_WIDE_INT bitpos = 0;
4576 rtx to_rtx = target;
4578 /* Just ignore missing fields.
4579 We cleared the whole structure, above,
4580 if any fields are missing. */
4584 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4587 if (host_integerp (DECL_SIZE (field), 1))
4588 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4592 unsignedp = TREE_UNSIGNED (field);
4593 mode = DECL_MODE (field);
4594 if (DECL_BIT_FIELD (field))
4597 offset = DECL_FIELD_OFFSET (field);
4598 if (host_integerp (offset, 0)
4599 && host_integerp (bit_position (field), 0))
4601 bitpos = int_bit_position (field);
4605 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4611 if (contains_placeholder_p (offset))
4612 offset = build (WITH_RECORD_EXPR, sizetype,
4613 offset, make_tree (TREE_TYPE (exp), target));
4615 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4616 if (GET_CODE (to_rtx) != MEM)
4619 if (GET_MODE (offset_rtx) != ptr_mode)
4621 #ifdef POINTERS_EXTEND_UNSIGNED
4622 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4624 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4629 = change_address (to_rtx, VOIDmode,
4630 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4631 force_reg (ptr_mode,
4633 align = DECL_OFFSET_ALIGN (field);
4636 if (TREE_READONLY (field))
4638 if (GET_CODE (to_rtx) == MEM)
4639 to_rtx = copy_rtx (to_rtx);
4641 RTX_UNCHANGING_P (to_rtx) = 1;
4644 #ifdef WORD_REGISTER_OPERATIONS
4645 /* If this initializes a field that is smaller than a word, at the
4646 start of a word, try to widen it to a full word.
4647 This special case allows us to output C++ member function
4648 initializations in a form that the optimizers can understand. */
4649 if (GET_CODE (target) == REG
4650 && bitsize < BITS_PER_WORD
4651 && bitpos % BITS_PER_WORD == 0
4652 && GET_MODE_CLASS (mode) == MODE_INT
4653 && TREE_CODE (value) == INTEGER_CST
4655 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4657 tree type = TREE_TYPE (value);
4658 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4660 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4661 value = convert (type, value);
4663 if (BYTES_BIG_ENDIAN)
4665 = fold (build (LSHIFT_EXPR, type, value,
4666 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4667 bitsize = BITS_PER_WORD;
4671 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4672 TREE_VALUE (elt), type, align, cleared,
4673 (DECL_NONADDRESSABLE_P (field)
4674 && GET_CODE (to_rtx) == MEM)
4675 ? MEM_ALIAS_SET (to_rtx)
4676 : get_alias_set (TREE_TYPE (field)));
4679 else if (TREE_CODE (type) == ARRAY_TYPE)
4684 tree domain = TYPE_DOMAIN (type);
4685 tree elttype = TREE_TYPE (type);
4686 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4687 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4688 HOST_WIDE_INT minelt;
4689 HOST_WIDE_INT maxelt;
4691 /* If we have constant bounds for the range of the type, get them. */
4694 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4695 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4698 /* If the constructor has fewer elements than the array,
4699 clear the whole array first. Similarly if this is
4700 static constructor of a non-BLKmode object. */
4701 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4705 HOST_WIDE_INT count = 0, zero_count = 0;
4706 need_to_clear = ! const_bounds_p;
4708 /* This loop is a more accurate version of the loop in
4709 mostly_zeros_p (it handles RANGE_EXPR in an index).
4710 It is also needed to check for missing elements. */
4711 for (elt = CONSTRUCTOR_ELTS (exp);
4712 elt != NULL_TREE && ! need_to_clear;
4713 elt = TREE_CHAIN (elt))
4715 tree index = TREE_PURPOSE (elt);
4716 HOST_WIDE_INT this_node_count;
4718 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4720 tree lo_index = TREE_OPERAND (index, 0);
4721 tree hi_index = TREE_OPERAND (index, 1);
4723 if (! host_integerp (lo_index, 1)
4724 || ! host_integerp (hi_index, 1))
4730 this_node_count = (tree_low_cst (hi_index, 1)
4731 - tree_low_cst (lo_index, 1) + 1);
4734 this_node_count = 1;
4736 count += this_node_count;
4737 if (mostly_zeros_p (TREE_VALUE (elt)))
4738 zero_count += this_node_count;
4741 /* Clear the entire array first if there are any missing elements,
4742 or if the incidence of zero elements is >= 75%. */
4744 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4748 if (need_to_clear && size > 0)
4751 clear_storage (target, GEN_INT (size), align);
4755 /* Inform later passes that the old value is dead. */
4756 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4758 /* Store each element of the constructor into
4759 the corresponding element of TARGET, determined
4760 by counting the elements. */
4761 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4763 elt = TREE_CHAIN (elt), i++)
4765 register enum machine_mode mode;
4766 HOST_WIDE_INT bitsize;
4767 HOST_WIDE_INT bitpos;
4769 tree value = TREE_VALUE (elt);
4770 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4771 tree index = TREE_PURPOSE (elt);
4772 rtx xtarget = target;
4774 if (cleared && is_zeros_p (value))
4777 unsignedp = TREE_UNSIGNED (elttype);
4778 mode = TYPE_MODE (elttype);
4779 if (mode == BLKmode)
4780 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4781 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4784 bitsize = GET_MODE_BITSIZE (mode);
4786 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4788 tree lo_index = TREE_OPERAND (index, 0);
4789 tree hi_index = TREE_OPERAND (index, 1);
4790 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4791 struct nesting *loop;
4792 HOST_WIDE_INT lo, hi, count;
4795 /* If the range is constant and "small", unroll the loop. */
4797 && host_integerp (lo_index, 0)
4798 && host_integerp (hi_index, 0)
4799 && (lo = tree_low_cst (lo_index, 0),
4800 hi = tree_low_cst (hi_index, 0),
4801 count = hi - lo + 1,
4802 (GET_CODE (target) != MEM
4804 || (host_integerp (TYPE_SIZE (elttype), 1)
4805 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4808 lo -= minelt; hi -= minelt;
4809 for (; lo <= hi; lo++)
4811 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4812 store_constructor_field
4813 (target, bitsize, bitpos, mode, value, type, align,
4815 TYPE_NONALIASED_COMPONENT (type)
4816 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4821 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4822 loop_top = gen_label_rtx ();
4823 loop_end = gen_label_rtx ();
4825 unsignedp = TREE_UNSIGNED (domain);
4827 index = build_decl (VAR_DECL, NULL_TREE, domain);
4830 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4832 SET_DECL_RTL (index, index_r);
4833 if (TREE_CODE (value) == SAVE_EXPR
4834 && SAVE_EXPR_RTL (value) == 0)
4836 /* Make sure value gets expanded once before the
4838 expand_expr (value, const0_rtx, VOIDmode, 0);
4841 store_expr (lo_index, index_r, 0);
4842 loop = expand_start_loop (0);
4844 /* Assign value to element index. */
4846 = convert (ssizetype,
4847 fold (build (MINUS_EXPR, TREE_TYPE (index),
4848 index, TYPE_MIN_VALUE (domain))));
4849 position = size_binop (MULT_EXPR, position,
4851 TYPE_SIZE_UNIT (elttype)));
4853 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4854 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4855 xtarget = change_address (target, mode, addr);
4856 if (TREE_CODE (value) == CONSTRUCTOR)
4857 store_constructor (value, xtarget, align, cleared,
4858 bitsize / BITS_PER_UNIT);
4860 store_expr (value, xtarget, 0);
4862 expand_exit_loop_if_false (loop,
4863 build (LT_EXPR, integer_type_node,
4866 expand_increment (build (PREINCREMENT_EXPR,
4868 index, integer_one_node), 0, 0);
4870 emit_label (loop_end);
4873 else if ((index != 0 && ! host_integerp (index, 0))
4874 || ! host_integerp (TYPE_SIZE (elttype), 1))
4880 index = ssize_int (1);
4883 index = convert (ssizetype,
4884 fold (build (MINUS_EXPR, index,
4885 TYPE_MIN_VALUE (domain))));
4887 position = size_binop (MULT_EXPR, index,
4889 TYPE_SIZE_UNIT (elttype)));
4890 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4891 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4892 xtarget = change_address (target, mode, addr);
4893 store_expr (value, xtarget, 0);
4898 bitpos = ((tree_low_cst (index, 0) - minelt)
4899 * tree_low_cst (TYPE_SIZE (elttype), 1));
4901 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4903 store_constructor_field (target, bitsize, bitpos, mode, value,
4904 type, align, cleared,
4905 TYPE_NONALIASED_COMPONENT (type)
4906 && GET_CODE (target) == MEM
4907 ? MEM_ALIAS_SET (target) :
4908 get_alias_set (elttype));
4914 /* Set constructor assignments. */
4915 else if (TREE_CODE (type) == SET_TYPE)
4917 tree elt = CONSTRUCTOR_ELTS (exp);
4918 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4919 tree domain = TYPE_DOMAIN (type);
4920 tree domain_min, domain_max, bitlength;
4922 /* The default implementation strategy is to extract the constant
4923 parts of the constructor, use that to initialize the target,
4924 and then "or" in whatever non-constant ranges we need in addition.
4926 If a large set is all zero or all ones, it is
4927 probably better to set it using memset (if available) or bzero.
4928 Also, if a large set has just a single range, it may also be
4929 better to first clear all the first clear the set (using
4930 bzero/memset), and set the bits we want. */
4932 /* Check for all zeros. */
4933 if (elt == NULL_TREE && size > 0)
4936 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4940 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4941 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4942 bitlength = size_binop (PLUS_EXPR,
4943 size_diffop (domain_max, domain_min),
4946 nbits = tree_low_cst (bitlength, 1);
4948 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4949 are "complicated" (more than one range), initialize (the
4950 constant parts) by copying from a constant. */
4951 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4952 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4954 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4955 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4956 char *bit_buffer = (char *) alloca (nbits);
4957 HOST_WIDE_INT word = 0;
4958 unsigned int bit_pos = 0;
4959 unsigned int ibit = 0;
4960 unsigned int offset = 0; /* In bytes from beginning of set. */
4962 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4965 if (bit_buffer[ibit])
4967 if (BYTES_BIG_ENDIAN)
4968 word |= (1 << (set_word_size - 1 - bit_pos));
4970 word |= 1 << bit_pos;
4974 if (bit_pos >= set_word_size || ibit == nbits)
4976 if (word != 0 || ! cleared)
4978 rtx datum = GEN_INT (word);
4981 /* The assumption here is that it is safe to use
4982 XEXP if the set is multi-word, but not if
4983 it's single-word. */
4984 if (GET_CODE (target) == MEM)
4986 to_rtx = plus_constant (XEXP (target, 0), offset);
4987 to_rtx = change_address (target, mode, to_rtx);
4989 else if (offset == 0)
4993 emit_move_insn (to_rtx, datum);
5000 offset += set_word_size / BITS_PER_UNIT;
5005 /* Don't bother clearing storage if the set is all ones. */
5006 if (TREE_CHAIN (elt) != NULL_TREE
5007 || (TREE_PURPOSE (elt) == NULL_TREE
5009 : ( ! host_integerp (TREE_VALUE (elt), 0)
5010 || ! host_integerp (TREE_PURPOSE (elt), 0)
5011 || (tree_low_cst (TREE_VALUE (elt), 0)
5012 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5013 != (HOST_WIDE_INT) nbits))))
5014 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5016 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5018 /* Start of range of element or NULL. */
5019 tree startbit = TREE_PURPOSE (elt);
5020 /* End of range of element, or element value. */
5021 tree endbit = TREE_VALUE (elt);
5022 #ifdef TARGET_MEM_FUNCTIONS
5023 HOST_WIDE_INT startb, endb;
5025 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5027 bitlength_rtx = expand_expr (bitlength,
5028 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5030 /* Handle non-range tuple element like [ expr ]. */
5031 if (startbit == NULL_TREE)
5033 startbit = save_expr (endbit);
5037 startbit = convert (sizetype, startbit);
5038 endbit = convert (sizetype, endbit);
5039 if (! integer_zerop (domain_min))
5041 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5042 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5044 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5045 EXPAND_CONST_ADDRESS);
5046 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5047 EXPAND_CONST_ADDRESS);
5053 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5056 emit_move_insn (targetx, target);
5059 else if (GET_CODE (target) == MEM)
5064 #ifdef TARGET_MEM_FUNCTIONS
5065 /* Optimization: If startbit and endbit are
5066 constants divisible by BITS_PER_UNIT,
5067 call memset instead. */
5068 if (TREE_CODE (startbit) == INTEGER_CST
5069 && TREE_CODE (endbit) == INTEGER_CST
5070 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5071 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5073 emit_library_call (memset_libfunc, LCT_NORMAL,
5075 plus_constant (XEXP (targetx, 0),
5076 startb / BITS_PER_UNIT),
5078 constm1_rtx, TYPE_MODE (integer_type_node),
5079 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5080 TYPE_MODE (sizetype));
5084 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5085 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5086 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5087 startbit_rtx, TYPE_MODE (sizetype),
5088 endbit_rtx, TYPE_MODE (sizetype));
5091 emit_move_insn (target, targetx);
5099 /* Store the value of EXP (an expression tree)
5100 into a subfield of TARGET which has mode MODE and occupies
5101 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5102 If MODE is VOIDmode, it means that we are storing into a bit-field.
5104 If VALUE_MODE is VOIDmode, return nothing in particular.
5105 UNSIGNEDP is not used in this case.
5107 Otherwise, return an rtx for the value stored. This rtx
5108 has mode VALUE_MODE if that is convenient to do.
5109 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5111 ALIGN is the alignment that TARGET is known to have.
5112 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5114 ALIAS_SET is the alias set for the destination. This value will
5115 (in general) be different from that for TARGET, since TARGET is a
5116 reference to the containing structure. */
5119 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5120 unsignedp, align, total_size, alias_set)
5122 HOST_WIDE_INT bitsize;
5123 HOST_WIDE_INT bitpos;
5124 enum machine_mode mode;
5126 enum machine_mode value_mode;
5129 HOST_WIDE_INT total_size;
5132 HOST_WIDE_INT width_mask = 0;
5134 if (TREE_CODE (exp) == ERROR_MARK)
5137 /* If we have nothing to store, do nothing unless the expression has
5140 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5142 if (bitsize < HOST_BITS_PER_WIDE_INT)
5143 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5145 /* If we are storing into an unaligned field of an aligned union that is
5146 in a register, we may have the mode of TARGET being an integer mode but
5147 MODE == BLKmode. In that case, get an aligned object whose size and
5148 alignment are the same as TARGET and store TARGET into it (we can avoid
5149 the store if the field being stored is the entire width of TARGET). Then
5150 call ourselves recursively to store the field into a BLKmode version of
5151 that object. Finally, load from the object into TARGET. This is not
5152 very efficient in general, but should only be slightly more expensive
5153 than the otherwise-required unaligned accesses. Perhaps this can be
5154 cleaned up later. */
5157 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5161 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5164 rtx blk_object = copy_rtx (object);
5166 PUT_MODE (blk_object, BLKmode);
5168 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5169 emit_move_insn (object, target);
5171 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5172 align, total_size, alias_set);
5174 /* Even though we aren't returning target, we need to
5175 give it the updated value. */
5176 emit_move_insn (target, object);
5181 if (GET_CODE (target) == CONCAT)
5183 /* We're storing into a struct containing a single __complex. */
5187 return store_expr (exp, target, 0);
5190 /* If the structure is in a register or if the component
5191 is a bit field, we cannot use addressing to access it.
5192 Use bit-field techniques or SUBREG to store in it. */
5194 if (mode == VOIDmode
5195 || (mode != BLKmode && ! direct_store[(int) mode]
5196 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5197 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5198 || GET_CODE (target) == REG
5199 || GET_CODE (target) == SUBREG
5200 /* If the field isn't aligned enough to store as an ordinary memref,
5201 store it as a bit field. */
5202 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5203 && (align < GET_MODE_ALIGNMENT (mode)
5204 || bitpos % GET_MODE_ALIGNMENT (mode)))
5205 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5206 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5207 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5208 /* If the RHS and field are a constant size and the size of the
5209 RHS isn't the same size as the bitfield, we must use bitfield
5212 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5213 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5215 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5217 /* If BITSIZE is narrower than the size of the type of EXP
5218 we will be narrowing TEMP. Normally, what's wanted are the
5219 low-order bits. However, if EXP's type is a record and this is
5220 big-endian machine, we want the upper BITSIZE bits. */
5221 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5222 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5223 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5224 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5225 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5229 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5231 if (mode != VOIDmode && mode != BLKmode
5232 && mode != TYPE_MODE (TREE_TYPE (exp)))
5233 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5235 /* If the modes of TARGET and TEMP are both BLKmode, both
5236 must be in memory and BITPOS must be aligned on a byte
5237 boundary. If so, we simply do a block copy. */
5238 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5240 unsigned int exp_align = expr_align (exp);
5242 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5243 || bitpos % BITS_PER_UNIT != 0)
5246 target = change_address (target, VOIDmode,
5247 plus_constant (XEXP (target, 0),
5248 bitpos / BITS_PER_UNIT));
5250 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5251 align = MIN (exp_align, align);
5253 /* Find an alignment that is consistent with the bit position. */
5254 while ((bitpos % align) != 0)
5257 emit_block_move (target, temp,
5258 bitsize == -1 ? expr_size (exp)
5259 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5263 return value_mode == VOIDmode ? const0_rtx : target;
5266 /* Store the value in the bitfield. */
5267 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5268 if (value_mode != VOIDmode)
5270 /* The caller wants an rtx for the value. */
5271 /* If possible, avoid refetching from the bitfield itself. */
5273 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5276 enum machine_mode tmode;
5279 return expand_and (temp,
5283 GET_MODE (temp) == VOIDmode
5285 : GET_MODE (temp))), NULL_RTX);
5286 tmode = GET_MODE (temp);
5287 if (tmode == VOIDmode)
5289 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5290 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5291 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5293 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5294 NULL_RTX, value_mode, 0, align,
5301 rtx addr = XEXP (target, 0);
5304 /* If a value is wanted, it must be the lhs;
5305 so make the address stable for multiple use. */
5307 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5308 && ! CONSTANT_ADDRESS_P (addr)
5309 /* A frame-pointer reference is already stable. */
5310 && ! (GET_CODE (addr) == PLUS
5311 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5312 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5313 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5314 addr = copy_to_reg (addr);
5316 /* Now build a reference to just the desired component. */
5318 to_rtx = copy_rtx (change_address (target, mode,
5319 plus_constant (addr,
5321 / BITS_PER_UNIT))));
5322 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5323 MEM_ALIAS_SET (to_rtx) = alias_set;
5325 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5329 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5330 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5331 ARRAY_REFs and find the ultimate containing object, which we return.
5333 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5334 bit position, and *PUNSIGNEDP to the signedness of the field.
5335 If the position of the field is variable, we store a tree
5336 giving the variable offset (in units) in *POFFSET.
5337 This offset is in addition to the bit position.
5338 If the position is not variable, we store 0 in *POFFSET.
5339 We set *PALIGNMENT to the alignment of the address that will be
5340 computed. This is the alignment of the thing we return if *POFFSET
5341 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5343 If any of the extraction expressions is volatile,
5344 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5346 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5347 is a mode that can be used to access the field. In that case, *PBITSIZE
5350 If the field describes a variable-sized object, *PMODE is set to
5351 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5352 this case, but the address of the object can be found. */
5355 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5356 punsignedp, pvolatilep, palignment)
5358 HOST_WIDE_INT *pbitsize;
5359 HOST_WIDE_INT *pbitpos;
5361 enum machine_mode *pmode;
5364 unsigned int *palignment;
5367 enum machine_mode mode = VOIDmode;
5368 tree offset = size_zero_node;
5369 tree bit_offset = bitsize_zero_node;
5370 unsigned int alignment = BIGGEST_ALIGNMENT;
5373 /* First get the mode, signedness, and size. We do this from just the
5374 outermost expression. */
5375 if (TREE_CODE (exp) == COMPONENT_REF)
5377 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5378 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5379 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5381 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5383 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5385 size_tree = TREE_OPERAND (exp, 1);
5386 *punsignedp = TREE_UNSIGNED (exp);
5390 mode = TYPE_MODE (TREE_TYPE (exp));
5391 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5393 if (mode == BLKmode)
5394 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5396 *pbitsize = GET_MODE_BITSIZE (mode);
5401 if (! host_integerp (size_tree, 1))
5402 mode = BLKmode, *pbitsize = -1;
5404 *pbitsize = tree_low_cst (size_tree, 1);
5407 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5408 and find the ultimate containing object. */
5411 if (TREE_CODE (exp) == BIT_FIELD_REF)
5412 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5413 else if (TREE_CODE (exp) == COMPONENT_REF)
5415 tree field = TREE_OPERAND (exp, 1);
5416 tree this_offset = DECL_FIELD_OFFSET (field);
5418 /* If this field hasn't been filled in yet, don't go
5419 past it. This should only happen when folding expressions
5420 made during type construction. */
5421 if (this_offset == 0)
5423 else if (! TREE_CONSTANT (this_offset)
5424 && contains_placeholder_p (this_offset))
5425 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5427 offset = size_binop (PLUS_EXPR, offset, this_offset);
5428 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5429 DECL_FIELD_BIT_OFFSET (field));
5431 if (! host_integerp (offset, 0))
5432 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5435 else if (TREE_CODE (exp) == ARRAY_REF)
5437 tree index = TREE_OPERAND (exp, 1);
5438 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5439 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5440 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5442 /* We assume all arrays have sizes that are a multiple of a byte.
5443 First subtract the lower bound, if any, in the type of the
5444 index, then convert to sizetype and multiply by the size of the
5446 if (low_bound != 0 && ! integer_zerop (low_bound))
5447 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5450 /* If the index has a self-referential type, pass it to a
5451 WITH_RECORD_EXPR; if the component size is, pass our
5452 component to one. */
5453 if (! TREE_CONSTANT (index)
5454 && contains_placeholder_p (index))
5455 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5456 if (! TREE_CONSTANT (unit_size)
5457 && contains_placeholder_p (unit_size))
5458 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5459 TREE_OPERAND (exp, 0));
5461 offset = size_binop (PLUS_EXPR, offset,
5462 size_binop (MULT_EXPR,
5463 convert (sizetype, index),
5467 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5468 && ! ((TREE_CODE (exp) == NOP_EXPR
5469 || TREE_CODE (exp) == CONVERT_EXPR)
5470 && (TYPE_MODE (TREE_TYPE (exp))
5471 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5474 /* If any reference in the chain is volatile, the effect is volatile. */
5475 if (TREE_THIS_VOLATILE (exp))
5478 /* If the offset is non-constant already, then we can't assume any
5479 alignment more than the alignment here. */
5480 if (! TREE_CONSTANT (offset))
5481 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5483 exp = TREE_OPERAND (exp, 0);
5487 alignment = MIN (alignment, DECL_ALIGN (exp));
5488 else if (TREE_TYPE (exp) != 0)
5489 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5491 /* If OFFSET is constant, see if we can return the whole thing as a
5492 constant bit position. Otherwise, split it up. */
5493 if (host_integerp (offset, 0)
5494 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5496 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5497 && host_integerp (tem, 0))
5498 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5500 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5503 *palignment = alignment;
5507 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5509 static enum memory_use_mode
5510 get_memory_usage_from_modifier (modifier)
5511 enum expand_modifier modifier;
5517 return MEMORY_USE_RO;
5519 case EXPAND_MEMORY_USE_WO:
5520 return MEMORY_USE_WO;
5522 case EXPAND_MEMORY_USE_RW:
5523 return MEMORY_USE_RW;
5525 case EXPAND_MEMORY_USE_DONT:
5526 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5527 MEMORY_USE_DONT, because they are modifiers to a call of
5528 expand_expr in the ADDR_EXPR case of expand_expr. */
5529 case EXPAND_CONST_ADDRESS:
5530 case EXPAND_INITIALIZER:
5531 return MEMORY_USE_DONT;
5532 case EXPAND_MEMORY_USE_BAD:
5538 /* Given an rtx VALUE that may contain additions and multiplications, return
5539 an equivalent value that just refers to a register, memory, or constant.
5540 This is done by generating instructions to perform the arithmetic and
5541 returning a pseudo-register containing the value.
5543 The returned value may be a REG, SUBREG, MEM or constant. */
5546 force_operand (value, target)
5549 register optab binoptab = 0;
5550 /* Use a temporary to force order of execution of calls to
5554 /* Use subtarget as the target for operand 0 of a binary operation. */
5555 register rtx subtarget = get_subtarget (target);
5557 /* Check for a PIC address load. */
5559 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5560 && XEXP (value, 0) == pic_offset_table_rtx
5561 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5562 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5563 || GET_CODE (XEXP (value, 1)) == CONST))
5566 subtarget = gen_reg_rtx (GET_MODE (value));
5567 emit_move_insn (subtarget, value);
5571 if (GET_CODE (value) == PLUS)
5572 binoptab = add_optab;
5573 else if (GET_CODE (value) == MINUS)
5574 binoptab = sub_optab;
5575 else if (GET_CODE (value) == MULT)
5577 op2 = XEXP (value, 1);
5578 if (!CONSTANT_P (op2)
5579 && !(GET_CODE (op2) == REG && op2 != subtarget))
5581 tmp = force_operand (XEXP (value, 0), subtarget);
5582 return expand_mult (GET_MODE (value), tmp,
5583 force_operand (op2, NULL_RTX),
5589 op2 = XEXP (value, 1);
5590 if (!CONSTANT_P (op2)
5591 && !(GET_CODE (op2) == REG && op2 != subtarget))
5593 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5595 binoptab = add_optab;
5596 op2 = negate_rtx (GET_MODE (value), op2);
5599 /* Check for an addition with OP2 a constant integer and our first
5600 operand a PLUS of a virtual register and something else. In that
5601 case, we want to emit the sum of the virtual register and the
5602 constant first and then add the other value. This allows virtual
5603 register instantiation to simply modify the constant rather than
5604 creating another one around this addition. */
5605 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5606 && GET_CODE (XEXP (value, 0)) == PLUS
5607 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5608 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5609 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5611 rtx temp = expand_binop (GET_MODE (value), binoptab,
5612 XEXP (XEXP (value, 0), 0), op2,
5613 subtarget, 0, OPTAB_LIB_WIDEN);
5614 return expand_binop (GET_MODE (value), binoptab, temp,
5615 force_operand (XEXP (XEXP (value, 0), 1), 0),
5616 target, 0, OPTAB_LIB_WIDEN);
5619 tmp = force_operand (XEXP (value, 0), subtarget);
5620 return expand_binop (GET_MODE (value), binoptab, tmp,
5621 force_operand (op2, NULL_RTX),
5622 target, 0, OPTAB_LIB_WIDEN);
5623 /* We give UNSIGNEDP = 0 to expand_binop
5624 because the only operations we are expanding here are signed ones. */
5629 /* Subroutine of expand_expr:
5630 save the non-copied parts (LIST) of an expr (LHS), and return a list
5631 which can restore these values to their previous values,
5632 should something modify their storage. */
5635 save_noncopied_parts (lhs, list)
5642 for (tail = list; tail; tail = TREE_CHAIN (tail))
5643 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5644 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5647 tree part = TREE_VALUE (tail);
5648 tree part_type = TREE_TYPE (part);
5649 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5651 = assign_temp (build_qualified_type (part_type,
5652 (TYPE_QUALS (part_type)
5653 | TYPE_QUAL_CONST)),
5656 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5657 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5658 parts = tree_cons (to_be_saved,
5659 build (RTL_EXPR, part_type, NULL_TREE,
5662 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5667 /* Subroutine of expand_expr:
5668 record the non-copied parts (LIST) of an expr (LHS), and return a list
5669 which specifies the initial values of these parts. */
5672 init_noncopied_parts (lhs, list)
5679 for (tail = list; tail; tail = TREE_CHAIN (tail))
5680 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5681 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5682 else if (TREE_PURPOSE (tail))
5684 tree part = TREE_VALUE (tail);
5685 tree part_type = TREE_TYPE (part);
5686 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5687 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5692 /* Subroutine of expand_expr: return nonzero iff there is no way that
5693 EXP can reference X, which is being modified. TOP_P is nonzero if this
5694 call is going to be used to determine whether we need a temporary
5695 for EXP, as opposed to a recursive call to this function.
5697 It is always safe for this routine to return zero since it merely
5698 searches for optimization opportunities. */
5701 safe_from_p (x, exp, top_p)
5708 static tree save_expr_list;
5711 /* If EXP has varying size, we MUST use a target since we currently
5712 have no way of allocating temporaries of variable size
5713 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5714 So we assume here that something at a higher level has prevented a
5715 clash. This is somewhat bogus, but the best we can do. Only
5716 do this when X is BLKmode and when we are at the top level. */
5717 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5718 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5719 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5720 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5721 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5723 && GET_MODE (x) == BLKmode)
5724 /* If X is in the outgoing argument area, it is always safe. */
5725 || (GET_CODE (x) == MEM
5726 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5727 || (GET_CODE (XEXP (x, 0)) == PLUS
5728 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5731 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5732 find the underlying pseudo. */
5733 if (GET_CODE (x) == SUBREG)
5736 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5740 /* A SAVE_EXPR might appear many times in the expression passed to the
5741 top-level safe_from_p call, and if it has a complex subexpression,
5742 examining it multiple times could result in a combinatorial explosion.
5743 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5744 with optimization took about 28 minutes to compile -- even though it was
5745 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5746 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5747 we have processed. Note that the only test of top_p was above. */
5756 rtn = safe_from_p (x, exp, 0);
5758 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5759 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5764 /* Now look at our tree code and possibly recurse. */
5765 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5768 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5775 if (TREE_CODE (exp) == TREE_LIST)
5776 return ((TREE_VALUE (exp) == 0
5777 || safe_from_p (x, TREE_VALUE (exp), 0))
5778 && (TREE_CHAIN (exp) == 0
5779 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5780 else if (TREE_CODE (exp) == ERROR_MARK)
5781 return 1; /* An already-visited SAVE_EXPR? */
5786 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5790 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5791 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5795 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5796 the expression. If it is set, we conflict iff we are that rtx or
5797 both are in memory. Otherwise, we check all operands of the
5798 expression recursively. */
5800 switch (TREE_CODE (exp))
5803 return (staticp (TREE_OPERAND (exp, 0))
5804 || TREE_STATIC (exp)
5805 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5808 if (GET_CODE (x) == MEM
5809 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5810 get_alias_set (exp)))
5815 /* Assume that the call will clobber all hard registers and
5817 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5818 || GET_CODE (x) == MEM)
5823 /* If a sequence exists, we would have to scan every instruction
5824 in the sequence to see if it was safe. This is probably not
5826 if (RTL_EXPR_SEQUENCE (exp))
5829 exp_rtl = RTL_EXPR_RTL (exp);
5832 case WITH_CLEANUP_EXPR:
5833 exp_rtl = RTL_EXPR_RTL (exp);
5836 case CLEANUP_POINT_EXPR:
5837 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5840 exp_rtl = SAVE_EXPR_RTL (exp);
5844 /* If we've already scanned this, don't do it again. Otherwise,
5845 show we've scanned it and record for clearing the flag if we're
5847 if (TREE_PRIVATE (exp))
5850 TREE_PRIVATE (exp) = 1;
5851 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5853 TREE_PRIVATE (exp) = 0;
5857 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5861 /* The only operand we look at is operand 1. The rest aren't
5862 part of the expression. */
5863 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5865 case METHOD_CALL_EXPR:
5866 /* This takes a rtx argument, but shouldn't appear here. */
5873 /* If we have an rtx, we do not need to scan our operands. */
5877 nops = first_rtl_op (TREE_CODE (exp));
5878 for (i = 0; i < nops; i++)
5879 if (TREE_OPERAND (exp, i) != 0
5880 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5883 /* If this is a language-specific tree code, it may require
5884 special handling. */
5885 if ((unsigned int) TREE_CODE (exp)
5886 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5888 && !(*lang_safe_from_p) (x, exp))
5892 /* If we have an rtl, find any enclosed object. Then see if we conflict
5896 if (GET_CODE (exp_rtl) == SUBREG)
5898 exp_rtl = SUBREG_REG (exp_rtl);
5899 if (GET_CODE (exp_rtl) == REG
5900 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5904 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5905 are memory and they conflict. */
5906 return ! (rtx_equal_p (x, exp_rtl)
5907 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5908 && true_dependence (exp_rtl, GET_MODE (x), x,
5909 rtx_addr_varies_p)));
5912 /* If we reach here, it is safe. */
5916 /* Subroutine of expand_expr: return nonzero iff EXP is an
5917 expression whose type is statically determinable. */
5923 if (TREE_CODE (exp) == PARM_DECL
5924 || TREE_CODE (exp) == VAR_DECL
5925 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5926 || TREE_CODE (exp) == COMPONENT_REF
5927 || TREE_CODE (exp) == ARRAY_REF)
5932 /* Subroutine of expand_expr: return rtx if EXP is a
5933 variable or parameter; else return 0. */
5940 switch (TREE_CODE (exp))
5944 return DECL_RTL (exp);
5950 #ifdef MAX_INTEGER_COMPUTATION_MODE
5953 check_max_integer_computation_mode (exp)
5956 enum tree_code code;
5957 enum machine_mode mode;
5959 /* Strip any NOPs that don't change the mode. */
5961 code = TREE_CODE (exp);
5963 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5964 if (code == NOP_EXPR
5965 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5968 /* First check the type of the overall operation. We need only look at
5969 unary, binary and relational operations. */
5970 if (TREE_CODE_CLASS (code) == '1'
5971 || TREE_CODE_CLASS (code) == '2'
5972 || TREE_CODE_CLASS (code) == '<')
5974 mode = TYPE_MODE (TREE_TYPE (exp));
5975 if (GET_MODE_CLASS (mode) == MODE_INT
5976 && mode > MAX_INTEGER_COMPUTATION_MODE)
5977 internal_error ("unsupported wide integer operation");
5980 /* Check operand of a unary op. */
5981 if (TREE_CODE_CLASS (code) == '1')
5983 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5984 if (GET_MODE_CLASS (mode) == MODE_INT
5985 && mode > MAX_INTEGER_COMPUTATION_MODE)
5986 internal_error ("unsupported wide integer operation");
5989 /* Check operands of a binary/comparison op. */
5990 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5992 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5993 if (GET_MODE_CLASS (mode) == MODE_INT
5994 && mode > MAX_INTEGER_COMPUTATION_MODE)
5995 internal_error ("unsupported wide integer operation");
5997 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5998 if (GET_MODE_CLASS (mode) == MODE_INT
5999 && mode > MAX_INTEGER_COMPUTATION_MODE)
6000 internal_error ("unsupported wide integer operation");
6005 /* expand_expr: generate code for computing expression EXP.
6006 An rtx for the computed value is returned. The value is never null.
6007 In the case of a void EXP, const0_rtx is returned.
6009 The value may be stored in TARGET if TARGET is nonzero.
6010 TARGET is just a suggestion; callers must assume that
6011 the rtx returned may not be the same as TARGET.
6013 If TARGET is CONST0_RTX, it means that the value will be ignored.
6015 If TMODE is not VOIDmode, it suggests generating the
6016 result in mode TMODE. But this is done only when convenient.
6017 Otherwise, TMODE is ignored and the value generated in its natural mode.
6018 TMODE is just a suggestion; callers must assume that
6019 the rtx returned may not have mode TMODE.
6021 Note that TARGET may have neither TMODE nor MODE. In that case, it
6022 probably will not be used.
6024 If MODIFIER is EXPAND_SUM then when EXP is an addition
6025 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6026 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6027 products as above, or REG or MEM, or constant.
6028 Ordinarily in such cases we would output mul or add instructions
6029 and then return a pseudo reg containing the sum.
6031 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6032 it also marks a label as absolutely required (it can't be dead).
6033 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6034 This is used for outputting expressions used in initializers.
6036 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6037 with a constant address even if that address is not normally legitimate.
6038 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6041 expand_expr (exp, target, tmode, modifier)
6044 enum machine_mode tmode;
6045 enum expand_modifier modifier;
6047 register rtx op0, op1, temp;
6048 tree type = TREE_TYPE (exp);
6049 int unsignedp = TREE_UNSIGNED (type);
6050 register enum machine_mode mode;
6051 register enum tree_code code = TREE_CODE (exp);
6053 rtx subtarget, original_target;
6056 /* Used by check-memory-usage to make modifier read only. */
6057 enum expand_modifier ro_modifier;
6059 /* Handle ERROR_MARK before anybody tries to access its type. */
6060 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6062 op0 = CONST0_RTX (tmode);
6068 mode = TYPE_MODE (type);
6069 /* Use subtarget as the target for operand 0 of a binary operation. */
6070 subtarget = get_subtarget (target);
6071 original_target = target;
6072 ignore = (target == const0_rtx
6073 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6074 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6075 || code == COND_EXPR)
6076 && TREE_CODE (type) == VOID_TYPE));
6078 /* Make a read-only version of the modifier. */
6079 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6080 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6081 ro_modifier = modifier;
6083 ro_modifier = EXPAND_NORMAL;
6085 /* If we are going to ignore this result, we need only do something
6086 if there is a side-effect somewhere in the expression. If there
6087 is, short-circuit the most common cases here. Note that we must
6088 not call expand_expr with anything but const0_rtx in case this
6089 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6093 if (! TREE_SIDE_EFFECTS (exp))
6096 /* Ensure we reference a volatile object even if value is ignored, but
6097 don't do this if all we are doing is taking its address. */
6098 if (TREE_THIS_VOLATILE (exp)
6099 && TREE_CODE (exp) != FUNCTION_DECL
6100 && mode != VOIDmode && mode != BLKmode
6101 && modifier != EXPAND_CONST_ADDRESS)
6103 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6104 if (GET_CODE (temp) == MEM)
6105 temp = copy_to_reg (temp);
6109 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6110 || code == INDIRECT_REF || code == BUFFER_REF)
6111 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6112 VOIDmode, ro_modifier);
6113 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6114 || code == ARRAY_REF)
6116 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6117 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6120 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6121 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6122 /* If the second operand has no side effects, just evaluate
6124 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6125 VOIDmode, ro_modifier);
6126 else if (code == BIT_FIELD_REF)
6128 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6129 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6130 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6137 #ifdef MAX_INTEGER_COMPUTATION_MODE
6138 /* Only check stuff here if the mode we want is different from the mode
6139 of the expression; if it's the same, check_max_integer_computiation_mode
6140 will handle it. Do we really need to check this stuff at all? */
6143 && GET_MODE (target) != mode
6144 && TREE_CODE (exp) != INTEGER_CST
6145 && TREE_CODE (exp) != PARM_DECL
6146 && TREE_CODE (exp) != ARRAY_REF
6147 && TREE_CODE (exp) != COMPONENT_REF
6148 && TREE_CODE (exp) != BIT_FIELD_REF
6149 && TREE_CODE (exp) != INDIRECT_REF
6150 && TREE_CODE (exp) != CALL_EXPR
6151 && TREE_CODE (exp) != VAR_DECL
6152 && TREE_CODE (exp) != RTL_EXPR)
6154 enum machine_mode mode = GET_MODE (target);
6156 if (GET_MODE_CLASS (mode) == MODE_INT
6157 && mode > MAX_INTEGER_COMPUTATION_MODE)
6158 internal_error ("unsupported wide integer operation");
6162 && TREE_CODE (exp) != INTEGER_CST
6163 && TREE_CODE (exp) != PARM_DECL
6164 && TREE_CODE (exp) != ARRAY_REF
6165 && TREE_CODE (exp) != COMPONENT_REF
6166 && TREE_CODE (exp) != BIT_FIELD_REF
6167 && TREE_CODE (exp) != INDIRECT_REF
6168 && TREE_CODE (exp) != VAR_DECL
6169 && TREE_CODE (exp) != CALL_EXPR
6170 && TREE_CODE (exp) != RTL_EXPR
6171 && GET_MODE_CLASS (tmode) == MODE_INT
6172 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6173 internal_error ("unsupported wide integer operation");
6175 check_max_integer_computation_mode (exp);
6178 /* If will do cse, generate all results into pseudo registers
6179 since 1) that allows cse to find more things
6180 and 2) otherwise cse could produce an insn the machine
6183 if (! cse_not_expected && mode != BLKmode && target
6184 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6191 tree function = decl_function_context (exp);
6192 /* Handle using a label in a containing function. */
6193 if (function != current_function_decl
6194 && function != inline_function_decl && function != 0)
6196 struct function *p = find_function_data (function);
6197 p->expr->x_forced_labels
6198 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6199 p->expr->x_forced_labels);
6203 if (modifier == EXPAND_INITIALIZER)
6204 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6209 temp = gen_rtx_MEM (FUNCTION_MODE,
6210 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6211 if (function != current_function_decl
6212 && function != inline_function_decl && function != 0)
6213 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6218 if (DECL_RTL (exp) == 0)
6220 error_with_decl (exp, "prior parameter's size depends on `%s'");
6221 return CONST0_RTX (mode);
6224 /* ... fall through ... */
6227 /* If a static var's type was incomplete when the decl was written,
6228 but the type is complete now, lay out the decl now. */
6229 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6230 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6232 layout_decl (exp, 0);
6233 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6236 /* Although static-storage variables start off initialized, according to
6237 ANSI C, a memcpy could overwrite them with uninitialized values. So
6238 we check them too. This also lets us check for read-only variables
6239 accessed via a non-const declaration, in case it won't be detected
6240 any other way (e.g., in an embedded system or OS kernel without
6243 Aggregates are not checked here; they're handled elsewhere. */
6244 if (cfun && current_function_check_memory_usage
6246 && GET_CODE (DECL_RTL (exp)) == MEM
6247 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6249 enum memory_use_mode memory_usage;
6250 memory_usage = get_memory_usage_from_modifier (modifier);
6252 in_check_memory_usage = 1;
6253 if (memory_usage != MEMORY_USE_DONT)
6254 emit_library_call (chkr_check_addr_libfunc,
6255 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6256 XEXP (DECL_RTL (exp), 0), Pmode,
6257 GEN_INT (int_size_in_bytes (type)),
6258 TYPE_MODE (sizetype),
6259 GEN_INT (memory_usage),
6260 TYPE_MODE (integer_type_node));
6261 in_check_memory_usage = 0;
6264 /* ... fall through ... */
6268 if (DECL_RTL (exp) == 0)
6271 /* Ensure variable marked as used even if it doesn't go through
6272 a parser. If it hasn't be used yet, write out an external
6274 if (! TREE_USED (exp))
6276 assemble_external (exp);
6277 TREE_USED (exp) = 1;
6280 /* Show we haven't gotten RTL for this yet. */
6283 /* Handle variables inherited from containing functions. */
6284 context = decl_function_context (exp);
6286 /* We treat inline_function_decl as an alias for the current function
6287 because that is the inline function whose vars, types, etc.
6288 are being merged into the current function.
6289 See expand_inline_function. */
6291 if (context != 0 && context != current_function_decl
6292 && context != inline_function_decl
6293 /* If var is static, we don't need a static chain to access it. */
6294 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6295 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6299 /* Mark as non-local and addressable. */
6300 DECL_NONLOCAL (exp) = 1;
6301 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6303 mark_addressable (exp);
6304 if (GET_CODE (DECL_RTL (exp)) != MEM)
6306 addr = XEXP (DECL_RTL (exp), 0);
6307 if (GET_CODE (addr) == MEM)
6308 addr = change_address (addr, Pmode,
6309 fix_lexical_addr (XEXP (addr, 0), exp));
6311 addr = fix_lexical_addr (addr, exp);
6313 temp = change_address (DECL_RTL (exp), mode, addr);
6316 /* This is the case of an array whose size is to be determined
6317 from its initializer, while the initializer is still being parsed.
6320 else if (GET_CODE (DECL_RTL (exp)) == MEM
6321 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6322 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6323 XEXP (DECL_RTL (exp), 0));
6325 /* If DECL_RTL is memory, we are in the normal case and either
6326 the address is not valid or it is not a register and -fforce-addr
6327 is specified, get the address into a register. */
6329 else if (GET_CODE (DECL_RTL (exp)) == MEM
6330 && modifier != EXPAND_CONST_ADDRESS
6331 && modifier != EXPAND_SUM
6332 && modifier != EXPAND_INITIALIZER
6333 && (! memory_address_p (DECL_MODE (exp),
6334 XEXP (DECL_RTL (exp), 0))
6336 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6337 temp = change_address (DECL_RTL (exp), VOIDmode,
6338 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6340 /* If we got something, return it. But first, set the alignment
6341 the address is a register. */
6344 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6345 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6350 /* If the mode of DECL_RTL does not match that of the decl, it
6351 must be a promoted value. We return a SUBREG of the wanted mode,
6352 but mark it so that we know that it was already extended. */
6354 if (GET_CODE (DECL_RTL (exp)) == REG
6355 && GET_MODE (DECL_RTL (exp)) != mode)
6357 /* Get the signedness used for this variable. Ensure we get the
6358 same mode we got when the variable was declared. */
6359 if (GET_MODE (DECL_RTL (exp))
6360 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6363 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6364 SUBREG_PROMOTED_VAR_P (temp) = 1;
6365 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6369 return DECL_RTL (exp);
6372 return immed_double_const (TREE_INT_CST_LOW (exp),
6373 TREE_INT_CST_HIGH (exp), mode);
6376 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6377 EXPAND_MEMORY_USE_BAD);
6380 /* If optimized, generate immediate CONST_DOUBLE
6381 which will be turned into memory by reload if necessary.
6383 We used to force a register so that loop.c could see it. But
6384 this does not allow gen_* patterns to perform optimizations with
6385 the constants. It also produces two insns in cases like "x = 1.0;".
6386 On most machines, floating-point constants are not permitted in
6387 many insns, so we'd end up copying it to a register in any case.
6389 Now, we do the copying in expand_binop, if appropriate. */
6390 return immed_real_const (exp);
6394 if (! TREE_CST_RTL (exp))
6395 output_constant_def (exp, 1);
6397 /* TREE_CST_RTL probably contains a constant address.
6398 On RISC machines where a constant address isn't valid,
6399 make some insns to get that address into a register. */
6400 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6401 && modifier != EXPAND_CONST_ADDRESS
6402 && modifier != EXPAND_INITIALIZER
6403 && modifier != EXPAND_SUM
6404 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6406 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6407 return change_address (TREE_CST_RTL (exp), VOIDmode,
6408 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6409 return TREE_CST_RTL (exp);
6411 case EXPR_WITH_FILE_LOCATION:
6414 const char *saved_input_filename = input_filename;
6415 int saved_lineno = lineno;
6416 input_filename = EXPR_WFL_FILENAME (exp);
6417 lineno = EXPR_WFL_LINENO (exp);
6418 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6419 emit_line_note (input_filename, lineno);
6420 /* Possibly avoid switching back and force here. */
6421 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6422 input_filename = saved_input_filename;
6423 lineno = saved_lineno;
6428 context = decl_function_context (exp);
6430 /* If this SAVE_EXPR was at global context, assume we are an
6431 initialization function and move it into our context. */
6433 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6435 /* We treat inline_function_decl as an alias for the current function
6436 because that is the inline function whose vars, types, etc.
6437 are being merged into the current function.
6438 See expand_inline_function. */
6439 if (context == current_function_decl || context == inline_function_decl)
6442 /* If this is non-local, handle it. */
6445 /* The following call just exists to abort if the context is
6446 not of a containing function. */
6447 find_function_data (context);
6449 temp = SAVE_EXPR_RTL (exp);
6450 if (temp && GET_CODE (temp) == REG)
6452 put_var_into_stack (exp);
6453 temp = SAVE_EXPR_RTL (exp);
6455 if (temp == 0 || GET_CODE (temp) != MEM)
6457 return change_address (temp, mode,
6458 fix_lexical_addr (XEXP (temp, 0), exp));
6460 if (SAVE_EXPR_RTL (exp) == 0)
6462 if (mode == VOIDmode)
6465 temp = assign_temp (build_qualified_type (type,
6467 | TYPE_QUAL_CONST)),
6470 SAVE_EXPR_RTL (exp) = temp;
6471 if (!optimize && GET_CODE (temp) == REG)
6472 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6475 /* If the mode of TEMP does not match that of the expression, it
6476 must be a promoted value. We pass store_expr a SUBREG of the
6477 wanted mode but mark it so that we know that it was already
6478 extended. Note that `unsignedp' was modified above in
6481 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6483 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6484 SUBREG_PROMOTED_VAR_P (temp) = 1;
6485 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6488 if (temp == const0_rtx)
6489 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6490 EXPAND_MEMORY_USE_BAD);
6492 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6494 TREE_USED (exp) = 1;
6497 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6498 must be a promoted value. We return a SUBREG of the wanted mode,
6499 but mark it so that we know that it was already extended. */
6501 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6502 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6504 /* Compute the signedness and make the proper SUBREG. */
6505 promote_mode (type, mode, &unsignedp, 0);
6506 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6507 SUBREG_PROMOTED_VAR_P (temp) = 1;
6508 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6512 return SAVE_EXPR_RTL (exp);
6517 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6518 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6522 case PLACEHOLDER_EXPR:
6524 tree placeholder_expr;
6526 /* If there is an object on the head of the placeholder list,
6527 see if some object in it of type TYPE or a pointer to it. For
6528 further information, see tree.def. */
6529 for (placeholder_expr = placeholder_list;
6530 placeholder_expr != 0;
6531 placeholder_expr = TREE_CHAIN (placeholder_expr))
6533 tree need_type = TYPE_MAIN_VARIANT (type);
6535 tree old_list = placeholder_list;
6538 /* Find the outermost reference that is of the type we want.
6539 If none, see if any object has a type that is a pointer to
6540 the type we want. */
6541 for (elt = TREE_PURPOSE (placeholder_expr);
6542 elt != 0 && object == 0;
6544 = ((TREE_CODE (elt) == COMPOUND_EXPR
6545 || TREE_CODE (elt) == COND_EXPR)
6546 ? TREE_OPERAND (elt, 1)
6547 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6548 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6549 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6550 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6551 ? TREE_OPERAND (elt, 0) : 0))
6552 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6555 for (elt = TREE_PURPOSE (placeholder_expr);
6556 elt != 0 && object == 0;
6558 = ((TREE_CODE (elt) == COMPOUND_EXPR
6559 || TREE_CODE (elt) == COND_EXPR)
6560 ? TREE_OPERAND (elt, 1)
6561 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6562 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6563 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6564 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6565 ? TREE_OPERAND (elt, 0) : 0))
6566 if (POINTER_TYPE_P (TREE_TYPE (elt))
6567 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6569 object = build1 (INDIRECT_REF, need_type, elt);
6573 /* Expand this object skipping the list entries before
6574 it was found in case it is also a PLACEHOLDER_EXPR.
6575 In that case, we want to translate it using subsequent
6577 placeholder_list = TREE_CHAIN (placeholder_expr);
6578 temp = expand_expr (object, original_target, tmode,
6580 placeholder_list = old_list;
6586 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6589 case WITH_RECORD_EXPR:
6590 /* Put the object on the placeholder list, expand our first operand,
6591 and pop the list. */
6592 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6594 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6595 tmode, ro_modifier);
6596 placeholder_list = TREE_CHAIN (placeholder_list);
6600 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6601 expand_goto (TREE_OPERAND (exp, 0));
6603 expand_computed_goto (TREE_OPERAND (exp, 0));
6607 expand_exit_loop_if_false (NULL,
6608 invert_truthvalue (TREE_OPERAND (exp, 0)));
6611 case LABELED_BLOCK_EXPR:
6612 if (LABELED_BLOCK_BODY (exp))
6613 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6614 /* Should perhaps use expand_label, but this is simpler and safer. */
6615 do_pending_stack_adjust ();
6616 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6619 case EXIT_BLOCK_EXPR:
6620 if (EXIT_BLOCK_RETURN (exp))
6621 sorry ("returned value in block_exit_expr");
6622 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6627 expand_start_loop (1);
6628 expand_expr_stmt (TREE_OPERAND (exp, 0));
6636 tree vars = TREE_OPERAND (exp, 0);
6637 int vars_need_expansion = 0;
6639 /* Need to open a binding contour here because
6640 if there are any cleanups they must be contained here. */
6641 expand_start_bindings (2);
6643 /* Mark the corresponding BLOCK for output in its proper place. */
6644 if (TREE_OPERAND (exp, 2) != 0
6645 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6646 insert_block (TREE_OPERAND (exp, 2));
6648 /* If VARS have not yet been expanded, expand them now. */
6651 if (!DECL_RTL_SET_P (vars))
6653 vars_need_expansion = 1;
6656 expand_decl_init (vars);
6657 vars = TREE_CHAIN (vars);
6660 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6662 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6668 if (RTL_EXPR_SEQUENCE (exp))
6670 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6672 emit_insns (RTL_EXPR_SEQUENCE (exp));
6673 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6675 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6676 free_temps_for_rtl_expr (exp);
6677 return RTL_EXPR_RTL (exp);
6680 /* If we don't need the result, just ensure we evaluate any
6685 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6686 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6687 EXPAND_MEMORY_USE_BAD);
6691 /* All elts simple constants => refer to a constant in memory. But
6692 if this is a non-BLKmode mode, let it store a field at a time
6693 since that should make a CONST_INT or CONST_DOUBLE when we
6694 fold. Likewise, if we have a target we can use, it is best to
6695 store directly into the target unless the type is large enough
6696 that memcpy will be used. If we are making an initializer and
6697 all operands are constant, put it in memory as well. */
6698 else if ((TREE_STATIC (exp)
6699 && ((mode == BLKmode
6700 && ! (target != 0 && safe_from_p (target, exp, 1)))
6701 || TREE_ADDRESSABLE (exp)
6702 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6703 && (! MOVE_BY_PIECES_P
6704 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6706 && ! mostly_zeros_p (exp))))
6707 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6709 rtx constructor = output_constant_def (exp, 1);
6711 if (modifier != EXPAND_CONST_ADDRESS
6712 && modifier != EXPAND_INITIALIZER
6713 && modifier != EXPAND_SUM
6714 && (! memory_address_p (GET_MODE (constructor),
6715 XEXP (constructor, 0))
6717 && GET_CODE (XEXP (constructor, 0)) != REG)))
6718 constructor = change_address (constructor, VOIDmode,
6719 XEXP (constructor, 0));
6724 /* Handle calls that pass values in multiple non-contiguous
6725 locations. The Irix 6 ABI has examples of this. */
6726 if (target == 0 || ! safe_from_p (target, exp, 1)
6727 || GET_CODE (target) == PARALLEL)
6729 = assign_temp (build_qualified_type (type,
6731 | (TREE_READONLY (exp)
6732 * TYPE_QUAL_CONST))),
6733 TREE_ADDRESSABLE (exp), 1, 1);
6735 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6736 int_size_in_bytes (TREE_TYPE (exp)));
6742 tree exp1 = TREE_OPERAND (exp, 0);
6744 tree string = string_constant (exp1, &index);
6746 /* Try to optimize reads from const strings. */
6748 && TREE_CODE (string) == STRING_CST
6749 && TREE_CODE (index) == INTEGER_CST
6750 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6751 && GET_MODE_CLASS (mode) == MODE_INT
6752 && GET_MODE_SIZE (mode) == 1
6753 && modifier != EXPAND_MEMORY_USE_WO)
6755 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6757 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6758 op0 = memory_address (mode, op0);
6760 if (cfun && current_function_check_memory_usage
6761 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6763 enum memory_use_mode memory_usage;
6764 memory_usage = get_memory_usage_from_modifier (modifier);
6766 if (memory_usage != MEMORY_USE_DONT)
6768 in_check_memory_usage = 1;
6769 emit_library_call (chkr_check_addr_libfunc,
6770 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6771 Pmode, GEN_INT (int_size_in_bytes (type)),
6772 TYPE_MODE (sizetype),
6773 GEN_INT (memory_usage),
6774 TYPE_MODE (integer_type_node));
6775 in_check_memory_usage = 0;
6779 temp = gen_rtx_MEM (mode, op0);
6780 set_mem_attributes (temp, exp, 0);
6782 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6783 here, because, in C and C++, the fact that a location is accessed
6784 through a pointer to const does not mean that the value there can
6785 never change. Languages where it can never change should
6786 also set TREE_STATIC. */
6787 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6789 /* If we are writing to this object and its type is a record with
6790 readonly fields, we must mark it as readonly so it will
6791 conflict with readonly references to those fields. */
6792 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6793 RTX_UNCHANGING_P (temp) = 1;
6799 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6803 tree array = TREE_OPERAND (exp, 0);
6804 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6805 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6806 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6809 /* Optimize the special-case of a zero lower bound.
6811 We convert the low_bound to sizetype to avoid some problems
6812 with constant folding. (E.g. suppose the lower bound is 1,
6813 and its mode is QI. Without the conversion, (ARRAY
6814 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6815 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6817 if (! integer_zerop (low_bound))
6818 index = size_diffop (index, convert (sizetype, low_bound));
6820 /* Fold an expression like: "foo"[2].
6821 This is not done in fold so it won't happen inside &.
6822 Don't fold if this is for wide characters since it's too
6823 difficult to do correctly and this is a very rare case. */
6825 if (TREE_CODE (array) == STRING_CST
6826 && TREE_CODE (index) == INTEGER_CST
6827 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6828 && GET_MODE_CLASS (mode) == MODE_INT
6829 && GET_MODE_SIZE (mode) == 1)
6831 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6833 /* If this is a constant index into a constant array,
6834 just get the value from the array. Handle both the cases when
6835 we have an explicit constructor and when our operand is a variable
6836 that was declared const. */
6838 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6839 && TREE_CODE (index) == INTEGER_CST
6840 && 0 > compare_tree_int (index,
6841 list_length (CONSTRUCTOR_ELTS
6842 (TREE_OPERAND (exp, 0)))))
6846 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6847 i = TREE_INT_CST_LOW (index);
6848 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6852 return expand_expr (fold (TREE_VALUE (elem)), target,
6853 tmode, ro_modifier);
6856 else if (optimize >= 1
6857 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6858 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6859 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6861 if (TREE_CODE (index) == INTEGER_CST)
6863 tree init = DECL_INITIAL (array);
6865 if (TREE_CODE (init) == CONSTRUCTOR)
6869 for (elem = CONSTRUCTOR_ELTS (init);
6871 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6872 elem = TREE_CHAIN (elem))
6876 return expand_expr (fold (TREE_VALUE (elem)), target,
6877 tmode, ro_modifier);
6879 else if (TREE_CODE (init) == STRING_CST
6880 && 0 > compare_tree_int (index,
6881 TREE_STRING_LENGTH (init)))
6883 tree type = TREE_TYPE (TREE_TYPE (init));
6884 enum machine_mode mode = TYPE_MODE (type);
6886 if (GET_MODE_CLASS (mode) == MODE_INT
6887 && GET_MODE_SIZE (mode) == 1)
6889 (TREE_STRING_POINTER
6890 (init)[TREE_INT_CST_LOW (index)]));
6899 /* If the operand is a CONSTRUCTOR, we can just extract the
6900 appropriate field if it is present. Don't do this if we have
6901 already written the data since we want to refer to that copy
6902 and varasm.c assumes that's what we'll do. */
6903 if (code != ARRAY_REF
6904 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6905 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6909 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6910 elt = TREE_CHAIN (elt))
6911 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6912 /* We can normally use the value of the field in the
6913 CONSTRUCTOR. However, if this is a bitfield in
6914 an integral mode that we can fit in a HOST_WIDE_INT,
6915 we must mask only the number of bits in the bitfield,
6916 since this is done implicitly by the constructor. If
6917 the bitfield does not meet either of those conditions,
6918 we can't do this optimization. */
6919 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6920 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6922 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6923 <= HOST_BITS_PER_WIDE_INT))))
6925 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6926 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6928 HOST_WIDE_INT bitsize
6929 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6931 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6933 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6934 op0 = expand_and (op0, op1, target);
6938 enum machine_mode imode
6939 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6941 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6944 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6946 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6956 enum machine_mode mode1;
6957 HOST_WIDE_INT bitsize, bitpos;
6960 unsigned int alignment;
6961 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6962 &mode1, &unsignedp, &volatilep,
6965 /* If we got back the original object, something is wrong. Perhaps
6966 we are evaluating an expression too early. In any event, don't
6967 infinitely recurse. */
6971 /* If TEM's type is a union of variable size, pass TARGET to the inner
6972 computation, since it will need a temporary and TARGET is known
6973 to have to do. This occurs in unchecked conversion in Ada. */
6975 op0 = expand_expr (tem,
6976 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6977 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6979 ? target : NULL_RTX),
6981 (modifier == EXPAND_INITIALIZER
6982 || modifier == EXPAND_CONST_ADDRESS)
6983 ? modifier : EXPAND_NORMAL);
6985 /* If this is a constant, put it into a register if it is a
6986 legitimate constant and OFFSET is 0 and memory if it isn't. */
6987 if (CONSTANT_P (op0))
6989 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6990 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6992 op0 = force_reg (mode, op0);
6994 op0 = validize_mem (force_const_mem (mode, op0));
6999 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7001 /* If this object is in memory, put it into a register.
7002 This case can't occur in C, but can in Ada if we have
7003 unchecked conversion of an expression from a scalar type to
7004 an array or record type. */
7005 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7006 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7008 tree nt = build_qualified_type (TREE_TYPE (tem),
7009 (TYPE_QUALS (TREE_TYPE (tem))
7010 | TYPE_QUAL_CONST));
7011 rtx memloc = assign_temp (nt, 1, 1, 1);
7013 mark_temp_addr_taken (memloc);
7014 emit_move_insn (memloc, op0);
7018 if (GET_CODE (op0) != MEM)
7021 if (GET_MODE (offset_rtx) != ptr_mode)
7023 #ifdef POINTERS_EXTEND_UNSIGNED
7024 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7026 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7030 /* A constant address in OP0 can have VOIDmode, we must not try
7031 to call force_reg for that case. Avoid that case. */
7032 if (GET_CODE (op0) == MEM
7033 && GET_MODE (op0) == BLKmode
7034 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7036 && (bitpos % bitsize) == 0
7037 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7038 && alignment == GET_MODE_ALIGNMENT (mode1))
7040 rtx temp = change_address (op0, mode1,
7041 plus_constant (XEXP (op0, 0),
7044 if (GET_CODE (XEXP (temp, 0)) == REG)
7047 op0 = change_address (op0, mode1,
7048 force_reg (GET_MODE (XEXP (temp, 0)),
7053 op0 = change_address (op0, VOIDmode,
7054 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7055 force_reg (ptr_mode,
7059 /* Don't forget about volatility even if this is a bitfield. */
7060 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7062 op0 = copy_rtx (op0);
7063 MEM_VOLATILE_P (op0) = 1;
7066 /* Check the access. */
7067 if (cfun != 0 && current_function_check_memory_usage
7068 && GET_CODE (op0) == MEM)
7070 enum memory_use_mode memory_usage;
7071 memory_usage = get_memory_usage_from_modifier (modifier);
7073 if (memory_usage != MEMORY_USE_DONT)
7078 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7079 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7081 /* Check the access right of the pointer. */
7082 in_check_memory_usage = 1;
7083 if (size > BITS_PER_UNIT)
7084 emit_library_call (chkr_check_addr_libfunc,
7085 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7086 Pmode, GEN_INT (size / BITS_PER_UNIT),
7087 TYPE_MODE (sizetype),
7088 GEN_INT (memory_usage),
7089 TYPE_MODE (integer_type_node));
7090 in_check_memory_usage = 0;
7094 /* In cases where an aligned union has an unaligned object
7095 as a field, we might be extracting a BLKmode value from
7096 an integer-mode (e.g., SImode) object. Handle this case
7097 by doing the extract into an object as wide as the field
7098 (which we know to be the width of a basic mode), then
7099 storing into memory, and changing the mode to BLKmode.
7100 If we ultimately want the address (EXPAND_CONST_ADDRESS or
7101 EXPAND_INITIALIZER), then we must not copy to a temporary. */
7102 if (mode1 == VOIDmode
7103 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7104 || (modifier != EXPAND_CONST_ADDRESS
7105 && modifier != EXPAND_INITIALIZER
7106 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
7107 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7108 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7109 /* If the field isn't aligned enough to fetch as a memref,
7110 fetch it as a bit field. */
7111 || (mode1 != BLKmode
7112 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7113 && ((TYPE_ALIGN (TREE_TYPE (tem))
7114 < GET_MODE_ALIGNMENT (mode))
7115 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7116 /* If the type and the field are a constant size and the
7117 size of the type isn't the same size as the bitfield,
7118 we must use bitfield operations. */
7120 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7122 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7124 || (modifier != EXPAND_CONST_ADDRESS
7125 && modifier != EXPAND_INITIALIZER
7127 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7128 && (TYPE_ALIGN (type) > alignment
7129 || bitpos % TYPE_ALIGN (type) != 0)))
7131 enum machine_mode ext_mode = mode;
7133 if (ext_mode == BLKmode
7134 && ! (target != 0 && GET_CODE (op0) == MEM
7135 && GET_CODE (target) == MEM
7136 && bitpos % BITS_PER_UNIT == 0))
7137 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7139 if (ext_mode == BLKmode)
7141 /* In this case, BITPOS must start at a byte boundary and
7142 TARGET, if specified, must be a MEM. */
7143 if (GET_CODE (op0) != MEM
7144 || (target != 0 && GET_CODE (target) != MEM)
7145 || bitpos % BITS_PER_UNIT != 0)
7148 op0 = change_address (op0, VOIDmode,
7149 plus_constant (XEXP (op0, 0),
7150 bitpos / BITS_PER_UNIT));
7152 target = assign_temp (type, 0, 1, 1);
7154 emit_block_move (target, op0,
7155 bitsize == -1 ? expr_size (exp)
7156 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7163 op0 = validize_mem (op0);
7165 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7166 mark_reg_pointer (XEXP (op0, 0), alignment);
7168 op0 = extract_bit_field (op0, bitsize, bitpos,
7169 unsignedp, target, ext_mode, ext_mode,
7171 int_size_in_bytes (TREE_TYPE (tem)));
7173 /* If the result is a record type and BITSIZE is narrower than
7174 the mode of OP0, an integral mode, and this is a big endian
7175 machine, we must put the field into the high-order bits. */
7176 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7177 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7178 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7179 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7180 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7184 if (mode == BLKmode)
7186 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7188 rtx new = assign_temp (nt, 0, 1, 1);
7190 emit_move_insn (new, op0);
7191 op0 = copy_rtx (new);
7192 PUT_MODE (op0, BLKmode);
7198 /* If the result is BLKmode, use that to access the object
7200 if (mode == BLKmode)
7203 /* Get a reference to just this component. */
7204 if (modifier == EXPAND_CONST_ADDRESS
7205 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7207 rtx new = gen_rtx_MEM (mode1,
7208 plus_constant (XEXP (op0, 0),
7209 (bitpos / BITS_PER_UNIT)));
7211 MEM_COPY_ATTRIBUTES (new, op0);
7215 op0 = change_address (op0, mode1,
7216 plus_constant (XEXP (op0, 0),
7217 (bitpos / BITS_PER_UNIT)));
7219 set_mem_attributes (op0, exp, 0);
7220 if (GET_CODE (XEXP (op0, 0)) == REG)
7221 mark_reg_pointer (XEXP (op0, 0), alignment);
7223 MEM_VOLATILE_P (op0) |= volatilep;
7224 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7225 || modifier == EXPAND_CONST_ADDRESS
7226 || modifier == EXPAND_INITIALIZER)
7228 else if (target == 0)
7229 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7231 convert_move (target, op0, unsignedp);
7235 /* Intended for a reference to a buffer of a file-object in Pascal.
7236 But it's not certain that a special tree code will really be
7237 necessary for these. INDIRECT_REF might work for them. */
7243 /* Pascal set IN expression.
7246 rlo = set_low - (set_low%bits_per_word);
7247 the_word = set [ (index - rlo)/bits_per_word ];
7248 bit_index = index % bits_per_word;
7249 bitmask = 1 << bit_index;
7250 return !!(the_word & bitmask); */
7252 tree set = TREE_OPERAND (exp, 0);
7253 tree index = TREE_OPERAND (exp, 1);
7254 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7255 tree set_type = TREE_TYPE (set);
7256 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7257 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7258 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7259 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7260 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7261 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7262 rtx setaddr = XEXP (setval, 0);
7263 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7265 rtx diff, quo, rem, addr, bit, result;
7267 /* If domain is empty, answer is no. Likewise if index is constant
7268 and out of bounds. */
7269 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7270 && TREE_CODE (set_low_bound) == INTEGER_CST
7271 && tree_int_cst_lt (set_high_bound, set_low_bound))
7272 || (TREE_CODE (index) == INTEGER_CST
7273 && TREE_CODE (set_low_bound) == INTEGER_CST
7274 && tree_int_cst_lt (index, set_low_bound))
7275 || (TREE_CODE (set_high_bound) == INTEGER_CST
7276 && TREE_CODE (index) == INTEGER_CST
7277 && tree_int_cst_lt (set_high_bound, index))))
7281 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7283 /* If we get here, we have to generate the code for both cases
7284 (in range and out of range). */
7286 op0 = gen_label_rtx ();
7287 op1 = gen_label_rtx ();
7289 if (! (GET_CODE (index_val) == CONST_INT
7290 && GET_CODE (lo_r) == CONST_INT))
7292 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7293 GET_MODE (index_val), iunsignedp, 0, op1);
7296 if (! (GET_CODE (index_val) == CONST_INT
7297 && GET_CODE (hi_r) == CONST_INT))
7299 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7300 GET_MODE (index_val), iunsignedp, 0, op1);
7303 /* Calculate the element number of bit zero in the first word
7305 if (GET_CODE (lo_r) == CONST_INT)
7306 rlow = GEN_INT (INTVAL (lo_r)
7307 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7309 rlow = expand_binop (index_mode, and_optab, lo_r,
7310 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7311 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7313 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7314 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7316 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7317 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7318 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7319 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7321 addr = memory_address (byte_mode,
7322 expand_binop (index_mode, add_optab, diff,
7323 setaddr, NULL_RTX, iunsignedp,
7326 /* Extract the bit we want to examine. */
7327 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7328 gen_rtx_MEM (byte_mode, addr),
7329 make_tree (TREE_TYPE (index), rem),
7331 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7332 GET_MODE (target) == byte_mode ? target : 0,
7333 1, OPTAB_LIB_WIDEN);
7335 if (result != target)
7336 convert_move (target, result, 1);
7338 /* Output the code to handle the out-of-range case. */
7341 emit_move_insn (target, const0_rtx);
7346 case WITH_CLEANUP_EXPR:
7347 if (RTL_EXPR_RTL (exp) == 0)
7350 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7351 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7353 /* That's it for this cleanup. */
7354 TREE_OPERAND (exp, 2) = 0;
7356 return RTL_EXPR_RTL (exp);
7358 case CLEANUP_POINT_EXPR:
7360 /* Start a new binding layer that will keep track of all cleanup
7361 actions to be performed. */
7362 expand_start_bindings (2);
7364 target_temp_slot_level = temp_slot_level;
7366 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7367 /* If we're going to use this value, load it up now. */
7369 op0 = force_not_mem (op0);
7370 preserve_temp_slots (op0);
7371 expand_end_bindings (NULL_TREE, 0, 0);
7376 /* Check for a built-in function. */
7377 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7378 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7380 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7382 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7383 == BUILT_IN_FRONTEND)
7384 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7386 return expand_builtin (exp, target, subtarget, tmode, ignore);
7389 return expand_call (exp, target, ignore);
7391 case NON_LVALUE_EXPR:
7394 case REFERENCE_EXPR:
7395 if (TREE_OPERAND (exp, 0) == error_mark_node)
7398 if (TREE_CODE (type) == UNION_TYPE)
7400 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7402 /* If both input and output are BLKmode, this conversion
7403 isn't actually doing anything unless we need to make the
7404 alignment stricter. */
7405 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7406 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7407 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7408 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7412 target = assign_temp (type, 0, 1, 1);
7414 if (GET_CODE (target) == MEM)
7415 /* Store data into beginning of memory target. */
7416 store_expr (TREE_OPERAND (exp, 0),
7417 change_address (target, TYPE_MODE (valtype), 0), 0);
7419 else if (GET_CODE (target) == REG)
7420 /* Store this field into a union of the proper type. */
7421 store_field (target,
7422 MIN ((int_size_in_bytes (TREE_TYPE
7423 (TREE_OPERAND (exp, 0)))
7425 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7426 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7427 VOIDmode, 0, BITS_PER_UNIT,
7428 int_size_in_bytes (type), 0);
7432 /* Return the entire union. */
7436 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7438 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7441 /* If the signedness of the conversion differs and OP0 is
7442 a promoted SUBREG, clear that indication since we now
7443 have to do the proper extension. */
7444 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7445 && GET_CODE (op0) == SUBREG)
7446 SUBREG_PROMOTED_VAR_P (op0) = 0;
7451 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7452 if (GET_MODE (op0) == mode)
7455 /* If OP0 is a constant, just convert it into the proper mode. */
7456 if (CONSTANT_P (op0))
7458 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7459 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7461 if (modifier == EXPAND_INITIALIZER)
7462 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7466 convert_to_mode (mode, op0,
7467 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7469 convert_move (target, op0,
7470 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7474 /* We come here from MINUS_EXPR when the second operand is a
7477 this_optab = ! unsignedp && flag_trapv
7478 && (GET_MODE_CLASS(mode) == MODE_INT)
7479 ? addv_optab : add_optab;
7481 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7482 something else, make sure we add the register to the constant and
7483 then to the other thing. This case can occur during strength
7484 reduction and doing it this way will produce better code if the
7485 frame pointer or argument pointer is eliminated.
7487 fold-const.c will ensure that the constant is always in the inner
7488 PLUS_EXPR, so the only case we need to do anything about is if
7489 sp, ap, or fp is our second argument, in which case we must swap
7490 the innermost first argument and our second argument. */
7492 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7493 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7494 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7495 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7496 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7497 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7499 tree t = TREE_OPERAND (exp, 1);
7501 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7502 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7505 /* If the result is to be ptr_mode and we are adding an integer to
7506 something, we might be forming a constant. So try to use
7507 plus_constant. If it produces a sum and we can't accept it,
7508 use force_operand. This allows P = &ARR[const] to generate
7509 efficient code on machines where a SYMBOL_REF is not a valid
7512 If this is an EXPAND_SUM call, always return the sum. */
7513 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7514 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7516 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7517 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7518 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7522 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7524 /* Use immed_double_const to ensure that the constant is
7525 truncated according to the mode of OP1, then sign extended
7526 to a HOST_WIDE_INT. Using the constant directly can result
7527 in non-canonical RTL in a 64x32 cross compile. */
7529 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7531 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7532 op1 = plus_constant (op1, INTVAL (constant_part));
7533 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7534 op1 = force_operand (op1, target);
7538 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7539 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7540 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7544 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7546 if (! CONSTANT_P (op0))
7548 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7549 VOIDmode, modifier);
7550 /* Don't go to both_summands if modifier
7551 says it's not right to return a PLUS. */
7552 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7556 /* Use immed_double_const to ensure that the constant is
7557 truncated according to the mode of OP1, then sign extended
7558 to a HOST_WIDE_INT. Using the constant directly can result
7559 in non-canonical RTL in a 64x32 cross compile. */
7561 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7563 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7564 op0 = plus_constant (op0, INTVAL (constant_part));
7565 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7566 op0 = force_operand (op0, target);
7571 /* No sense saving up arithmetic to be done
7572 if it's all in the wrong mode to form part of an address.
7573 And force_operand won't know whether to sign-extend or
7575 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7576 || mode != ptr_mode)
7579 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7582 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7583 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7586 /* Make sure any term that's a sum with a constant comes last. */
7587 if (GET_CODE (op0) == PLUS
7588 && CONSTANT_P (XEXP (op0, 1)))
7594 /* If adding to a sum including a constant,
7595 associate it to put the constant outside. */
7596 if (GET_CODE (op1) == PLUS
7597 && CONSTANT_P (XEXP (op1, 1)))
7599 rtx constant_term = const0_rtx;
7601 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7604 /* Ensure that MULT comes first if there is one. */
7605 else if (GET_CODE (op0) == MULT)
7606 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7608 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7610 /* Let's also eliminate constants from op0 if possible. */
7611 op0 = eliminate_constant_term (op0, &constant_term);
7613 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7614 their sum should be a constant. Form it into OP1, since the
7615 result we want will then be OP0 + OP1. */
7617 temp = simplify_binary_operation (PLUS, mode, constant_term,
7622 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7625 /* Put a constant term last and put a multiplication first. */
7626 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7627 temp = op1, op1 = op0, op0 = temp;
7629 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7630 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7633 /* For initializers, we are allowed to return a MINUS of two
7634 symbolic constants. Here we handle all cases when both operands
7636 /* Handle difference of two symbolic constants,
7637 for the sake of an initializer. */
7638 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7639 && really_constant_p (TREE_OPERAND (exp, 0))
7640 && really_constant_p (TREE_OPERAND (exp, 1)))
7642 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7643 VOIDmode, ro_modifier);
7644 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7645 VOIDmode, ro_modifier);
7647 /* If the last operand is a CONST_INT, use plus_constant of
7648 the negated constant. Else make the MINUS. */
7649 if (GET_CODE (op1) == CONST_INT)
7650 return plus_constant (op0, - INTVAL (op1));
7652 return gen_rtx_MINUS (mode, op0, op1);
7654 /* Convert A - const to A + (-const). */
7655 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7657 tree negated = fold (build1 (NEGATE_EXPR, type,
7658 TREE_OPERAND (exp, 1)));
7660 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7661 /* If we can't negate the constant in TYPE, leave it alone and
7662 expand_binop will negate it for us. We used to try to do it
7663 here in the signed version of TYPE, but that doesn't work
7664 on POINTER_TYPEs. */;
7667 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7671 this_optab = ! unsignedp && flag_trapv
7672 && (GET_MODE_CLASS(mode) == MODE_INT)
7673 ? subv_optab : sub_optab;
7677 /* If first operand is constant, swap them.
7678 Thus the following special case checks need only
7679 check the second operand. */
7680 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7682 register tree t1 = TREE_OPERAND (exp, 0);
7683 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7684 TREE_OPERAND (exp, 1) = t1;
7687 /* Attempt to return something suitable for generating an
7688 indexed address, for machines that support that. */
7690 if (modifier == EXPAND_SUM && mode == ptr_mode
7691 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7692 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7694 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7697 /* Apply distributive law if OP0 is x+c. */
7698 if (GET_CODE (op0) == PLUS
7699 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7704 (mode, XEXP (op0, 0),
7705 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7706 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7707 * INTVAL (XEXP (op0, 1))));
7709 if (GET_CODE (op0) != REG)
7710 op0 = force_operand (op0, NULL_RTX);
7711 if (GET_CODE (op0) != REG)
7712 op0 = copy_to_mode_reg (mode, op0);
7715 gen_rtx_MULT (mode, op0,
7716 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7719 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7722 /* Check for multiplying things that have been extended
7723 from a narrower type. If this machine supports multiplying
7724 in that narrower type with a result in the desired type,
7725 do it that way, and avoid the explicit type-conversion. */
7726 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7727 && TREE_CODE (type) == INTEGER_TYPE
7728 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7729 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7730 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7731 && int_fits_type_p (TREE_OPERAND (exp, 1),
7732 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7733 /* Don't use a widening multiply if a shift will do. */
7734 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7735 > HOST_BITS_PER_WIDE_INT)
7736 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7738 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7739 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7741 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7742 /* If both operands are extended, they must either both
7743 be zero-extended or both be sign-extended. */
7744 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7746 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7748 enum machine_mode innermode
7749 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7750 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7751 ? smul_widen_optab : umul_widen_optab);
7752 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7753 ? umul_widen_optab : smul_widen_optab);
7754 if (mode == GET_MODE_WIDER_MODE (innermode))
7756 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7758 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7759 NULL_RTX, VOIDmode, 0);
7760 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7761 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7764 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7765 NULL_RTX, VOIDmode, 0);
7768 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7769 && innermode == word_mode)
7772 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7773 NULL_RTX, VOIDmode, 0);
7774 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7775 op1 = convert_modes (innermode, mode,
7776 expand_expr (TREE_OPERAND (exp, 1),
7777 NULL_RTX, VOIDmode, 0),
7780 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7781 NULL_RTX, VOIDmode, 0);
7782 temp = expand_binop (mode, other_optab, op0, op1, target,
7783 unsignedp, OPTAB_LIB_WIDEN);
7784 htem = expand_mult_highpart_adjust (innermode,
7785 gen_highpart (innermode, temp),
7787 gen_highpart (innermode, temp),
7789 emit_move_insn (gen_highpart (innermode, temp), htem);
7794 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7795 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7796 return expand_mult (mode, op0, op1, target, unsignedp);
7798 case TRUNC_DIV_EXPR:
7799 case FLOOR_DIV_EXPR:
7801 case ROUND_DIV_EXPR:
7802 case EXACT_DIV_EXPR:
7803 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7805 /* Possible optimization: compute the dividend with EXPAND_SUM
7806 then if the divisor is constant can optimize the case
7807 where some terms of the dividend have coeffs divisible by it. */
7808 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7809 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7810 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7813 this_optab = flodiv_optab;
7816 case TRUNC_MOD_EXPR:
7817 case FLOOR_MOD_EXPR:
7819 case ROUND_MOD_EXPR:
7820 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7822 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7823 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7824 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7826 case FIX_ROUND_EXPR:
7827 case FIX_FLOOR_EXPR:
7829 abort (); /* Not used for C. */
7831 case FIX_TRUNC_EXPR:
7832 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7834 target = gen_reg_rtx (mode);
7835 expand_fix (target, op0, unsignedp);
7839 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7841 target = gen_reg_rtx (mode);
7842 /* expand_float can't figure out what to do if FROM has VOIDmode.
7843 So give it the correct mode. With -O, cse will optimize this. */
7844 if (GET_MODE (op0) == VOIDmode)
7845 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7847 expand_float (target, op0,
7848 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7852 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7853 temp = expand_unop (mode,
7854 ! unsignedp && flag_trapv
7855 && (GET_MODE_CLASS(mode) == MODE_INT)
7856 ? negv_optab : neg_optab, op0, target, 0);
7862 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7864 /* Handle complex values specially. */
7865 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7866 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7867 return expand_complex_abs (mode, op0, target, unsignedp);
7869 /* Unsigned abs is simply the operand. Testing here means we don't
7870 risk generating incorrect code below. */
7871 if (TREE_UNSIGNED (type))
7874 return expand_abs (mode, op0, target, unsignedp,
7875 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7879 target = original_target;
7880 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7881 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7882 || GET_MODE (target) != mode
7883 || (GET_CODE (target) == REG
7884 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7885 target = gen_reg_rtx (mode);
7886 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7887 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7889 /* First try to do it with a special MIN or MAX instruction.
7890 If that does not win, use a conditional jump to select the proper
7892 this_optab = (TREE_UNSIGNED (type)
7893 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7894 : (code == MIN_EXPR ? smin_optab : smax_optab));
7896 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7901 /* At this point, a MEM target is no longer useful; we will get better
7904 if (GET_CODE (target) == MEM)
7905 target = gen_reg_rtx (mode);
7908 emit_move_insn (target, op0);
7910 op0 = gen_label_rtx ();
7912 /* If this mode is an integer too wide to compare properly,
7913 compare word by word. Rely on cse to optimize constant cases. */
7914 if (GET_MODE_CLASS (mode) == MODE_INT
7915 && ! can_compare_p (GE, mode, ccp_jump))
7917 if (code == MAX_EXPR)
7918 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7919 target, op1, NULL_RTX, op0);
7921 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7922 op1, target, NULL_RTX, op0);
7926 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7927 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7928 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7931 emit_move_insn (target, op1);
7936 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7937 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7943 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7944 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7949 /* ??? Can optimize bitwise operations with one arg constant.
7950 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7951 and (a bitwise1 b) bitwise2 b (etc)
7952 but that is probably not worth while. */
7954 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7955 boolean values when we want in all cases to compute both of them. In
7956 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7957 as actual zero-or-1 values and then bitwise anding. In cases where
7958 there cannot be any side effects, better code would be made by
7959 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7960 how to recognize those cases. */
7962 case TRUTH_AND_EXPR:
7964 this_optab = and_optab;
7969 this_optab = ior_optab;
7972 case TRUTH_XOR_EXPR:
7974 this_optab = xor_optab;
7981 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7983 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7984 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7987 /* Could determine the answer when only additive constants differ. Also,
7988 the addition of one can be handled by changing the condition. */
7995 case UNORDERED_EXPR:
8002 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8006 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8007 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8009 && GET_CODE (original_target) == REG
8010 && (GET_MODE (original_target)
8011 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8013 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8016 if (temp != original_target)
8017 temp = copy_to_reg (temp);
8019 op1 = gen_label_rtx ();
8020 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8021 GET_MODE (temp), unsignedp, 0, op1);
8022 emit_move_insn (temp, const1_rtx);
8027 /* If no set-flag instruction, must generate a conditional
8028 store into a temporary variable. Drop through
8029 and handle this like && and ||. */
8031 case TRUTH_ANDIF_EXPR:
8032 case TRUTH_ORIF_EXPR:
8034 && (target == 0 || ! safe_from_p (target, exp, 1)
8035 /* Make sure we don't have a hard reg (such as function's return
8036 value) live across basic blocks, if not optimizing. */
8037 || (!optimize && GET_CODE (target) == REG
8038 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8039 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8042 emit_clr_insn (target);
8044 op1 = gen_label_rtx ();
8045 jumpifnot (exp, op1);
8048 emit_0_to_1_insn (target);
8051 return ignore ? const0_rtx : target;
8053 case TRUTH_NOT_EXPR:
8054 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8055 /* The parser is careful to generate TRUTH_NOT_EXPR
8056 only with operands that are always zero or one. */
8057 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8058 target, 1, OPTAB_LIB_WIDEN);
8064 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8066 return expand_expr (TREE_OPERAND (exp, 1),
8067 (ignore ? const0_rtx : target),
8071 /* If we would have a "singleton" (see below) were it not for a
8072 conversion in each arm, bring that conversion back out. */
8073 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8074 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8075 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8076 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8078 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8079 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8081 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8082 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8083 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8084 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8085 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8086 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8087 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8088 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8089 return expand_expr (build1 (NOP_EXPR, type,
8090 build (COND_EXPR, TREE_TYPE (iftrue),
8091 TREE_OPERAND (exp, 0),
8093 target, tmode, modifier);
8097 /* Note that COND_EXPRs whose type is a structure or union
8098 are required to be constructed to contain assignments of
8099 a temporary variable, so that we can evaluate them here
8100 for side effect only. If type is void, we must do likewise. */
8102 /* If an arm of the branch requires a cleanup,
8103 only that cleanup is performed. */
8106 tree binary_op = 0, unary_op = 0;
8108 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8109 convert it to our mode, if necessary. */
8110 if (integer_onep (TREE_OPERAND (exp, 1))
8111 && integer_zerop (TREE_OPERAND (exp, 2))
8112 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8116 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8121 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8122 if (GET_MODE (op0) == mode)
8126 target = gen_reg_rtx (mode);
8127 convert_move (target, op0, unsignedp);
8131 /* Check for X ? A + B : A. If we have this, we can copy A to the
8132 output and conditionally add B. Similarly for unary operations.
8133 Don't do this if X has side-effects because those side effects
8134 might affect A or B and the "?" operation is a sequence point in
8135 ANSI. (operand_equal_p tests for side effects.) */
8137 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8138 && operand_equal_p (TREE_OPERAND (exp, 2),
8139 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8140 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8141 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8142 && operand_equal_p (TREE_OPERAND (exp, 1),
8143 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8144 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8145 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8146 && operand_equal_p (TREE_OPERAND (exp, 2),
8147 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8148 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8149 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8150 && operand_equal_p (TREE_OPERAND (exp, 1),
8151 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8152 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8154 /* If we are not to produce a result, we have no target. Otherwise,
8155 if a target was specified use it; it will not be used as an
8156 intermediate target unless it is safe. If no target, use a
8161 else if (original_target
8162 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8163 || (singleton && GET_CODE (original_target) == REG
8164 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8165 && original_target == var_rtx (singleton)))
8166 && GET_MODE (original_target) == mode
8167 #ifdef HAVE_conditional_move
8168 && (! can_conditionally_move_p (mode)
8169 || GET_CODE (original_target) == REG
8170 || TREE_ADDRESSABLE (type))
8172 && ! (GET_CODE (original_target) == MEM
8173 && MEM_VOLATILE_P (original_target)))
8174 temp = original_target;
8175 else if (TREE_ADDRESSABLE (type))
8178 temp = assign_temp (type, 0, 0, 1);
8180 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8181 do the test of X as a store-flag operation, do this as
8182 A + ((X != 0) << log C). Similarly for other simple binary
8183 operators. Only do for C == 1 if BRANCH_COST is low. */
8184 if (temp && singleton && binary_op
8185 && (TREE_CODE (binary_op) == PLUS_EXPR
8186 || TREE_CODE (binary_op) == MINUS_EXPR
8187 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8188 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8189 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8190 : integer_onep (TREE_OPERAND (binary_op, 1)))
8191 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8194 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8195 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8196 ? addv_optab : add_optab)
8197 : TREE_CODE (binary_op) == MINUS_EXPR
8198 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8199 ? subv_optab : sub_optab)
8200 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8203 /* If we had X ? A : A + 1, do this as A + (X == 0).
8205 We have to invert the truth value here and then put it
8206 back later if do_store_flag fails. We cannot simply copy
8207 TREE_OPERAND (exp, 0) to another variable and modify that
8208 because invert_truthvalue can modify the tree pointed to
8210 if (singleton == TREE_OPERAND (exp, 1))
8211 TREE_OPERAND (exp, 0)
8212 = invert_truthvalue (TREE_OPERAND (exp, 0));
8214 result = do_store_flag (TREE_OPERAND (exp, 0),
8215 (safe_from_p (temp, singleton, 1)
8217 mode, BRANCH_COST <= 1);
8219 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8220 result = expand_shift (LSHIFT_EXPR, mode, result,
8221 build_int_2 (tree_log2
8225 (safe_from_p (temp, singleton, 1)
8226 ? temp : NULL_RTX), 0);
8230 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8231 return expand_binop (mode, boptab, op1, result, temp,
8232 unsignedp, OPTAB_LIB_WIDEN);
8234 else if (singleton == TREE_OPERAND (exp, 1))
8235 TREE_OPERAND (exp, 0)
8236 = invert_truthvalue (TREE_OPERAND (exp, 0));
8239 do_pending_stack_adjust ();
8241 op0 = gen_label_rtx ();
8243 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8247 /* If the target conflicts with the other operand of the
8248 binary op, we can't use it. Also, we can't use the target
8249 if it is a hard register, because evaluating the condition
8250 might clobber it. */
8252 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8253 || (GET_CODE (temp) == REG
8254 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8255 temp = gen_reg_rtx (mode);
8256 store_expr (singleton, temp, 0);
8259 expand_expr (singleton,
8260 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8261 if (singleton == TREE_OPERAND (exp, 1))
8262 jumpif (TREE_OPERAND (exp, 0), op0);
8264 jumpifnot (TREE_OPERAND (exp, 0), op0);
8266 start_cleanup_deferral ();
8267 if (binary_op && temp == 0)
8268 /* Just touch the other operand. */
8269 expand_expr (TREE_OPERAND (binary_op, 1),
8270 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8272 store_expr (build (TREE_CODE (binary_op), type,
8273 make_tree (type, temp),
8274 TREE_OPERAND (binary_op, 1)),
8277 store_expr (build1 (TREE_CODE (unary_op), type,
8278 make_tree (type, temp)),
8282 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8283 comparison operator. If we have one of these cases, set the
8284 output to A, branch on A (cse will merge these two references),
8285 then set the output to FOO. */
8287 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8288 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8289 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8290 TREE_OPERAND (exp, 1), 0)
8291 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8292 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8293 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8295 if (GET_CODE (temp) == REG
8296 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8297 temp = gen_reg_rtx (mode);
8298 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8299 jumpif (TREE_OPERAND (exp, 0), op0);
8301 start_cleanup_deferral ();
8302 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8306 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8307 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8308 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8309 TREE_OPERAND (exp, 2), 0)
8310 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8311 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8312 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8314 if (GET_CODE (temp) == REG
8315 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8316 temp = gen_reg_rtx (mode);
8317 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8318 jumpifnot (TREE_OPERAND (exp, 0), op0);
8320 start_cleanup_deferral ();
8321 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8326 op1 = gen_label_rtx ();
8327 jumpifnot (TREE_OPERAND (exp, 0), op0);
8329 start_cleanup_deferral ();
8331 /* One branch of the cond can be void, if it never returns. For
8332 example A ? throw : E */
8334 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8335 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8337 expand_expr (TREE_OPERAND (exp, 1),
8338 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8339 end_cleanup_deferral ();
8341 emit_jump_insn (gen_jump (op1));
8344 start_cleanup_deferral ();
8346 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8347 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8349 expand_expr (TREE_OPERAND (exp, 2),
8350 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8353 end_cleanup_deferral ();
8364 /* Something needs to be initialized, but we didn't know
8365 where that thing was when building the tree. For example,
8366 it could be the return value of a function, or a parameter
8367 to a function which lays down in the stack, or a temporary
8368 variable which must be passed by reference.
8370 We guarantee that the expression will either be constructed
8371 or copied into our original target. */
8373 tree slot = TREE_OPERAND (exp, 0);
8374 tree cleanups = NULL_TREE;
8377 if (TREE_CODE (slot) != VAR_DECL)
8381 target = original_target;
8383 /* Set this here so that if we get a target that refers to a
8384 register variable that's already been used, put_reg_into_stack
8385 knows that it should fix up those uses. */
8386 TREE_USED (slot) = 1;
8390 if (DECL_RTL_SET_P (slot))
8392 target = DECL_RTL (slot);
8393 /* If we have already expanded the slot, so don't do
8395 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8400 target = assign_temp (type, 2, 0, 1);
8401 /* All temp slots at this level must not conflict. */
8402 preserve_temp_slots (target);
8403 SET_DECL_RTL (slot, target);
8404 if (TREE_ADDRESSABLE (slot))
8405 put_var_into_stack (slot);
8407 /* Since SLOT is not known to the called function
8408 to belong to its stack frame, we must build an explicit
8409 cleanup. This case occurs when we must build up a reference
8410 to pass the reference as an argument. In this case,
8411 it is very likely that such a reference need not be
8414 if (TREE_OPERAND (exp, 2) == 0)
8415 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8416 cleanups = TREE_OPERAND (exp, 2);
8421 /* This case does occur, when expanding a parameter which
8422 needs to be constructed on the stack. The target
8423 is the actual stack address that we want to initialize.
8424 The function we call will perform the cleanup in this case. */
8426 /* If we have already assigned it space, use that space,
8427 not target that we were passed in, as our target
8428 parameter is only a hint. */
8429 if (DECL_RTL_SET_P (slot))
8431 target = DECL_RTL (slot);
8432 /* If we have already expanded the slot, so don't do
8434 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8439 SET_DECL_RTL (slot, target);
8440 /* If we must have an addressable slot, then make sure that
8441 the RTL that we just stored in slot is OK. */
8442 if (TREE_ADDRESSABLE (slot))
8443 put_var_into_stack (slot);
8447 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8448 /* Mark it as expanded. */
8449 TREE_OPERAND (exp, 1) = NULL_TREE;
8451 store_expr (exp1, target, 0);
8453 expand_decl_cleanup (NULL_TREE, cleanups);
8460 tree lhs = TREE_OPERAND (exp, 0);
8461 tree rhs = TREE_OPERAND (exp, 1);
8462 tree noncopied_parts = 0;
8463 tree lhs_type = TREE_TYPE (lhs);
8465 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8466 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8467 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8468 TYPE_NONCOPIED_PARTS (lhs_type));
8469 while (noncopied_parts != 0)
8471 expand_assignment (TREE_VALUE (noncopied_parts),
8472 TREE_PURPOSE (noncopied_parts), 0, 0);
8473 noncopied_parts = TREE_CHAIN (noncopied_parts);
8480 /* If lhs is complex, expand calls in rhs before computing it.
8481 That's so we don't compute a pointer and save it over a call.
8482 If lhs is simple, compute it first so we can give it as a
8483 target if the rhs is just a call. This avoids an extra temp and copy
8484 and that prevents a partial-subsumption which makes bad code.
8485 Actually we could treat component_ref's of vars like vars. */
8487 tree lhs = TREE_OPERAND (exp, 0);
8488 tree rhs = TREE_OPERAND (exp, 1);
8489 tree noncopied_parts = 0;
8490 tree lhs_type = TREE_TYPE (lhs);
8494 /* Check for |= or &= of a bitfield of size one into another bitfield
8495 of size 1. In this case, (unless we need the result of the
8496 assignment) we can do this more efficiently with a
8497 test followed by an assignment, if necessary.
8499 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8500 things change so we do, this code should be enhanced to
8503 && TREE_CODE (lhs) == COMPONENT_REF
8504 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8505 || TREE_CODE (rhs) == BIT_AND_EXPR)
8506 && TREE_OPERAND (rhs, 0) == lhs
8507 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8508 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8509 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8511 rtx label = gen_label_rtx ();
8513 do_jump (TREE_OPERAND (rhs, 1),
8514 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8515 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8516 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8517 (TREE_CODE (rhs) == BIT_IOR_EXPR
8519 : integer_zero_node)),
8521 do_pending_stack_adjust ();
8526 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8527 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8528 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8529 TYPE_NONCOPIED_PARTS (lhs_type));
8531 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8532 while (noncopied_parts != 0)
8534 expand_assignment (TREE_PURPOSE (noncopied_parts),
8535 TREE_VALUE (noncopied_parts), 0, 0);
8536 noncopied_parts = TREE_CHAIN (noncopied_parts);
8542 if (!TREE_OPERAND (exp, 0))
8543 expand_null_return ();
8545 expand_return (TREE_OPERAND (exp, 0));
8548 case PREINCREMENT_EXPR:
8549 case PREDECREMENT_EXPR:
8550 return expand_increment (exp, 0, ignore);
8552 case POSTINCREMENT_EXPR:
8553 case POSTDECREMENT_EXPR:
8554 /* Faster to treat as pre-increment if result is not used. */
8555 return expand_increment (exp, ! ignore, ignore);
8558 /* If nonzero, TEMP will be set to the address of something that might
8559 be a MEM corresponding to a stack slot. */
8562 /* Are we taking the address of a nested function? */
8563 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8564 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8565 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8566 && ! TREE_STATIC (exp))
8568 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8569 op0 = force_operand (op0, target);
8571 /* If we are taking the address of something erroneous, just
8573 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8577 /* We make sure to pass const0_rtx down if we came in with
8578 ignore set, to avoid doing the cleanups twice for something. */
8579 op0 = expand_expr (TREE_OPERAND (exp, 0),
8580 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8581 (modifier == EXPAND_INITIALIZER
8582 ? modifier : EXPAND_CONST_ADDRESS));
8584 /* If we are going to ignore the result, OP0 will have been set
8585 to const0_rtx, so just return it. Don't get confused and
8586 think we are taking the address of the constant. */
8590 op0 = protect_from_queue (op0, 0);
8592 /* We would like the object in memory. If it is a constant, we can
8593 have it be statically allocated into memory. For a non-constant,
8594 we need to allocate some memory and store the value into it. */
8596 if (CONSTANT_P (op0))
8597 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8599 else if (GET_CODE (op0) == MEM)
8601 mark_temp_addr_taken (op0);
8602 temp = XEXP (op0, 0);
8605 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8606 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8607 || GET_CODE (op0) == PARALLEL)
8609 /* If this object is in a register, it must be not
8611 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8612 tree nt = build_qualified_type (inner_type,
8613 (TYPE_QUALS (inner_type)
8614 | TYPE_QUAL_CONST));
8615 rtx memloc = assign_temp (nt, 1, 1, 1);
8617 mark_temp_addr_taken (memloc);
8618 if (GET_CODE (op0) == PARALLEL)
8619 /* Handle calls that pass values in multiple non-contiguous
8620 locations. The Irix 6 ABI has examples of this. */
8621 emit_group_store (memloc, op0,
8622 int_size_in_bytes (inner_type),
8623 TYPE_ALIGN (inner_type));
8625 emit_move_insn (memloc, op0);
8629 if (GET_CODE (op0) != MEM)
8632 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8634 temp = XEXP (op0, 0);
8635 #ifdef POINTERS_EXTEND_UNSIGNED
8636 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8637 && mode == ptr_mode)
8638 temp = convert_memory_address (ptr_mode, temp);
8643 op0 = force_operand (XEXP (op0, 0), target);
8646 if (flag_force_addr && GET_CODE (op0) != REG)
8647 op0 = force_reg (Pmode, op0);
8649 if (GET_CODE (op0) == REG
8650 && ! REG_USERVAR_P (op0))
8651 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8653 /* If we might have had a temp slot, add an equivalent address
8656 update_temp_slot_address (temp, op0);
8658 #ifdef POINTERS_EXTEND_UNSIGNED
8659 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8660 && mode == ptr_mode)
8661 op0 = convert_memory_address (ptr_mode, op0);
8666 case ENTRY_VALUE_EXPR:
8669 /* COMPLEX type for Extended Pascal & Fortran */
8672 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8675 /* Get the rtx code of the operands. */
8676 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8677 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8680 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8684 /* Move the real (op0) and imaginary (op1) parts to their location. */
8685 emit_move_insn (gen_realpart (mode, target), op0);
8686 emit_move_insn (gen_imagpart (mode, target), op1);
8688 insns = get_insns ();
8691 /* Complex construction should appear as a single unit. */
8692 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8693 each with a separate pseudo as destination.
8694 It's not correct for flow to treat them as a unit. */
8695 if (GET_CODE (target) != CONCAT)
8696 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8704 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8705 return gen_realpart (mode, op0);
8708 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8709 return gen_imagpart (mode, op0);
8713 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8717 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8720 target = gen_reg_rtx (mode);
8724 /* Store the realpart and the negated imagpart to target. */
8725 emit_move_insn (gen_realpart (partmode, target),
8726 gen_realpart (partmode, op0));
8728 imag_t = gen_imagpart (partmode, target);
8729 temp = expand_unop (partmode,
8730 ! unsignedp && flag_trapv
8731 && (GET_MODE_CLASS(partmode) == MODE_INT)
8732 ? negv_optab : neg_optab,
8733 gen_imagpart (partmode, op0), imag_t, 0);
8735 emit_move_insn (imag_t, temp);
8737 insns = get_insns ();
8740 /* Conjugate should appear as a single unit
8741 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8742 each with a separate pseudo as destination.
8743 It's not correct for flow to treat them as a unit. */
8744 if (GET_CODE (target) != CONCAT)
8745 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8752 case TRY_CATCH_EXPR:
8754 tree handler = TREE_OPERAND (exp, 1);
8756 expand_eh_region_start ();
8758 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8760 expand_eh_region_end_cleanup (handler);
8765 case TRY_FINALLY_EXPR:
8767 tree try_block = TREE_OPERAND (exp, 0);
8768 tree finally_block = TREE_OPERAND (exp, 1);
8769 rtx finally_label = gen_label_rtx ();
8770 rtx done_label = gen_label_rtx ();
8771 rtx return_link = gen_reg_rtx (Pmode);
8772 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8773 (tree) finally_label, (tree) return_link);
8774 TREE_SIDE_EFFECTS (cleanup) = 1;
8776 /* Start a new binding layer that will keep track of all cleanup
8777 actions to be performed. */
8778 expand_start_bindings (2);
8780 target_temp_slot_level = temp_slot_level;
8782 expand_decl_cleanup (NULL_TREE, cleanup);
8783 op0 = expand_expr (try_block, target, tmode, modifier);
8785 preserve_temp_slots (op0);
8786 expand_end_bindings (NULL_TREE, 0, 0);
8787 emit_jump (done_label);
8788 emit_label (finally_label);
8789 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8790 emit_indirect_jump (return_link);
8791 emit_label (done_label);
8795 case GOTO_SUBROUTINE_EXPR:
8797 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8798 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8799 rtx return_address = gen_label_rtx ();
8800 emit_move_insn (return_link,
8801 gen_rtx_LABEL_REF (Pmode, return_address));
8803 emit_label (return_address);
8808 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8811 return get_exception_pointer ();
8814 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8817 /* Here to do an ordinary binary operator, generating an instruction
8818 from the optab already placed in `this_optab'. */
8820 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8822 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8823 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8825 temp = expand_binop (mode, this_optab, op0, op1, target,
8826 unsignedp, OPTAB_LIB_WIDEN);
8832 /* Similar to expand_expr, except that we don't specify a target, target
8833 mode, or modifier and we return the alignment of the inner type. This is
8834 used in cases where it is not necessary to align the result to the
8835 alignment of its type as long as we know the alignment of the result, for
8836 example for comparisons of BLKmode values. */
8839 expand_expr_unaligned (exp, palign)
8841 unsigned int *palign;
8844 tree type = TREE_TYPE (exp);
8845 register enum machine_mode mode = TYPE_MODE (type);
8847 /* Default the alignment we return to that of the type. */
8848 *palign = TYPE_ALIGN (type);
8850 /* The only cases in which we do anything special is if the resulting mode
8852 if (mode != BLKmode)
8853 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8855 switch (TREE_CODE (exp))
8859 case NON_LVALUE_EXPR:
8860 /* Conversions between BLKmode values don't change the underlying
8861 alignment or value. */
8862 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8863 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8867 /* Much of the code for this case is copied directly from expand_expr.
8868 We need to duplicate it here because we will do something different
8869 in the fall-through case, so we need to handle the same exceptions
8872 tree array = TREE_OPERAND (exp, 0);
8873 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8874 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8875 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8878 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8881 /* Optimize the special-case of a zero lower bound.
8883 We convert the low_bound to sizetype to avoid some problems
8884 with constant folding. (E.g. suppose the lower bound is 1,
8885 and its mode is QI. Without the conversion, (ARRAY
8886 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8887 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8889 if (! integer_zerop (low_bound))
8890 index = size_diffop (index, convert (sizetype, low_bound));
8892 /* If this is a constant index into a constant array,
8893 just get the value from the array. Handle both the cases when
8894 we have an explicit constructor and when our operand is a variable
8895 that was declared const. */
8897 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8898 && host_integerp (index, 0)
8899 && 0 > compare_tree_int (index,
8900 list_length (CONSTRUCTOR_ELTS
8901 (TREE_OPERAND (exp, 0)))))
8905 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8906 i = tree_low_cst (index, 0);
8907 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8911 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8914 else if (optimize >= 1
8915 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8916 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8917 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8919 if (TREE_CODE (index) == INTEGER_CST)
8921 tree init = DECL_INITIAL (array);
8923 if (TREE_CODE (init) == CONSTRUCTOR)
8927 for (elem = CONSTRUCTOR_ELTS (init);
8928 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8929 elem = TREE_CHAIN (elem))
8933 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8943 /* If the operand is a CONSTRUCTOR, we can just extract the
8944 appropriate field if it is present. Don't do this if we have
8945 already written the data since we want to refer to that copy
8946 and varasm.c assumes that's what we'll do. */
8947 if (TREE_CODE (exp) != ARRAY_REF
8948 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8949 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8953 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8954 elt = TREE_CHAIN (elt))
8955 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8956 /* Note that unlike the case in expand_expr, we know this is
8957 BLKmode and hence not an integer. */
8958 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8962 enum machine_mode mode1;
8963 HOST_WIDE_INT bitsize, bitpos;
8966 unsigned int alignment;
8968 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8969 &mode1, &unsignedp, &volatilep,
8972 /* If we got back the original object, something is wrong. Perhaps
8973 we are evaluating an expression too early. In any event, don't
8974 infinitely recurse. */
8978 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8980 /* If this is a constant, put it into a register if it is a
8981 legitimate constant and OFFSET is 0 and memory if it isn't. */
8982 if (CONSTANT_P (op0))
8984 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8986 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8988 op0 = force_reg (inner_mode, op0);
8990 op0 = validize_mem (force_const_mem (inner_mode, op0));
8995 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8997 /* If this object is in a register, put it into memory.
8998 This case can't occur in C, but can in Ada if we have
8999 unchecked conversion of an expression from a scalar type to
9000 an array or record type. */
9001 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9002 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9004 tree nt = build_qualified_type (TREE_TYPE (tem),
9005 (TYPE_QUALS (TREE_TYPE (tem))
9006 | TYPE_QUAL_CONST));
9007 rtx memloc = assign_temp (nt, 1, 1, 1);
9009 mark_temp_addr_taken (memloc);
9010 emit_move_insn (memloc, op0);
9014 if (GET_CODE (op0) != MEM)
9017 if (GET_MODE (offset_rtx) != ptr_mode)
9019 #ifdef POINTERS_EXTEND_UNSIGNED
9020 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9022 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9026 op0 = change_address (op0, VOIDmode,
9027 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9028 force_reg (ptr_mode,
9032 /* Don't forget about volatility even if this is a bitfield. */
9033 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9035 op0 = copy_rtx (op0);
9036 MEM_VOLATILE_P (op0) = 1;
9039 /* Check the access. */
9040 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9045 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9046 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9048 /* Check the access right of the pointer. */
9049 in_check_memory_usage = 1;
9050 if (size > BITS_PER_UNIT)
9051 emit_library_call (chkr_check_addr_libfunc,
9052 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9053 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9054 TYPE_MODE (sizetype),
9055 GEN_INT (MEMORY_USE_RO),
9056 TYPE_MODE (integer_type_node));
9057 in_check_memory_usage = 0;
9060 /* In cases where an aligned union has an unaligned object
9061 as a field, we might be extracting a BLKmode value from
9062 an integer-mode (e.g., SImode) object. Handle this case
9063 by doing the extract into an object as wide as the field
9064 (which we know to be the width of a basic mode), then
9065 storing into memory, and changing the mode to BLKmode.
9066 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9067 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9068 if (mode1 == VOIDmode
9069 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9070 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9071 && (TYPE_ALIGN (type) > alignment
9072 || bitpos % TYPE_ALIGN (type) != 0)))
9074 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9076 if (ext_mode == BLKmode)
9078 /* In this case, BITPOS must start at a byte boundary. */
9079 if (GET_CODE (op0) != MEM
9080 || bitpos % BITS_PER_UNIT != 0)
9083 op0 = change_address (op0, VOIDmode,
9084 plus_constant (XEXP (op0, 0),
9085 bitpos / BITS_PER_UNIT));
9089 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9091 rtx new = assign_temp (nt, 0, 1, 1);
9093 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9094 unsignedp, NULL_RTX, ext_mode,
9095 ext_mode, alignment,
9096 int_size_in_bytes (TREE_TYPE (tem)));
9098 /* If the result is a record type and BITSIZE is narrower than
9099 the mode of OP0, an integral mode, and this is a big endian
9100 machine, we must put the field into the high-order bits. */
9101 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9102 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9103 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9104 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9105 size_int (GET_MODE_BITSIZE
9110 emit_move_insn (new, op0);
9111 op0 = copy_rtx (new);
9112 PUT_MODE (op0, BLKmode);
9116 /* Get a reference to just this component. */
9117 op0 = change_address (op0, mode1,
9118 plus_constant (XEXP (op0, 0),
9119 (bitpos / BITS_PER_UNIT)));
9121 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9123 /* Adjust the alignment in case the bit position is not
9124 a multiple of the alignment of the inner object. */
9125 while (bitpos % alignment != 0)
9128 if (GET_CODE (XEXP (op0, 0)) == REG)
9129 mark_reg_pointer (XEXP (op0, 0), alignment);
9131 MEM_IN_STRUCT_P (op0) = 1;
9132 MEM_VOLATILE_P (op0) |= volatilep;
9134 *palign = alignment;
9143 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9146 /* Return the tree node if a ARG corresponds to a string constant or zero
9147 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9148 in bytes within the string that ARG is accessing. The type of the
9149 offset will be `sizetype'. */
9152 string_constant (arg, ptr_offset)
9158 if (TREE_CODE (arg) == ADDR_EXPR
9159 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9161 *ptr_offset = size_zero_node;
9162 return TREE_OPERAND (arg, 0);
9164 else if (TREE_CODE (arg) == PLUS_EXPR)
9166 tree arg0 = TREE_OPERAND (arg, 0);
9167 tree arg1 = TREE_OPERAND (arg, 1);
9172 if (TREE_CODE (arg0) == ADDR_EXPR
9173 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9175 *ptr_offset = convert (sizetype, arg1);
9176 return TREE_OPERAND (arg0, 0);
9178 else if (TREE_CODE (arg1) == ADDR_EXPR
9179 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9181 *ptr_offset = convert (sizetype, arg0);
9182 return TREE_OPERAND (arg1, 0);
9189 /* Expand code for a post- or pre- increment or decrement
9190 and return the RTX for the result.
9191 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9194 expand_increment (exp, post, ignore)
9198 register rtx op0, op1;
9199 register rtx temp, value;
9200 register tree incremented = TREE_OPERAND (exp, 0);
9201 optab this_optab = add_optab;
9203 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9204 int op0_is_copy = 0;
9205 int single_insn = 0;
9206 /* 1 means we can't store into OP0 directly,
9207 because it is a subreg narrower than a word,
9208 and we don't dare clobber the rest of the word. */
9211 /* Stabilize any component ref that might need to be
9212 evaluated more than once below. */
9214 || TREE_CODE (incremented) == BIT_FIELD_REF
9215 || (TREE_CODE (incremented) == COMPONENT_REF
9216 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9217 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9218 incremented = stabilize_reference (incremented);
9219 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9220 ones into save exprs so that they don't accidentally get evaluated
9221 more than once by the code below. */
9222 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9223 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9224 incremented = save_expr (incremented);
9226 /* Compute the operands as RTX.
9227 Note whether OP0 is the actual lvalue or a copy of it:
9228 I believe it is a copy iff it is a register or subreg
9229 and insns were generated in computing it. */
9231 temp = get_last_insn ();
9232 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9234 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9235 in place but instead must do sign- or zero-extension during assignment,
9236 so we copy it into a new register and let the code below use it as
9239 Note that we can safely modify this SUBREG since it is know not to be
9240 shared (it was made by the expand_expr call above). */
9242 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9245 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9249 else if (GET_CODE (op0) == SUBREG
9250 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9252 /* We cannot increment this SUBREG in place. If we are
9253 post-incrementing, get a copy of the old value. Otherwise,
9254 just mark that we cannot increment in place. */
9256 op0 = copy_to_reg (op0);
9261 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9262 && temp != get_last_insn ());
9263 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9264 EXPAND_MEMORY_USE_BAD);
9266 /* Decide whether incrementing or decrementing. */
9267 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9268 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9269 this_optab = sub_optab;
9271 /* Convert decrement by a constant into a negative increment. */
9272 if (this_optab == sub_optab
9273 && GET_CODE (op1) == CONST_INT)
9275 op1 = GEN_INT (-INTVAL (op1));
9276 this_optab = add_optab;
9279 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9280 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9282 /* For a preincrement, see if we can do this with a single instruction. */
9285 icode = (int) this_optab->handlers[(int) mode].insn_code;
9286 if (icode != (int) CODE_FOR_nothing
9287 /* Make sure that OP0 is valid for operands 0 and 1
9288 of the insn we want to queue. */
9289 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9290 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9291 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9295 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9296 then we cannot just increment OP0. We must therefore contrive to
9297 increment the original value. Then, for postincrement, we can return
9298 OP0 since it is a copy of the old value. For preincrement, expand here
9299 unless we can do it with a single insn.
9301 Likewise if storing directly into OP0 would clobber high bits
9302 we need to preserve (bad_subreg). */
9303 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9305 /* This is the easiest way to increment the value wherever it is.
9306 Problems with multiple evaluation of INCREMENTED are prevented
9307 because either (1) it is a component_ref or preincrement,
9308 in which case it was stabilized above, or (2) it is an array_ref
9309 with constant index in an array in a register, which is
9310 safe to reevaluate. */
9311 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9312 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9313 ? MINUS_EXPR : PLUS_EXPR),
9316 TREE_OPERAND (exp, 1));
9318 while (TREE_CODE (incremented) == NOP_EXPR
9319 || TREE_CODE (incremented) == CONVERT_EXPR)
9321 newexp = convert (TREE_TYPE (incremented), newexp);
9322 incremented = TREE_OPERAND (incremented, 0);
9325 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9326 return post ? op0 : temp;
9331 /* We have a true reference to the value in OP0.
9332 If there is an insn to add or subtract in this mode, queue it.
9333 Queueing the increment insn avoids the register shuffling
9334 that often results if we must increment now and first save
9335 the old value for subsequent use. */
9337 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9338 op0 = stabilize (op0);
9341 icode = (int) this_optab->handlers[(int) mode].insn_code;
9342 if (icode != (int) CODE_FOR_nothing
9343 /* Make sure that OP0 is valid for operands 0 and 1
9344 of the insn we want to queue. */
9345 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9346 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9348 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9349 op1 = force_reg (mode, op1);
9351 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9353 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9355 rtx addr = (general_operand (XEXP (op0, 0), mode)
9356 ? force_reg (Pmode, XEXP (op0, 0))
9357 : copy_to_reg (XEXP (op0, 0)));
9360 op0 = change_address (op0, VOIDmode, addr);
9361 temp = force_reg (GET_MODE (op0), op0);
9362 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9363 op1 = force_reg (mode, op1);
9365 /* The increment queue is LIFO, thus we have to `queue'
9366 the instructions in reverse order. */
9367 enqueue_insn (op0, gen_move_insn (op0, temp));
9368 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9373 /* Preincrement, or we can't increment with one simple insn. */
9375 /* Save a copy of the value before inc or dec, to return it later. */
9376 temp = value = copy_to_reg (op0);
9378 /* Arrange to return the incremented value. */
9379 /* Copy the rtx because expand_binop will protect from the queue,
9380 and the results of that would be invalid for us to return
9381 if our caller does emit_queue before using our result. */
9382 temp = copy_rtx (value = op0);
9384 /* Increment however we can. */
9385 op1 = expand_binop (mode, this_optab, value, op1,
9386 current_function_check_memory_usage ? NULL_RTX : op0,
9387 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9388 /* Make sure the value is stored into OP0. */
9390 emit_move_insn (op0, op1);
9395 /* At the start of a function, record that we have no previously-pushed
9396 arguments waiting to be popped. */
9399 init_pending_stack_adjust ()
9401 pending_stack_adjust = 0;
9404 /* When exiting from function, if safe, clear out any pending stack adjust
9405 so the adjustment won't get done.
9407 Note, if the current function calls alloca, then it must have a
9408 frame pointer regardless of the value of flag_omit_frame_pointer. */
9411 clear_pending_stack_adjust ()
9413 #ifdef EXIT_IGNORE_STACK
9415 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9416 && EXIT_IGNORE_STACK
9417 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9418 && ! flag_inline_functions)
9420 stack_pointer_delta -= pending_stack_adjust,
9421 pending_stack_adjust = 0;
9426 /* Pop any previously-pushed arguments that have not been popped yet. */
9429 do_pending_stack_adjust ()
9431 if (inhibit_defer_pop == 0)
9433 if (pending_stack_adjust != 0)
9434 adjust_stack (GEN_INT (pending_stack_adjust));
9435 pending_stack_adjust = 0;
9439 /* Expand conditional expressions. */
9441 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9442 LABEL is an rtx of code CODE_LABEL, in this function and all the
9446 jumpifnot (exp, label)
9450 do_jump (exp, label, NULL_RTX);
9453 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9460 do_jump (exp, NULL_RTX, label);
9463 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9464 the result is zero, or IF_TRUE_LABEL if the result is one.
9465 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9466 meaning fall through in that case.
9468 do_jump always does any pending stack adjust except when it does not
9469 actually perform a jump. An example where there is no jump
9470 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9472 This function is responsible for optimizing cases such as
9473 &&, || and comparison operators in EXP. */
9476 do_jump (exp, if_false_label, if_true_label)
9478 rtx if_false_label, if_true_label;
9480 register enum tree_code code = TREE_CODE (exp);
9481 /* Some cases need to create a label to jump to
9482 in order to properly fall through.
9483 These cases set DROP_THROUGH_LABEL nonzero. */
9484 rtx drop_through_label = 0;
9488 enum machine_mode mode;
9490 #ifdef MAX_INTEGER_COMPUTATION_MODE
9491 check_max_integer_computation_mode (exp);
9502 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9508 /* This is not true with #pragma weak */
9510 /* The address of something can never be zero. */
9512 emit_jump (if_true_label);
9517 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9518 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9519 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9522 /* If we are narrowing the operand, we have to do the compare in the
9524 if ((TYPE_PRECISION (TREE_TYPE (exp))
9525 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9527 case NON_LVALUE_EXPR:
9528 case REFERENCE_EXPR:
9533 /* These cannot change zero->non-zero or vice versa. */
9534 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9537 case WITH_RECORD_EXPR:
9538 /* Put the object on the placeholder list, recurse through our first
9539 operand, and pop the list. */
9540 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9542 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9543 placeholder_list = TREE_CHAIN (placeholder_list);
9547 /* This is never less insns than evaluating the PLUS_EXPR followed by
9548 a test and can be longer if the test is eliminated. */
9550 /* Reduce to minus. */
9551 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9552 TREE_OPERAND (exp, 0),
9553 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9554 TREE_OPERAND (exp, 1))));
9555 /* Process as MINUS. */
9559 /* Non-zero iff operands of minus differ. */
9560 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9561 TREE_OPERAND (exp, 0),
9562 TREE_OPERAND (exp, 1)),
9563 NE, NE, if_false_label, if_true_label);
9567 /* If we are AND'ing with a small constant, do this comparison in the
9568 smallest type that fits. If the machine doesn't have comparisons
9569 that small, it will be converted back to the wider comparison.
9570 This helps if we are testing the sign bit of a narrower object.
9571 combine can't do this for us because it can't know whether a
9572 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9574 if (! SLOW_BYTE_ACCESS
9575 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9576 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9577 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9578 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9579 && (type = type_for_mode (mode, 1)) != 0
9580 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9581 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9582 != CODE_FOR_nothing))
9584 do_jump (convert (type, exp), if_false_label, if_true_label);
9589 case TRUTH_NOT_EXPR:
9590 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9593 case TRUTH_ANDIF_EXPR:
9594 if (if_false_label == 0)
9595 if_false_label = drop_through_label = gen_label_rtx ();
9596 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9597 start_cleanup_deferral ();
9598 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9599 end_cleanup_deferral ();
9602 case TRUTH_ORIF_EXPR:
9603 if (if_true_label == 0)
9604 if_true_label = drop_through_label = gen_label_rtx ();
9605 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9606 start_cleanup_deferral ();
9607 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9608 end_cleanup_deferral ();
9613 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9614 preserve_temp_slots (NULL_RTX);
9618 do_pending_stack_adjust ();
9619 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9626 HOST_WIDE_INT bitsize, bitpos;
9628 enum machine_mode mode;
9632 unsigned int alignment;
9634 /* Get description of this reference. We don't actually care
9635 about the underlying object here. */
9636 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9637 &unsignedp, &volatilep, &alignment);
9639 type = type_for_size (bitsize, unsignedp);
9640 if (! SLOW_BYTE_ACCESS
9641 && type != 0 && bitsize >= 0
9642 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9643 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9644 != CODE_FOR_nothing))
9646 do_jump (convert (type, exp), if_false_label, if_true_label);
9653 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9654 if (integer_onep (TREE_OPERAND (exp, 1))
9655 && integer_zerop (TREE_OPERAND (exp, 2)))
9656 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9658 else if (integer_zerop (TREE_OPERAND (exp, 1))
9659 && integer_onep (TREE_OPERAND (exp, 2)))
9660 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9664 register rtx label1 = gen_label_rtx ();
9665 drop_through_label = gen_label_rtx ();
9667 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9669 start_cleanup_deferral ();
9670 /* Now the THEN-expression. */
9671 do_jump (TREE_OPERAND (exp, 1),
9672 if_false_label ? if_false_label : drop_through_label,
9673 if_true_label ? if_true_label : drop_through_label);
9674 /* In case the do_jump just above never jumps. */
9675 do_pending_stack_adjust ();
9676 emit_label (label1);
9678 /* Now the ELSE-expression. */
9679 do_jump (TREE_OPERAND (exp, 2),
9680 if_false_label ? if_false_label : drop_through_label,
9681 if_true_label ? if_true_label : drop_through_label);
9682 end_cleanup_deferral ();
9688 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9690 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9691 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9693 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9694 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9697 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9698 fold (build (EQ_EXPR, TREE_TYPE (exp),
9699 fold (build1 (REALPART_EXPR,
9700 TREE_TYPE (inner_type),
9702 fold (build1 (REALPART_EXPR,
9703 TREE_TYPE (inner_type),
9705 fold (build (EQ_EXPR, TREE_TYPE (exp),
9706 fold (build1 (IMAGPART_EXPR,
9707 TREE_TYPE (inner_type),
9709 fold (build1 (IMAGPART_EXPR,
9710 TREE_TYPE (inner_type),
9712 if_false_label, if_true_label);
9715 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9716 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9718 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9719 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9720 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9722 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9728 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9730 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9731 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9733 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9734 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9737 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9738 fold (build (NE_EXPR, TREE_TYPE (exp),
9739 fold (build1 (REALPART_EXPR,
9740 TREE_TYPE (inner_type),
9742 fold (build1 (REALPART_EXPR,
9743 TREE_TYPE (inner_type),
9745 fold (build (NE_EXPR, TREE_TYPE (exp),
9746 fold (build1 (IMAGPART_EXPR,
9747 TREE_TYPE (inner_type),
9749 fold (build1 (IMAGPART_EXPR,
9750 TREE_TYPE (inner_type),
9752 if_false_label, if_true_label);
9755 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9756 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9758 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9759 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9760 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9762 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9767 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9768 if (GET_MODE_CLASS (mode) == MODE_INT
9769 && ! can_compare_p (LT, mode, ccp_jump))
9770 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9772 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9776 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9777 if (GET_MODE_CLASS (mode) == MODE_INT
9778 && ! can_compare_p (LE, mode, ccp_jump))
9779 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9781 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9785 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9786 if (GET_MODE_CLASS (mode) == MODE_INT
9787 && ! can_compare_p (GT, mode, ccp_jump))
9788 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9790 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9794 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9795 if (GET_MODE_CLASS (mode) == MODE_INT
9796 && ! can_compare_p (GE, mode, ccp_jump))
9797 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9799 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9802 case UNORDERED_EXPR:
9805 enum rtx_code cmp, rcmp;
9808 if (code == UNORDERED_EXPR)
9809 cmp = UNORDERED, rcmp = ORDERED;
9811 cmp = ORDERED, rcmp = UNORDERED;
9812 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9815 if (! can_compare_p (cmp, mode, ccp_jump)
9816 && (can_compare_p (rcmp, mode, ccp_jump)
9817 /* If the target doesn't provide either UNORDERED or ORDERED
9818 comparisons, canonicalize on UNORDERED for the library. */
9819 || rcmp == UNORDERED))
9823 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9825 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9830 enum rtx_code rcode1;
9831 enum tree_code tcode2;
9855 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9856 if (can_compare_p (rcode1, mode, ccp_jump))
9857 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9861 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9862 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9865 /* If the target doesn't support combined unordered
9866 compares, decompose into UNORDERED + comparison. */
9867 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9868 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9869 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9870 do_jump (exp, if_false_label, if_true_label);
9877 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9879 /* This is not needed any more and causes poor code since it causes
9880 comparisons and tests from non-SI objects to have different code
9882 /* Copy to register to avoid generating bad insns by cse
9883 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9884 if (!cse_not_expected && GET_CODE (temp) == MEM)
9885 temp = copy_to_reg (temp);
9887 do_pending_stack_adjust ();
9888 /* Do any postincrements in the expression that was tested. */
9891 if (GET_CODE (temp) == CONST_INT
9892 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9893 || GET_CODE (temp) == LABEL_REF)
9895 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9899 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9900 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9901 /* Note swapping the labels gives us not-equal. */
9902 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9903 else if (GET_MODE (temp) != VOIDmode)
9904 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9905 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9906 GET_MODE (temp), NULL_RTX, 0,
9907 if_false_label, if_true_label);
9912 if (drop_through_label)
9914 /* If do_jump produces code that might be jumped around,
9915 do any stack adjusts from that code, before the place
9916 where control merges in. */
9917 do_pending_stack_adjust ();
9918 emit_label (drop_through_label);
9922 /* Given a comparison expression EXP for values too wide to be compared
9923 with one insn, test the comparison and jump to the appropriate label.
9924 The code of EXP is ignored; we always test GT if SWAP is 0,
9925 and LT if SWAP is 1. */
9928 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9931 rtx if_false_label, if_true_label;
9933 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9934 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9935 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9936 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9938 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9941 /* Compare OP0 with OP1, word at a time, in mode MODE.
9942 UNSIGNEDP says to do unsigned comparison.
9943 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9946 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9947 enum machine_mode mode;
9950 rtx if_false_label, if_true_label;
9952 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9953 rtx drop_through_label = 0;
9956 if (! if_true_label || ! if_false_label)
9957 drop_through_label = gen_label_rtx ();
9958 if (! if_true_label)
9959 if_true_label = drop_through_label;
9960 if (! if_false_label)
9961 if_false_label = drop_through_label;
9963 /* Compare a word at a time, high order first. */
9964 for (i = 0; i < nwords; i++)
9966 rtx op0_word, op1_word;
9968 if (WORDS_BIG_ENDIAN)
9970 op0_word = operand_subword_force (op0, i, mode);
9971 op1_word = operand_subword_force (op1, i, mode);
9975 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9976 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9979 /* All but high-order word must be compared as unsigned. */
9980 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9981 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9982 NULL_RTX, if_true_label);
9984 /* Consider lower words only if these are equal. */
9985 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9986 NULL_RTX, 0, NULL_RTX, if_false_label);
9990 emit_jump (if_false_label);
9991 if (drop_through_label)
9992 emit_label (drop_through_label);
9995 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9996 with one insn, test the comparison and jump to the appropriate label. */
9999 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10001 rtx if_false_label, if_true_label;
10003 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10004 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10005 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10006 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10008 rtx drop_through_label = 0;
10010 if (! if_false_label)
10011 drop_through_label = if_false_label = gen_label_rtx ();
10013 for (i = 0; i < nwords; i++)
10014 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10015 operand_subword_force (op1, i, mode),
10016 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10017 word_mode, NULL_RTX, 0, if_false_label,
10021 emit_jump (if_true_label);
10022 if (drop_through_label)
10023 emit_label (drop_through_label);
10026 /* Jump according to whether OP0 is 0.
10027 We assume that OP0 has an integer mode that is too wide
10028 for the available compare insns. */
10031 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10033 rtx if_false_label, if_true_label;
10035 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10038 rtx drop_through_label = 0;
10040 /* The fastest way of doing this comparison on almost any machine is to
10041 "or" all the words and compare the result. If all have to be loaded
10042 from memory and this is a very wide item, it's possible this may
10043 be slower, but that's highly unlikely. */
10045 part = gen_reg_rtx (word_mode);
10046 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10047 for (i = 1; i < nwords && part != 0; i++)
10048 part = expand_binop (word_mode, ior_optab, part,
10049 operand_subword_force (op0, i, GET_MODE (op0)),
10050 part, 1, OPTAB_WIDEN);
10054 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10055 NULL_RTX, 0, if_false_label, if_true_label);
10060 /* If we couldn't do the "or" simply, do this with a series of compares. */
10061 if (! if_false_label)
10062 drop_through_label = if_false_label = gen_label_rtx ();
10064 for (i = 0; i < nwords; i++)
10065 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10066 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10067 if_false_label, NULL_RTX);
10070 emit_jump (if_true_label);
10072 if (drop_through_label)
10073 emit_label (drop_through_label);
10076 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10077 (including code to compute the values to be compared)
10078 and set (CC0) according to the result.
10079 The decision as to signed or unsigned comparison must be made by the caller.
10081 We force a stack adjustment unless there are currently
10082 things pushed on the stack that aren't yet used.
10084 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10087 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10088 size of MODE should be used. */
10091 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10092 register rtx op0, op1;
10093 enum rtx_code code;
10095 enum machine_mode mode;
10097 unsigned int align;
10101 /* If one operand is constant, make it the second one. Only do this
10102 if the other operand is not constant as well. */
10104 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10105 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10110 code = swap_condition (code);
10113 if (flag_force_mem)
10115 op0 = force_not_mem (op0);
10116 op1 = force_not_mem (op1);
10119 do_pending_stack_adjust ();
10121 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10122 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10126 /* There's no need to do this now that combine.c can eliminate lots of
10127 sign extensions. This can be less efficient in certain cases on other
10130 /* If this is a signed equality comparison, we can do it as an
10131 unsigned comparison since zero-extension is cheaper than sign
10132 extension and comparisons with zero are done as unsigned. This is
10133 the case even on machines that can do fast sign extension, since
10134 zero-extension is easier to combine with other operations than
10135 sign-extension is. If we are comparing against a constant, we must
10136 convert it to what it would look like unsigned. */
10137 if ((code == EQ || code == NE) && ! unsignedp
10138 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10140 if (GET_CODE (op1) == CONST_INT
10141 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10142 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10147 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10149 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10152 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10153 The decision as to signed or unsigned comparison must be made by the caller.
10155 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10158 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10159 size of MODE should be used. */
10162 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10163 if_false_label, if_true_label)
10164 register rtx op0, op1;
10165 enum rtx_code code;
10167 enum machine_mode mode;
10169 unsigned int align;
10170 rtx if_false_label, if_true_label;
10173 int dummy_true_label = 0;
10175 /* Reverse the comparison if that is safe and we want to jump if it is
10177 if (! if_true_label && ! FLOAT_MODE_P (mode))
10179 if_true_label = if_false_label;
10180 if_false_label = 0;
10181 code = reverse_condition (code);
10184 /* If one operand is constant, make it the second one. Only do this
10185 if the other operand is not constant as well. */
10187 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10188 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10193 code = swap_condition (code);
10196 if (flag_force_mem)
10198 op0 = force_not_mem (op0);
10199 op1 = force_not_mem (op1);
10202 do_pending_stack_adjust ();
10204 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10205 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10207 if (tem == const_true_rtx)
10210 emit_jump (if_true_label);
10214 if (if_false_label)
10215 emit_jump (if_false_label);
10221 /* There's no need to do this now that combine.c can eliminate lots of
10222 sign extensions. This can be less efficient in certain cases on other
10225 /* If this is a signed equality comparison, we can do it as an
10226 unsigned comparison since zero-extension is cheaper than sign
10227 extension and comparisons with zero are done as unsigned. This is
10228 the case even on machines that can do fast sign extension, since
10229 zero-extension is easier to combine with other operations than
10230 sign-extension is. If we are comparing against a constant, we must
10231 convert it to what it would look like unsigned. */
10232 if ((code == EQ || code == NE) && ! unsignedp
10233 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10235 if (GET_CODE (op1) == CONST_INT
10236 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10237 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10242 if (! if_true_label)
10244 dummy_true_label = 1;
10245 if_true_label = gen_label_rtx ();
10248 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10251 if (if_false_label)
10252 emit_jump (if_false_label);
10253 if (dummy_true_label)
10254 emit_label (if_true_label);
10257 /* Generate code for a comparison expression EXP (including code to compute
10258 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10259 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10260 generated code will drop through.
10261 SIGNED_CODE should be the rtx operation for this comparison for
10262 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10264 We force a stack adjustment unless there are currently
10265 things pushed on the stack that aren't yet used. */
10268 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10271 enum rtx_code signed_code, unsigned_code;
10272 rtx if_false_label, if_true_label;
10274 unsigned int align0, align1;
10275 register rtx op0, op1;
10276 register tree type;
10277 register enum machine_mode mode;
10279 enum rtx_code code;
10281 /* Don't crash if the comparison was erroneous. */
10282 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10283 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10286 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10287 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10290 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10291 mode = TYPE_MODE (type);
10292 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10293 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10294 || (GET_MODE_BITSIZE (mode)
10295 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10298 /* op0 might have been replaced by promoted constant, in which
10299 case the type of second argument should be used. */
10300 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10301 mode = TYPE_MODE (type);
10303 unsignedp = TREE_UNSIGNED (type);
10304 code = unsignedp ? unsigned_code : signed_code;
10306 #ifdef HAVE_canonicalize_funcptr_for_compare
10307 /* If function pointers need to be "canonicalized" before they can
10308 be reliably compared, then canonicalize them. */
10309 if (HAVE_canonicalize_funcptr_for_compare
10310 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10311 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10314 rtx new_op0 = gen_reg_rtx (mode);
10316 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10320 if (HAVE_canonicalize_funcptr_for_compare
10321 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10322 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10325 rtx new_op1 = gen_reg_rtx (mode);
10327 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10332 /* Do any postincrements in the expression that was tested. */
10335 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10337 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10338 MIN (align0, align1),
10339 if_false_label, if_true_label);
10342 /* Generate code to calculate EXP using a store-flag instruction
10343 and return an rtx for the result. EXP is either a comparison
10344 or a TRUTH_NOT_EXPR whose operand is a comparison.
10346 If TARGET is nonzero, store the result there if convenient.
10348 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10351 Return zero if there is no suitable set-flag instruction
10352 available on this machine.
10354 Once expand_expr has been called on the arguments of the comparison,
10355 we are committed to doing the store flag, since it is not safe to
10356 re-evaluate the expression. We emit the store-flag insn by calling
10357 emit_store_flag, but only expand the arguments if we have a reason
10358 to believe that emit_store_flag will be successful. If we think that
10359 it will, but it isn't, we have to simulate the store-flag with a
10360 set/jump/set sequence. */
10363 do_store_flag (exp, target, mode, only_cheap)
10366 enum machine_mode mode;
10369 enum rtx_code code;
10370 tree arg0, arg1, type;
10372 enum machine_mode operand_mode;
10376 enum insn_code icode;
10377 rtx subtarget = target;
10380 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10381 result at the end. We can't simply invert the test since it would
10382 have already been inverted if it were valid. This case occurs for
10383 some floating-point comparisons. */
10385 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10386 invert = 1, exp = TREE_OPERAND (exp, 0);
10388 arg0 = TREE_OPERAND (exp, 0);
10389 arg1 = TREE_OPERAND (exp, 1);
10391 /* Don't crash if the comparison was erroneous. */
10392 if (arg0 == error_mark_node || arg1 == error_mark_node)
10395 type = TREE_TYPE (arg0);
10396 operand_mode = TYPE_MODE (type);
10397 unsignedp = TREE_UNSIGNED (type);
10399 /* We won't bother with BLKmode store-flag operations because it would mean
10400 passing a lot of information to emit_store_flag. */
10401 if (operand_mode == BLKmode)
10404 /* We won't bother with store-flag operations involving function pointers
10405 when function pointers must be canonicalized before comparisons. */
10406 #ifdef HAVE_canonicalize_funcptr_for_compare
10407 if (HAVE_canonicalize_funcptr_for_compare
10408 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10409 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10411 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10412 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10413 == FUNCTION_TYPE))))
10420 /* Get the rtx comparison code to use. We know that EXP is a comparison
10421 operation of some type. Some comparisons against 1 and -1 can be
10422 converted to comparisons with zero. Do so here so that the tests
10423 below will be aware that we have a comparison with zero. These
10424 tests will not catch constants in the first operand, but constants
10425 are rarely passed as the first operand. */
10427 switch (TREE_CODE (exp))
10436 if (integer_onep (arg1))
10437 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10439 code = unsignedp ? LTU : LT;
10442 if (! unsignedp && integer_all_onesp (arg1))
10443 arg1 = integer_zero_node, code = LT;
10445 code = unsignedp ? LEU : LE;
10448 if (! unsignedp && integer_all_onesp (arg1))
10449 arg1 = integer_zero_node, code = GE;
10451 code = unsignedp ? GTU : GT;
10454 if (integer_onep (arg1))
10455 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10457 code = unsignedp ? GEU : GE;
10460 case UNORDERED_EXPR:
10486 /* Put a constant second. */
10487 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10489 tem = arg0; arg0 = arg1; arg1 = tem;
10490 code = swap_condition (code);
10493 /* If this is an equality or inequality test of a single bit, we can
10494 do this by shifting the bit being tested to the low-order bit and
10495 masking the result with the constant 1. If the condition was EQ,
10496 we xor it with 1. This does not require an scc insn and is faster
10497 than an scc insn even if we have it. */
10499 if ((code == NE || code == EQ)
10500 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10501 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10503 tree inner = TREE_OPERAND (arg0, 0);
10504 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10507 /* If INNER is a right shift of a constant and it plus BITNUM does
10508 not overflow, adjust BITNUM and INNER. */
10510 if (TREE_CODE (inner) == RSHIFT_EXPR
10511 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10512 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10513 && bitnum < TYPE_PRECISION (type)
10514 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10515 bitnum - TYPE_PRECISION (type)))
10517 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10518 inner = TREE_OPERAND (inner, 0);
10521 /* If we are going to be able to omit the AND below, we must do our
10522 operations as unsigned. If we must use the AND, we have a choice.
10523 Normally unsigned is faster, but for some machines signed is. */
10524 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10525 #ifdef LOAD_EXTEND_OP
10526 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10532 if (! get_subtarget (subtarget)
10533 || GET_MODE (subtarget) != operand_mode
10534 || ! safe_from_p (subtarget, inner, 1))
10537 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10540 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10541 size_int (bitnum), subtarget, ops_unsignedp);
10543 if (GET_MODE (op0) != mode)
10544 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10546 if ((code == EQ && ! invert) || (code == NE && invert))
10547 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10548 ops_unsignedp, OPTAB_LIB_WIDEN);
10550 /* Put the AND last so it can combine with more things. */
10551 if (bitnum != TYPE_PRECISION (type) - 1)
10552 op0 = expand_and (op0, const1_rtx, subtarget);
10557 /* Now see if we are likely to be able to do this. Return if not. */
10558 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10561 icode = setcc_gen_code[(int) code];
10562 if (icode == CODE_FOR_nothing
10563 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10565 /* We can only do this if it is one of the special cases that
10566 can be handled without an scc insn. */
10567 if ((code == LT && integer_zerop (arg1))
10568 || (! only_cheap && code == GE && integer_zerop (arg1)))
10570 else if (BRANCH_COST >= 0
10571 && ! only_cheap && (code == NE || code == EQ)
10572 && TREE_CODE (type) != REAL_TYPE
10573 && ((abs_optab->handlers[(int) operand_mode].insn_code
10574 != CODE_FOR_nothing)
10575 || (ffs_optab->handlers[(int) operand_mode].insn_code
10576 != CODE_FOR_nothing)))
10582 if (! get_subtarget (target)
10583 || GET_MODE (subtarget) != operand_mode
10584 || ! safe_from_p (subtarget, arg1, 1))
10587 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10588 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10591 target = gen_reg_rtx (mode);
10593 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10594 because, if the emit_store_flag does anything it will succeed and
10595 OP0 and OP1 will not be used subsequently. */
10597 result = emit_store_flag (target, code,
10598 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10599 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10600 operand_mode, unsignedp, 1);
10605 result = expand_binop (mode, xor_optab, result, const1_rtx,
10606 result, 0, OPTAB_LIB_WIDEN);
10610 /* If this failed, we have to do this with set/compare/jump/set code. */
10611 if (GET_CODE (target) != REG
10612 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10613 target = gen_reg_rtx (GET_MODE (target));
10615 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10616 result = compare_from_rtx (op0, op1, code, unsignedp,
10617 operand_mode, NULL_RTX, 0);
10618 if (GET_CODE (result) == CONST_INT)
10619 return (((result == const0_rtx && ! invert)
10620 || (result != const0_rtx && invert))
10621 ? const0_rtx : const1_rtx);
10623 label = gen_label_rtx ();
10624 if (bcc_gen_fctn[(int) code] == 0)
10627 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10628 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10629 emit_label (label);
10634 /* Generate a tablejump instruction (used for switch statements). */
10636 #ifdef HAVE_tablejump
10638 /* INDEX is the value being switched on, with the lowest value
10639 in the table already subtracted.
10640 MODE is its expected mode (needed if INDEX is constant).
10641 RANGE is the length of the jump table.
10642 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10644 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10645 index value is out of range. */
10648 do_tablejump (index, mode, range, table_label, default_label)
10649 rtx index, range, table_label, default_label;
10650 enum machine_mode mode;
10652 register rtx temp, vector;
10654 /* Do an unsigned comparison (in the proper mode) between the index
10655 expression and the value which represents the length of the range.
10656 Since we just finished subtracting the lower bound of the range
10657 from the index expression, this comparison allows us to simultaneously
10658 check that the original index expression value is both greater than
10659 or equal to the minimum value of the range and less than or equal to
10660 the maximum value of the range. */
10662 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10665 /* If index is in range, it must fit in Pmode.
10666 Convert to Pmode so we can index with it. */
10668 index = convert_to_mode (Pmode, index, 1);
10670 /* Don't let a MEM slip thru, because then INDEX that comes
10671 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10672 and break_out_memory_refs will go to work on it and mess it up. */
10673 #ifdef PIC_CASE_VECTOR_ADDRESS
10674 if (flag_pic && GET_CODE (index) != REG)
10675 index = copy_to_mode_reg (Pmode, index);
10678 /* If flag_force_addr were to affect this address
10679 it could interfere with the tricky assumptions made
10680 about addresses that contain label-refs,
10681 which may be valid only very near the tablejump itself. */
10682 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10683 GET_MODE_SIZE, because this indicates how large insns are. The other
10684 uses should all be Pmode, because they are addresses. This code
10685 could fail if addresses and insns are not the same size. */
10686 index = gen_rtx_PLUS (Pmode,
10687 gen_rtx_MULT (Pmode, index,
10688 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10689 gen_rtx_LABEL_REF (Pmode, table_label));
10690 #ifdef PIC_CASE_VECTOR_ADDRESS
10692 index = PIC_CASE_VECTOR_ADDRESS (index);
10695 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10696 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10697 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10698 RTX_UNCHANGING_P (vector) = 1;
10699 convert_move (temp, vector, 0);
10701 emit_jump_insn (gen_tablejump (temp, table_label));
10703 /* If we are generating PIC code or if the table is PC-relative, the
10704 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10705 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10709 #endif /* HAVE_tablejump */