1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "typeclass.h"
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
63 #define STACK_PUSH_CODE PRE_INC
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
97 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
114 /* This structure is used by store_by_pieces to describe the clear to
117 struct store_by_pieces
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
130 extern struct obstack permanent_obstack;
132 static rtx get_push_address PARAMS ((int));
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
184 /* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
188 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
191 /* If we are optimizing for space (-Os), cut down the default move ratio. */
192 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 /* This macro is used to determine whether move_by_pieces should be called
197 to perform a structure copy. */
198 #ifndef MOVE_BY_PIECES_P
199 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
203 /* This array records the insn_code of insns to perform block moves. */
204 enum insn_code movstr_optab[NUM_MACHINE_MODES];
206 /* This array records the insn_code of insns to perform block clears. */
207 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
209 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
211 #ifndef SLOW_UNALIGNED_ACCESS
212 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
215 /* This is run once per compilation to set up which modes can be used
216 directly in memory and to initialize the block move optab. */
222 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
235 pat = PATTERN (insn);
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
255 if (! HARD_REGNO_MODE_OK (regno, mode))
258 reg = gen_rtx_REG (mode, regno);
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
285 /* This is run at the start of compiling a function. */
290 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
293 pending_stack_adjust = 0;
294 stack_pointer_delta = 0;
295 inhibit_defer_pop = 0;
297 apply_args_value = 0;
303 struct expr_status *p;
308 ggc_mark_rtx (p->x_saveregs_value);
309 ggc_mark_rtx (p->x_apply_args_value);
310 ggc_mark_rtx (p->x_forced_labels);
321 /* Small sanity check that the queue is empty at the end of a function. */
324 finish_expr_for_function ()
330 /* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
333 /* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
341 enqueue_insn (var, body)
344 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
345 body, pending_chain);
346 return pending_chain;
349 /* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
365 protect_from_queue (x, modify)
369 register RTX_CODE code = GET_CODE (x);
371 #if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain == 0)
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
384 if (code == MEM && GET_MODE (x) != BLKmode
385 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 register rtx y = XEXP (x, 0);
388 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
390 MEM_COPY_ATTRIBUTES (new, x);
394 register rtx temp = gen_reg_rtx (GET_MODE (new));
395 emit_insn_before (gen_move_insn (temp, new),
399 /* Copy the address into a pseudo, so that the returned value
400 remains correct across calls to emit_queue. */
401 XEXP (new, 0) = copy_to_reg (XEXP (new, 0));
404 /* Otherwise, recursively protect the subexpressions of all
405 the kinds of rtx's that can contain a QUEUED. */
408 rtx tem = protect_from_queue (XEXP (x, 0), 0);
409 if (tem != XEXP (x, 0))
415 else if (code == PLUS || code == MULT)
417 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
418 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
419 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
428 /* If the increment has not happened, use the variable itself. Copy it
429 into a new pseudo so that the value remains correct across calls to
431 if (QUEUED_INSN (x) == 0)
432 return copy_to_reg (QUEUED_VAR (x));
433 /* If the increment has happened and a pre-increment copy exists,
435 if (QUEUED_COPY (x) != 0)
436 return QUEUED_COPY (x);
437 /* The increment has happened but we haven't set up a pre-increment copy.
438 Set one up now, and use it. */
439 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
440 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
442 return QUEUED_COPY (x);
445 /* Return nonzero if X contains a QUEUED expression:
446 if it contains anything that will be altered by a queued increment.
447 We handle only combinations of MEM, PLUS, MINUS and MULT operators
448 since memory addresses generally contain only those. */
454 register enum rtx_code code = GET_CODE (x);
460 return queued_subexp_p (XEXP (x, 0));
464 return (queued_subexp_p (XEXP (x, 0))
465 || queued_subexp_p (XEXP (x, 1)));
471 /* Perform all the pending incrementations. */
477 while ((p = pending_chain))
479 rtx body = QUEUED_BODY (p);
481 if (GET_CODE (body) == SEQUENCE)
483 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
484 emit_insn (QUEUED_BODY (p));
487 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
488 pending_chain = QUEUED_NEXT (p);
492 /* Copy data from FROM to TO, where the machine modes are not the same.
493 Both modes may be integer, or both may be floating.
494 UNSIGNEDP should be nonzero if FROM is an unsigned type.
495 This causes zero-extension instead of sign-extension. */
498 convert_move (to, from, unsignedp)
499 register rtx to, from;
502 enum machine_mode to_mode = GET_MODE (to);
503 enum machine_mode from_mode = GET_MODE (from);
504 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
505 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
509 /* rtx code for making an equivalent value. */
510 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
512 to = protect_from_queue (to, 1);
513 from = protect_from_queue (from, 0);
515 if (to_real != from_real)
518 /* If FROM is a SUBREG that indicates that we have already done at least
519 the required extension, strip it. We don't handle such SUBREGs as
522 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
523 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
524 >= GET_MODE_SIZE (to_mode))
525 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
526 from = gen_lowpart (to_mode, from), from_mode = to_mode;
528 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
531 if (to_mode == from_mode
532 || (from_mode == VOIDmode && CONSTANT_P (from)))
534 emit_move_insn (to, from);
538 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
540 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
543 if (VECTOR_MODE_P (to_mode))
544 from = gen_rtx_SUBREG (to_mode, from, 0);
546 to = gen_rtx_SUBREG (from_mode, to, 0);
548 emit_move_insn (to, from);
552 if (to_real != from_real)
559 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
561 /* Try converting directly if the insn is supported. */
562 if ((code = can_extend_p (to_mode, from_mode, 0))
565 emit_unop_insn (code, to, from, UNKNOWN);
570 #ifdef HAVE_trunchfqf2
571 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
573 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
577 #ifdef HAVE_trunctqfqf2
578 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
580 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
584 #ifdef HAVE_truncsfqf2
585 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
587 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
591 #ifdef HAVE_truncdfqf2
592 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
594 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
598 #ifdef HAVE_truncxfqf2
599 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
605 #ifdef HAVE_trunctfqf2
606 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
613 #ifdef HAVE_trunctqfhf2
614 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
616 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
620 #ifdef HAVE_truncsfhf2
621 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
623 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
627 #ifdef HAVE_truncdfhf2
628 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
630 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
634 #ifdef HAVE_truncxfhf2
635 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
637 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
641 #ifdef HAVE_trunctfhf2
642 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
644 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
649 #ifdef HAVE_truncsftqf2
650 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
652 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
656 #ifdef HAVE_truncdftqf2
657 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
659 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
663 #ifdef HAVE_truncxftqf2
664 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
666 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
670 #ifdef HAVE_trunctftqf2
671 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
673 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
678 #ifdef HAVE_truncdfsf2
679 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
681 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
685 #ifdef HAVE_truncxfsf2
686 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
688 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
692 #ifdef HAVE_trunctfsf2
693 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
695 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
699 #ifdef HAVE_truncxfdf2
700 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
702 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
706 #ifdef HAVE_trunctfdf2
707 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
709 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
721 libcall = extendsfdf2_libfunc;
725 libcall = extendsfxf2_libfunc;
729 libcall = extendsftf2_libfunc;
741 libcall = truncdfsf2_libfunc;
745 libcall = extenddfxf2_libfunc;
749 libcall = extenddftf2_libfunc;
761 libcall = truncxfsf2_libfunc;
765 libcall = truncxfdf2_libfunc;
777 libcall = trunctfsf2_libfunc;
781 libcall = trunctfdf2_libfunc;
793 if (libcall == (rtx) 0)
794 /* This conversion is not implemented yet. */
798 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
800 insns = get_insns ();
802 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
807 /* Now both modes are integers. */
809 /* Handle expanding beyond a word. */
810 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
811 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
818 enum machine_mode lowpart_mode;
819 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
821 /* Try converting directly if the insn is supported. */
822 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
825 /* If FROM is a SUBREG, put it into a register. Do this
826 so that we always generate the same set of insns for
827 better cse'ing; if an intermediate assignment occurred,
828 we won't be doing the operation directly on the SUBREG. */
829 if (optimize > 0 && GET_CODE (from) == SUBREG)
830 from = force_reg (from_mode, from);
831 emit_unop_insn (code, to, from, equiv_code);
834 /* Next, try converting via full word. */
835 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
836 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
837 != CODE_FOR_nothing))
839 if (GET_CODE (to) == REG)
840 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
841 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
842 emit_unop_insn (code, to,
843 gen_lowpart (word_mode, to), equiv_code);
847 /* No special multiword conversion insn; do it by hand. */
850 /* Since we will turn this into a no conflict block, we must ensure
851 that the source does not overlap the target. */
853 if (reg_overlap_mentioned_p (to, from))
854 from = force_reg (from_mode, from);
856 /* Get a copy of FROM widened to a word, if necessary. */
857 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
858 lowpart_mode = word_mode;
860 lowpart_mode = from_mode;
862 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
864 lowpart = gen_lowpart (lowpart_mode, to);
865 emit_move_insn (lowpart, lowfrom);
867 /* Compute the value to put in each remaining word. */
869 fill_value = const0_rtx;
874 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
875 && STORE_FLAG_VALUE == -1)
877 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
879 fill_value = gen_reg_rtx (word_mode);
880 emit_insn (gen_slt (fill_value));
886 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
887 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
889 fill_value = convert_to_mode (word_mode, fill_value, 1);
893 /* Fill the remaining words. */
894 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
896 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
897 rtx subword = operand_subword (to, index, 1, to_mode);
902 if (fill_value != subword)
903 emit_move_insn (subword, fill_value);
906 insns = get_insns ();
909 emit_no_conflict_block (insns, to, from, NULL_RTX,
910 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
914 /* Truncating multi-word to a word or less. */
915 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
916 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
918 if (!((GET_CODE (from) == MEM
919 && ! MEM_VOLATILE_P (from)
920 && direct_load[(int) to_mode]
921 && ! mode_dependent_address_p (XEXP (from, 0)))
922 || GET_CODE (from) == REG
923 || GET_CODE (from) == SUBREG))
924 from = force_reg (from_mode, from);
925 convert_move (to, gen_lowpart (word_mode, from), 0);
929 /* Handle pointer conversion. */ /* SPEE 900220. */
930 if (to_mode == PQImode)
932 if (from_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
935 #ifdef HAVE_truncqipqi2
936 if (HAVE_truncqipqi2)
938 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
941 #endif /* HAVE_truncqipqi2 */
945 if (from_mode == PQImode)
947 if (to_mode != QImode)
949 from = convert_to_mode (QImode, from, unsignedp);
954 #ifdef HAVE_extendpqiqi2
955 if (HAVE_extendpqiqi2)
957 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
960 #endif /* HAVE_extendpqiqi2 */
965 if (to_mode == PSImode)
967 if (from_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
970 #ifdef HAVE_truncsipsi2
971 if (HAVE_truncsipsi2)
973 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
976 #endif /* HAVE_truncsipsi2 */
980 if (from_mode == PSImode)
982 if (to_mode != SImode)
984 from = convert_to_mode (SImode, from, unsignedp);
989 #ifdef HAVE_extendpsisi2
990 if (! unsignedp && HAVE_extendpsisi2)
992 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
995 #endif /* HAVE_extendpsisi2 */
996 #ifdef HAVE_zero_extendpsisi2
997 if (unsignedp && HAVE_zero_extendpsisi2)
999 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1002 #endif /* HAVE_zero_extendpsisi2 */
1007 if (to_mode == PDImode)
1009 if (from_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1012 #ifdef HAVE_truncdipdi2
1013 if (HAVE_truncdipdi2)
1015 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1018 #endif /* HAVE_truncdipdi2 */
1022 if (from_mode == PDImode)
1024 if (to_mode != DImode)
1026 from = convert_to_mode (DImode, from, unsignedp);
1031 #ifdef HAVE_extendpdidi2
1032 if (HAVE_extendpdidi2)
1034 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1037 #endif /* HAVE_extendpdidi2 */
1042 /* Now follow all the conversions between integers
1043 no more than a word long. */
1045 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1046 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1048 GET_MODE_BITSIZE (from_mode)))
1050 if (!((GET_CODE (from) == MEM
1051 && ! MEM_VOLATILE_P (from)
1052 && direct_load[(int) to_mode]
1053 && ! mode_dependent_address_p (XEXP (from, 0)))
1054 || GET_CODE (from) == REG
1055 || GET_CODE (from) == SUBREG))
1056 from = force_reg (from_mode, from);
1057 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1058 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1059 from = copy_to_reg (from);
1060 emit_move_insn (to, gen_lowpart (to_mode, from));
1064 /* Handle extension. */
1065 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1067 /* Convert directly if that works. */
1068 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1069 != CODE_FOR_nothing)
1071 emit_unop_insn (code, to, from, equiv_code);
1076 enum machine_mode intermediate;
1080 /* Search for a mode to convert via. */
1081 for (intermediate = from_mode; intermediate != VOIDmode;
1082 intermediate = GET_MODE_WIDER_MODE (intermediate))
1083 if (((can_extend_p (to_mode, intermediate, unsignedp)
1084 != CODE_FOR_nothing)
1085 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1086 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1087 GET_MODE_BITSIZE (intermediate))))
1088 && (can_extend_p (intermediate, from_mode, unsignedp)
1089 != CODE_FOR_nothing))
1091 convert_move (to, convert_to_mode (intermediate, from,
1092 unsignedp), unsignedp);
1096 /* No suitable intermediate mode.
1097 Generate what we need with shifts. */
1098 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1099 - GET_MODE_BITSIZE (from_mode), 0);
1100 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1101 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1103 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1106 emit_move_insn (to, tmp);
1111 /* Support special truncate insns for certain modes. */
1113 if (from_mode == DImode && to_mode == SImode)
1115 #ifdef HAVE_truncdisi2
1116 if (HAVE_truncdisi2)
1118 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == DImode && to_mode == HImode)
1128 #ifdef HAVE_truncdihi2
1129 if (HAVE_truncdihi2)
1131 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == DImode && to_mode == QImode)
1141 #ifdef HAVE_truncdiqi2
1142 if (HAVE_truncdiqi2)
1144 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == SImode && to_mode == HImode)
1154 #ifdef HAVE_truncsihi2
1155 if (HAVE_truncsihi2)
1157 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == SImode && to_mode == QImode)
1167 #ifdef HAVE_truncsiqi2
1168 if (HAVE_truncsiqi2)
1170 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == HImode && to_mode == QImode)
1180 #ifdef HAVE_trunchiqi2
1181 if (HAVE_trunchiqi2)
1183 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == DImode)
1193 #ifdef HAVE_trunctidi2
1194 if (HAVE_trunctidi2)
1196 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == SImode)
1206 #ifdef HAVE_trunctisi2
1207 if (HAVE_trunctisi2)
1209 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 if (from_mode == TImode && to_mode == HImode)
1219 #ifdef HAVE_trunctihi2
1220 if (HAVE_trunctihi2)
1222 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 if (from_mode == TImode && to_mode == QImode)
1232 #ifdef HAVE_trunctiqi2
1233 if (HAVE_trunctiqi2)
1235 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 /* Handle truncation of volatile memrefs, and so on;
1244 the things that couldn't be truncated directly,
1245 and for which there was no special instruction. */
1246 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1248 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1249 emit_move_insn (to, temp);
1253 /* Mode combination is not recognized. */
1257 /* Return an rtx for a value that would result
1258 from converting X to mode MODE.
1259 Both X and MODE may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261 This can be done by referring to a part of X in place
1262 or by copying to a new temporary with conversion.
1264 This function *must not* call protect_from_queue
1265 except when putting X into an insn (in which case convert_move does it). */
1268 convert_to_mode (mode, x, unsignedp)
1269 enum machine_mode mode;
1273 return convert_modes (mode, VOIDmode, x, unsignedp);
1276 /* Return an rtx for a value that would result
1277 from converting X from mode OLDMODE to mode MODE.
1278 Both modes may be floating, or both integer.
1279 UNSIGNEDP is nonzero if X is an unsigned value.
1281 This can be done by referring to a part of X in place
1282 or by copying to a new temporary with conversion.
1284 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1290 convert_modes (mode, oldmode, x, unsignedp)
1291 enum machine_mode mode, oldmode;
1297 /* If FROM is a SUBREG that indicates that we have already done at least
1298 the required extension, strip it. */
1300 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1301 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1302 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1303 x = gen_lowpart (mode, x);
1305 if (GET_MODE (x) != VOIDmode)
1306 oldmode = GET_MODE (x);
1308 if (mode == oldmode)
1311 /* There is one case that we must handle specially: If we are converting
1312 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1313 we are to interpret the constant as unsigned, gen_lowpart will do
1314 the wrong if the constant appears negative. What we want to do is
1315 make the high-order word of the constant zero, not all ones. */
1317 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1319 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1321 HOST_WIDE_INT val = INTVAL (x);
1323 if (oldmode != VOIDmode
1324 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1326 int width = GET_MODE_BITSIZE (oldmode);
1328 /* We need to zero extend VAL. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1332 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1335 /* We can do this with a gen_lowpart if both desired and current modes
1336 are integer, and this is either a constant integer, a register, or a
1337 non-volatile MEM. Except for the constant case where MODE is no
1338 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1340 if ((GET_CODE (x) == CONST_INT
1341 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1342 || (GET_MODE_CLASS (mode) == MODE_INT
1343 && GET_MODE_CLASS (oldmode) == MODE_INT
1344 && (GET_CODE (x) == CONST_DOUBLE
1345 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1346 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1347 && direct_load[(int) mode])
1348 || (GET_CODE (x) == REG
1349 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1350 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1352 /* ?? If we don't know OLDMODE, we have to assume here that
1353 X does not need sign- or zero-extension. This may not be
1354 the case, but it's the best we can do. */
1355 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1356 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1358 HOST_WIDE_INT val = INTVAL (x);
1359 int width = GET_MODE_BITSIZE (oldmode);
1361 /* We must sign or zero-extend in this case. Start by
1362 zero-extending, then sign extend if we need to. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1365 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1366 val |= (HOST_WIDE_INT) (-1) << width;
1368 return GEN_INT (trunc_int_for_mode (val, mode));
1371 return gen_lowpart (mode, x);
1374 temp = gen_reg_rtx (mode);
1375 convert_move (temp, x, unsignedp);
1379 /* This macro is used to determine what the largest unit size that
1380 move_by_pieces can use is. */
1382 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1383 move efficiently, as opposed to MOVE_MAX which is the maximum
1384 number of bytes we can move with a single instruction. */
1386 #ifndef MOVE_MAX_PIECES
1387 #define MOVE_MAX_PIECES MOVE_MAX
1390 /* Generate several move instructions to copy LEN bytes
1391 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1392 The caller must pass FROM and TO
1393 through protect_from_queue before calling.
1395 When TO is NULL, the emit_single_push_insn is used to push the
1398 ALIGN is maximum alignment we can assume. */
1401 move_by_pieces (to, from, len, align)
1403 unsigned HOST_WIDE_INT len;
1406 struct move_by_pieces data;
1407 rtx to_addr, from_addr = XEXP (from, 0);
1408 unsigned int max_size = MOVE_MAX_PIECES + 1;
1409 enum machine_mode mode = VOIDmode, tmode;
1410 enum insn_code icode;
1413 data.from_addr = from_addr;
1416 to_addr = XEXP (to, 0);
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1429 #ifdef STACK_GROWS_DOWNWARD
1435 data.to_addr = to_addr;
1438 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1439 || GET_CODE (from_addr) == POST_INC
1440 || GET_CODE (from_addr) == POST_DEC);
1442 data.explicit_inc_from = 0;
1443 data.explicit_inc_to = 0;
1444 if (data.reverse) data.offset = len;
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data.autinc_from && data.autinc_to)
1451 && move_by_pieces_ninsns (len, align) > 2)
1453 /* Find the mode of the largest move... */
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1459 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1461 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1462 data.autinc_from = 1;
1463 data.explicit_inc_from = -1;
1465 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 data.autinc_from = 1;
1469 data.explicit_inc_from = 1;
1471 if (!data.autinc_from && CONSTANT_P (from_addr))
1472 data.from_addr = copy_addr_to_reg (from_addr);
1473 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1475 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1477 data.explicit_inc_to = -1;
1479 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1481 data.to_addr = copy_addr_to_reg (to_addr);
1483 data.explicit_inc_to = 1;
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1489 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1490 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1491 align = MOVE_MAX * BITS_PER_UNIT;
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1496 while (max_size > 1)
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
1503 if (mode == VOIDmode)
1506 icode = mov_optab->handlers[(int) mode].insn_code;
1507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1510 max_size = GET_MODE_SIZE (mode);
1513 /* The code above should have handled everything. */
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bits) is maximum alignment we can assume. */
1521 static unsigned HOST_WIDE_INT
1522 move_by_pieces_ninsns (l, align)
1523 unsigned HOST_WIDE_INT l;
1526 unsigned HOST_WIDE_INT n_insns = 0;
1527 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1533 while (max_size > 1)
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1543 if (mode == VOIDmode)
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1550 max_size = GET_MODE_SIZE (mode);
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1563 move_by_pieces_1 (genfun, mode, data)
1564 rtx (*genfun) PARAMS ((rtx, ...));
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1568 unsigned int size = GET_MODE_SIZE (mode);
1571 while (data->len >= size)
1574 data->offset -= size;
1578 if (data->autinc_to)
1580 to1 = gen_rtx_MEM (mode, data->to_addr);
1581 MEM_COPY_ATTRIBUTES (to1, data->to);
1584 to1 = change_address (data->to, mode,
1585 plus_constant (data->to_addr, data->offset));
1588 if (data->autinc_from)
1590 from1 = gen_rtx_MEM (mode, data->from_addr);
1591 MEM_COPY_ATTRIBUTES (from1, data->from);
1594 from1 = change_address (data->from, mode,
1595 plus_constant (data->from_addr, data->offset));
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1598 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1600 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1603 emit_insn ((*genfun) (to1, from1));
1605 emit_single_push_insn (mode, from1, NULL);
1607 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1608 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1609 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1610 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1612 if (! data->reverse)
1613 data->offset += size;
1619 /* Emit code to move a block Y to a block X.
1620 This may be done with string-move instructions,
1621 with multiple scalar move instructions, or with a library call.
1623 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1625 SIZE is an rtx that says how long they are.
1626 ALIGN is the maximum alignment we can assume they have.
1628 Return the address of the new block, if memcpy is called and returns it,
1632 emit_block_move (x, y, size, align)
1638 #ifdef TARGET_MEM_FUNCTIONS
1640 tree call_expr, arg_list;
1643 if (GET_MODE (x) != BLKmode)
1646 if (GET_MODE (y) != BLKmode)
1649 x = protect_from_queue (x, 1);
1650 y = protect_from_queue (y, 0);
1651 size = protect_from_queue (size, 0);
1653 if (GET_CODE (x) != MEM)
1655 if (GET_CODE (y) != MEM)
1660 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1661 move_by_pieces (x, y, INTVAL (size), align);
1664 /* Try the most limited insn first, because there's no point
1665 including more than one in the machine description unless
1666 the more limited one has some advantage. */
1668 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1669 enum machine_mode mode;
1671 /* Since this is a move insn, we don't care about volatility. */
1674 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1675 mode = GET_MODE_WIDER_MODE (mode))
1677 enum insn_code code = movstr_optab[(int) mode];
1678 insn_operand_predicate_fn pred;
1680 if (code != CODE_FOR_nothing
1681 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1682 here because if SIZE is less than the mode mask, as it is
1683 returned by the macro, it will definitely be less than the
1684 actual mode mask. */
1685 && ((GET_CODE (size) == CONST_INT
1686 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1687 <= (GET_MODE_MASK (mode) >> 1)))
1688 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1689 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1690 || (*pred) (x, BLKmode))
1691 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1692 || (*pred) (y, BLKmode))
1693 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1694 || (*pred) (opalign, VOIDmode)))
1697 rtx last = get_last_insn ();
1700 op2 = convert_to_mode (mode, size, 1);
1701 pred = insn_data[(int) code].operand[2].predicate;
1702 if (pred != 0 && ! (*pred) (op2, mode))
1703 op2 = copy_to_mode_reg (mode, op2);
1705 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1713 delete_insns_since (last);
1719 /* X, Y, or SIZE may have been passed through protect_from_queue.
1721 It is unsafe to save the value generated by protect_from_queue
1722 and reuse it later. Consider what happens if emit_queue is
1723 called before the return value from protect_from_queue is used.
1725 Expansion of the CALL_EXPR below will call emit_queue before
1726 we are finished emitting RTL for argument setup. So if we are
1727 not careful we could get the wrong value for an argument.
1729 To avoid this problem we go ahead and emit code to copy X, Y &
1730 SIZE into new pseudos. We can then place those new pseudos
1731 into an RTL_EXPR and use them later, even after a call to
1734 Note this is not strictly needed for library calls since they
1735 do not call emit_queue before loading their arguments. However,
1736 we may need to have library calls call emit_queue in the future
1737 since failing to do so could cause problems for targets which
1738 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1739 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1740 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1742 #ifdef TARGET_MEM_FUNCTIONS
1743 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1745 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1746 TREE_UNSIGNED (integer_type_node));
1747 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1750 #ifdef TARGET_MEM_FUNCTIONS
1751 /* It is incorrect to use the libcall calling conventions to call
1752 memcpy in this context.
1754 This could be a user call to memcpy and the user may wish to
1755 examine the return value from memcpy.
1757 For targets where libcalls and normal calls have different conventions
1758 for returning pointers, we could end up generating incorrect code.
1760 So instead of using a libcall sequence we build up a suitable
1761 CALL_EXPR and expand the call in the normal fashion. */
1762 if (fn == NULL_TREE)
1766 /* This was copied from except.c, I don't know if all this is
1767 necessary in this context or not. */
1768 fn = get_identifier ("memcpy");
1769 fntype = build_pointer_type (void_type_node);
1770 fntype = build_function_type (fntype, NULL_TREE);
1771 fn = build_decl (FUNCTION_DECL, fn, fntype);
1772 ggc_add_tree_root (&fn, 1);
1773 DECL_EXTERNAL (fn) = 1;
1774 TREE_PUBLIC (fn) = 1;
1775 DECL_ARTIFICIAL (fn) = 1;
1776 make_decl_rtl (fn, NULL);
1777 assemble_external (fn);
1780 /* We need to make an argument list for the function call.
1782 memcpy has three arguments, the first two are void * addresses and
1783 the last is a size_t byte count for the copy. */
1785 = build_tree_list (NULL_TREE,
1786 make_tree (build_pointer_type (void_type_node), x));
1787 TREE_CHAIN (arg_list)
1788 = build_tree_list (NULL_TREE,
1789 make_tree (build_pointer_type (void_type_node), y));
1790 TREE_CHAIN (TREE_CHAIN (arg_list))
1791 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1792 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1794 /* Now we have to build up the CALL_EXPR itself. */
1795 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1796 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1797 call_expr, arg_list, NULL_TREE);
1798 TREE_SIDE_EFFECTS (call_expr) = 1;
1800 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1802 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1803 VOIDmode, 3, y, Pmode, x, Pmode,
1804 convert_to_mode (TYPE_MODE (integer_type_node), size,
1805 TREE_UNSIGNED (integer_type_node)),
1806 TYPE_MODE (integer_type_node));
1813 /* Copy all or part of a value X into registers starting at REGNO.
1814 The number of registers to be filled is NREGS. */
1817 move_block_to_reg (regno, x, nregs, mode)
1821 enum machine_mode mode;
1824 #ifdef HAVE_load_multiple
1832 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1833 x = validize_mem (force_const_mem (mode, x));
1835 /* See if the machine can do this with a load multiple insn. */
1836 #ifdef HAVE_load_multiple
1837 if (HAVE_load_multiple)
1839 last = get_last_insn ();
1840 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1848 delete_insns_since (last);
1852 for (i = 0; i < nregs; i++)
1853 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1854 operand_subword_force (x, i, mode));
1857 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1858 The number of registers to be filled is NREGS. SIZE indicates the number
1859 of bytes in the object X. */
1862 move_block_from_reg (regno, x, nregs, size)
1869 #ifdef HAVE_store_multiple
1873 enum machine_mode mode;
1878 /* If SIZE is that of a mode no bigger than a word, just use that
1879 mode's store operation. */
1880 if (size <= UNITS_PER_WORD
1881 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1883 emit_move_insn (change_address (x, mode, NULL),
1884 gen_rtx_REG (mode, regno));
1888 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1889 to the left before storing to memory. Note that the previous test
1890 doesn't handle all cases (e.g. SIZE == 3). */
1891 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1893 rtx tem = operand_subword (x, 0, 1, BLKmode);
1899 shift = expand_shift (LSHIFT_EXPR, word_mode,
1900 gen_rtx_REG (word_mode, regno),
1901 build_int_2 ((UNITS_PER_WORD - size)
1902 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1903 emit_move_insn (tem, shift);
1907 /* See if the machine can do this with a store multiple insn. */
1908 #ifdef HAVE_store_multiple
1909 if (HAVE_store_multiple)
1911 last = get_last_insn ();
1912 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1920 delete_insns_since (last);
1924 for (i = 0; i < nregs; i++)
1926 rtx tem = operand_subword (x, i, 1, BLKmode);
1931 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1935 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1936 registers represented by a PARALLEL. SSIZE represents the total size of
1937 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1939 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1940 the balance will be in what would be the low-order memory addresses, i.e.
1941 left justified for big endian, right justified for little endian. This
1942 happens to be true for the targets currently using this support. If this
1943 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1947 emit_group_load (dst, orig_src, ssize, align)
1955 if (GET_CODE (dst) != PARALLEL)
1958 /* Check for a NULL entry, used to indicate that the parameter goes
1959 both on the stack and in registers. */
1960 if (XEXP (XVECEXP (dst, 0, 0), 0))
1965 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1967 /* Process the pieces. */
1968 for (i = start; i < XVECLEN (dst, 0); i++)
1970 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1971 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1972 unsigned int bytelen = GET_MODE_SIZE (mode);
1975 /* Handle trailing fragments that run over the size of the struct. */
1976 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1978 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1979 bytelen = ssize - bytepos;
1984 /* If we won't be loading directly from memory, protect the real source
1985 from strange tricks we might play; but make sure that the source can
1986 be loaded directly into the destination. */
1988 if (GET_CODE (orig_src) != MEM
1989 && (!CONSTANT_P (orig_src)
1990 || (GET_MODE (orig_src) != mode
1991 && GET_MODE (orig_src) != VOIDmode)))
1993 if (GET_MODE (orig_src) == VOIDmode)
1994 src = gen_reg_rtx (mode);
1996 src = gen_reg_rtx (GET_MODE (orig_src));
1997 emit_move_insn (src, orig_src);
2000 /* Optimize the access just a bit. */
2001 if (GET_CODE (src) == MEM
2002 && align >= GET_MODE_ALIGNMENT (mode)
2003 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2004 && bytelen == GET_MODE_SIZE (mode))
2006 tmps[i] = gen_reg_rtx (mode);
2007 emit_move_insn (tmps[i],
2008 change_address (src, mode,
2009 plus_constant (XEXP (src, 0),
2012 else if (GET_CODE (src) == CONCAT)
2015 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2016 tmps[i] = XEXP (src, 0);
2017 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2018 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2019 tmps[i] = XEXP (src, 1);
2023 else if (CONSTANT_P (src)
2024 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2027 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2028 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2029 mode, mode, align, ssize);
2031 if (BYTES_BIG_ENDIAN && shift)
2032 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2033 tmps[i], 0, OPTAB_WIDEN);
2038 /* Copy the extracted pieces into the proper (probable) hard regs. */
2039 for (i = start; i < XVECLEN (dst, 0); i++)
2040 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2043 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2044 registers represented by a PARALLEL. SSIZE represents the total size of
2045 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2048 emit_group_store (orig_dst, src, ssize, align)
2056 if (GET_CODE (src) != PARALLEL)
2059 /* Check for a NULL entry, used to indicate that the parameter goes
2060 both on the stack and in registers. */
2061 if (XEXP (XVECEXP (src, 0, 0), 0))
2066 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2068 /* Copy the (probable) hard regs into pseudos. */
2069 for (i = start; i < XVECLEN (src, 0); i++)
2071 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2072 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2073 emit_move_insn (tmps[i], reg);
2077 /* If we won't be storing directly into memory, protect the real destination
2078 from strange tricks we might play. */
2080 if (GET_CODE (dst) == PARALLEL)
2084 /* We can get a PARALLEL dst if there is a conditional expression in
2085 a return statement. In that case, the dst and src are the same,
2086 so no action is necessary. */
2087 if (rtx_equal_p (dst, src))
2090 /* It is unclear if we can ever reach here, but we may as well handle
2091 it. Allocate a temporary, and split this into a store/load to/from
2094 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2095 emit_group_store (temp, src, ssize, align);
2096 emit_group_load (dst, temp, ssize, align);
2099 else if (GET_CODE (dst) != MEM)
2101 dst = gen_reg_rtx (GET_MODE (orig_dst));
2102 /* Make life a bit easier for combine. */
2103 emit_move_insn (dst, const0_rtx);
2106 /* Process the pieces. */
2107 for (i = start; i < XVECLEN (src, 0); i++)
2109 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2110 enum machine_mode mode = GET_MODE (tmps[i]);
2111 unsigned int bytelen = GET_MODE_SIZE (mode);
2113 /* Handle trailing fragments that run over the size of the struct. */
2114 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2116 if (BYTES_BIG_ENDIAN)
2118 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2119 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2120 tmps[i], 0, OPTAB_WIDEN);
2122 bytelen = ssize - bytepos;
2125 /* Optimize the access just a bit. */
2126 if (GET_CODE (dst) == MEM
2127 && align >= GET_MODE_ALIGNMENT (mode)
2128 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2129 && bytelen == GET_MODE_SIZE (mode))
2130 emit_move_insn (change_address (dst, mode,
2131 plus_constant (XEXP (dst, 0),
2135 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2136 mode, tmps[i], align, ssize);
2141 /* Copy from the pseudo into the (probable) hard reg. */
2142 if (GET_CODE (dst) == REG)
2143 emit_move_insn (orig_dst, dst);
2146 /* Generate code to copy a BLKmode object of TYPE out of a
2147 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2148 is null, a stack temporary is created. TGTBLK is returned.
2150 The primary purpose of this routine is to handle functions
2151 that return BLKmode structures in registers. Some machines
2152 (the PA for example) want to return all small structures
2153 in registers regardless of the structure's alignment. */
2156 copy_blkmode_from_reg (tgtblk, srcreg, type)
2161 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2162 rtx src = NULL, dst = NULL;
2163 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2164 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2168 tgtblk = assign_temp (build_qualified_type (type,
2170 | TYPE_QUAL_CONST)),
2172 preserve_temp_slots (tgtblk);
2175 /* This code assumes srcreg is at least a full word. If it isn't,
2176 copy it into a new pseudo which is a full word. */
2177 if (GET_MODE (srcreg) != BLKmode
2178 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2179 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2181 /* Structures whose size is not a multiple of a word are aligned
2182 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2183 machine, this means we must skip the empty high order bytes when
2184 calculating the bit offset. */
2185 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2186 big_endian_correction
2187 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2189 /* Copy the structure BITSIZE bites at a time.
2191 We could probably emit more efficient code for machines which do not use
2192 strict alignment, but it doesn't seem worth the effort at the current
2194 for (bitpos = 0, xbitpos = big_endian_correction;
2195 bitpos < bytes * BITS_PER_UNIT;
2196 bitpos += bitsize, xbitpos += bitsize)
2198 /* We need a new source operand each time xbitpos is on a
2199 word boundary and when xbitpos == big_endian_correction
2200 (the first time through). */
2201 if (xbitpos % BITS_PER_WORD == 0
2202 || xbitpos == big_endian_correction)
2203 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2206 /* We need a new destination operand each time bitpos is on
2208 if (bitpos % BITS_PER_WORD == 0)
2209 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2211 /* Use xbitpos for the source extraction (right justified) and
2212 xbitpos for the destination store (left justified). */
2213 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2214 extract_bit_field (src, bitsize,
2215 xbitpos % BITS_PER_WORD, 1,
2216 NULL_RTX, word_mode, word_mode,
2217 bitsize, BITS_PER_WORD),
2218 bitsize, BITS_PER_WORD);
2224 /* Add a USE expression for REG to the (possibly empty) list pointed
2225 to by CALL_FUSAGE. REG must denote a hard register. */
2228 use_reg (call_fusage, reg)
2229 rtx *call_fusage, reg;
2231 if (GET_CODE (reg) != REG
2232 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2236 = gen_rtx_EXPR_LIST (VOIDmode,
2237 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2240 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2241 starting at REGNO. All of these registers must be hard registers. */
2244 use_regs (call_fusage, regno, nregs)
2251 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2254 for (i = 0; i < nregs; i++)
2255 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2258 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2259 PARALLEL REGS. This is for calls that pass values in multiple
2260 non-contiguous locations. The Irix 6 ABI has examples of this. */
2263 use_group_regs (call_fusage, regs)
2269 for (i = 0; i < XVECLEN (regs, 0); i++)
2271 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2273 /* A NULL entry means the parameter goes both on the stack and in
2274 registers. This can also be a MEM for targets that pass values
2275 partially on the stack and partially in registers. */
2276 if (reg != 0 && GET_CODE (reg) == REG)
2277 use_reg (call_fusage, reg);
2283 can_store_by_pieces (len, constfun, constfundata, align)
2284 unsigned HOST_WIDE_INT len;
2285 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2289 unsigned HOST_WIDE_INT max_size, l;
2290 HOST_WIDE_INT offset = 0;
2291 enum machine_mode mode, tmode;
2292 enum insn_code icode;
2296 if (! MOVE_BY_PIECES_P (len, align))
2299 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2300 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2301 align = MOVE_MAX * BITS_PER_UNIT;
2303 /* We would first store what we can in the largest integer mode, then go to
2304 successively smaller modes. */
2307 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2312 max_size = MOVE_MAX_PIECES + 1;
2313 while (max_size > 1)
2315 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2316 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2317 if (GET_MODE_SIZE (tmode) < max_size)
2320 if (mode == VOIDmode)
2323 icode = mov_optab->handlers[(int) mode].insn_code;
2324 if (icode != CODE_FOR_nothing
2325 && align >= GET_MODE_ALIGNMENT (mode))
2327 unsigned int size = GET_MODE_SIZE (mode);
2334 cst = (*constfun) (constfundata, offset, mode);
2335 if (!LEGITIMATE_CONSTANT_P (cst))
2345 max_size = GET_MODE_SIZE (mode);
2348 /* The code above should have handled everything. */
2356 /* Generate several move instructions to store LEN bytes generated by
2357 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2358 pointer which will be passed as argument in every CONSTFUN call.
2359 ALIGN is maximum alignment we can assume. */
2362 store_by_pieces (to, len, constfun, constfundata, align)
2364 unsigned HOST_WIDE_INT len;
2365 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2369 struct store_by_pieces data;
2371 if (! MOVE_BY_PIECES_P (len, align))
2373 to = protect_from_queue (to, 1);
2374 data.constfun = constfun;
2375 data.constfundata = constfundata;
2378 store_by_pieces_1 (&data, align);
2381 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2382 rtx with BLKmode). The caller must pass TO through protect_from_queue
2383 before calling. ALIGN is maximum alignment we can assume. */
2386 clear_by_pieces (to, len, align)
2388 unsigned HOST_WIDE_INT len;
2391 struct store_by_pieces data;
2393 data.constfun = clear_by_pieces_1;
2394 data.constfundata = NULL;
2397 store_by_pieces_1 (&data, align);
2400 /* Callback routine for clear_by_pieces.
2401 Return const0_rtx unconditionally. */
2404 clear_by_pieces_1 (data, offset, mode)
2405 PTR data ATTRIBUTE_UNUSED;
2406 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2407 enum machine_mode mode ATTRIBUTE_UNUSED;
2412 /* Subroutine of clear_by_pieces and store_by_pieces.
2413 Generate several move instructions to store LEN bytes of block TO. (A MEM
2414 rtx with BLKmode). The caller must pass TO through protect_from_queue
2415 before calling. ALIGN is maximum alignment we can assume. */
2418 store_by_pieces_1 (data, align)
2419 struct store_by_pieces *data;
2422 rtx to_addr = XEXP (data->to, 0);
2423 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2424 enum machine_mode mode = VOIDmode, tmode;
2425 enum insn_code icode;
2428 data->to_addr = to_addr;
2430 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2431 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2433 data->explicit_inc_to = 0;
2435 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2437 data->offset = data->len;
2439 /* If storing requires more than two move insns,
2440 copy addresses to registers (to make displacements shorter)
2441 and use post-increment if available. */
2442 if (!data->autinc_to
2443 && move_by_pieces_ninsns (data->len, align) > 2)
2445 /* Determine the main mode we'll be using. */
2446 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2447 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2448 if (GET_MODE_SIZE (tmode) < max_size)
2451 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2453 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2454 data->autinc_to = 1;
2455 data->explicit_inc_to = -1;
2458 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2459 && ! data->autinc_to)
2461 data->to_addr = copy_addr_to_reg (to_addr);
2462 data->autinc_to = 1;
2463 data->explicit_inc_to = 1;
2466 if ( !data->autinc_to && CONSTANT_P (to_addr))
2467 data->to_addr = copy_addr_to_reg (to_addr);
2470 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2471 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2472 align = MOVE_MAX * BITS_PER_UNIT;
2474 /* First store what we can in the largest integer mode, then go to
2475 successively smaller modes. */
2477 while (max_size > 1)
2479 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2480 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2481 if (GET_MODE_SIZE (tmode) < max_size)
2484 if (mode == VOIDmode)
2487 icode = mov_optab->handlers[(int) mode].insn_code;
2488 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2489 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2491 max_size = GET_MODE_SIZE (mode);
2494 /* The code above should have handled everything. */
2499 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2500 with move instructions for mode MODE. GENFUN is the gen_... function
2501 to make a move insn for that mode. DATA has all the other info. */
2504 store_by_pieces_2 (genfun, mode, data)
2505 rtx (*genfun) PARAMS ((rtx, ...));
2506 enum machine_mode mode;
2507 struct store_by_pieces *data;
2509 unsigned int size = GET_MODE_SIZE (mode);
2512 while (data->len >= size)
2515 data->offset -= size;
2517 if (data->autinc_to)
2519 to1 = gen_rtx_MEM (mode, data->to_addr);
2520 MEM_COPY_ATTRIBUTES (to1, data->to);
2523 to1 = change_address (data->to, mode,
2524 plus_constant (data->to_addr, data->offset));
2526 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2527 emit_insn (gen_add2_insn (data->to_addr,
2528 GEN_INT (-(HOST_WIDE_INT) size)));
2530 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2531 emit_insn ((*genfun) (to1, cst));
2533 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2534 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2536 if (! data->reverse)
2537 data->offset += size;
2543 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2544 its length in bytes and ALIGN is the maximum alignment we can is has.
2546 If we call a function that returns the length of the block, return it. */
2549 clear_storage (object, size, align)
2554 #ifdef TARGET_MEM_FUNCTIONS
2556 tree call_expr, arg_list;
2560 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2561 just move a zero. Otherwise, do this a piece at a time. */
2562 if (GET_MODE (object) != BLKmode
2563 && GET_CODE (size) == CONST_INT
2564 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2565 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2568 object = protect_from_queue (object, 1);
2569 size = protect_from_queue (size, 0);
2571 if (GET_CODE (size) == CONST_INT
2572 && MOVE_BY_PIECES_P (INTVAL (size), align))
2573 clear_by_pieces (object, INTVAL (size), align);
2576 /* Try the most limited insn first, because there's no point
2577 including more than one in the machine description unless
2578 the more limited one has some advantage. */
2580 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2581 enum machine_mode mode;
2583 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2584 mode = GET_MODE_WIDER_MODE (mode))
2586 enum insn_code code = clrstr_optab[(int) mode];
2587 insn_operand_predicate_fn pred;
2589 if (code != CODE_FOR_nothing
2590 /* We don't need MODE to be narrower than
2591 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2592 the mode mask, as it is returned by the macro, it will
2593 definitely be less than the actual mode mask. */
2594 && ((GET_CODE (size) == CONST_INT
2595 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2596 <= (GET_MODE_MASK (mode) >> 1)))
2597 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2598 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2599 || (*pred) (object, BLKmode))
2600 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2601 || (*pred) (opalign, VOIDmode)))
2604 rtx last = get_last_insn ();
2607 op1 = convert_to_mode (mode, size, 1);
2608 pred = insn_data[(int) code].operand[1].predicate;
2609 if (pred != 0 && ! (*pred) (op1, mode))
2610 op1 = copy_to_mode_reg (mode, op1);
2612 pat = GEN_FCN ((int) code) (object, op1, opalign);
2619 delete_insns_since (last);
2623 /* OBJECT or SIZE may have been passed through protect_from_queue.
2625 It is unsafe to save the value generated by protect_from_queue
2626 and reuse it later. Consider what happens if emit_queue is
2627 called before the return value from protect_from_queue is used.
2629 Expansion of the CALL_EXPR below will call emit_queue before
2630 we are finished emitting RTL for argument setup. So if we are
2631 not careful we could get the wrong value for an argument.
2633 To avoid this problem we go ahead and emit code to copy OBJECT
2634 and SIZE into new pseudos. We can then place those new pseudos
2635 into an RTL_EXPR and use them later, even after a call to
2638 Note this is not strictly needed for library calls since they
2639 do not call emit_queue before loading their arguments. However,
2640 we may need to have library calls call emit_queue in the future
2641 since failing to do so could cause problems for targets which
2642 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2643 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2645 #ifdef TARGET_MEM_FUNCTIONS
2646 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2648 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2649 TREE_UNSIGNED (integer_type_node));
2650 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2653 #ifdef TARGET_MEM_FUNCTIONS
2654 /* It is incorrect to use the libcall calling conventions to call
2655 memset in this context.
2657 This could be a user call to memset and the user may wish to
2658 examine the return value from memset.
2660 For targets where libcalls and normal calls have different
2661 conventions for returning pointers, we could end up generating
2664 So instead of using a libcall sequence we build up a suitable
2665 CALL_EXPR and expand the call in the normal fashion. */
2666 if (fn == NULL_TREE)
2670 /* This was copied from except.c, I don't know if all this is
2671 necessary in this context or not. */
2672 fn = get_identifier ("memset");
2673 fntype = build_pointer_type (void_type_node);
2674 fntype = build_function_type (fntype, NULL_TREE);
2675 fn = build_decl (FUNCTION_DECL, fn, fntype);
2676 ggc_add_tree_root (&fn, 1);
2677 DECL_EXTERNAL (fn) = 1;
2678 TREE_PUBLIC (fn) = 1;
2679 DECL_ARTIFICIAL (fn) = 1;
2680 make_decl_rtl (fn, NULL);
2681 assemble_external (fn);
2684 /* We need to make an argument list for the function call.
2686 memset has three arguments, the first is a void * addresses, the
2687 second a integer with the initialization value, the last is a
2688 size_t byte count for the copy. */
2690 = build_tree_list (NULL_TREE,
2691 make_tree (build_pointer_type (void_type_node),
2693 TREE_CHAIN (arg_list)
2694 = build_tree_list (NULL_TREE,
2695 make_tree (integer_type_node, const0_rtx));
2696 TREE_CHAIN (TREE_CHAIN (arg_list))
2697 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2698 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2700 /* Now we have to build up the CALL_EXPR itself. */
2701 call_expr = build1 (ADDR_EXPR,
2702 build_pointer_type (TREE_TYPE (fn)), fn);
2703 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2704 call_expr, arg_list, NULL_TREE);
2705 TREE_SIDE_EFFECTS (call_expr) = 1;
2707 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2709 emit_library_call (bzero_libfunc, LCT_NORMAL,
2710 VOIDmode, 2, object, Pmode, size,
2711 TYPE_MODE (integer_type_node));
2719 /* Generate code to copy Y into X.
2720 Both Y and X must have the same mode, except that
2721 Y can be a constant with VOIDmode.
2722 This mode cannot be BLKmode; use emit_block_move for that.
2724 Return the last instruction emitted. */
2727 emit_move_insn (x, y)
2730 enum machine_mode mode = GET_MODE (x);
2731 rtx y_cst = NULL_RTX;
2734 x = protect_from_queue (x, 1);
2735 y = protect_from_queue (y, 0);
2737 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2740 /* Never force constant_p_rtx to memory. */
2741 if (GET_CODE (y) == CONSTANT_P_RTX)
2743 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2746 y = force_const_mem (mode, y);
2749 /* If X or Y are memory references, verify that their addresses are valid
2751 if (GET_CODE (x) == MEM
2752 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2753 && ! push_operand (x, GET_MODE (x)))
2755 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2756 x = change_address (x, VOIDmode, XEXP (x, 0));
2758 if (GET_CODE (y) == MEM
2759 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2761 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2762 y = change_address (y, VOIDmode, XEXP (y, 0));
2764 if (mode == BLKmode)
2767 last_insn = emit_move_insn_1 (x, y);
2769 if (y_cst && GET_CODE (x) == REG)
2770 REG_NOTES (last_insn)
2771 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2776 /* Low level part of emit_move_insn.
2777 Called just like emit_move_insn, but assumes X and Y
2778 are basically valid. */
2781 emit_move_insn_1 (x, y)
2784 enum machine_mode mode = GET_MODE (x);
2785 enum machine_mode submode;
2786 enum mode_class class = GET_MODE_CLASS (mode);
2789 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2792 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2794 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2796 /* Expand complex moves by moving real part and imag part, if possible. */
2797 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2798 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2800 (class == MODE_COMPLEX_INT
2801 ? MODE_INT : MODE_FLOAT),
2803 && (mov_optab->handlers[(int) submode].insn_code
2804 != CODE_FOR_nothing))
2806 /* Don't split destination if it is a stack push. */
2807 int stack = push_operand (x, GET_MODE (x));
2809 #ifdef PUSH_ROUNDING
2810 /* In case we output to the stack, but the size is smaller machine can
2811 push exactly, we need to use move instructions. */
2813 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2816 int offset1, offset2;
2818 /* Do not use anti_adjust_stack, since we don't want to update
2819 stack_pointer_delta. */
2820 temp = expand_binop (Pmode,
2821 #ifdef STACK_GROWS_DOWNWARD
2828 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2832 if (temp != stack_pointer_rtx)
2833 emit_move_insn (stack_pointer_rtx, temp);
2834 #ifdef STACK_GROWS_DOWNWARD
2836 offset2 = GET_MODE_SIZE (submode);
2838 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2839 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2840 + GET_MODE_SIZE (submode));
2842 emit_move_insn (change_address (x, submode,
2843 gen_rtx_PLUS (Pmode,
2845 GEN_INT (offset1))),
2846 gen_realpart (submode, y));
2847 emit_move_insn (change_address (x, submode,
2848 gen_rtx_PLUS (Pmode,
2850 GEN_INT (offset2))),
2851 gen_imagpart (submode, y));
2855 /* If this is a stack, push the highpart first, so it
2856 will be in the argument order.
2858 In that case, change_address is used only to convert
2859 the mode, not to change the address. */
2862 /* Note that the real part always precedes the imag part in memory
2863 regardless of machine's endianness. */
2864 #ifdef STACK_GROWS_DOWNWARD
2865 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2866 (gen_rtx_MEM (submode, XEXP (x, 0)),
2867 gen_imagpart (submode, y)));
2868 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2869 (gen_rtx_MEM (submode, XEXP (x, 0)),
2870 gen_realpart (submode, y)));
2872 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2873 (gen_rtx_MEM (submode, XEXP (x, 0)),
2874 gen_realpart (submode, y)));
2875 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2876 (gen_rtx_MEM (submode, XEXP (x, 0)),
2877 gen_imagpart (submode, y)));
2882 rtx realpart_x, realpart_y;
2883 rtx imagpart_x, imagpart_y;
2885 /* If this is a complex value with each part being smaller than a
2886 word, the usual calling sequence will likely pack the pieces into
2887 a single register. Unfortunately, SUBREG of hard registers only
2888 deals in terms of words, so we have a problem converting input
2889 arguments to the CONCAT of two registers that is used elsewhere
2890 for complex values. If this is before reload, we can copy it into
2891 memory and reload. FIXME, we should see about using extract and
2892 insert on integer registers, but complex short and complex char
2893 variables should be rarely used. */
2894 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2895 && (reload_in_progress | reload_completed) == 0)
2897 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2898 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2900 if (packed_dest_p || packed_src_p)
2902 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2903 ? MODE_FLOAT : MODE_INT);
2905 enum machine_mode reg_mode
2906 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2908 if (reg_mode != BLKmode)
2910 rtx mem = assign_stack_temp (reg_mode,
2911 GET_MODE_SIZE (mode), 0);
2912 rtx cmem = change_address (mem, mode, NULL_RTX);
2915 = N_("function using short complex types cannot be inline");
2919 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2920 emit_move_insn_1 (cmem, y);
2921 return emit_move_insn_1 (sreg, mem);
2925 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2926 emit_move_insn_1 (mem, sreg);
2927 return emit_move_insn_1 (x, cmem);
2933 realpart_x = gen_realpart (submode, x);
2934 realpart_y = gen_realpart (submode, y);
2935 imagpart_x = gen_imagpart (submode, x);
2936 imagpart_y = gen_imagpart (submode, y);
2938 /* Show the output dies here. This is necessary for SUBREGs
2939 of pseudos since we cannot track their lifetimes correctly;
2940 hard regs shouldn't appear here except as return values.
2941 We never want to emit such a clobber after reload. */
2943 && ! (reload_in_progress || reload_completed)
2944 && (GET_CODE (realpart_x) == SUBREG
2945 || GET_CODE (imagpart_x) == SUBREG))
2947 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2950 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2951 (realpart_x, realpart_y));
2952 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2953 (imagpart_x, imagpart_y));
2956 return get_last_insn ();
2959 /* This will handle any multi-word mode that lacks a move_insn pattern.
2960 However, you will get better code if you define such patterns,
2961 even if they must turn into multiple assembler instructions. */
2962 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2968 #ifdef PUSH_ROUNDING
2970 /* If X is a push on the stack, do the push now and replace
2971 X with a reference to the stack pointer. */
2972 if (push_operand (x, GET_MODE (x)))
2974 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2975 x = change_address (x, VOIDmode, stack_pointer_rtx);
2979 /* If we are in reload, see if either operand is a MEM whose address
2980 is scheduled for replacement. */
2981 if (reload_in_progress && GET_CODE (x) == MEM
2982 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2984 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2986 MEM_COPY_ATTRIBUTES (new, x);
2989 if (reload_in_progress && GET_CODE (y) == MEM
2990 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2992 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2994 MEM_COPY_ATTRIBUTES (new, y);
3002 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3005 rtx xpart = operand_subword (x, i, 1, mode);
3006 rtx ypart = operand_subword (y, i, 1, mode);
3008 /* If we can't get a part of Y, put Y into memory if it is a
3009 constant. Otherwise, force it into a register. If we still
3010 can't get a part of Y, abort. */
3011 if (ypart == 0 && CONSTANT_P (y))
3013 y = force_const_mem (mode, y);
3014 ypart = operand_subword (y, i, 1, mode);
3016 else if (ypart == 0)
3017 ypart = operand_subword_force (y, i, mode);
3019 if (xpart == 0 || ypart == 0)
3022 need_clobber |= (GET_CODE (xpart) == SUBREG);
3024 last_insn = emit_move_insn (xpart, ypart);
3027 seq = gen_sequence ();
3030 /* Show the output dies here. This is necessary for SUBREGs
3031 of pseudos since we cannot track their lifetimes correctly;
3032 hard regs shouldn't appear here except as return values.
3033 We never want to emit such a clobber after reload. */
3035 && ! (reload_in_progress || reload_completed)
3036 && need_clobber != 0)
3038 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3049 /* Pushing data onto the stack. */
3051 /* Push a block of length SIZE (perhaps variable)
3052 and return an rtx to address the beginning of the block.
3053 Note that it is not possible for the value returned to be a QUEUED.
3054 The value may be virtual_outgoing_args_rtx.
3056 EXTRA is the number of bytes of padding to push in addition to SIZE.
3057 BELOW nonzero means this padding comes at low addresses;
3058 otherwise, the padding comes at high addresses. */
3061 push_block (size, extra, below)
3067 size = convert_modes (Pmode, ptr_mode, size, 1);
3068 if (CONSTANT_P (size))
3069 anti_adjust_stack (plus_constant (size, extra));
3070 else if (GET_CODE (size) == REG && extra == 0)
3071 anti_adjust_stack (size);
3074 temp = copy_to_mode_reg (Pmode, size);
3076 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3077 temp, 0, OPTAB_LIB_WIDEN);
3078 anti_adjust_stack (temp);
3081 #ifndef STACK_GROWS_DOWNWARD
3082 #ifdef ARGS_GROW_DOWNWARD
3083 if (!ACCUMULATE_OUTGOING_ARGS)
3091 /* Return the lowest stack address when STACK or ARGS grow downward and
3092 we are not aaccumulating outgoing arguments (the c4x port uses such
3094 temp = virtual_outgoing_args_rtx;
3095 if (extra != 0 && below)
3096 temp = plus_constant (temp, extra);
3100 if (GET_CODE (size) == CONST_INT)
3101 temp = plus_constant (virtual_outgoing_args_rtx,
3102 -INTVAL (size) - (below ? 0 : extra));
3103 else if (extra != 0 && !below)
3104 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3105 negate_rtx (Pmode, plus_constant (size, extra)));
3107 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3108 negate_rtx (Pmode, size));
3111 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3115 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3116 block of SIZE bytes. */
3119 get_push_address (size)
3124 if (STACK_PUSH_CODE == POST_DEC)
3125 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3126 else if (STACK_PUSH_CODE == POST_INC)
3127 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3129 temp = stack_pointer_rtx;
3131 return copy_to_reg (temp);
3134 /* Emit single push insn. */
3136 emit_single_push_insn (mode, x, type)
3138 enum machine_mode mode;
3141 #ifdef PUSH_ROUNDING
3143 int rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3146 if (GET_MODE_SIZE (mode) == rounded_size)
3147 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3150 #ifdef STACK_GROWS_DOWNWARD
3151 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3152 GEN_INT (-rounded_size));
3154 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3155 GEN_INT (rounded_size));
3157 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3160 dest = gen_rtx_MEM (mode, dest_addr);
3162 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3166 set_mem_attributes (dest, type, 1);
3167 /* Function incoming arguments may overlap with sibling call
3168 outgoing arguments and we cannot allow reordering of reads
3169 from function arguments with stores to outgoing arguments
3170 of sibling calls. */
3171 MEM_ALIAS_SET (dest) = 0;
3173 emit_move_insn (dest, x);
3179 /* Generate code to push X onto the stack, assuming it has mode MODE and
3181 MODE is redundant except when X is a CONST_INT (since they don't
3183 SIZE is an rtx for the size of data to be copied (in bytes),
3184 needed only if X is BLKmode.
3186 ALIGN (in bits) is maximum alignment we can assume.
3188 If PARTIAL and REG are both nonzero, then copy that many of the first
3189 words of X into registers starting with REG, and push the rest of X.
3190 The amount of space pushed is decreased by PARTIAL words,
3191 rounded *down* to a multiple of PARM_BOUNDARY.
3192 REG must be a hard register in this case.
3193 If REG is zero but PARTIAL is not, take any all others actions for an
3194 argument partially in registers, but do not actually load any
3197 EXTRA is the amount in bytes of extra space to leave next to this arg.
3198 This is ignored if an argument block has already been allocated.
3200 On a machine that lacks real push insns, ARGS_ADDR is the address of
3201 the bottom of the argument block for this call. We use indexing off there
3202 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3203 argument block has not been preallocated.
3205 ARGS_SO_FAR is the size of args previously pushed for this call.
3207 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3208 for arguments passed in registers. If nonzero, it will be the number
3209 of bytes required. */
3212 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3213 args_addr, args_so_far, reg_parm_stack_space,
3216 enum machine_mode mode;
3225 int reg_parm_stack_space;
3229 enum direction stack_direction
3230 #ifdef STACK_GROWS_DOWNWARD
3236 /* Decide where to pad the argument: `downward' for below,
3237 `upward' for above, or `none' for don't pad it.
3238 Default is below for small data on big-endian machines; else above. */
3239 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3241 /* Invert direction if stack is post-update. */
3242 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3243 if (where_pad != none)
3244 where_pad = (where_pad == downward ? upward : downward);
3246 xinner = x = protect_from_queue (x, 0);
3248 if (mode == BLKmode)
3250 /* Copy a block into the stack, entirely or partially. */
3253 int used = partial * UNITS_PER_WORD;
3254 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3262 /* USED is now the # of bytes we need not copy to the stack
3263 because registers will take care of them. */
3266 xinner = change_address (xinner, BLKmode,
3267 plus_constant (XEXP (xinner, 0), used));
3269 /* If the partial register-part of the arg counts in its stack size,
3270 skip the part of stack space corresponding to the registers.
3271 Otherwise, start copying to the beginning of the stack space,
3272 by setting SKIP to 0. */
3273 skip = (reg_parm_stack_space == 0) ? 0 : used;
3275 #ifdef PUSH_ROUNDING
3276 /* Do it with several push insns if that doesn't take lots of insns
3277 and if there is no difficulty with push insns that skip bytes
3278 on the stack for alignment purposes. */
3281 && GET_CODE (size) == CONST_INT
3283 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3284 /* Here we avoid the case of a structure whose weak alignment
3285 forces many pushes of a small amount of data,
3286 and such small pushes do rounding that causes trouble. */
3287 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3288 || align >= BIGGEST_ALIGNMENT
3289 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3290 == (align / BITS_PER_UNIT)))
3291 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3293 /* Push padding now if padding above and stack grows down,
3294 or if padding below and stack grows up.
3295 But if space already allocated, this has already been done. */
3296 if (extra && args_addr == 0
3297 && where_pad != none && where_pad != stack_direction)
3298 anti_adjust_stack (GEN_INT (extra));
3300 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3302 if (current_function_check_memory_usage && ! in_check_memory_usage)
3306 in_check_memory_usage = 1;
3307 temp = get_push_address (INTVAL (size) - used);
3308 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3309 emit_library_call (chkr_copy_bitmap_libfunc,
3310 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3311 Pmode, XEXP (xinner, 0), Pmode,
3312 GEN_INT (INTVAL (size) - used),
3313 TYPE_MODE (sizetype));
3315 emit_library_call (chkr_set_right_libfunc,
3316 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3317 Pmode, GEN_INT (INTVAL (size) - used),
3318 TYPE_MODE (sizetype),
3319 GEN_INT (MEMORY_USE_RW),
3320 TYPE_MODE (integer_type_node));
3321 in_check_memory_usage = 0;
3325 #endif /* PUSH_ROUNDING */
3329 /* Otherwise make space on the stack and copy the data
3330 to the address of that space. */
3332 /* Deduct words put into registers from the size we must copy. */
3335 if (GET_CODE (size) == CONST_INT)
3336 size = GEN_INT (INTVAL (size) - used);
3338 size = expand_binop (GET_MODE (size), sub_optab, size,
3339 GEN_INT (used), NULL_RTX, 0,
3343 /* Get the address of the stack space.
3344 In this case, we do not deal with EXTRA separately.
3345 A single stack adjust will do. */
3348 temp = push_block (size, extra, where_pad == downward);
3351 else if (GET_CODE (args_so_far) == CONST_INT)
3352 temp = memory_address (BLKmode,
3353 plus_constant (args_addr,
3354 skip + INTVAL (args_so_far)));
3356 temp = memory_address (BLKmode,
3357 plus_constant (gen_rtx_PLUS (Pmode,
3361 if (current_function_check_memory_usage && ! in_check_memory_usage)
3363 in_check_memory_usage = 1;
3364 target = copy_to_reg (temp);
3365 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3366 emit_library_call (chkr_copy_bitmap_libfunc,
3367 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3369 XEXP (xinner, 0), Pmode,
3370 size, TYPE_MODE (sizetype));
3372 emit_library_call (chkr_set_right_libfunc,
3373 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3375 size, TYPE_MODE (sizetype),
3376 GEN_INT (MEMORY_USE_RW),
3377 TYPE_MODE (integer_type_node));
3378 in_check_memory_usage = 0;
3381 target = gen_rtx_MEM (BLKmode, temp);
3385 set_mem_attributes (target, type, 1);
3386 /* Function incoming arguments may overlap with sibling call
3387 outgoing arguments and we cannot allow reordering of reads
3388 from function arguments with stores to outgoing arguments
3389 of sibling calls. */
3390 MEM_ALIAS_SET (target) = 0;
3393 /* TEMP is the address of the block. Copy the data there. */
3394 if (GET_CODE (size) == CONST_INT
3395 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3397 move_by_pieces (target, xinner, INTVAL (size), align);
3402 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3403 enum machine_mode mode;
3405 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3407 mode = GET_MODE_WIDER_MODE (mode))
3409 enum insn_code code = movstr_optab[(int) mode];
3410 insn_operand_predicate_fn pred;
3412 if (code != CODE_FOR_nothing
3413 && ((GET_CODE (size) == CONST_INT
3414 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3415 <= (GET_MODE_MASK (mode) >> 1)))
3416 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3417 && (!(pred = insn_data[(int) code].operand[0].predicate)
3418 || ((*pred) (target, BLKmode)))
3419 && (!(pred = insn_data[(int) code].operand[1].predicate)
3420 || ((*pred) (xinner, BLKmode)))
3421 && (!(pred = insn_data[(int) code].operand[3].predicate)
3422 || ((*pred) (opalign, VOIDmode))))
3424 rtx op2 = convert_to_mode (mode, size, 1);
3425 rtx last = get_last_insn ();
3428 pred = insn_data[(int) code].operand[2].predicate;
3429 if (pred != 0 && ! (*pred) (op2, mode))
3430 op2 = copy_to_mode_reg (mode, op2);
3432 pat = GEN_FCN ((int) code) (target, xinner,
3440 delete_insns_since (last);
3445 if (!ACCUMULATE_OUTGOING_ARGS)
3447 /* If the source is referenced relative to the stack pointer,
3448 copy it to another register to stabilize it. We do not need
3449 to do this if we know that we won't be changing sp. */
3451 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3452 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3453 temp = copy_to_reg (temp);
3456 /* Make inhibit_defer_pop nonzero around the library call
3457 to force it to pop the bcopy-arguments right away. */
3459 #ifdef TARGET_MEM_FUNCTIONS
3460 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3461 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3462 convert_to_mode (TYPE_MODE (sizetype),
3463 size, TREE_UNSIGNED (sizetype)),
3464 TYPE_MODE (sizetype));
3466 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3467 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3468 convert_to_mode (TYPE_MODE (integer_type_node),
3470 TREE_UNSIGNED (integer_type_node)),
3471 TYPE_MODE (integer_type_node));
3476 else if (partial > 0)
3478 /* Scalar partly in registers. */
3480 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3483 /* # words of start of argument
3484 that we must make space for but need not store. */
3485 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3486 int args_offset = INTVAL (args_so_far);
3489 /* Push padding now if padding above and stack grows down,
3490 or if padding below and stack grows up.
3491 But if space already allocated, this has already been done. */
3492 if (extra && args_addr == 0
3493 && where_pad != none && where_pad != stack_direction)
3494 anti_adjust_stack (GEN_INT (extra));
3496 /* If we make space by pushing it, we might as well push
3497 the real data. Otherwise, we can leave OFFSET nonzero
3498 and leave the space uninitialized. */
3502 /* Now NOT_STACK gets the number of words that we don't need to
3503 allocate on the stack. */
3504 not_stack = partial - offset;
3506 /* If the partial register-part of the arg counts in its stack size,
3507 skip the part of stack space corresponding to the registers.
3508 Otherwise, start copying to the beginning of the stack space,
3509 by setting SKIP to 0. */
3510 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3512 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3513 x = validize_mem (force_const_mem (mode, x));
3515 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3516 SUBREGs of such registers are not allowed. */
3517 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3518 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3519 x = copy_to_reg (x);
3521 /* Loop over all the words allocated on the stack for this arg. */
3522 /* We can do it by words, because any scalar bigger than a word
3523 has a size a multiple of a word. */
3524 #ifndef PUSH_ARGS_REVERSED
3525 for (i = not_stack; i < size; i++)
3527 for (i = size - 1; i >= not_stack; i--)
3529 if (i >= not_stack + offset)
3530 emit_push_insn (operand_subword_force (x, i, mode),
3531 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3533 GEN_INT (args_offset + ((i - not_stack + skip)
3535 reg_parm_stack_space, alignment_pad);
3540 rtx target = NULL_RTX;
3543 /* Push padding now if padding above and stack grows down,
3544 or if padding below and stack grows up.
3545 But if space already allocated, this has already been done. */
3546 if (extra && args_addr == 0
3547 && where_pad != none && where_pad != stack_direction)
3548 anti_adjust_stack (GEN_INT (extra));
3550 #ifdef PUSH_ROUNDING
3551 if (args_addr == 0 && PUSH_ARGS)
3552 emit_single_push_insn (mode, x, type);
3556 if (GET_CODE (args_so_far) == CONST_INT)
3558 = memory_address (mode,
3559 plus_constant (args_addr,
3560 INTVAL (args_so_far)));
3562 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3565 dest = gen_rtx_MEM (mode, addr);
3568 set_mem_attributes (dest, type, 1);
3569 /* Function incoming arguments may overlap with sibling call
3570 outgoing arguments and we cannot allow reordering of reads
3571 from function arguments with stores to outgoing arguments
3572 of sibling calls. */
3573 MEM_ALIAS_SET (dest) = 0;
3576 emit_move_insn (dest, x);
3580 if (current_function_check_memory_usage && ! in_check_memory_usage)
3582 in_check_memory_usage = 1;
3584 target = get_push_address (GET_MODE_SIZE (mode));
3586 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3587 emit_library_call (chkr_copy_bitmap_libfunc,
3588 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3589 Pmode, XEXP (x, 0), Pmode,
3590 GEN_INT (GET_MODE_SIZE (mode)),
3591 TYPE_MODE (sizetype));
3593 emit_library_call (chkr_set_right_libfunc,
3594 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3595 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3596 TYPE_MODE (sizetype),
3597 GEN_INT (MEMORY_USE_RW),
3598 TYPE_MODE (integer_type_node));
3599 in_check_memory_usage = 0;
3604 /* If part should go in registers, copy that part
3605 into the appropriate registers. Do this now, at the end,
3606 since mem-to-mem copies above may do function calls. */
3607 if (partial > 0 && reg != 0)
3609 /* Handle calls that pass values in multiple non-contiguous locations.
3610 The Irix 6 ABI has examples of this. */
3611 if (GET_CODE (reg) == PARALLEL)
3612 emit_group_load (reg, x, -1, align); /* ??? size? */
3614 move_block_to_reg (REGNO (reg), x, partial, mode);
3617 if (extra && args_addr == 0 && where_pad == stack_direction)
3618 anti_adjust_stack (GEN_INT (extra));
3620 if (alignment_pad && args_addr == 0)
3621 anti_adjust_stack (alignment_pad);
3624 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3632 /* Only registers can be subtargets. */
3633 || GET_CODE (x) != REG
3634 /* If the register is readonly, it can't be set more than once. */
3635 || RTX_UNCHANGING_P (x)
3636 /* Don't use hard regs to avoid extending their life. */
3637 || REGNO (x) < FIRST_PSEUDO_REGISTER
3638 /* Avoid subtargets inside loops,
3639 since they hide some invariant expressions. */
3640 || preserve_subexpressions_p ())
3644 /* Expand an assignment that stores the value of FROM into TO.
3645 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3646 (This may contain a QUEUED rtx;
3647 if the value is constant, this rtx is a constant.)
3648 Otherwise, the returned value is NULL_RTX.
3650 SUGGEST_REG is no longer actually used.
3651 It used to mean, copy the value through a register
3652 and return that register, if that is possible.
3653 We now use WANT_VALUE to decide whether to do this. */
3656 expand_assignment (to, from, want_value, suggest_reg)
3659 int suggest_reg ATTRIBUTE_UNUSED;
3661 register rtx to_rtx = 0;
3664 /* Don't crash if the lhs of the assignment was erroneous. */
3666 if (TREE_CODE (to) == ERROR_MARK)
3668 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3669 return want_value ? result : NULL_RTX;
3672 /* Assignment of a structure component needs special treatment
3673 if the structure component's rtx is not simply a MEM.
3674 Assignment of an array element at a constant index, and assignment of
3675 an array element in an unaligned packed structure field, has the same
3678 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3679 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3681 enum machine_mode mode1;
3682 HOST_WIDE_INT bitsize, bitpos;
3687 unsigned int alignment;
3690 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3691 &unsignedp, &volatilep, &alignment);
3693 /* If we are going to use store_bit_field and extract_bit_field,
3694 make sure to_rtx will be safe for multiple use. */
3696 if (mode1 == VOIDmode && want_value)
3697 tem = stabilize_reference (tem);
3699 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3702 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3704 if (GET_CODE (to_rtx) != MEM)
3707 if (GET_MODE (offset_rtx) != ptr_mode)
3709 #ifdef POINTERS_EXTEND_UNSIGNED
3710 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3712 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3716 /* A constant address in TO_RTX can have VOIDmode, we must not try
3717 to call force_reg for that case. Avoid that case. */
3718 if (GET_CODE (to_rtx) == MEM
3719 && GET_MODE (to_rtx) == BLKmode
3720 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3722 && (bitpos % bitsize) == 0
3723 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3724 && alignment == GET_MODE_ALIGNMENT (mode1))
3726 rtx temp = change_address (to_rtx, mode1,
3727 plus_constant (XEXP (to_rtx, 0),
3730 if (GET_CODE (XEXP (temp, 0)) == REG)
3733 to_rtx = change_address (to_rtx, mode1,
3734 force_reg (GET_MODE (XEXP (temp, 0)),
3739 to_rtx = change_address (to_rtx, VOIDmode,
3740 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3741 force_reg (ptr_mode,
3747 if (GET_CODE (to_rtx) == MEM)
3749 /* When the offset is zero, to_rtx is the address of the
3750 structure we are storing into, and hence may be shared.
3751 We must make a new MEM before setting the volatile bit. */
3753 to_rtx = copy_rtx (to_rtx);
3755 MEM_VOLATILE_P (to_rtx) = 1;
3757 #if 0 /* This was turned off because, when a field is volatile
3758 in an object which is not volatile, the object may be in a register,
3759 and then we would abort over here. */
3765 if (TREE_CODE (to) == COMPONENT_REF
3766 && TREE_READONLY (TREE_OPERAND (to, 1)))
3769 to_rtx = copy_rtx (to_rtx);
3771 RTX_UNCHANGING_P (to_rtx) = 1;
3774 /* Check the access. */
3775 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3780 enum machine_mode best_mode;
3782 best_mode = get_best_mode (bitsize, bitpos,
3783 TYPE_ALIGN (TREE_TYPE (tem)),
3785 if (best_mode == VOIDmode)
3788 best_mode_size = GET_MODE_BITSIZE (best_mode);
3789 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3790 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3791 size *= GET_MODE_SIZE (best_mode);
3793 /* Check the access right of the pointer. */
3794 in_check_memory_usage = 1;
3796 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3797 VOIDmode, 3, to_addr, Pmode,
3798 GEN_INT (size), TYPE_MODE (sizetype),
3799 GEN_INT (MEMORY_USE_WO),
3800 TYPE_MODE (integer_type_node));
3801 in_check_memory_usage = 0;
3804 /* If this is a varying-length object, we must get the address of
3805 the source and do an explicit block move. */
3808 unsigned int from_align;
3809 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3811 = change_address (to_rtx, VOIDmode,
3812 plus_constant (XEXP (to_rtx, 0),
3813 bitpos / BITS_PER_UNIT));
3815 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3816 MIN (alignment, from_align));
3823 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3825 /* Spurious cast for HPUX compiler. */
3826 ? ((enum machine_mode)
3827 TYPE_MODE (TREE_TYPE (to)))
3831 int_size_in_bytes (TREE_TYPE (tem)),
3832 get_alias_set (to));
3834 preserve_temp_slots (result);
3838 /* If the value is meaningful, convert RESULT to the proper mode.
3839 Otherwise, return nothing. */
3840 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3841 TYPE_MODE (TREE_TYPE (from)),
3843 TREE_UNSIGNED (TREE_TYPE (to)))
3848 /* If the rhs is a function call and its value is not an aggregate,
3849 call the function before we start to compute the lhs.
3850 This is needed for correct code for cases such as
3851 val = setjmp (buf) on machines where reference to val
3852 requires loading up part of an address in a separate insn.
3854 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3855 since it might be a promoted variable where the zero- or sign- extension
3856 needs to be done. Handling this in the normal way is safe because no
3857 computation is done before the call. */
3858 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3859 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3860 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3861 && GET_CODE (DECL_RTL (to)) == REG))
3866 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3868 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3870 /* Handle calls that return values in multiple non-contiguous locations.
3871 The Irix 6 ABI has examples of this. */
3872 if (GET_CODE (to_rtx) == PARALLEL)
3873 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3874 TYPE_ALIGN (TREE_TYPE (from)));
3875 else if (GET_MODE (to_rtx) == BLKmode)
3876 emit_block_move (to_rtx, value, expr_size (from),
3877 TYPE_ALIGN (TREE_TYPE (from)));
3880 #ifdef POINTERS_EXTEND_UNSIGNED
3881 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3882 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3883 value = convert_memory_address (GET_MODE (to_rtx), value);
3885 emit_move_insn (to_rtx, value);
3887 preserve_temp_slots (to_rtx);
3890 return want_value ? to_rtx : NULL_RTX;
3893 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3894 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3898 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3899 if (GET_CODE (to_rtx) == MEM)
3900 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3903 /* Don't move directly into a return register. */
3904 if (TREE_CODE (to) == RESULT_DECL
3905 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3910 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3912 if (GET_CODE (to_rtx) == PARALLEL)
3913 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3914 TYPE_ALIGN (TREE_TYPE (from)));
3916 emit_move_insn (to_rtx, temp);
3918 preserve_temp_slots (to_rtx);
3921 return want_value ? to_rtx : NULL_RTX;
3924 /* In case we are returning the contents of an object which overlaps
3925 the place the value is being stored, use a safe function when copying
3926 a value through a pointer into a structure value return block. */
3927 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3928 && current_function_returns_struct
3929 && !current_function_returns_pcc_struct)
3934 size = expr_size (from);
3935 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3936 EXPAND_MEMORY_USE_DONT);
3938 /* Copy the rights of the bitmap. */
3939 if (current_function_check_memory_usage)
3940 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3941 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3942 XEXP (from_rtx, 0), Pmode,
3943 convert_to_mode (TYPE_MODE (sizetype),
3944 size, TREE_UNSIGNED (sizetype)),
3945 TYPE_MODE (sizetype));
3947 #ifdef TARGET_MEM_FUNCTIONS
3948 emit_library_call (memmove_libfunc, LCT_NORMAL,
3949 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3950 XEXP (from_rtx, 0), Pmode,
3951 convert_to_mode (TYPE_MODE (sizetype),
3952 size, TREE_UNSIGNED (sizetype)),
3953 TYPE_MODE (sizetype));
3955 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3956 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3957 XEXP (to_rtx, 0), Pmode,
3958 convert_to_mode (TYPE_MODE (integer_type_node),
3959 size, TREE_UNSIGNED (integer_type_node)),
3960 TYPE_MODE (integer_type_node));
3963 preserve_temp_slots (to_rtx);
3966 return want_value ? to_rtx : NULL_RTX;
3969 /* Compute FROM and store the value in the rtx we got. */
3972 result = store_expr (from, to_rtx, want_value);
3973 preserve_temp_slots (result);
3976 return want_value ? result : NULL_RTX;
3979 /* Generate code for computing expression EXP,
3980 and storing the value into TARGET.
3981 TARGET may contain a QUEUED rtx.
3983 If WANT_VALUE is nonzero, return a copy of the value
3984 not in TARGET, so that we can be sure to use the proper
3985 value in a containing expression even if TARGET has something
3986 else stored in it. If possible, we copy the value through a pseudo
3987 and return that pseudo. Or, if the value is constant, we try to
3988 return the constant. In some cases, we return a pseudo
3989 copied *from* TARGET.
3991 If the mode is BLKmode then we may return TARGET itself.
3992 It turns out that in BLKmode it doesn't cause a problem.
3993 because C has no operators that could combine two different
3994 assignments into the same BLKmode object with different values
3995 with no sequence point. Will other languages need this to
3998 If WANT_VALUE is 0, we return NULL, to make sure
3999 to catch quickly any cases where the caller uses the value
4000 and fails to set WANT_VALUE. */
4003 store_expr (exp, target, want_value)
4005 register rtx target;
4009 int dont_return_target = 0;
4010 int dont_store_target = 0;
4012 if (TREE_CODE (exp) == COMPOUND_EXPR)
4014 /* Perform first part of compound expression, then assign from second
4016 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4018 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4020 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4022 /* For conditional expression, get safe form of the target. Then
4023 test the condition, doing the appropriate assignment on either
4024 side. This avoids the creation of unnecessary temporaries.
4025 For non-BLKmode, it is more efficient not to do this. */
4027 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4030 target = protect_from_queue (target, 1);
4032 do_pending_stack_adjust ();
4034 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4035 start_cleanup_deferral ();
4036 store_expr (TREE_OPERAND (exp, 1), target, 0);
4037 end_cleanup_deferral ();
4039 emit_jump_insn (gen_jump (lab2));
4042 start_cleanup_deferral ();
4043 store_expr (TREE_OPERAND (exp, 2), target, 0);
4044 end_cleanup_deferral ();
4049 return want_value ? target : NULL_RTX;
4051 else if (queued_subexp_p (target))
4052 /* If target contains a postincrement, let's not risk
4053 using it as the place to generate the rhs. */
4055 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4057 /* Expand EXP into a new pseudo. */
4058 temp = gen_reg_rtx (GET_MODE (target));
4059 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4062 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4064 /* If target is volatile, ANSI requires accessing the value
4065 *from* the target, if it is accessed. So make that happen.
4066 In no case return the target itself. */
4067 if (! MEM_VOLATILE_P (target) && want_value)
4068 dont_return_target = 1;
4070 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4071 && GET_MODE (target) != BLKmode)
4072 /* If target is in memory and caller wants value in a register instead,
4073 arrange that. Pass TARGET as target for expand_expr so that,
4074 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4075 We know expand_expr will not use the target in that case.
4076 Don't do this if TARGET is volatile because we are supposed
4077 to write it and then read it. */
4079 temp = expand_expr (exp, target, GET_MODE (target), 0);
4080 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4082 /* If TEMP is already in the desired TARGET, only copy it from
4083 memory and don't store it there again. */
4085 || (rtx_equal_p (temp, target)
4086 && ! side_effects_p (temp) && ! side_effects_p (target)))
4087 dont_store_target = 1;
4088 temp = copy_to_reg (temp);
4090 dont_return_target = 1;
4092 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4093 /* If this is an scalar in a register that is stored in a wider mode
4094 than the declared mode, compute the result into its declared mode
4095 and then convert to the wider mode. Our value is the computed
4098 /* If we don't want a value, we can do the conversion inside EXP,
4099 which will often result in some optimizations. Do the conversion
4100 in two steps: first change the signedness, if needed, then
4101 the extend. But don't do this if the type of EXP is a subtype
4102 of something else since then the conversion might involve
4103 more than just converting modes. */
4104 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4105 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4107 if (TREE_UNSIGNED (TREE_TYPE (exp))
4108 != SUBREG_PROMOTED_UNSIGNED_P (target))
4111 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4115 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4116 SUBREG_PROMOTED_UNSIGNED_P (target)),
4120 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4122 /* If TEMP is a volatile MEM and we want a result value, make
4123 the access now so it gets done only once. Likewise if
4124 it contains TARGET. */
4125 if (GET_CODE (temp) == MEM && want_value
4126 && (MEM_VOLATILE_P (temp)
4127 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4128 temp = copy_to_reg (temp);
4130 /* If TEMP is a VOIDmode constant, use convert_modes to make
4131 sure that we properly convert it. */
4132 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4133 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4134 TYPE_MODE (TREE_TYPE (exp)), temp,
4135 SUBREG_PROMOTED_UNSIGNED_P (target));
4137 convert_move (SUBREG_REG (target), temp,
4138 SUBREG_PROMOTED_UNSIGNED_P (target));
4140 /* If we promoted a constant, change the mode back down to match
4141 target. Otherwise, the caller might get confused by a result whose
4142 mode is larger than expected. */
4144 if (want_value && GET_MODE (temp) != GET_MODE (target)
4145 && GET_MODE (temp) != VOIDmode)
4147 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4148 SUBREG_PROMOTED_VAR_P (temp) = 1;
4149 SUBREG_PROMOTED_UNSIGNED_P (temp)
4150 = SUBREG_PROMOTED_UNSIGNED_P (target);
4153 return want_value ? temp : NULL_RTX;
4157 temp = expand_expr (exp, target, GET_MODE (target), 0);
4158 /* Return TARGET if it's a specified hardware register.
4159 If TARGET is a volatile mem ref, either return TARGET
4160 or return a reg copied *from* TARGET; ANSI requires this.
4162 Otherwise, if TEMP is not TARGET, return TEMP
4163 if it is constant (for efficiency),
4164 or if we really want the correct value. */
4165 if (!(target && GET_CODE (target) == REG
4166 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4167 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4168 && ! rtx_equal_p (temp, target)
4169 && (CONSTANT_P (temp) || want_value))
4170 dont_return_target = 1;
4173 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4174 the same as that of TARGET, adjust the constant. This is needed, for
4175 example, in case it is a CONST_DOUBLE and we want only a word-sized
4177 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4178 && TREE_CODE (exp) != ERROR_MARK
4179 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4180 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4181 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4183 if (current_function_check_memory_usage
4184 && GET_CODE (target) == MEM
4185 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4187 in_check_memory_usage = 1;
4188 if (GET_CODE (temp) == MEM)
4189 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4190 VOIDmode, 3, XEXP (target, 0), Pmode,
4191 XEXP (temp, 0), Pmode,
4192 expr_size (exp), TYPE_MODE (sizetype));
4194 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4195 VOIDmode, 3, XEXP (target, 0), Pmode,
4196 expr_size (exp), TYPE_MODE (sizetype),
4197 GEN_INT (MEMORY_USE_WO),
4198 TYPE_MODE (integer_type_node));
4199 in_check_memory_usage = 0;
4202 /* If value was not generated in the target, store it there.
4203 Convert the value to TARGET's type first if nec. */
4204 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4205 one or both of them are volatile memory refs, we have to distinguish
4207 - expand_expr has used TARGET. In this case, we must not generate
4208 another copy. This can be detected by TARGET being equal according
4210 - expand_expr has not used TARGET - that means that the source just
4211 happens to have the same RTX form. Since temp will have been created
4212 by expand_expr, it will compare unequal according to == .
4213 We must generate a copy in this case, to reach the correct number
4214 of volatile memory references. */
4216 if ((! rtx_equal_p (temp, target)
4217 || (temp != target && (side_effects_p (temp)
4218 || side_effects_p (target))))
4219 && TREE_CODE (exp) != ERROR_MARK
4220 && ! dont_store_target)
4222 target = protect_from_queue (target, 1);
4223 if (GET_MODE (temp) != GET_MODE (target)
4224 && GET_MODE (temp) != VOIDmode)
4226 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4227 if (dont_return_target)
4229 /* In this case, we will return TEMP,
4230 so make sure it has the proper mode.
4231 But don't forget to store the value into TARGET. */
4232 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4233 emit_move_insn (target, temp);
4236 convert_move (target, temp, unsignedp);
4239 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4241 /* Handle copying a string constant into an array.
4242 The string constant may be shorter than the array.
4243 So copy just the string's actual length, and clear the rest. */
4247 /* Get the size of the data type of the string,
4248 which is actually the size of the target. */
4249 size = expr_size (exp);
4250 if (GET_CODE (size) == CONST_INT
4251 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4252 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4255 /* Compute the size of the data to copy from the string. */
4257 = size_binop (MIN_EXPR,
4258 make_tree (sizetype, size),
4259 size_int (TREE_STRING_LENGTH (exp)));
4260 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4261 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4265 /* Copy that much. */
4266 emit_block_move (target, temp, copy_size_rtx,
4267 TYPE_ALIGN (TREE_TYPE (exp)));
4269 /* Figure out how much is left in TARGET that we have to clear.
4270 Do all calculations in ptr_mode. */
4272 addr = XEXP (target, 0);
4273 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4275 if (GET_CODE (copy_size_rtx) == CONST_INT)
4277 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4278 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4280 (unsigned int) (BITS_PER_UNIT
4281 * (INTVAL (copy_size_rtx)
4282 & - INTVAL (copy_size_rtx))));
4286 addr = force_reg (ptr_mode, addr);
4287 addr = expand_binop (ptr_mode, add_optab, addr,
4288 copy_size_rtx, NULL_RTX, 0,
4291 size = expand_binop (ptr_mode, sub_optab, size,
4292 copy_size_rtx, NULL_RTX, 0,
4295 align = BITS_PER_UNIT;
4296 label = gen_label_rtx ();
4297 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4298 GET_MODE (size), 0, 0, label);
4300 align = MIN (align, expr_align (copy_size));
4302 if (size != const0_rtx)
4304 rtx dest = gen_rtx_MEM (BLKmode, addr);
4306 MEM_COPY_ATTRIBUTES (dest, target);
4308 /* Be sure we can write on ADDR. */
4309 in_check_memory_usage = 1;
4310 if (current_function_check_memory_usage)
4311 emit_library_call (chkr_check_addr_libfunc,
4312 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4314 size, TYPE_MODE (sizetype),
4315 GEN_INT (MEMORY_USE_WO),
4316 TYPE_MODE (integer_type_node));
4317 in_check_memory_usage = 0;
4318 clear_storage (dest, size, align);
4325 /* Handle calls that return values in multiple non-contiguous locations.
4326 The Irix 6 ABI has examples of this. */
4327 else if (GET_CODE (target) == PARALLEL)
4328 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4329 TYPE_ALIGN (TREE_TYPE (exp)));
4330 else if (GET_MODE (temp) == BLKmode)
4331 emit_block_move (target, temp, expr_size (exp),
4332 TYPE_ALIGN (TREE_TYPE (exp)));
4334 emit_move_insn (target, temp);
4337 /* If we don't want a value, return NULL_RTX. */
4341 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4342 ??? The latter test doesn't seem to make sense. */
4343 else if (dont_return_target && GET_CODE (temp) != MEM)
4346 /* Return TARGET itself if it is a hard register. */
4347 else if (want_value && GET_MODE (target) != BLKmode
4348 && ! (GET_CODE (target) == REG
4349 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4350 return copy_to_reg (target);
4356 /* Return 1 if EXP just contains zeros. */
4364 switch (TREE_CODE (exp))
4368 case NON_LVALUE_EXPR:
4369 return is_zeros_p (TREE_OPERAND (exp, 0));
4372 return integer_zerop (exp);
4376 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4379 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4382 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4383 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4384 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4385 if (! is_zeros_p (TREE_VALUE (elt)))
4395 /* Return 1 if EXP contains mostly (3/4) zeros. */
4398 mostly_zeros_p (exp)
4401 if (TREE_CODE (exp) == CONSTRUCTOR)
4403 int elts = 0, zeros = 0;
4404 tree elt = CONSTRUCTOR_ELTS (exp);
4405 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4407 /* If there are no ranges of true bits, it is all zero. */
4408 return elt == NULL_TREE;
4410 for (; elt; elt = TREE_CHAIN (elt))
4412 /* We do not handle the case where the index is a RANGE_EXPR,
4413 so the statistic will be somewhat inaccurate.
4414 We do make a more accurate count in store_constructor itself,
4415 so since this function is only used for nested array elements,
4416 this should be close enough. */
4417 if (mostly_zeros_p (TREE_VALUE (elt)))
4422 return 4 * zeros >= 3 * elts;
4425 return is_zeros_p (exp);
4428 /* Helper function for store_constructor.
4429 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4430 TYPE is the type of the CONSTRUCTOR, not the element type.
4431 ALIGN and CLEARED are as for store_constructor.
4432 ALIAS_SET is the alias set to use for any stores.
4434 This provides a recursive shortcut back to store_constructor when it isn't
4435 necessary to go through store_field. This is so that we can pass through
4436 the cleared field to let store_constructor know that we may not have to
4437 clear a substructure if the outer structure has already been cleared. */
4440 store_constructor_field (target, bitsize, bitpos,
4441 mode, exp, type, align, cleared, alias_set)
4443 unsigned HOST_WIDE_INT bitsize;
4444 HOST_WIDE_INT bitpos;
4445 enum machine_mode mode;
4451 if (TREE_CODE (exp) == CONSTRUCTOR
4452 && bitpos % BITS_PER_UNIT == 0
4453 /* If we have a non-zero bitpos for a register target, then we just
4454 let store_field do the bitfield handling. This is unlikely to
4455 generate unnecessary clear instructions anyways. */
4456 && (bitpos == 0 || GET_CODE (target) == MEM))
4460 = change_address (target,
4461 GET_MODE (target) == BLKmode
4463 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4464 ? BLKmode : VOIDmode,
4465 plus_constant (XEXP (target, 0),
4466 bitpos / BITS_PER_UNIT));
4469 /* Show the alignment may no longer be what it was and update the alias
4470 set, if required. */
4472 align = MIN (align, (unsigned int) bitpos & - bitpos);
4473 if (GET_CODE (target) == MEM)
4474 MEM_ALIAS_SET (target) = alias_set;
4476 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4479 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4480 int_size_in_bytes (type), alias_set);
4483 /* Store the value of constructor EXP into the rtx TARGET.
4484 TARGET is either a REG or a MEM.
4485 ALIGN is the maximum known alignment for TARGET.
4486 CLEARED is true if TARGET is known to have been zero'd.
4487 SIZE is the number of bytes of TARGET we are allowed to modify: this
4488 may not be the same as the size of EXP if we are assigning to a field
4489 which has been packed to exclude padding bits. */
4492 store_constructor (exp, target, align, cleared, size)
4499 tree type = TREE_TYPE (exp);
4500 #ifdef WORD_REGISTER_OPERATIONS
4501 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4504 /* We know our target cannot conflict, since safe_from_p has been called. */
4506 /* Don't try copying piece by piece into a hard register
4507 since that is vulnerable to being clobbered by EXP.
4508 Instead, construct in a pseudo register and then copy it all. */
4509 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4511 rtx temp = gen_reg_rtx (GET_MODE (target));
4512 store_constructor (exp, temp, align, cleared, size);
4513 emit_move_insn (target, temp);
4518 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4519 || TREE_CODE (type) == QUAL_UNION_TYPE)
4523 /* Inform later passes that the whole union value is dead. */
4524 if ((TREE_CODE (type) == UNION_TYPE
4525 || TREE_CODE (type) == QUAL_UNION_TYPE)
4528 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4530 /* If the constructor is empty, clear the union. */
4531 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4532 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4535 /* If we are building a static constructor into a register,
4536 set the initial value as zero so we can fold the value into
4537 a constant. But if more than one register is involved,
4538 this probably loses. */
4539 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4540 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4543 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4548 /* If the constructor has fewer fields than the structure
4549 or if we are initializing the structure to mostly zeros,
4550 clear the whole structure first. Don't do this is TARGET is
4551 register whose mode size isn't equal to SIZE since clear_storage
4552 can't handle this case. */
4554 && ((list_length (CONSTRUCTOR_ELTS (exp))
4555 != fields_length (type))
4556 || mostly_zeros_p (exp))
4557 && (GET_CODE (target) != REG
4558 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4561 clear_storage (target, GEN_INT (size), align);
4566 /* Inform later passes that the old value is dead. */
4567 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4569 /* Store each element of the constructor into
4570 the corresponding field of TARGET. */
4572 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4574 register tree field = TREE_PURPOSE (elt);
4575 #ifdef WORD_REGISTER_OPERATIONS
4576 tree value = TREE_VALUE (elt);
4578 register enum machine_mode mode;
4579 HOST_WIDE_INT bitsize;
4580 HOST_WIDE_INT bitpos = 0;
4583 rtx to_rtx = target;
4585 /* Just ignore missing fields.
4586 We cleared the whole structure, above,
4587 if any fields are missing. */
4591 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4594 if (host_integerp (DECL_SIZE (field), 1))
4595 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4599 unsignedp = TREE_UNSIGNED (field);
4600 mode = DECL_MODE (field);
4601 if (DECL_BIT_FIELD (field))
4604 offset = DECL_FIELD_OFFSET (field);
4605 if (host_integerp (offset, 0)
4606 && host_integerp (bit_position (field), 0))
4608 bitpos = int_bit_position (field);
4612 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4618 if (contains_placeholder_p (offset))
4619 offset = build (WITH_RECORD_EXPR, sizetype,
4620 offset, make_tree (TREE_TYPE (exp), target));
4622 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4623 if (GET_CODE (to_rtx) != MEM)
4626 if (GET_MODE (offset_rtx) != ptr_mode)
4628 #ifdef POINTERS_EXTEND_UNSIGNED
4629 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4631 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4636 = change_address (to_rtx, VOIDmode,
4637 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4638 force_reg (ptr_mode,
4640 align = DECL_OFFSET_ALIGN (field);
4643 if (TREE_READONLY (field))
4645 if (GET_CODE (to_rtx) == MEM)
4646 to_rtx = copy_rtx (to_rtx);
4648 RTX_UNCHANGING_P (to_rtx) = 1;
4651 #ifdef WORD_REGISTER_OPERATIONS
4652 /* If this initializes a field that is smaller than a word, at the
4653 start of a word, try to widen it to a full word.
4654 This special case allows us to output C++ member function
4655 initializations in a form that the optimizers can understand. */
4656 if (GET_CODE (target) == REG
4657 && bitsize < BITS_PER_WORD
4658 && bitpos % BITS_PER_WORD == 0
4659 && GET_MODE_CLASS (mode) == MODE_INT
4660 && TREE_CODE (value) == INTEGER_CST
4662 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4664 tree type = TREE_TYPE (value);
4665 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4667 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4668 value = convert (type, value);
4670 if (BYTES_BIG_ENDIAN)
4672 = fold (build (LSHIFT_EXPR, type, value,
4673 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4674 bitsize = BITS_PER_WORD;
4678 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4679 TREE_VALUE (elt), type, align, cleared,
4680 (DECL_NONADDRESSABLE_P (field)
4681 && GET_CODE (to_rtx) == MEM)
4682 ? MEM_ALIAS_SET (to_rtx)
4683 : get_alias_set (TREE_TYPE (field)));
4686 else if (TREE_CODE (type) == ARRAY_TYPE)
4691 tree domain = TYPE_DOMAIN (type);
4692 tree elttype = TREE_TYPE (type);
4693 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4694 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4695 HOST_WIDE_INT minelt;
4696 HOST_WIDE_INT maxelt;
4698 /* If we have constant bounds for the range of the type, get them. */
4701 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4702 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4705 /* If the constructor has fewer elements than the array,
4706 clear the whole array first. Similarly if this is
4707 static constructor of a non-BLKmode object. */
4708 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4712 HOST_WIDE_INT count = 0, zero_count = 0;
4713 need_to_clear = ! const_bounds_p;
4715 /* This loop is a more accurate version of the loop in
4716 mostly_zeros_p (it handles RANGE_EXPR in an index).
4717 It is also needed to check for missing elements. */
4718 for (elt = CONSTRUCTOR_ELTS (exp);
4719 elt != NULL_TREE && ! need_to_clear;
4720 elt = TREE_CHAIN (elt))
4722 tree index = TREE_PURPOSE (elt);
4723 HOST_WIDE_INT this_node_count;
4725 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4727 tree lo_index = TREE_OPERAND (index, 0);
4728 tree hi_index = TREE_OPERAND (index, 1);
4730 if (! host_integerp (lo_index, 1)
4731 || ! host_integerp (hi_index, 1))
4737 this_node_count = (tree_low_cst (hi_index, 1)
4738 - tree_low_cst (lo_index, 1) + 1);
4741 this_node_count = 1;
4743 count += this_node_count;
4744 if (mostly_zeros_p (TREE_VALUE (elt)))
4745 zero_count += this_node_count;
4748 /* Clear the entire array first if there are any missing elements,
4749 or if the incidence of zero elements is >= 75%. */
4751 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4755 if (need_to_clear && size > 0)
4758 clear_storage (target, GEN_INT (size), align);
4762 /* Inform later passes that the old value is dead. */
4763 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4765 /* Store each element of the constructor into
4766 the corresponding element of TARGET, determined
4767 by counting the elements. */
4768 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4770 elt = TREE_CHAIN (elt), i++)
4772 register enum machine_mode mode;
4773 HOST_WIDE_INT bitsize;
4774 HOST_WIDE_INT bitpos;
4776 tree value = TREE_VALUE (elt);
4777 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4778 tree index = TREE_PURPOSE (elt);
4779 rtx xtarget = target;
4781 if (cleared && is_zeros_p (value))
4784 unsignedp = TREE_UNSIGNED (elttype);
4785 mode = TYPE_MODE (elttype);
4786 if (mode == BLKmode)
4787 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4788 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4791 bitsize = GET_MODE_BITSIZE (mode);
4793 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4795 tree lo_index = TREE_OPERAND (index, 0);
4796 tree hi_index = TREE_OPERAND (index, 1);
4797 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4798 struct nesting *loop;
4799 HOST_WIDE_INT lo, hi, count;
4802 /* If the range is constant and "small", unroll the loop. */
4804 && host_integerp (lo_index, 0)
4805 && host_integerp (hi_index, 0)
4806 && (lo = tree_low_cst (lo_index, 0),
4807 hi = tree_low_cst (hi_index, 0),
4808 count = hi - lo + 1,
4809 (GET_CODE (target) != MEM
4811 || (host_integerp (TYPE_SIZE (elttype), 1)
4812 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4815 lo -= minelt; hi -= minelt;
4816 for (; lo <= hi; lo++)
4818 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4819 store_constructor_field
4820 (target, bitsize, bitpos, mode, value, type, align,
4822 TYPE_NONALIASED_COMPONENT (type)
4823 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4828 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4829 loop_top = gen_label_rtx ();
4830 loop_end = gen_label_rtx ();
4832 unsignedp = TREE_UNSIGNED (domain);
4834 index = build_decl (VAR_DECL, NULL_TREE, domain);
4837 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4839 SET_DECL_RTL (index, index_r);
4840 if (TREE_CODE (value) == SAVE_EXPR
4841 && SAVE_EXPR_RTL (value) == 0)
4843 /* Make sure value gets expanded once before the
4845 expand_expr (value, const0_rtx, VOIDmode, 0);
4848 store_expr (lo_index, index_r, 0);
4849 loop = expand_start_loop (0);
4851 /* Assign value to element index. */
4853 = convert (ssizetype,
4854 fold (build (MINUS_EXPR, TREE_TYPE (index),
4855 index, TYPE_MIN_VALUE (domain))));
4856 position = size_binop (MULT_EXPR, position,
4858 TYPE_SIZE_UNIT (elttype)));
4860 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4861 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4862 xtarget = change_address (target, mode, addr);
4863 if (TREE_CODE (value) == CONSTRUCTOR)
4864 store_constructor (value, xtarget, align, cleared,
4865 bitsize / BITS_PER_UNIT);
4867 store_expr (value, xtarget, 0);
4869 expand_exit_loop_if_false (loop,
4870 build (LT_EXPR, integer_type_node,
4873 expand_increment (build (PREINCREMENT_EXPR,
4875 index, integer_one_node), 0, 0);
4877 emit_label (loop_end);
4880 else if ((index != 0 && ! host_integerp (index, 0))
4881 || ! host_integerp (TYPE_SIZE (elttype), 1))
4887 index = ssize_int (1);
4890 index = convert (ssizetype,
4891 fold (build (MINUS_EXPR, index,
4892 TYPE_MIN_VALUE (domain))));
4894 position = size_binop (MULT_EXPR, index,
4896 TYPE_SIZE_UNIT (elttype)));
4897 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4898 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4899 xtarget = change_address (target, mode, addr);
4900 store_expr (value, xtarget, 0);
4905 bitpos = ((tree_low_cst (index, 0) - minelt)
4906 * tree_low_cst (TYPE_SIZE (elttype), 1));
4908 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4910 store_constructor_field (target, bitsize, bitpos, mode, value,
4911 type, align, cleared,
4912 TYPE_NONALIASED_COMPONENT (type)
4913 && GET_CODE (target) == MEM
4914 ? MEM_ALIAS_SET (target) :
4915 get_alias_set (elttype));
4921 /* Set constructor assignments. */
4922 else if (TREE_CODE (type) == SET_TYPE)
4924 tree elt = CONSTRUCTOR_ELTS (exp);
4925 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4926 tree domain = TYPE_DOMAIN (type);
4927 tree domain_min, domain_max, bitlength;
4929 /* The default implementation strategy is to extract the constant
4930 parts of the constructor, use that to initialize the target,
4931 and then "or" in whatever non-constant ranges we need in addition.
4933 If a large set is all zero or all ones, it is
4934 probably better to set it using memset (if available) or bzero.
4935 Also, if a large set has just a single range, it may also be
4936 better to first clear all the first clear the set (using
4937 bzero/memset), and set the bits we want. */
4939 /* Check for all zeros. */
4940 if (elt == NULL_TREE && size > 0)
4943 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4947 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4948 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4949 bitlength = size_binop (PLUS_EXPR,
4950 size_diffop (domain_max, domain_min),
4953 nbits = tree_low_cst (bitlength, 1);
4955 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4956 are "complicated" (more than one range), initialize (the
4957 constant parts) by copying from a constant. */
4958 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4959 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4961 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4962 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4963 char *bit_buffer = (char *) alloca (nbits);
4964 HOST_WIDE_INT word = 0;
4965 unsigned int bit_pos = 0;
4966 unsigned int ibit = 0;
4967 unsigned int offset = 0; /* In bytes from beginning of set. */
4969 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4972 if (bit_buffer[ibit])
4974 if (BYTES_BIG_ENDIAN)
4975 word |= (1 << (set_word_size - 1 - bit_pos));
4977 word |= 1 << bit_pos;
4981 if (bit_pos >= set_word_size || ibit == nbits)
4983 if (word != 0 || ! cleared)
4985 rtx datum = GEN_INT (word);
4988 /* The assumption here is that it is safe to use
4989 XEXP if the set is multi-word, but not if
4990 it's single-word. */
4991 if (GET_CODE (target) == MEM)
4993 to_rtx = plus_constant (XEXP (target, 0), offset);
4994 to_rtx = change_address (target, mode, to_rtx);
4996 else if (offset == 0)
5000 emit_move_insn (to_rtx, datum);
5007 offset += set_word_size / BITS_PER_UNIT;
5012 /* Don't bother clearing storage if the set is all ones. */
5013 if (TREE_CHAIN (elt) != NULL_TREE
5014 || (TREE_PURPOSE (elt) == NULL_TREE
5016 : ( ! host_integerp (TREE_VALUE (elt), 0)
5017 || ! host_integerp (TREE_PURPOSE (elt), 0)
5018 || (tree_low_cst (TREE_VALUE (elt), 0)
5019 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5020 != (HOST_WIDE_INT) nbits))))
5021 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5023 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5025 /* Start of range of element or NULL. */
5026 tree startbit = TREE_PURPOSE (elt);
5027 /* End of range of element, or element value. */
5028 tree endbit = TREE_VALUE (elt);
5029 #ifdef TARGET_MEM_FUNCTIONS
5030 HOST_WIDE_INT startb, endb;
5032 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5034 bitlength_rtx = expand_expr (bitlength,
5035 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5037 /* Handle non-range tuple element like [ expr ]. */
5038 if (startbit == NULL_TREE)
5040 startbit = save_expr (endbit);
5044 startbit = convert (sizetype, startbit);
5045 endbit = convert (sizetype, endbit);
5046 if (! integer_zerop (domain_min))
5048 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5049 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5051 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5052 EXPAND_CONST_ADDRESS);
5053 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5054 EXPAND_CONST_ADDRESS);
5060 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5063 emit_move_insn (targetx, target);
5066 else if (GET_CODE (target) == MEM)
5071 #ifdef TARGET_MEM_FUNCTIONS
5072 /* Optimization: If startbit and endbit are
5073 constants divisible by BITS_PER_UNIT,
5074 call memset instead. */
5075 if (TREE_CODE (startbit) == INTEGER_CST
5076 && TREE_CODE (endbit) == INTEGER_CST
5077 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5078 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5080 emit_library_call (memset_libfunc, LCT_NORMAL,
5082 plus_constant (XEXP (targetx, 0),
5083 startb / BITS_PER_UNIT),
5085 constm1_rtx, TYPE_MODE (integer_type_node),
5086 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5087 TYPE_MODE (sizetype));
5091 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5092 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5093 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5094 startbit_rtx, TYPE_MODE (sizetype),
5095 endbit_rtx, TYPE_MODE (sizetype));
5098 emit_move_insn (target, targetx);
5106 /* Store the value of EXP (an expression tree)
5107 into a subfield of TARGET which has mode MODE and occupies
5108 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5109 If MODE is VOIDmode, it means that we are storing into a bit-field.
5111 If VALUE_MODE is VOIDmode, return nothing in particular.
5112 UNSIGNEDP is not used in this case.
5114 Otherwise, return an rtx for the value stored. This rtx
5115 has mode VALUE_MODE if that is convenient to do.
5116 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5118 ALIGN is the alignment that TARGET is known to have.
5119 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5121 ALIAS_SET is the alias set for the destination. This value will
5122 (in general) be different from that for TARGET, since TARGET is a
5123 reference to the containing structure. */
5126 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5127 unsignedp, align, total_size, alias_set)
5129 HOST_WIDE_INT bitsize;
5130 HOST_WIDE_INT bitpos;
5131 enum machine_mode mode;
5133 enum machine_mode value_mode;
5136 HOST_WIDE_INT total_size;
5139 HOST_WIDE_INT width_mask = 0;
5141 if (TREE_CODE (exp) == ERROR_MARK)
5144 /* If we have nothing to store, do nothing unless the expression has
5147 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5149 if (bitsize < HOST_BITS_PER_WIDE_INT)
5150 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5152 /* If we are storing into an unaligned field of an aligned union that is
5153 in a register, we may have the mode of TARGET being an integer mode but
5154 MODE == BLKmode. In that case, get an aligned object whose size and
5155 alignment are the same as TARGET and store TARGET into it (we can avoid
5156 the store if the field being stored is the entire width of TARGET). Then
5157 call ourselves recursively to store the field into a BLKmode version of
5158 that object. Finally, load from the object into TARGET. This is not
5159 very efficient in general, but should only be slightly more expensive
5160 than the otherwise-required unaligned accesses. Perhaps this can be
5161 cleaned up later. */
5164 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5168 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5171 rtx blk_object = copy_rtx (object);
5173 PUT_MODE (blk_object, BLKmode);
5175 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5176 emit_move_insn (object, target);
5178 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5179 align, total_size, alias_set);
5181 /* Even though we aren't returning target, we need to
5182 give it the updated value. */
5183 emit_move_insn (target, object);
5188 if (GET_CODE (target) == CONCAT)
5190 /* We're storing into a struct containing a single __complex. */
5194 return store_expr (exp, target, 0);
5197 /* If the structure is in a register or if the component
5198 is a bit field, we cannot use addressing to access it.
5199 Use bit-field techniques or SUBREG to store in it. */
5201 if (mode == VOIDmode
5202 || (mode != BLKmode && ! direct_store[(int) mode]
5203 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5204 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5205 || GET_CODE (target) == REG
5206 || GET_CODE (target) == SUBREG
5207 /* If the field isn't aligned enough to store as an ordinary memref,
5208 store it as a bit field. */
5209 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5210 && (align < GET_MODE_ALIGNMENT (mode)
5211 || bitpos % GET_MODE_ALIGNMENT (mode)))
5212 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5213 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5214 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5215 /* If the RHS and field are a constant size and the size of the
5216 RHS isn't the same size as the bitfield, we must use bitfield
5219 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5220 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5222 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5224 /* If BITSIZE is narrower than the size of the type of EXP
5225 we will be narrowing TEMP. Normally, what's wanted are the
5226 low-order bits. However, if EXP's type is a record and this is
5227 big-endian machine, we want the upper BITSIZE bits. */
5228 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5229 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5230 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5231 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5232 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5236 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5238 if (mode != VOIDmode && mode != BLKmode
5239 && mode != TYPE_MODE (TREE_TYPE (exp)))
5240 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5242 /* If the modes of TARGET and TEMP are both BLKmode, both
5243 must be in memory and BITPOS must be aligned on a byte
5244 boundary. If so, we simply do a block copy. */
5245 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5247 unsigned int exp_align = expr_align (exp);
5249 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5250 || bitpos % BITS_PER_UNIT != 0)
5253 target = change_address (target, VOIDmode,
5254 plus_constant (XEXP (target, 0),
5255 bitpos / BITS_PER_UNIT));
5257 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5258 align = MIN (exp_align, align);
5260 /* Find an alignment that is consistent with the bit position. */
5261 while ((bitpos % align) != 0)
5264 emit_block_move (target, temp,
5265 bitsize == -1 ? expr_size (exp)
5266 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5270 return value_mode == VOIDmode ? const0_rtx : target;
5273 /* Store the value in the bitfield. */
5274 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5275 if (value_mode != VOIDmode)
5277 /* The caller wants an rtx for the value. */
5278 /* If possible, avoid refetching from the bitfield itself. */
5280 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5283 enum machine_mode tmode;
5286 return expand_and (temp,
5290 GET_MODE (temp) == VOIDmode
5292 : GET_MODE (temp))), NULL_RTX);
5293 tmode = GET_MODE (temp);
5294 if (tmode == VOIDmode)
5296 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5297 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5298 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5300 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5301 NULL_RTX, value_mode, 0, align,
5308 rtx addr = XEXP (target, 0);
5311 /* If a value is wanted, it must be the lhs;
5312 so make the address stable for multiple use. */
5314 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5315 && ! CONSTANT_ADDRESS_P (addr)
5316 /* A frame-pointer reference is already stable. */
5317 && ! (GET_CODE (addr) == PLUS
5318 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5319 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5320 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5321 addr = copy_to_reg (addr);
5323 /* Now build a reference to just the desired component. */
5325 to_rtx = copy_rtx (change_address (target, mode,
5326 plus_constant (addr,
5328 / BITS_PER_UNIT))));
5329 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5330 /* If the address of the structure varies, then it might be on
5331 the stack. And, stack slots may be shared across scopes.
5332 So, two different structures, of different types, can end up
5333 at the same location. We will give the structures alias set
5334 zero; here we must be careful not to give non-zero alias sets
5336 if (!rtx_varies_p (addr, /*for_alias=*/0))
5337 MEM_ALIAS_SET (to_rtx) = alias_set;
5339 MEM_ALIAS_SET (to_rtx) = 0;
5341 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5345 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5346 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5347 codes and find the ultimate containing object, which we return.
5349 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5350 bit position, and *PUNSIGNEDP to the signedness of the field.
5351 If the position of the field is variable, we store a tree
5352 giving the variable offset (in units) in *POFFSET.
5353 This offset is in addition to the bit position.
5354 If the position is not variable, we store 0 in *POFFSET.
5355 We set *PALIGNMENT to the alignment of the address that will be
5356 computed. This is the alignment of the thing we return if *POFFSET
5357 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5359 If any of the extraction expressions is volatile,
5360 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5362 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5363 is a mode that can be used to access the field. In that case, *PBITSIZE
5366 If the field describes a variable-sized object, *PMODE is set to
5367 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5368 this case, but the address of the object can be found. */
5371 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5372 punsignedp, pvolatilep, palignment)
5374 HOST_WIDE_INT *pbitsize;
5375 HOST_WIDE_INT *pbitpos;
5377 enum machine_mode *pmode;
5380 unsigned int *palignment;
5383 enum machine_mode mode = VOIDmode;
5384 tree offset = size_zero_node;
5385 tree bit_offset = bitsize_zero_node;
5386 unsigned int alignment = BIGGEST_ALIGNMENT;
5389 /* First get the mode, signedness, and size. We do this from just the
5390 outermost expression. */
5391 if (TREE_CODE (exp) == COMPONENT_REF)
5393 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5394 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5395 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5397 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5399 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5401 size_tree = TREE_OPERAND (exp, 1);
5402 *punsignedp = TREE_UNSIGNED (exp);
5406 mode = TYPE_MODE (TREE_TYPE (exp));
5407 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5409 if (mode == BLKmode)
5410 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5412 *pbitsize = GET_MODE_BITSIZE (mode);
5417 if (! host_integerp (size_tree, 1))
5418 mode = BLKmode, *pbitsize = -1;
5420 *pbitsize = tree_low_cst (size_tree, 1);
5423 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5424 and find the ultimate containing object. */
5427 if (TREE_CODE (exp) == BIT_FIELD_REF)
5428 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5429 else if (TREE_CODE (exp) == COMPONENT_REF)
5431 tree field = TREE_OPERAND (exp, 1);
5432 tree this_offset = DECL_FIELD_OFFSET (field);
5434 /* If this field hasn't been filled in yet, don't go
5435 past it. This should only happen when folding expressions
5436 made during type construction. */
5437 if (this_offset == 0)
5439 else if (! TREE_CONSTANT (this_offset)
5440 && contains_placeholder_p (this_offset))
5441 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5443 offset = size_binop (PLUS_EXPR, offset, this_offset);
5444 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5445 DECL_FIELD_BIT_OFFSET (field));
5447 if (! host_integerp (offset, 0))
5448 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5451 else if (TREE_CODE (exp) == ARRAY_REF
5452 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5454 tree index = TREE_OPERAND (exp, 1);
5455 tree array = TREE_OPERAND (exp, 0);
5456 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5457 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5458 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5460 /* We assume all arrays have sizes that are a multiple of a byte.
5461 First subtract the lower bound, if any, in the type of the
5462 index, then convert to sizetype and multiply by the size of the
5464 if (low_bound != 0 && ! integer_zerop (low_bound))
5465 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5468 /* If the index has a self-referential type, pass it to a
5469 WITH_RECORD_EXPR; if the component size is, pass our
5470 component to one. */
5471 if (! TREE_CONSTANT (index)
5472 && contains_placeholder_p (index))
5473 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5474 if (! TREE_CONSTANT (unit_size)
5475 && contains_placeholder_p (unit_size))
5476 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5478 offset = size_binop (PLUS_EXPR, offset,
5479 size_binop (MULT_EXPR,
5480 convert (sizetype, index),
5484 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5485 && ! ((TREE_CODE (exp) == NOP_EXPR
5486 || TREE_CODE (exp) == CONVERT_EXPR)
5487 && (TYPE_MODE (TREE_TYPE (exp))
5488 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5491 /* If any reference in the chain is volatile, the effect is volatile. */
5492 if (TREE_THIS_VOLATILE (exp))
5495 /* If the offset is non-constant already, then we can't assume any
5496 alignment more than the alignment here. */
5497 if (! TREE_CONSTANT (offset))
5498 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5500 exp = TREE_OPERAND (exp, 0);
5504 alignment = MIN (alignment, DECL_ALIGN (exp));
5505 else if (TREE_TYPE (exp) != 0)
5506 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5508 /* If OFFSET is constant, see if we can return the whole thing as a
5509 constant bit position. Otherwise, split it up. */
5510 if (host_integerp (offset, 0)
5511 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5513 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5514 && host_integerp (tem, 0))
5515 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5517 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5520 *palignment = alignment;
5524 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5526 static enum memory_use_mode
5527 get_memory_usage_from_modifier (modifier)
5528 enum expand_modifier modifier;
5534 return MEMORY_USE_RO;
5536 case EXPAND_MEMORY_USE_WO:
5537 return MEMORY_USE_WO;
5539 case EXPAND_MEMORY_USE_RW:
5540 return MEMORY_USE_RW;
5542 case EXPAND_MEMORY_USE_DONT:
5543 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5544 MEMORY_USE_DONT, because they are modifiers to a call of
5545 expand_expr in the ADDR_EXPR case of expand_expr. */
5546 case EXPAND_CONST_ADDRESS:
5547 case EXPAND_INITIALIZER:
5548 return MEMORY_USE_DONT;
5549 case EXPAND_MEMORY_USE_BAD:
5555 /* Given an rtx VALUE that may contain additions and multiplications, return
5556 an equivalent value that just refers to a register, memory, or constant.
5557 This is done by generating instructions to perform the arithmetic and
5558 returning a pseudo-register containing the value.
5560 The returned value may be a REG, SUBREG, MEM or constant. */
5563 force_operand (value, target)
5566 register optab binoptab = 0;
5567 /* Use a temporary to force order of execution of calls to
5571 /* Use subtarget as the target for operand 0 of a binary operation. */
5572 register rtx subtarget = get_subtarget (target);
5574 /* Check for a PIC address load. */
5576 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5577 && XEXP (value, 0) == pic_offset_table_rtx
5578 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5579 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5580 || GET_CODE (XEXP (value, 1)) == CONST))
5583 subtarget = gen_reg_rtx (GET_MODE (value));
5584 emit_move_insn (subtarget, value);
5588 if (GET_CODE (value) == PLUS)
5589 binoptab = add_optab;
5590 else if (GET_CODE (value) == MINUS)
5591 binoptab = sub_optab;
5592 else if (GET_CODE (value) == MULT)
5594 op2 = XEXP (value, 1);
5595 if (!CONSTANT_P (op2)
5596 && !(GET_CODE (op2) == REG && op2 != subtarget))
5598 tmp = force_operand (XEXP (value, 0), subtarget);
5599 return expand_mult (GET_MODE (value), tmp,
5600 force_operand (op2, NULL_RTX),
5606 op2 = XEXP (value, 1);
5607 if (!CONSTANT_P (op2)
5608 && !(GET_CODE (op2) == REG && op2 != subtarget))
5610 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5612 binoptab = add_optab;
5613 op2 = negate_rtx (GET_MODE (value), op2);
5616 /* Check for an addition with OP2 a constant integer and our first
5617 operand a PLUS of a virtual register and something else. In that
5618 case, we want to emit the sum of the virtual register and the
5619 constant first and then add the other value. This allows virtual
5620 register instantiation to simply modify the constant rather than
5621 creating another one around this addition. */
5622 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5623 && GET_CODE (XEXP (value, 0)) == PLUS
5624 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5625 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5626 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5628 rtx temp = expand_binop (GET_MODE (value), binoptab,
5629 XEXP (XEXP (value, 0), 0), op2,
5630 subtarget, 0, OPTAB_LIB_WIDEN);
5631 return expand_binop (GET_MODE (value), binoptab, temp,
5632 force_operand (XEXP (XEXP (value, 0), 1), 0),
5633 target, 0, OPTAB_LIB_WIDEN);
5636 tmp = force_operand (XEXP (value, 0), subtarget);
5637 return expand_binop (GET_MODE (value), binoptab, tmp,
5638 force_operand (op2, NULL_RTX),
5639 target, 0, OPTAB_LIB_WIDEN);
5640 /* We give UNSIGNEDP = 0 to expand_binop
5641 because the only operations we are expanding here are signed ones. */
5646 /* Subroutine of expand_expr:
5647 save the non-copied parts (LIST) of an expr (LHS), and return a list
5648 which can restore these values to their previous values,
5649 should something modify their storage. */
5652 save_noncopied_parts (lhs, list)
5659 for (tail = list; tail; tail = TREE_CHAIN (tail))
5660 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5661 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5664 tree part = TREE_VALUE (tail);
5665 tree part_type = TREE_TYPE (part);
5666 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5668 = assign_temp (build_qualified_type (part_type,
5669 (TYPE_QUALS (part_type)
5670 | TYPE_QUAL_CONST)),
5673 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5674 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5675 parts = tree_cons (to_be_saved,
5676 build (RTL_EXPR, part_type, NULL_TREE,
5679 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5684 /* Subroutine of expand_expr:
5685 record the non-copied parts (LIST) of an expr (LHS), and return a list
5686 which specifies the initial values of these parts. */
5689 init_noncopied_parts (lhs, list)
5696 for (tail = list; tail; tail = TREE_CHAIN (tail))
5697 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5698 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5699 else if (TREE_PURPOSE (tail))
5701 tree part = TREE_VALUE (tail);
5702 tree part_type = TREE_TYPE (part);
5703 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5704 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5709 /* Subroutine of expand_expr: return nonzero iff there is no way that
5710 EXP can reference X, which is being modified. TOP_P is nonzero if this
5711 call is going to be used to determine whether we need a temporary
5712 for EXP, as opposed to a recursive call to this function.
5714 It is always safe for this routine to return zero since it merely
5715 searches for optimization opportunities. */
5718 safe_from_p (x, exp, top_p)
5725 static tree save_expr_list;
5728 /* If EXP has varying size, we MUST use a target since we currently
5729 have no way of allocating temporaries of variable size
5730 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5731 So we assume here that something at a higher level has prevented a
5732 clash. This is somewhat bogus, but the best we can do. Only
5733 do this when X is BLKmode and when we are at the top level. */
5734 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5735 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5736 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5737 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5738 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5740 && GET_MODE (x) == BLKmode)
5741 /* If X is in the outgoing argument area, it is always safe. */
5742 || (GET_CODE (x) == MEM
5743 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5744 || (GET_CODE (XEXP (x, 0)) == PLUS
5745 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5748 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5749 find the underlying pseudo. */
5750 if (GET_CODE (x) == SUBREG)
5753 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5757 /* A SAVE_EXPR might appear many times in the expression passed to the
5758 top-level safe_from_p call, and if it has a complex subexpression,
5759 examining it multiple times could result in a combinatorial explosion.
5760 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5761 with optimization took about 28 minutes to compile -- even though it was
5762 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5763 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5764 we have processed. Note that the only test of top_p was above. */
5773 rtn = safe_from_p (x, exp, 0);
5775 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5776 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5781 /* Now look at our tree code and possibly recurse. */
5782 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5785 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5792 if (TREE_CODE (exp) == TREE_LIST)
5793 return ((TREE_VALUE (exp) == 0
5794 || safe_from_p (x, TREE_VALUE (exp), 0))
5795 && (TREE_CHAIN (exp) == 0
5796 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5797 else if (TREE_CODE (exp) == ERROR_MARK)
5798 return 1; /* An already-visited SAVE_EXPR? */
5803 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5807 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5808 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5812 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5813 the expression. If it is set, we conflict iff we are that rtx or
5814 both are in memory. Otherwise, we check all operands of the
5815 expression recursively. */
5817 switch (TREE_CODE (exp))
5820 return (staticp (TREE_OPERAND (exp, 0))
5821 || TREE_STATIC (exp)
5822 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5825 if (GET_CODE (x) == MEM
5826 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5827 get_alias_set (exp)))
5832 /* Assume that the call will clobber all hard registers and
5834 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5835 || GET_CODE (x) == MEM)
5840 /* If a sequence exists, we would have to scan every instruction
5841 in the sequence to see if it was safe. This is probably not
5843 if (RTL_EXPR_SEQUENCE (exp))
5846 exp_rtl = RTL_EXPR_RTL (exp);
5849 case WITH_CLEANUP_EXPR:
5850 exp_rtl = RTL_EXPR_RTL (exp);
5853 case CLEANUP_POINT_EXPR:
5854 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5857 exp_rtl = SAVE_EXPR_RTL (exp);
5861 /* If we've already scanned this, don't do it again. Otherwise,
5862 show we've scanned it and record for clearing the flag if we're
5864 if (TREE_PRIVATE (exp))
5867 TREE_PRIVATE (exp) = 1;
5868 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5870 TREE_PRIVATE (exp) = 0;
5874 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5878 /* The only operand we look at is operand 1. The rest aren't
5879 part of the expression. */
5880 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5882 case METHOD_CALL_EXPR:
5883 /* This takes a rtx argument, but shouldn't appear here. */
5890 /* If we have an rtx, we do not need to scan our operands. */
5894 nops = first_rtl_op (TREE_CODE (exp));
5895 for (i = 0; i < nops; i++)
5896 if (TREE_OPERAND (exp, i) != 0
5897 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5900 /* If this is a language-specific tree code, it may require
5901 special handling. */
5902 if ((unsigned int) TREE_CODE (exp)
5903 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5905 && !(*lang_safe_from_p) (x, exp))
5909 /* If we have an rtl, find any enclosed object. Then see if we conflict
5913 if (GET_CODE (exp_rtl) == SUBREG)
5915 exp_rtl = SUBREG_REG (exp_rtl);
5916 if (GET_CODE (exp_rtl) == REG
5917 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5921 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5922 are memory and they conflict. */
5923 return ! (rtx_equal_p (x, exp_rtl)
5924 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5925 && true_dependence (exp_rtl, GET_MODE (x), x,
5926 rtx_addr_varies_p)));
5929 /* If we reach here, it is safe. */
5933 /* Subroutine of expand_expr: return nonzero iff EXP is an
5934 expression whose type is statically determinable. */
5940 if (TREE_CODE (exp) == PARM_DECL
5941 || TREE_CODE (exp) == VAR_DECL
5942 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5943 || TREE_CODE (exp) == COMPONENT_REF
5944 || TREE_CODE (exp) == ARRAY_REF)
5949 /* Subroutine of expand_expr: return rtx if EXP is a
5950 variable or parameter; else return 0. */
5957 switch (TREE_CODE (exp))
5961 return DECL_RTL (exp);
5967 #ifdef MAX_INTEGER_COMPUTATION_MODE
5970 check_max_integer_computation_mode (exp)
5973 enum tree_code code;
5974 enum machine_mode mode;
5976 /* Strip any NOPs that don't change the mode. */
5978 code = TREE_CODE (exp);
5980 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5981 if (code == NOP_EXPR
5982 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5985 /* First check the type of the overall operation. We need only look at
5986 unary, binary and relational operations. */
5987 if (TREE_CODE_CLASS (code) == '1'
5988 || TREE_CODE_CLASS (code) == '2'
5989 || TREE_CODE_CLASS (code) == '<')
5991 mode = TYPE_MODE (TREE_TYPE (exp));
5992 if (GET_MODE_CLASS (mode) == MODE_INT
5993 && mode > MAX_INTEGER_COMPUTATION_MODE)
5994 internal_error ("unsupported wide integer operation");
5997 /* Check operand of a unary op. */
5998 if (TREE_CODE_CLASS (code) == '1')
6000 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6001 if (GET_MODE_CLASS (mode) == MODE_INT
6002 && mode > MAX_INTEGER_COMPUTATION_MODE)
6003 internal_error ("unsupported wide integer operation");
6006 /* Check operands of a binary/comparison op. */
6007 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6009 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6010 if (GET_MODE_CLASS (mode) == MODE_INT
6011 && mode > MAX_INTEGER_COMPUTATION_MODE)
6012 internal_error ("unsupported wide integer operation");
6014 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6015 if (GET_MODE_CLASS (mode) == MODE_INT
6016 && mode > MAX_INTEGER_COMPUTATION_MODE)
6017 internal_error ("unsupported wide integer operation");
6022 /* expand_expr: generate code for computing expression EXP.
6023 An rtx for the computed value is returned. The value is never null.
6024 In the case of a void EXP, const0_rtx is returned.
6026 The value may be stored in TARGET if TARGET is nonzero.
6027 TARGET is just a suggestion; callers must assume that
6028 the rtx returned may not be the same as TARGET.
6030 If TARGET is CONST0_RTX, it means that the value will be ignored.
6032 If TMODE is not VOIDmode, it suggests generating the
6033 result in mode TMODE. But this is done only when convenient.
6034 Otherwise, TMODE is ignored and the value generated in its natural mode.
6035 TMODE is just a suggestion; callers must assume that
6036 the rtx returned may not have mode TMODE.
6038 Note that TARGET may have neither TMODE nor MODE. In that case, it
6039 probably will not be used.
6041 If MODIFIER is EXPAND_SUM then when EXP is an addition
6042 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6043 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6044 products as above, or REG or MEM, or constant.
6045 Ordinarily in such cases we would output mul or add instructions
6046 and then return a pseudo reg containing the sum.
6048 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6049 it also marks a label as absolutely required (it can't be dead).
6050 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6051 This is used for outputting expressions used in initializers.
6053 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6054 with a constant address even if that address is not normally legitimate.
6055 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6058 expand_expr (exp, target, tmode, modifier)
6061 enum machine_mode tmode;
6062 enum expand_modifier modifier;
6064 register rtx op0, op1, temp;
6065 tree type = TREE_TYPE (exp);
6066 int unsignedp = TREE_UNSIGNED (type);
6067 register enum machine_mode mode;
6068 register enum tree_code code = TREE_CODE (exp);
6070 rtx subtarget, original_target;
6073 /* Used by check-memory-usage to make modifier read only. */
6074 enum expand_modifier ro_modifier;
6076 /* Handle ERROR_MARK before anybody tries to access its type. */
6077 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6079 op0 = CONST0_RTX (tmode);
6085 mode = TYPE_MODE (type);
6086 /* Use subtarget as the target for operand 0 of a binary operation. */
6087 subtarget = get_subtarget (target);
6088 original_target = target;
6089 ignore = (target == const0_rtx
6090 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6091 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6092 || code == COND_EXPR)
6093 && TREE_CODE (type) == VOID_TYPE));
6095 /* Make a read-only version of the modifier. */
6096 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6097 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6098 ro_modifier = modifier;
6100 ro_modifier = EXPAND_NORMAL;
6102 /* If we are going to ignore this result, we need only do something
6103 if there is a side-effect somewhere in the expression. If there
6104 is, short-circuit the most common cases here. Note that we must
6105 not call expand_expr with anything but const0_rtx in case this
6106 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6110 if (! TREE_SIDE_EFFECTS (exp))
6113 /* Ensure we reference a volatile object even if value is ignored, but
6114 don't do this if all we are doing is taking its address. */
6115 if (TREE_THIS_VOLATILE (exp)
6116 && TREE_CODE (exp) != FUNCTION_DECL
6117 && mode != VOIDmode && mode != BLKmode
6118 && modifier != EXPAND_CONST_ADDRESS)
6120 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6121 if (GET_CODE (temp) == MEM)
6122 temp = copy_to_reg (temp);
6126 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6127 || code == INDIRECT_REF || code == BUFFER_REF)
6128 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6129 VOIDmode, ro_modifier);
6130 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6131 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6133 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6135 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6139 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6140 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6141 /* If the second operand has no side effects, just evaluate
6143 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6144 VOIDmode, ro_modifier);
6145 else if (code == BIT_FIELD_REF)
6147 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6149 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6151 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6159 #ifdef MAX_INTEGER_COMPUTATION_MODE
6160 /* Only check stuff here if the mode we want is different from the mode
6161 of the expression; if it's the same, check_max_integer_computiation_mode
6162 will handle it. Do we really need to check this stuff at all? */
6165 && GET_MODE (target) != mode
6166 && TREE_CODE (exp) != INTEGER_CST
6167 && TREE_CODE (exp) != PARM_DECL
6168 && TREE_CODE (exp) != ARRAY_REF
6169 && TREE_CODE (exp) != ARRAY_RANGE_REF
6170 && TREE_CODE (exp) != COMPONENT_REF
6171 && TREE_CODE (exp) != BIT_FIELD_REF
6172 && TREE_CODE (exp) != INDIRECT_REF
6173 && TREE_CODE (exp) != CALL_EXPR
6174 && TREE_CODE (exp) != VAR_DECL
6175 && TREE_CODE (exp) != RTL_EXPR)
6177 enum machine_mode mode = GET_MODE (target);
6179 if (GET_MODE_CLASS (mode) == MODE_INT
6180 && mode > MAX_INTEGER_COMPUTATION_MODE)
6181 internal_error ("unsupported wide integer operation");
6185 && TREE_CODE (exp) != INTEGER_CST
6186 && TREE_CODE (exp) != PARM_DECL
6187 && TREE_CODE (exp) != ARRAY_REF
6188 && TREE_CODE (exp) != ARRAY_RANGE_REF
6189 && TREE_CODE (exp) != COMPONENT_REF
6190 && TREE_CODE (exp) != BIT_FIELD_REF
6191 && TREE_CODE (exp) != INDIRECT_REF
6192 && TREE_CODE (exp) != VAR_DECL
6193 && TREE_CODE (exp) != CALL_EXPR
6194 && TREE_CODE (exp) != RTL_EXPR
6195 && GET_MODE_CLASS (tmode) == MODE_INT
6196 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6197 internal_error ("unsupported wide integer operation");
6199 check_max_integer_computation_mode (exp);
6202 /* If will do cse, generate all results into pseudo registers
6203 since 1) that allows cse to find more things
6204 and 2) otherwise cse could produce an insn the machine
6207 if (! cse_not_expected && mode != BLKmode && target
6208 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6215 tree function = decl_function_context (exp);
6216 /* Handle using a label in a containing function. */
6217 if (function != current_function_decl
6218 && function != inline_function_decl && function != 0)
6220 struct function *p = find_function_data (function);
6221 p->expr->x_forced_labels
6222 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6223 p->expr->x_forced_labels);
6227 if (modifier == EXPAND_INITIALIZER)
6228 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6233 temp = gen_rtx_MEM (FUNCTION_MODE,
6234 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6235 if (function != current_function_decl
6236 && function != inline_function_decl && function != 0)
6237 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6242 if (DECL_RTL (exp) == 0)
6244 error_with_decl (exp, "prior parameter's size depends on `%s'");
6245 return CONST0_RTX (mode);
6248 /* ... fall through ... */
6251 /* If a static var's type was incomplete when the decl was written,
6252 but the type is complete now, lay out the decl now. */
6253 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6254 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6256 layout_decl (exp, 0);
6257 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6260 /* Although static-storage variables start off initialized, according to
6261 ANSI C, a memcpy could overwrite them with uninitialized values. So
6262 we check them too. This also lets us check for read-only variables
6263 accessed via a non-const declaration, in case it won't be detected
6264 any other way (e.g., in an embedded system or OS kernel without
6267 Aggregates are not checked here; they're handled elsewhere. */
6268 if (cfun && current_function_check_memory_usage
6270 && GET_CODE (DECL_RTL (exp)) == MEM
6271 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6273 enum memory_use_mode memory_usage;
6274 memory_usage = get_memory_usage_from_modifier (modifier);
6276 in_check_memory_usage = 1;
6277 if (memory_usage != MEMORY_USE_DONT)
6278 emit_library_call (chkr_check_addr_libfunc,
6279 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6280 XEXP (DECL_RTL (exp), 0), Pmode,
6281 GEN_INT (int_size_in_bytes (type)),
6282 TYPE_MODE (sizetype),
6283 GEN_INT (memory_usage),
6284 TYPE_MODE (integer_type_node));
6285 in_check_memory_usage = 0;
6288 /* ... fall through ... */
6292 if (DECL_RTL (exp) == 0)
6295 /* Ensure variable marked as used even if it doesn't go through
6296 a parser. If it hasn't be used yet, write out an external
6298 if (! TREE_USED (exp))
6300 assemble_external (exp);
6301 TREE_USED (exp) = 1;
6304 /* Show we haven't gotten RTL for this yet. */
6307 /* Handle variables inherited from containing functions. */
6308 context = decl_function_context (exp);
6310 /* We treat inline_function_decl as an alias for the current function
6311 because that is the inline function whose vars, types, etc.
6312 are being merged into the current function.
6313 See expand_inline_function. */
6315 if (context != 0 && context != current_function_decl
6316 && context != inline_function_decl
6317 /* If var is static, we don't need a static chain to access it. */
6318 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6319 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6323 /* Mark as non-local and addressable. */
6324 DECL_NONLOCAL (exp) = 1;
6325 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6327 mark_addressable (exp);
6328 if (GET_CODE (DECL_RTL (exp)) != MEM)
6330 addr = XEXP (DECL_RTL (exp), 0);
6331 if (GET_CODE (addr) == MEM)
6332 addr = change_address (addr, Pmode,
6333 fix_lexical_addr (XEXP (addr, 0), exp));
6335 addr = fix_lexical_addr (addr, exp);
6337 temp = change_address (DECL_RTL (exp), mode, addr);
6340 /* This is the case of an array whose size is to be determined
6341 from its initializer, while the initializer is still being parsed.
6344 else if (GET_CODE (DECL_RTL (exp)) == MEM
6345 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6346 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6347 XEXP (DECL_RTL (exp), 0));
6349 /* If DECL_RTL is memory, we are in the normal case and either
6350 the address is not valid or it is not a register and -fforce-addr
6351 is specified, get the address into a register. */
6353 else if (GET_CODE (DECL_RTL (exp)) == MEM
6354 && modifier != EXPAND_CONST_ADDRESS
6355 && modifier != EXPAND_SUM
6356 && modifier != EXPAND_INITIALIZER
6357 && (! memory_address_p (DECL_MODE (exp),
6358 XEXP (DECL_RTL (exp), 0))
6360 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6361 temp = change_address (DECL_RTL (exp), VOIDmode,
6362 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6364 /* If we got something, return it. But first, set the alignment
6365 if the address is a register. */
6368 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6369 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6374 /* If the mode of DECL_RTL does not match that of the decl, it
6375 must be a promoted value. We return a SUBREG of the wanted mode,
6376 but mark it so that we know that it was already extended. */
6378 if (GET_CODE (DECL_RTL (exp)) == REG
6379 && GET_MODE (DECL_RTL (exp)) != mode)
6381 /* Get the signedness used for this variable. Ensure we get the
6382 same mode we got when the variable was declared. */
6383 if (GET_MODE (DECL_RTL (exp))
6384 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6387 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6388 SUBREG_PROMOTED_VAR_P (temp) = 1;
6389 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6393 return DECL_RTL (exp);
6396 return immed_double_const (TREE_INT_CST_LOW (exp),
6397 TREE_INT_CST_HIGH (exp), mode);
6400 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6401 EXPAND_MEMORY_USE_BAD);
6404 /* If optimized, generate immediate CONST_DOUBLE
6405 which will be turned into memory by reload if necessary.
6407 We used to force a register so that loop.c could see it. But
6408 this does not allow gen_* patterns to perform optimizations with
6409 the constants. It also produces two insns in cases like "x = 1.0;".
6410 On most machines, floating-point constants are not permitted in
6411 many insns, so we'd end up copying it to a register in any case.
6413 Now, we do the copying in expand_binop, if appropriate. */
6414 return immed_real_const (exp);
6418 if (! TREE_CST_RTL (exp))
6419 output_constant_def (exp, 1);
6421 /* TREE_CST_RTL probably contains a constant address.
6422 On RISC machines where a constant address isn't valid,
6423 make some insns to get that address into a register. */
6424 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6425 && modifier != EXPAND_CONST_ADDRESS
6426 && modifier != EXPAND_INITIALIZER
6427 && modifier != EXPAND_SUM
6428 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6430 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6431 return change_address (TREE_CST_RTL (exp), VOIDmode,
6432 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6433 return TREE_CST_RTL (exp);
6435 case EXPR_WITH_FILE_LOCATION:
6438 const char *saved_input_filename = input_filename;
6439 int saved_lineno = lineno;
6440 input_filename = EXPR_WFL_FILENAME (exp);
6441 lineno = EXPR_WFL_LINENO (exp);
6442 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6443 emit_line_note (input_filename, lineno);
6444 /* Possibly avoid switching back and force here. */
6445 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6446 input_filename = saved_input_filename;
6447 lineno = saved_lineno;
6452 context = decl_function_context (exp);
6454 /* If this SAVE_EXPR was at global context, assume we are an
6455 initialization function and move it into our context. */
6457 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6459 /* We treat inline_function_decl as an alias for the current function
6460 because that is the inline function whose vars, types, etc.
6461 are being merged into the current function.
6462 See expand_inline_function. */
6463 if (context == current_function_decl || context == inline_function_decl)
6466 /* If this is non-local, handle it. */
6469 /* The following call just exists to abort if the context is
6470 not of a containing function. */
6471 find_function_data (context);
6473 temp = SAVE_EXPR_RTL (exp);
6474 if (temp && GET_CODE (temp) == REG)
6476 put_var_into_stack (exp);
6477 temp = SAVE_EXPR_RTL (exp);
6479 if (temp == 0 || GET_CODE (temp) != MEM)
6481 return change_address (temp, mode,
6482 fix_lexical_addr (XEXP (temp, 0), exp));
6484 if (SAVE_EXPR_RTL (exp) == 0)
6486 if (mode == VOIDmode)
6489 temp = assign_temp (build_qualified_type (type,
6491 | TYPE_QUAL_CONST)),
6494 SAVE_EXPR_RTL (exp) = temp;
6495 if (!optimize && GET_CODE (temp) == REG)
6496 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6499 /* If the mode of TEMP does not match that of the expression, it
6500 must be a promoted value. We pass store_expr a SUBREG of the
6501 wanted mode but mark it so that we know that it was already
6502 extended. Note that `unsignedp' was modified above in
6505 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6507 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6508 SUBREG_PROMOTED_VAR_P (temp) = 1;
6509 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6512 if (temp == const0_rtx)
6513 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6514 EXPAND_MEMORY_USE_BAD);
6516 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6518 TREE_USED (exp) = 1;
6521 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6522 must be a promoted value. We return a SUBREG of the wanted mode,
6523 but mark it so that we know that it was already extended. */
6525 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6526 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6528 /* Compute the signedness and make the proper SUBREG. */
6529 promote_mode (type, mode, &unsignedp, 0);
6530 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6531 SUBREG_PROMOTED_VAR_P (temp) = 1;
6532 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6536 return SAVE_EXPR_RTL (exp);
6541 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6542 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6546 case PLACEHOLDER_EXPR:
6548 tree placeholder_expr;
6550 /* If there is an object on the head of the placeholder list,
6551 see if some object in it of type TYPE or a pointer to it. For
6552 further information, see tree.def. */
6553 for (placeholder_expr = placeholder_list;
6554 placeholder_expr != 0;
6555 placeholder_expr = TREE_CHAIN (placeholder_expr))
6557 tree need_type = TYPE_MAIN_VARIANT (type);
6559 tree old_list = placeholder_list;
6562 /* Find the outermost reference that is of the type we want.
6563 If none, see if any object has a type that is a pointer to
6564 the type we want. */
6565 for (elt = TREE_PURPOSE (placeholder_expr);
6566 elt != 0 && object == 0;
6568 = ((TREE_CODE (elt) == COMPOUND_EXPR
6569 || TREE_CODE (elt) == COND_EXPR)
6570 ? TREE_OPERAND (elt, 1)
6571 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6572 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6573 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6574 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6575 ? TREE_OPERAND (elt, 0) : 0))
6576 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6579 for (elt = TREE_PURPOSE (placeholder_expr);
6580 elt != 0 && object == 0;
6582 = ((TREE_CODE (elt) == COMPOUND_EXPR
6583 || TREE_CODE (elt) == COND_EXPR)
6584 ? TREE_OPERAND (elt, 1)
6585 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6586 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6587 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6588 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6589 ? TREE_OPERAND (elt, 0) : 0))
6590 if (POINTER_TYPE_P (TREE_TYPE (elt))
6591 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6593 object = build1 (INDIRECT_REF, need_type, elt);
6597 /* Expand this object skipping the list entries before
6598 it was found in case it is also a PLACEHOLDER_EXPR.
6599 In that case, we want to translate it using subsequent
6601 placeholder_list = TREE_CHAIN (placeholder_expr);
6602 temp = expand_expr (object, original_target, tmode,
6604 placeholder_list = old_list;
6610 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6613 case WITH_RECORD_EXPR:
6614 /* Put the object on the placeholder list, expand our first operand,
6615 and pop the list. */
6616 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6618 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6619 tmode, ro_modifier);
6620 placeholder_list = TREE_CHAIN (placeholder_list);
6624 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6625 expand_goto (TREE_OPERAND (exp, 0));
6627 expand_computed_goto (TREE_OPERAND (exp, 0));
6631 expand_exit_loop_if_false (NULL,
6632 invert_truthvalue (TREE_OPERAND (exp, 0)));
6635 case LABELED_BLOCK_EXPR:
6636 if (LABELED_BLOCK_BODY (exp))
6637 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6638 /* Should perhaps use expand_label, but this is simpler and safer. */
6639 do_pending_stack_adjust ();
6640 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6643 case EXIT_BLOCK_EXPR:
6644 if (EXIT_BLOCK_RETURN (exp))
6645 sorry ("returned value in block_exit_expr");
6646 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6651 expand_start_loop (1);
6652 expand_expr_stmt (TREE_OPERAND (exp, 0));
6660 tree vars = TREE_OPERAND (exp, 0);
6661 int vars_need_expansion = 0;
6663 /* Need to open a binding contour here because
6664 if there are any cleanups they must be contained here. */
6665 expand_start_bindings (2);
6667 /* Mark the corresponding BLOCK for output in its proper place. */
6668 if (TREE_OPERAND (exp, 2) != 0
6669 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6670 insert_block (TREE_OPERAND (exp, 2));
6672 /* If VARS have not yet been expanded, expand them now. */
6675 if (!DECL_RTL_SET_P (vars))
6677 vars_need_expansion = 1;
6680 expand_decl_init (vars);
6681 vars = TREE_CHAIN (vars);
6684 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6686 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6692 if (RTL_EXPR_SEQUENCE (exp))
6694 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6696 emit_insns (RTL_EXPR_SEQUENCE (exp));
6697 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6699 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6700 free_temps_for_rtl_expr (exp);
6701 return RTL_EXPR_RTL (exp);
6704 /* If we don't need the result, just ensure we evaluate any
6709 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6710 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6711 EXPAND_MEMORY_USE_BAD);
6715 /* All elts simple constants => refer to a constant in memory. But
6716 if this is a non-BLKmode mode, let it store a field at a time
6717 since that should make a CONST_INT or CONST_DOUBLE when we
6718 fold. Likewise, if we have a target we can use, it is best to
6719 store directly into the target unless the type is large enough
6720 that memcpy will be used. If we are making an initializer and
6721 all operands are constant, put it in memory as well. */
6722 else if ((TREE_STATIC (exp)
6723 && ((mode == BLKmode
6724 && ! (target != 0 && safe_from_p (target, exp, 1)))
6725 || TREE_ADDRESSABLE (exp)
6726 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6727 && (! MOVE_BY_PIECES_P
6728 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6730 && ! mostly_zeros_p (exp))))
6731 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6733 rtx constructor = output_constant_def (exp, 1);
6735 if (modifier != EXPAND_CONST_ADDRESS
6736 && modifier != EXPAND_INITIALIZER
6737 && modifier != EXPAND_SUM
6738 && (! memory_address_p (GET_MODE (constructor),
6739 XEXP (constructor, 0))
6741 && GET_CODE (XEXP (constructor, 0)) != REG)))
6742 constructor = change_address (constructor, VOIDmode,
6743 XEXP (constructor, 0));
6748 /* Handle calls that pass values in multiple non-contiguous
6749 locations. The Irix 6 ABI has examples of this. */
6750 if (target == 0 || ! safe_from_p (target, exp, 1)
6751 || GET_CODE (target) == PARALLEL)
6753 = assign_temp (build_qualified_type (type,
6755 | (TREE_READONLY (exp)
6756 * TYPE_QUAL_CONST))),
6757 TREE_ADDRESSABLE (exp), 1, 1);
6759 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6760 int_size_in_bytes (TREE_TYPE (exp)));
6766 tree exp1 = TREE_OPERAND (exp, 0);
6768 tree string = string_constant (exp1, &index);
6770 /* Try to optimize reads from const strings. */
6772 && TREE_CODE (string) == STRING_CST
6773 && TREE_CODE (index) == INTEGER_CST
6774 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6775 && GET_MODE_CLASS (mode) == MODE_INT
6776 && GET_MODE_SIZE (mode) == 1
6777 && modifier != EXPAND_MEMORY_USE_WO)
6779 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6781 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6782 op0 = memory_address (mode, op0);
6784 if (cfun && current_function_check_memory_usage
6785 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6787 enum memory_use_mode memory_usage;
6788 memory_usage = get_memory_usage_from_modifier (modifier);
6790 if (memory_usage != MEMORY_USE_DONT)
6792 in_check_memory_usage = 1;
6793 emit_library_call (chkr_check_addr_libfunc,
6794 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6795 Pmode, GEN_INT (int_size_in_bytes (type)),
6796 TYPE_MODE (sizetype),
6797 GEN_INT (memory_usage),
6798 TYPE_MODE (integer_type_node));
6799 in_check_memory_usage = 0;
6803 temp = gen_rtx_MEM (mode, op0);
6804 set_mem_attributes (temp, exp, 0);
6806 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6807 here, because, in C and C++, the fact that a location is accessed
6808 through a pointer to const does not mean that the value there can
6809 never change. Languages where it can never change should
6810 also set TREE_STATIC. */
6811 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6813 /* If we are writing to this object and its type is a record with
6814 readonly fields, we must mark it as readonly so it will
6815 conflict with readonly references to those fields. */
6816 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6817 RTX_UNCHANGING_P (temp) = 1;
6823 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6827 tree array = TREE_OPERAND (exp, 0);
6828 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6829 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6830 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6833 /* Optimize the special-case of a zero lower bound.
6835 We convert the low_bound to sizetype to avoid some problems
6836 with constant folding. (E.g. suppose the lower bound is 1,
6837 and its mode is QI. Without the conversion, (ARRAY
6838 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6839 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6841 if (! integer_zerop (low_bound))
6842 index = size_diffop (index, convert (sizetype, low_bound));
6844 /* Fold an expression like: "foo"[2].
6845 This is not done in fold so it won't happen inside &.
6846 Don't fold if this is for wide characters since it's too
6847 difficult to do correctly and this is a very rare case. */
6849 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6850 && TREE_CODE (array) == STRING_CST
6851 && TREE_CODE (index) == INTEGER_CST
6852 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6853 && GET_MODE_CLASS (mode) == MODE_INT
6854 && GET_MODE_SIZE (mode) == 1)
6856 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6858 /* If this is a constant index into a constant array,
6859 just get the value from the array. Handle both the cases when
6860 we have an explicit constructor and when our operand is a variable
6861 that was declared const. */
6863 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6864 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6865 && TREE_CODE (index) == INTEGER_CST
6866 && 0 > compare_tree_int (index,
6867 list_length (CONSTRUCTOR_ELTS
6868 (TREE_OPERAND (exp, 0)))))
6872 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6873 i = TREE_INT_CST_LOW (index);
6874 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6878 return expand_expr (fold (TREE_VALUE (elem)), target,
6879 tmode, ro_modifier);
6882 else if (optimize >= 1
6883 && modifier != EXPAND_CONST_ADDRESS
6884 && modifier != EXPAND_INITIALIZER
6885 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6886 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6887 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6889 if (TREE_CODE (index) == INTEGER_CST)
6891 tree init = DECL_INITIAL (array);
6893 if (TREE_CODE (init) == CONSTRUCTOR)
6897 for (elem = CONSTRUCTOR_ELTS (init);
6899 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6900 elem = TREE_CHAIN (elem))
6903 if (elem && !TREE_SIDE_EFFECTS (elem))
6904 return expand_expr (fold (TREE_VALUE (elem)), target,
6905 tmode, ro_modifier);
6907 else if (TREE_CODE (init) == STRING_CST
6908 && 0 > compare_tree_int (index,
6909 TREE_STRING_LENGTH (init)))
6911 tree type = TREE_TYPE (TREE_TYPE (init));
6912 enum machine_mode mode = TYPE_MODE (type);
6914 if (GET_MODE_CLASS (mode) == MODE_INT
6915 && GET_MODE_SIZE (mode) == 1)
6917 (TREE_STRING_POINTER
6918 (init)[TREE_INT_CST_LOW (index)]));
6927 case ARRAY_RANGE_REF:
6928 /* If the operand is a CONSTRUCTOR, we can just extract the
6929 appropriate field if it is present. Don't do this if we have
6930 already written the data since we want to refer to that copy
6931 and varasm.c assumes that's what we'll do. */
6932 if (code == COMPONENT_REF
6933 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6934 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6938 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6939 elt = TREE_CHAIN (elt))
6940 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6941 /* We can normally use the value of the field in the
6942 CONSTRUCTOR. However, if this is a bitfield in
6943 an integral mode that we can fit in a HOST_WIDE_INT,
6944 we must mask only the number of bits in the bitfield,
6945 since this is done implicitly by the constructor. If
6946 the bitfield does not meet either of those conditions,
6947 we can't do this optimization. */
6948 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6949 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6951 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6952 <= HOST_BITS_PER_WIDE_INT))))
6954 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6955 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6957 HOST_WIDE_INT bitsize
6958 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6960 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6962 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6963 op0 = expand_and (op0, op1, target);
6967 enum machine_mode imode
6968 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6970 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6973 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6975 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6985 enum machine_mode mode1;
6986 HOST_WIDE_INT bitsize, bitpos;
6989 unsigned int alignment;
6990 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6991 &mode1, &unsignedp, &volatilep,
6994 /* If we got back the original object, something is wrong. Perhaps
6995 we are evaluating an expression too early. In any event, don't
6996 infinitely recurse. */
7000 /* If TEM's type is a union of variable size, pass TARGET to the inner
7001 computation, since it will need a temporary and TARGET is known
7002 to have to do. This occurs in unchecked conversion in Ada. */
7004 op0 = expand_expr (tem,
7005 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7006 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7008 ? target : NULL_RTX),
7010 (modifier == EXPAND_INITIALIZER
7011 || modifier == EXPAND_CONST_ADDRESS)
7012 ? modifier : EXPAND_NORMAL);
7014 /* If this is a constant, put it into a register if it is a
7015 legitimate constant and OFFSET is 0 and memory if it isn't. */
7016 if (CONSTANT_P (op0))
7018 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7019 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7021 op0 = force_reg (mode, op0);
7023 op0 = validize_mem (force_const_mem (mode, op0));
7028 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7030 /* If this object is in a register, put it into memory.
7031 This case can't occur in C, but can in Ada if we have
7032 unchecked conversion of an expression from a scalar type to
7033 an array or record type. */
7034 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7035 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7037 tree nt = build_qualified_type (TREE_TYPE (tem),
7038 (TYPE_QUALS (TREE_TYPE (tem))
7039 | TYPE_QUAL_CONST));
7040 rtx memloc = assign_temp (nt, 1, 1, 1);
7042 mark_temp_addr_taken (memloc);
7043 emit_move_insn (memloc, op0);
7047 if (GET_CODE (op0) != MEM)
7050 if (GET_MODE (offset_rtx) != ptr_mode)
7052 #ifdef POINTERS_EXTEND_UNSIGNED
7053 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7055 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7059 /* A constant address in OP0 can have VOIDmode, we must not try
7060 to call force_reg for that case. Avoid that case. */
7061 if (GET_CODE (op0) == MEM
7062 && GET_MODE (op0) == BLKmode
7063 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7065 && (bitpos % bitsize) == 0
7066 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7067 && alignment == GET_MODE_ALIGNMENT (mode1))
7069 rtx temp = change_address (op0, mode1,
7070 plus_constant (XEXP (op0, 0),
7073 if (GET_CODE (XEXP (temp, 0)) == REG)
7076 op0 = change_address (op0, mode1,
7077 force_reg (GET_MODE (XEXP (temp, 0)),
7082 op0 = change_address (op0, VOIDmode,
7083 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7084 force_reg (ptr_mode,
7088 /* Don't forget about volatility even if this is a bitfield. */
7089 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7091 op0 = copy_rtx (op0);
7092 MEM_VOLATILE_P (op0) = 1;
7095 /* Check the access. */
7096 if (cfun != 0 && current_function_check_memory_usage
7097 && GET_CODE (op0) == MEM)
7099 enum memory_use_mode memory_usage;
7100 memory_usage = get_memory_usage_from_modifier (modifier);
7102 if (memory_usage != MEMORY_USE_DONT)
7107 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7108 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7110 /* Check the access right of the pointer. */
7111 in_check_memory_usage = 1;
7112 if (size > BITS_PER_UNIT)
7113 emit_library_call (chkr_check_addr_libfunc,
7114 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7115 Pmode, GEN_INT (size / BITS_PER_UNIT),
7116 TYPE_MODE (sizetype),
7117 GEN_INT (memory_usage),
7118 TYPE_MODE (integer_type_node));
7119 in_check_memory_usage = 0;
7123 /* In cases where an aligned union has an unaligned object
7124 as a field, we might be extracting a BLKmode value from
7125 an integer-mode (e.g., SImode) object. Handle this case
7126 by doing the extract into an object as wide as the field
7127 (which we know to be the width of a basic mode), then
7128 storing into memory, and changing the mode to BLKmode. */
7129 if (mode1 == VOIDmode
7130 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7131 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7132 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7133 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7134 /* If the field isn't aligned enough to fetch as a memref,
7135 fetch it as a bit field. */
7136 || (mode1 != BLKmode
7137 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7138 && ((TYPE_ALIGN (TREE_TYPE (tem))
7139 < GET_MODE_ALIGNMENT (mode))
7140 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7141 /* If the type and the field are a constant size and the
7142 size of the type isn't the same size as the bitfield,
7143 we must use bitfield operations. */
7145 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7147 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7150 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7151 && (TYPE_ALIGN (type) > alignment
7152 || bitpos % TYPE_ALIGN (type) != 0)))
7154 enum machine_mode ext_mode = mode;
7156 if (ext_mode == BLKmode
7157 && ! (target != 0 && GET_CODE (op0) == MEM
7158 && GET_CODE (target) == MEM
7159 && bitpos % BITS_PER_UNIT == 0))
7160 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7162 if (ext_mode == BLKmode)
7164 /* In this case, BITPOS must start at a byte boundary and
7165 TARGET, if specified, must be a MEM. */
7166 if (GET_CODE (op0) != MEM
7167 || (target != 0 && GET_CODE (target) != MEM)
7168 || bitpos % BITS_PER_UNIT != 0)
7171 op0 = change_address (op0, VOIDmode,
7172 plus_constant (XEXP (op0, 0),
7173 bitpos / BITS_PER_UNIT));
7175 target = assign_temp (type, 0, 1, 1);
7177 emit_block_move (target, op0,
7178 bitsize == -1 ? expr_size (exp)
7179 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7186 op0 = validize_mem (op0);
7188 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7189 mark_reg_pointer (XEXP (op0, 0), alignment);
7191 op0 = extract_bit_field (op0, bitsize, bitpos,
7192 unsignedp, target, ext_mode, ext_mode,
7194 int_size_in_bytes (TREE_TYPE (tem)));
7196 /* If the result is a record type and BITSIZE is narrower than
7197 the mode of OP0, an integral mode, and this is a big endian
7198 machine, we must put the field into the high-order bits. */
7199 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7200 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7201 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7202 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7203 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7207 if (mode == BLKmode)
7209 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7211 rtx new = assign_temp (nt, 0, 1, 1);
7213 emit_move_insn (new, op0);
7214 op0 = copy_rtx (new);
7215 PUT_MODE (op0, BLKmode);
7221 /* If the result is BLKmode, use that to access the object
7223 if (mode == BLKmode)
7226 /* Get a reference to just this component. */
7227 if (modifier == EXPAND_CONST_ADDRESS
7228 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7230 rtx new = gen_rtx_MEM (mode1,
7231 plus_constant (XEXP (op0, 0),
7232 (bitpos / BITS_PER_UNIT)));
7234 MEM_COPY_ATTRIBUTES (new, op0);
7238 op0 = change_address (op0, mode1,
7239 plus_constant (XEXP (op0, 0),
7240 (bitpos / BITS_PER_UNIT)));
7242 set_mem_attributes (op0, exp, 0);
7243 if (GET_CODE (XEXP (op0, 0)) == REG)
7244 mark_reg_pointer (XEXP (op0, 0), alignment);
7246 MEM_VOLATILE_P (op0) |= volatilep;
7247 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7248 || modifier == EXPAND_CONST_ADDRESS
7249 || modifier == EXPAND_INITIALIZER)
7251 else if (target == 0)
7252 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7254 convert_move (target, op0, unsignedp);
7258 /* Intended for a reference to a buffer of a file-object in Pascal.
7259 But it's not certain that a special tree code will really be
7260 necessary for these. INDIRECT_REF might work for them. */
7266 /* Pascal set IN expression.
7269 rlo = set_low - (set_low%bits_per_word);
7270 the_word = set [ (index - rlo)/bits_per_word ];
7271 bit_index = index % bits_per_word;
7272 bitmask = 1 << bit_index;
7273 return !!(the_word & bitmask); */
7275 tree set = TREE_OPERAND (exp, 0);
7276 tree index = TREE_OPERAND (exp, 1);
7277 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7278 tree set_type = TREE_TYPE (set);
7279 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7280 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7281 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7282 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7283 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7284 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7285 rtx setaddr = XEXP (setval, 0);
7286 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7288 rtx diff, quo, rem, addr, bit, result;
7290 /* If domain is empty, answer is no. Likewise if index is constant
7291 and out of bounds. */
7292 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7293 && TREE_CODE (set_low_bound) == INTEGER_CST
7294 && tree_int_cst_lt (set_high_bound, set_low_bound))
7295 || (TREE_CODE (index) == INTEGER_CST
7296 && TREE_CODE (set_low_bound) == INTEGER_CST
7297 && tree_int_cst_lt (index, set_low_bound))
7298 || (TREE_CODE (set_high_bound) == INTEGER_CST
7299 && TREE_CODE (index) == INTEGER_CST
7300 && tree_int_cst_lt (set_high_bound, index))))
7304 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7306 /* If we get here, we have to generate the code for both cases
7307 (in range and out of range). */
7309 op0 = gen_label_rtx ();
7310 op1 = gen_label_rtx ();
7312 if (! (GET_CODE (index_val) == CONST_INT
7313 && GET_CODE (lo_r) == CONST_INT))
7315 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7316 GET_MODE (index_val), iunsignedp, 0, op1);
7319 if (! (GET_CODE (index_val) == CONST_INT
7320 && GET_CODE (hi_r) == CONST_INT))
7322 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7323 GET_MODE (index_val), iunsignedp, 0, op1);
7326 /* Calculate the element number of bit zero in the first word
7328 if (GET_CODE (lo_r) == CONST_INT)
7329 rlow = GEN_INT (INTVAL (lo_r)
7330 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7332 rlow = expand_binop (index_mode, and_optab, lo_r,
7333 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7334 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7336 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7337 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7339 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7340 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7341 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7342 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7344 addr = memory_address (byte_mode,
7345 expand_binop (index_mode, add_optab, diff,
7346 setaddr, NULL_RTX, iunsignedp,
7349 /* Extract the bit we want to examine. */
7350 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7351 gen_rtx_MEM (byte_mode, addr),
7352 make_tree (TREE_TYPE (index), rem),
7354 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7355 GET_MODE (target) == byte_mode ? target : 0,
7356 1, OPTAB_LIB_WIDEN);
7358 if (result != target)
7359 convert_move (target, result, 1);
7361 /* Output the code to handle the out-of-range case. */
7364 emit_move_insn (target, const0_rtx);
7369 case WITH_CLEANUP_EXPR:
7370 if (RTL_EXPR_RTL (exp) == 0)
7373 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7374 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7376 /* That's it for this cleanup. */
7377 TREE_OPERAND (exp, 2) = 0;
7379 return RTL_EXPR_RTL (exp);
7381 case CLEANUP_POINT_EXPR:
7383 /* Start a new binding layer that will keep track of all cleanup
7384 actions to be performed. */
7385 expand_start_bindings (2);
7387 target_temp_slot_level = temp_slot_level;
7389 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7390 /* If we're going to use this value, load it up now. */
7392 op0 = force_not_mem (op0);
7393 preserve_temp_slots (op0);
7394 expand_end_bindings (NULL_TREE, 0, 0);
7399 /* Check for a built-in function. */
7400 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7401 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7403 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7405 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7406 == BUILT_IN_FRONTEND)
7407 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7409 return expand_builtin (exp, target, subtarget, tmode, ignore);
7412 return expand_call (exp, target, ignore);
7414 case NON_LVALUE_EXPR:
7417 case REFERENCE_EXPR:
7418 if (TREE_OPERAND (exp, 0) == error_mark_node)
7421 if (TREE_CODE (type) == UNION_TYPE)
7423 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7425 /* If both input and output are BLKmode, this conversion
7426 isn't actually doing anything unless we need to make the
7427 alignment stricter. */
7428 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7429 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7430 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7431 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7435 target = assign_temp (type, 0, 1, 1);
7437 if (GET_CODE (target) == MEM)
7438 /* Store data into beginning of memory target. */
7439 store_expr (TREE_OPERAND (exp, 0),
7440 change_address (target, TYPE_MODE (valtype), 0), 0);
7442 else if (GET_CODE (target) == REG)
7443 /* Store this field into a union of the proper type. */
7444 store_field (target,
7445 MIN ((int_size_in_bytes (TREE_TYPE
7446 (TREE_OPERAND (exp, 0)))
7448 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7449 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7450 VOIDmode, 0, BITS_PER_UNIT,
7451 int_size_in_bytes (type), 0);
7455 /* Return the entire union. */
7459 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7461 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7464 /* If the signedness of the conversion differs and OP0 is
7465 a promoted SUBREG, clear that indication since we now
7466 have to do the proper extension. */
7467 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7468 && GET_CODE (op0) == SUBREG)
7469 SUBREG_PROMOTED_VAR_P (op0) = 0;
7474 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7475 if (GET_MODE (op0) == mode)
7478 /* If OP0 is a constant, just convert it into the proper mode. */
7479 if (CONSTANT_P (op0))
7481 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7482 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7484 if (modifier == EXPAND_INITIALIZER)
7485 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7489 convert_to_mode (mode, op0,
7490 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7492 convert_move (target, op0,
7493 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7497 /* We come here from MINUS_EXPR when the second operand is a
7500 this_optab = ! unsignedp && flag_trapv
7501 && (GET_MODE_CLASS(mode) == MODE_INT)
7502 ? addv_optab : add_optab;
7504 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7505 something else, make sure we add the register to the constant and
7506 then to the other thing. This case can occur during strength
7507 reduction and doing it this way will produce better code if the
7508 frame pointer or argument pointer is eliminated.
7510 fold-const.c will ensure that the constant is always in the inner
7511 PLUS_EXPR, so the only case we need to do anything about is if
7512 sp, ap, or fp is our second argument, in which case we must swap
7513 the innermost first argument and our second argument. */
7515 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7516 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7517 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7518 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7519 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7520 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7522 tree t = TREE_OPERAND (exp, 1);
7524 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7525 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7528 /* If the result is to be ptr_mode and we are adding an integer to
7529 something, we might be forming a constant. So try to use
7530 plus_constant. If it produces a sum and we can't accept it,
7531 use force_operand. This allows P = &ARR[const] to generate
7532 efficient code on machines where a SYMBOL_REF is not a valid
7535 If this is an EXPAND_SUM call, always return the sum. */
7536 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7537 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7539 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7540 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7541 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7545 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7547 /* Use immed_double_const to ensure that the constant is
7548 truncated according to the mode of OP1, then sign extended
7549 to a HOST_WIDE_INT. Using the constant directly can result
7550 in non-canonical RTL in a 64x32 cross compile. */
7552 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7554 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7555 op1 = plus_constant (op1, INTVAL (constant_part));
7556 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7557 op1 = force_operand (op1, target);
7561 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7562 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7563 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7567 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7569 if (! CONSTANT_P (op0))
7571 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7572 VOIDmode, modifier);
7573 /* Don't go to both_summands if modifier
7574 says it's not right to return a PLUS. */
7575 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7579 /* Use immed_double_const to ensure that the constant is
7580 truncated according to the mode of OP1, then sign extended
7581 to a HOST_WIDE_INT. Using the constant directly can result
7582 in non-canonical RTL in a 64x32 cross compile. */
7584 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7586 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7587 op0 = plus_constant (op0, INTVAL (constant_part));
7588 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7589 op0 = force_operand (op0, target);
7594 /* No sense saving up arithmetic to be done
7595 if it's all in the wrong mode to form part of an address.
7596 And force_operand won't know whether to sign-extend or
7598 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7599 || mode != ptr_mode)
7602 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7605 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7606 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7609 /* Make sure any term that's a sum with a constant comes last. */
7610 if (GET_CODE (op0) == PLUS
7611 && CONSTANT_P (XEXP (op0, 1)))
7617 /* If adding to a sum including a constant,
7618 associate it to put the constant outside. */
7619 if (GET_CODE (op1) == PLUS
7620 && CONSTANT_P (XEXP (op1, 1)))
7622 rtx constant_term = const0_rtx;
7624 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7627 /* Ensure that MULT comes first if there is one. */
7628 else if (GET_CODE (op0) == MULT)
7629 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7631 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7633 /* Let's also eliminate constants from op0 if possible. */
7634 op0 = eliminate_constant_term (op0, &constant_term);
7636 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7637 their sum should be a constant. Form it into OP1, since the
7638 result we want will then be OP0 + OP1. */
7640 temp = simplify_binary_operation (PLUS, mode, constant_term,
7645 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7648 /* Put a constant term last and put a multiplication first. */
7649 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7650 temp = op1, op1 = op0, op0 = temp;
7652 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7653 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7656 /* For initializers, we are allowed to return a MINUS of two
7657 symbolic constants. Here we handle all cases when both operands
7659 /* Handle difference of two symbolic constants,
7660 for the sake of an initializer. */
7661 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7662 && really_constant_p (TREE_OPERAND (exp, 0))
7663 && really_constant_p (TREE_OPERAND (exp, 1)))
7665 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7666 VOIDmode, ro_modifier);
7667 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7668 VOIDmode, ro_modifier);
7670 /* If the last operand is a CONST_INT, use plus_constant of
7671 the negated constant. Else make the MINUS. */
7672 if (GET_CODE (op1) == CONST_INT)
7673 return plus_constant (op0, - INTVAL (op1));
7675 return gen_rtx_MINUS (mode, op0, op1);
7677 /* Convert A - const to A + (-const). */
7678 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7680 tree negated = fold (build1 (NEGATE_EXPR, type,
7681 TREE_OPERAND (exp, 1)));
7683 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7684 /* If we can't negate the constant in TYPE, leave it alone and
7685 expand_binop will negate it for us. We used to try to do it
7686 here in the signed version of TYPE, but that doesn't work
7687 on POINTER_TYPEs. */;
7690 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7694 this_optab = ! unsignedp && flag_trapv
7695 && (GET_MODE_CLASS(mode) == MODE_INT)
7696 ? subv_optab : sub_optab;
7700 /* If first operand is constant, swap them.
7701 Thus the following special case checks need only
7702 check the second operand. */
7703 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7705 register tree t1 = TREE_OPERAND (exp, 0);
7706 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7707 TREE_OPERAND (exp, 1) = t1;
7710 /* Attempt to return something suitable for generating an
7711 indexed address, for machines that support that. */
7713 if (modifier == EXPAND_SUM && mode == ptr_mode
7714 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7715 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7717 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7720 /* Apply distributive law if OP0 is x+c. */
7721 if (GET_CODE (op0) == PLUS
7722 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7727 (mode, XEXP (op0, 0),
7728 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7729 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7730 * INTVAL (XEXP (op0, 1))));
7732 if (GET_CODE (op0) != REG)
7733 op0 = force_operand (op0, NULL_RTX);
7734 if (GET_CODE (op0) != REG)
7735 op0 = copy_to_mode_reg (mode, op0);
7738 gen_rtx_MULT (mode, op0,
7739 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7742 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7745 /* Check for multiplying things that have been extended
7746 from a narrower type. If this machine supports multiplying
7747 in that narrower type with a result in the desired type,
7748 do it that way, and avoid the explicit type-conversion. */
7749 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7750 && TREE_CODE (type) == INTEGER_TYPE
7751 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7752 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7753 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7754 && int_fits_type_p (TREE_OPERAND (exp, 1),
7755 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7756 /* Don't use a widening multiply if a shift will do. */
7757 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7758 > HOST_BITS_PER_WIDE_INT)
7759 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7761 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7762 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7764 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7765 /* If both operands are extended, they must either both
7766 be zero-extended or both be sign-extended. */
7767 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7769 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7771 enum machine_mode innermode
7772 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7773 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7774 ? smul_widen_optab : umul_widen_optab);
7775 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7776 ? umul_widen_optab : smul_widen_optab);
7777 if (mode == GET_MODE_WIDER_MODE (innermode))
7779 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7781 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7782 NULL_RTX, VOIDmode, 0);
7783 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7784 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7787 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7788 NULL_RTX, VOIDmode, 0);
7791 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7792 && innermode == word_mode)
7795 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7796 NULL_RTX, VOIDmode, 0);
7797 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7798 op1 = convert_modes (innermode, mode,
7799 expand_expr (TREE_OPERAND (exp, 1),
7800 NULL_RTX, VOIDmode, 0),
7803 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7804 NULL_RTX, VOIDmode, 0);
7805 temp = expand_binop (mode, other_optab, op0, op1, target,
7806 unsignedp, OPTAB_LIB_WIDEN);
7807 htem = expand_mult_highpart_adjust (innermode,
7808 gen_highpart (innermode, temp),
7810 gen_highpart (innermode, temp),
7812 emit_move_insn (gen_highpart (innermode, temp), htem);
7817 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7818 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7819 return expand_mult (mode, op0, op1, target, unsignedp);
7821 case TRUNC_DIV_EXPR:
7822 case FLOOR_DIV_EXPR:
7824 case ROUND_DIV_EXPR:
7825 case EXACT_DIV_EXPR:
7826 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7828 /* Possible optimization: compute the dividend with EXPAND_SUM
7829 then if the divisor is constant can optimize the case
7830 where some terms of the dividend have coeffs divisible by it. */
7831 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7832 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7833 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7836 this_optab = flodiv_optab;
7839 case TRUNC_MOD_EXPR:
7840 case FLOOR_MOD_EXPR:
7842 case ROUND_MOD_EXPR:
7843 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7845 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7846 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7847 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7849 case FIX_ROUND_EXPR:
7850 case FIX_FLOOR_EXPR:
7852 abort (); /* Not used for C. */
7854 case FIX_TRUNC_EXPR:
7855 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7857 target = gen_reg_rtx (mode);
7858 expand_fix (target, op0, unsignedp);
7862 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7864 target = gen_reg_rtx (mode);
7865 /* expand_float can't figure out what to do if FROM has VOIDmode.
7866 So give it the correct mode. With -O, cse will optimize this. */
7867 if (GET_MODE (op0) == VOIDmode)
7868 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7870 expand_float (target, op0,
7871 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7875 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7876 temp = expand_unop (mode,
7877 ! unsignedp && flag_trapv
7878 && (GET_MODE_CLASS(mode) == MODE_INT)
7879 ? negv_optab : neg_optab, op0, target, 0);
7885 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7887 /* Handle complex values specially. */
7888 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7889 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7890 return expand_complex_abs (mode, op0, target, unsignedp);
7892 /* Unsigned abs is simply the operand. Testing here means we don't
7893 risk generating incorrect code below. */
7894 if (TREE_UNSIGNED (type))
7897 return expand_abs (mode, op0, target, unsignedp,
7898 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7902 target = original_target;
7903 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7904 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7905 || GET_MODE (target) != mode
7906 || (GET_CODE (target) == REG
7907 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7908 target = gen_reg_rtx (mode);
7909 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7910 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7912 /* First try to do it with a special MIN or MAX instruction.
7913 If that does not win, use a conditional jump to select the proper
7915 this_optab = (TREE_UNSIGNED (type)
7916 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7917 : (code == MIN_EXPR ? smin_optab : smax_optab));
7919 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7924 /* At this point, a MEM target is no longer useful; we will get better
7927 if (GET_CODE (target) == MEM)
7928 target = gen_reg_rtx (mode);
7931 emit_move_insn (target, op0);
7933 op0 = gen_label_rtx ();
7935 /* If this mode is an integer too wide to compare properly,
7936 compare word by word. Rely on cse to optimize constant cases. */
7937 if (GET_MODE_CLASS (mode) == MODE_INT
7938 && ! can_compare_p (GE, mode, ccp_jump))
7940 if (code == MAX_EXPR)
7941 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7942 target, op1, NULL_RTX, op0);
7944 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7945 op1, target, NULL_RTX, op0);
7949 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7950 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7951 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7954 emit_move_insn (target, op1);
7959 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7960 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7966 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7967 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7972 /* ??? Can optimize bitwise operations with one arg constant.
7973 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7974 and (a bitwise1 b) bitwise2 b (etc)
7975 but that is probably not worth while. */
7977 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7978 boolean values when we want in all cases to compute both of them. In
7979 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7980 as actual zero-or-1 values and then bitwise anding. In cases where
7981 there cannot be any side effects, better code would be made by
7982 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7983 how to recognize those cases. */
7985 case TRUTH_AND_EXPR:
7987 this_optab = and_optab;
7992 this_optab = ior_optab;
7995 case TRUTH_XOR_EXPR:
7997 this_optab = xor_optab;
8004 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8006 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8007 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8010 /* Could determine the answer when only additive constants differ. Also,
8011 the addition of one can be handled by changing the condition. */
8018 case UNORDERED_EXPR:
8025 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8029 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8030 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8032 && GET_CODE (original_target) == REG
8033 && (GET_MODE (original_target)
8034 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8036 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8039 if (temp != original_target)
8040 temp = copy_to_reg (temp);
8042 op1 = gen_label_rtx ();
8043 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8044 GET_MODE (temp), unsignedp, 0, op1);
8045 emit_move_insn (temp, const1_rtx);
8050 /* If no set-flag instruction, must generate a conditional
8051 store into a temporary variable. Drop through
8052 and handle this like && and ||. */
8054 case TRUTH_ANDIF_EXPR:
8055 case TRUTH_ORIF_EXPR:
8057 && (target == 0 || ! safe_from_p (target, exp, 1)
8058 /* Make sure we don't have a hard reg (such as function's return
8059 value) live across basic blocks, if not optimizing. */
8060 || (!optimize && GET_CODE (target) == REG
8061 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8062 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8065 emit_clr_insn (target);
8067 op1 = gen_label_rtx ();
8068 jumpifnot (exp, op1);
8071 emit_0_to_1_insn (target);
8074 return ignore ? const0_rtx : target;
8076 case TRUTH_NOT_EXPR:
8077 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8078 /* The parser is careful to generate TRUTH_NOT_EXPR
8079 only with operands that are always zero or one. */
8080 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8081 target, 1, OPTAB_LIB_WIDEN);
8087 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8089 return expand_expr (TREE_OPERAND (exp, 1),
8090 (ignore ? const0_rtx : target),
8094 /* If we would have a "singleton" (see below) were it not for a
8095 conversion in each arm, bring that conversion back out. */
8096 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8097 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8098 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8099 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8101 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8102 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8104 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8105 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8106 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8107 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8108 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8109 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8110 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8111 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8112 return expand_expr (build1 (NOP_EXPR, type,
8113 build (COND_EXPR, TREE_TYPE (iftrue),
8114 TREE_OPERAND (exp, 0),
8116 target, tmode, modifier);
8120 /* Note that COND_EXPRs whose type is a structure or union
8121 are required to be constructed to contain assignments of
8122 a temporary variable, so that we can evaluate them here
8123 for side effect only. If type is void, we must do likewise. */
8125 /* If an arm of the branch requires a cleanup,
8126 only that cleanup is performed. */
8129 tree binary_op = 0, unary_op = 0;
8131 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8132 convert it to our mode, if necessary. */
8133 if (integer_onep (TREE_OPERAND (exp, 1))
8134 && integer_zerop (TREE_OPERAND (exp, 2))
8135 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8139 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8144 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8145 if (GET_MODE (op0) == mode)
8149 target = gen_reg_rtx (mode);
8150 convert_move (target, op0, unsignedp);
8154 /* Check for X ? A + B : A. If we have this, we can copy A to the
8155 output and conditionally add B. Similarly for unary operations.
8156 Don't do this if X has side-effects because those side effects
8157 might affect A or B and the "?" operation is a sequence point in
8158 ANSI. (operand_equal_p tests for side effects.) */
8160 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8161 && operand_equal_p (TREE_OPERAND (exp, 2),
8162 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8163 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8164 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8165 && operand_equal_p (TREE_OPERAND (exp, 1),
8166 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8167 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8168 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8169 && operand_equal_p (TREE_OPERAND (exp, 2),
8170 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8171 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8172 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8173 && operand_equal_p (TREE_OPERAND (exp, 1),
8174 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8175 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8177 /* If we are not to produce a result, we have no target. Otherwise,
8178 if a target was specified use it; it will not be used as an
8179 intermediate target unless it is safe. If no target, use a
8184 else if (original_target
8185 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8186 || (singleton && GET_CODE (original_target) == REG
8187 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8188 && original_target == var_rtx (singleton)))
8189 && GET_MODE (original_target) == mode
8190 #ifdef HAVE_conditional_move
8191 && (! can_conditionally_move_p (mode)
8192 || GET_CODE (original_target) == REG
8193 || TREE_ADDRESSABLE (type))
8195 && ! (GET_CODE (original_target) == MEM
8196 && MEM_VOLATILE_P (original_target)))
8197 temp = original_target;
8198 else if (TREE_ADDRESSABLE (type))
8201 temp = assign_temp (type, 0, 0, 1);
8203 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8204 do the test of X as a store-flag operation, do this as
8205 A + ((X != 0) << log C). Similarly for other simple binary
8206 operators. Only do for C == 1 if BRANCH_COST is low. */
8207 if (temp && singleton && binary_op
8208 && (TREE_CODE (binary_op) == PLUS_EXPR
8209 || TREE_CODE (binary_op) == MINUS_EXPR
8210 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8211 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8212 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8213 : integer_onep (TREE_OPERAND (binary_op, 1)))
8214 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8217 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8218 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8219 ? addv_optab : add_optab)
8220 : TREE_CODE (binary_op) == MINUS_EXPR
8221 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8222 ? subv_optab : sub_optab)
8223 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8226 /* If we had X ? A : A + 1, do this as A + (X == 0).
8228 We have to invert the truth value here and then put it
8229 back later if do_store_flag fails. We cannot simply copy
8230 TREE_OPERAND (exp, 0) to another variable and modify that
8231 because invert_truthvalue can modify the tree pointed to
8233 if (singleton == TREE_OPERAND (exp, 1))
8234 TREE_OPERAND (exp, 0)
8235 = invert_truthvalue (TREE_OPERAND (exp, 0));
8237 result = do_store_flag (TREE_OPERAND (exp, 0),
8238 (safe_from_p (temp, singleton, 1)
8240 mode, BRANCH_COST <= 1);
8242 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8243 result = expand_shift (LSHIFT_EXPR, mode, result,
8244 build_int_2 (tree_log2
8248 (safe_from_p (temp, singleton, 1)
8249 ? temp : NULL_RTX), 0);
8253 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8254 return expand_binop (mode, boptab, op1, result, temp,
8255 unsignedp, OPTAB_LIB_WIDEN);
8257 else if (singleton == TREE_OPERAND (exp, 1))
8258 TREE_OPERAND (exp, 0)
8259 = invert_truthvalue (TREE_OPERAND (exp, 0));
8262 do_pending_stack_adjust ();
8264 op0 = gen_label_rtx ();
8266 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8270 /* If the target conflicts with the other operand of the
8271 binary op, we can't use it. Also, we can't use the target
8272 if it is a hard register, because evaluating the condition
8273 might clobber it. */
8275 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8276 || (GET_CODE (temp) == REG
8277 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8278 temp = gen_reg_rtx (mode);
8279 store_expr (singleton, temp, 0);
8282 expand_expr (singleton,
8283 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8284 if (singleton == TREE_OPERAND (exp, 1))
8285 jumpif (TREE_OPERAND (exp, 0), op0);
8287 jumpifnot (TREE_OPERAND (exp, 0), op0);
8289 start_cleanup_deferral ();
8290 if (binary_op && temp == 0)
8291 /* Just touch the other operand. */
8292 expand_expr (TREE_OPERAND (binary_op, 1),
8293 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8295 store_expr (build (TREE_CODE (binary_op), type,
8296 make_tree (type, temp),
8297 TREE_OPERAND (binary_op, 1)),
8300 store_expr (build1 (TREE_CODE (unary_op), type,
8301 make_tree (type, temp)),
8305 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8306 comparison operator. If we have one of these cases, set the
8307 output to A, branch on A (cse will merge these two references),
8308 then set the output to FOO. */
8310 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8311 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8312 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8313 TREE_OPERAND (exp, 1), 0)
8314 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8315 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8316 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8318 if (GET_CODE (temp) == REG
8319 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8320 temp = gen_reg_rtx (mode);
8321 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8322 jumpif (TREE_OPERAND (exp, 0), op0);
8324 start_cleanup_deferral ();
8325 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8329 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8330 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8331 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8332 TREE_OPERAND (exp, 2), 0)
8333 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8334 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8335 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8337 if (GET_CODE (temp) == REG
8338 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8339 temp = gen_reg_rtx (mode);
8340 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8341 jumpifnot (TREE_OPERAND (exp, 0), op0);
8343 start_cleanup_deferral ();
8344 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8349 op1 = gen_label_rtx ();
8350 jumpifnot (TREE_OPERAND (exp, 0), op0);
8352 start_cleanup_deferral ();
8354 /* One branch of the cond can be void, if it never returns. For
8355 example A ? throw : E */
8357 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8358 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8360 expand_expr (TREE_OPERAND (exp, 1),
8361 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8362 end_cleanup_deferral ();
8364 emit_jump_insn (gen_jump (op1));
8367 start_cleanup_deferral ();
8369 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8370 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8372 expand_expr (TREE_OPERAND (exp, 2),
8373 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8376 end_cleanup_deferral ();
8387 /* Something needs to be initialized, but we didn't know
8388 where that thing was when building the tree. For example,
8389 it could be the return value of a function, or a parameter
8390 to a function which lays down in the stack, or a temporary
8391 variable which must be passed by reference.
8393 We guarantee that the expression will either be constructed
8394 or copied into our original target. */
8396 tree slot = TREE_OPERAND (exp, 0);
8397 tree cleanups = NULL_TREE;
8400 if (TREE_CODE (slot) != VAR_DECL)
8404 target = original_target;
8406 /* Set this here so that if we get a target that refers to a
8407 register variable that's already been used, put_reg_into_stack
8408 knows that it should fix up those uses. */
8409 TREE_USED (slot) = 1;
8413 if (DECL_RTL_SET_P (slot))
8415 target = DECL_RTL (slot);
8416 /* If we have already expanded the slot, so don't do
8418 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8423 target = assign_temp (type, 2, 0, 1);
8424 /* All temp slots at this level must not conflict. */
8425 preserve_temp_slots (target);
8426 SET_DECL_RTL (slot, target);
8427 if (TREE_ADDRESSABLE (slot))
8428 put_var_into_stack (slot);
8430 /* Since SLOT is not known to the called function
8431 to belong to its stack frame, we must build an explicit
8432 cleanup. This case occurs when we must build up a reference
8433 to pass the reference as an argument. In this case,
8434 it is very likely that such a reference need not be
8437 if (TREE_OPERAND (exp, 2) == 0)
8438 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8439 cleanups = TREE_OPERAND (exp, 2);
8444 /* This case does occur, when expanding a parameter which
8445 needs to be constructed on the stack. The target
8446 is the actual stack address that we want to initialize.
8447 The function we call will perform the cleanup in this case. */
8449 /* If we have already assigned it space, use that space,
8450 not target that we were passed in, as our target
8451 parameter is only a hint. */
8452 if (DECL_RTL_SET_P (slot))
8454 target = DECL_RTL (slot);
8455 /* If we have already expanded the slot, so don't do
8457 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8462 SET_DECL_RTL (slot, target);
8463 /* If we must have an addressable slot, then make sure that
8464 the RTL that we just stored in slot is OK. */
8465 if (TREE_ADDRESSABLE (slot))
8466 put_var_into_stack (slot);
8470 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8471 /* Mark it as expanded. */
8472 TREE_OPERAND (exp, 1) = NULL_TREE;
8474 store_expr (exp1, target, 0);
8476 expand_decl_cleanup (NULL_TREE, cleanups);
8483 tree lhs = TREE_OPERAND (exp, 0);
8484 tree rhs = TREE_OPERAND (exp, 1);
8485 tree noncopied_parts = 0;
8486 tree lhs_type = TREE_TYPE (lhs);
8488 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8489 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8491 = init_noncopied_parts (stabilize_reference (lhs),
8492 TYPE_NONCOPIED_PARTS (lhs_type));
8494 while (noncopied_parts != 0)
8496 expand_assignment (TREE_VALUE (noncopied_parts),
8497 TREE_PURPOSE (noncopied_parts), 0, 0);
8498 noncopied_parts = TREE_CHAIN (noncopied_parts);
8505 /* If lhs is complex, expand calls in rhs before computing it.
8506 That's so we don't compute a pointer and save it over a call.
8507 If lhs is simple, compute it first so we can give it as a
8508 target if the rhs is just a call. This avoids an extra temp and copy
8509 and that prevents a partial-subsumption which makes bad code.
8510 Actually we could treat component_ref's of vars like vars. */
8512 tree lhs = TREE_OPERAND (exp, 0);
8513 tree rhs = TREE_OPERAND (exp, 1);
8514 tree noncopied_parts = 0;
8515 tree lhs_type = TREE_TYPE (lhs);
8519 /* Check for |= or &= of a bitfield of size one into another bitfield
8520 of size 1. In this case, (unless we need the result of the
8521 assignment) we can do this more efficiently with a
8522 test followed by an assignment, if necessary.
8524 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8525 things change so we do, this code should be enhanced to
8528 && TREE_CODE (lhs) == COMPONENT_REF
8529 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8530 || TREE_CODE (rhs) == BIT_AND_EXPR)
8531 && TREE_OPERAND (rhs, 0) == lhs
8532 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8533 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8534 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8536 rtx label = gen_label_rtx ();
8538 do_jump (TREE_OPERAND (rhs, 1),
8539 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8540 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8541 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8542 (TREE_CODE (rhs) == BIT_IOR_EXPR
8544 : integer_zero_node)),
8546 do_pending_stack_adjust ();
8551 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8552 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8554 = save_noncopied_parts (stabilize_reference (lhs),
8555 TYPE_NONCOPIED_PARTS (lhs_type));
8557 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8558 while (noncopied_parts != 0)
8560 expand_assignment (TREE_PURPOSE (noncopied_parts),
8561 TREE_VALUE (noncopied_parts), 0, 0);
8562 noncopied_parts = TREE_CHAIN (noncopied_parts);
8568 if (!TREE_OPERAND (exp, 0))
8569 expand_null_return ();
8571 expand_return (TREE_OPERAND (exp, 0));
8574 case PREINCREMENT_EXPR:
8575 case PREDECREMENT_EXPR:
8576 return expand_increment (exp, 0, ignore);
8578 case POSTINCREMENT_EXPR:
8579 case POSTDECREMENT_EXPR:
8580 /* Faster to treat as pre-increment if result is not used. */
8581 return expand_increment (exp, ! ignore, ignore);
8584 /* If nonzero, TEMP will be set to the address of something that might
8585 be a MEM corresponding to a stack slot. */
8588 /* Are we taking the address of a nested function? */
8589 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8590 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8591 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8592 && ! TREE_STATIC (exp))
8594 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8595 op0 = force_operand (op0, target);
8597 /* If we are taking the address of something erroneous, just
8599 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8603 /* We make sure to pass const0_rtx down if we came in with
8604 ignore set, to avoid doing the cleanups twice for something. */
8605 op0 = expand_expr (TREE_OPERAND (exp, 0),
8606 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8607 (modifier == EXPAND_INITIALIZER
8608 ? modifier : EXPAND_CONST_ADDRESS));
8610 /* If we are going to ignore the result, OP0 will have been set
8611 to const0_rtx, so just return it. Don't get confused and
8612 think we are taking the address of the constant. */
8616 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8617 clever and returns a REG when given a MEM. */
8618 op0 = protect_from_queue (op0, 1);
8620 /* We would like the object in memory. If it is a constant, we can
8621 have it be statically allocated into memory. For a non-constant,
8622 we need to allocate some memory and store the value into it. */
8624 if (CONSTANT_P (op0))
8625 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8627 else if (GET_CODE (op0) == MEM)
8629 mark_temp_addr_taken (op0);
8630 temp = XEXP (op0, 0);
8633 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8634 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8635 || GET_CODE (op0) == PARALLEL)
8637 /* If this object is in a register, it must be not
8639 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8640 tree nt = build_qualified_type (inner_type,
8641 (TYPE_QUALS (inner_type)
8642 | TYPE_QUAL_CONST));
8643 rtx memloc = assign_temp (nt, 1, 1, 1);
8645 mark_temp_addr_taken (memloc);
8646 if (GET_CODE (op0) == PARALLEL)
8647 /* Handle calls that pass values in multiple non-contiguous
8648 locations. The Irix 6 ABI has examples of this. */
8649 emit_group_store (memloc, op0,
8650 int_size_in_bytes (inner_type),
8651 TYPE_ALIGN (inner_type));
8653 emit_move_insn (memloc, op0);
8657 if (GET_CODE (op0) != MEM)
8660 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8662 temp = XEXP (op0, 0);
8663 #ifdef POINTERS_EXTEND_UNSIGNED
8664 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8665 && mode == ptr_mode)
8666 temp = convert_memory_address (ptr_mode, temp);
8671 op0 = force_operand (XEXP (op0, 0), target);
8674 if (flag_force_addr && GET_CODE (op0) != REG)
8675 op0 = force_reg (Pmode, op0);
8677 if (GET_CODE (op0) == REG
8678 && ! REG_USERVAR_P (op0))
8679 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8681 /* If we might have had a temp slot, add an equivalent address
8684 update_temp_slot_address (temp, op0);
8686 #ifdef POINTERS_EXTEND_UNSIGNED
8687 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8688 && mode == ptr_mode)
8689 op0 = convert_memory_address (ptr_mode, op0);
8694 case ENTRY_VALUE_EXPR:
8697 /* COMPLEX type for Extended Pascal & Fortran */
8700 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8703 /* Get the rtx code of the operands. */
8704 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8705 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8708 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8712 /* Move the real (op0) and imaginary (op1) parts to their location. */
8713 emit_move_insn (gen_realpart (mode, target), op0);
8714 emit_move_insn (gen_imagpart (mode, target), op1);
8716 insns = get_insns ();
8719 /* Complex construction should appear as a single unit. */
8720 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8721 each with a separate pseudo as destination.
8722 It's not correct for flow to treat them as a unit. */
8723 if (GET_CODE (target) != CONCAT)
8724 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8732 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8733 return gen_realpart (mode, op0);
8736 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8737 return gen_imagpart (mode, op0);
8741 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8745 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8748 target = gen_reg_rtx (mode);
8752 /* Store the realpart and the negated imagpart to target. */
8753 emit_move_insn (gen_realpart (partmode, target),
8754 gen_realpart (partmode, op0));
8756 imag_t = gen_imagpart (partmode, target);
8757 temp = expand_unop (partmode,
8758 ! unsignedp && flag_trapv
8759 && (GET_MODE_CLASS(partmode) == MODE_INT)
8760 ? negv_optab : neg_optab,
8761 gen_imagpart (partmode, op0), imag_t, 0);
8763 emit_move_insn (imag_t, temp);
8765 insns = get_insns ();
8768 /* Conjugate should appear as a single unit
8769 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8770 each with a separate pseudo as destination.
8771 It's not correct for flow to treat them as a unit. */
8772 if (GET_CODE (target) != CONCAT)
8773 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8780 case TRY_CATCH_EXPR:
8782 tree handler = TREE_OPERAND (exp, 1);
8784 expand_eh_region_start ();
8786 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8788 expand_eh_region_end_cleanup (handler);
8793 case TRY_FINALLY_EXPR:
8795 tree try_block = TREE_OPERAND (exp, 0);
8796 tree finally_block = TREE_OPERAND (exp, 1);
8797 rtx finally_label = gen_label_rtx ();
8798 rtx done_label = gen_label_rtx ();
8799 rtx return_link = gen_reg_rtx (Pmode);
8800 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8801 (tree) finally_label, (tree) return_link);
8802 TREE_SIDE_EFFECTS (cleanup) = 1;
8804 /* Start a new binding layer that will keep track of all cleanup
8805 actions to be performed. */
8806 expand_start_bindings (2);
8808 target_temp_slot_level = temp_slot_level;
8810 expand_decl_cleanup (NULL_TREE, cleanup);
8811 op0 = expand_expr (try_block, target, tmode, modifier);
8813 preserve_temp_slots (op0);
8814 expand_end_bindings (NULL_TREE, 0, 0);
8815 emit_jump (done_label);
8816 emit_label (finally_label);
8817 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8818 emit_indirect_jump (return_link);
8819 emit_label (done_label);
8823 case GOTO_SUBROUTINE_EXPR:
8825 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8826 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8827 rtx return_address = gen_label_rtx ();
8828 emit_move_insn (return_link,
8829 gen_rtx_LABEL_REF (Pmode, return_address));
8831 emit_label (return_address);
8836 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8839 return get_exception_pointer (cfun);
8842 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8845 /* Here to do an ordinary binary operator, generating an instruction
8846 from the optab already placed in `this_optab'. */
8848 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8850 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8851 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8853 temp = expand_binop (mode, this_optab, op0, op1, target,
8854 unsignedp, OPTAB_LIB_WIDEN);
8860 /* Similar to expand_expr, except that we don't specify a target, target
8861 mode, or modifier and we return the alignment of the inner type. This is
8862 used in cases where it is not necessary to align the result to the
8863 alignment of its type as long as we know the alignment of the result, for
8864 example for comparisons of BLKmode values. */
8867 expand_expr_unaligned (exp, palign)
8869 unsigned int *palign;
8872 tree type = TREE_TYPE (exp);
8873 register enum machine_mode mode = TYPE_MODE (type);
8875 /* Default the alignment we return to that of the type. */
8876 *palign = TYPE_ALIGN (type);
8878 /* The only cases in which we do anything special is if the resulting mode
8880 if (mode != BLKmode)
8881 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8883 switch (TREE_CODE (exp))
8887 case NON_LVALUE_EXPR:
8888 /* Conversions between BLKmode values don't change the underlying
8889 alignment or value. */
8890 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8891 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8895 /* Much of the code for this case is copied directly from expand_expr.
8896 We need to duplicate it here because we will do something different
8897 in the fall-through case, so we need to handle the same exceptions
8900 tree array = TREE_OPERAND (exp, 0);
8901 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8902 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8903 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8906 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8909 /* Optimize the special-case of a zero lower bound.
8911 We convert the low_bound to sizetype to avoid some problems
8912 with constant folding. (E.g. suppose the lower bound is 1,
8913 and its mode is QI. Without the conversion, (ARRAY
8914 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8915 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8917 if (! integer_zerop (low_bound))
8918 index = size_diffop (index, convert (sizetype, low_bound));
8920 /* If this is a constant index into a constant array,
8921 just get the value from the array. Handle both the cases when
8922 we have an explicit constructor and when our operand is a variable
8923 that was declared const. */
8925 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8926 && host_integerp (index, 0)
8927 && 0 > compare_tree_int (index,
8928 list_length (CONSTRUCTOR_ELTS
8929 (TREE_OPERAND (exp, 0)))))
8933 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8934 i = tree_low_cst (index, 0);
8935 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8939 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8942 else if (optimize >= 1
8943 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8944 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8945 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8947 if (TREE_CODE (index) == INTEGER_CST)
8949 tree init = DECL_INITIAL (array);
8951 if (TREE_CODE (init) == CONSTRUCTOR)
8955 for (elem = CONSTRUCTOR_ELTS (init);
8956 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8957 elem = TREE_CHAIN (elem))
8961 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8971 case ARRAY_RANGE_REF:
8972 /* If the operand is a CONSTRUCTOR, we can just extract the
8973 appropriate field if it is present. Don't do this if we have
8974 already written the data since we want to refer to that copy
8975 and varasm.c assumes that's what we'll do. */
8976 if (TREE_CODE (exp) == COMPONENT_REF
8977 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8978 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8982 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8983 elt = TREE_CHAIN (elt))
8984 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8985 /* Note that unlike the case in expand_expr, we know this is
8986 BLKmode and hence not an integer. */
8987 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8991 enum machine_mode mode1;
8992 HOST_WIDE_INT bitsize, bitpos;
8995 unsigned int alignment;
8997 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8998 &mode1, &unsignedp, &volatilep,
9001 /* If we got back the original object, something is wrong. Perhaps
9002 we are evaluating an expression too early. In any event, don't
9003 infinitely recurse. */
9007 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9009 /* If this is a constant, put it into a register if it is a
9010 legitimate constant and OFFSET is 0 and memory if it isn't. */
9011 if (CONSTANT_P (op0))
9013 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9015 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9017 op0 = force_reg (inner_mode, op0);
9019 op0 = validize_mem (force_const_mem (inner_mode, op0));
9024 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9026 /* If this object is in a register, put it into memory.
9027 This case can't occur in C, but can in Ada if we have
9028 unchecked conversion of an expression from a scalar type to
9029 an array or record type. */
9030 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9031 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9033 tree nt = build_qualified_type (TREE_TYPE (tem),
9034 (TYPE_QUALS (TREE_TYPE (tem))
9035 | TYPE_QUAL_CONST));
9036 rtx memloc = assign_temp (nt, 1, 1, 1);
9038 mark_temp_addr_taken (memloc);
9039 emit_move_insn (memloc, op0);
9043 if (GET_CODE (op0) != MEM)
9046 if (GET_MODE (offset_rtx) != ptr_mode)
9048 #ifdef POINTERS_EXTEND_UNSIGNED
9049 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9051 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9055 op0 = change_address (op0, VOIDmode,
9056 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9057 force_reg (ptr_mode,
9061 /* Don't forget about volatility even if this is a bitfield. */
9062 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9064 op0 = copy_rtx (op0);
9065 MEM_VOLATILE_P (op0) = 1;
9068 /* Check the access. */
9069 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9074 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9075 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9077 /* Check the access right of the pointer. */
9078 in_check_memory_usage = 1;
9079 if (size > BITS_PER_UNIT)
9080 emit_library_call (chkr_check_addr_libfunc,
9081 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9082 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9083 TYPE_MODE (sizetype),
9084 GEN_INT (MEMORY_USE_RO),
9085 TYPE_MODE (integer_type_node));
9086 in_check_memory_usage = 0;
9089 /* In cases where an aligned union has an unaligned object
9090 as a field, we might be extracting a BLKmode value from
9091 an integer-mode (e.g., SImode) object. Handle this case
9092 by doing the extract into an object as wide as the field
9093 (which we know to be the width of a basic mode), then
9094 storing into memory, and changing the mode to BLKmode.
9095 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9096 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9097 if (mode1 == VOIDmode
9098 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9099 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9100 && (TYPE_ALIGN (type) > alignment
9101 || bitpos % TYPE_ALIGN (type) != 0)))
9103 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9105 if (ext_mode == BLKmode)
9107 /* In this case, BITPOS must start at a byte boundary. */
9108 if (GET_CODE (op0) != MEM
9109 || bitpos % BITS_PER_UNIT != 0)
9112 op0 = change_address (op0, VOIDmode,
9113 plus_constant (XEXP (op0, 0),
9114 bitpos / BITS_PER_UNIT));
9118 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9120 rtx new = assign_temp (nt, 0, 1, 1);
9122 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9123 unsignedp, NULL_RTX, ext_mode,
9124 ext_mode, alignment,
9125 int_size_in_bytes (TREE_TYPE (tem)));
9127 /* If the result is a record type and BITSIZE is narrower than
9128 the mode of OP0, an integral mode, and this is a big endian
9129 machine, we must put the field into the high-order bits. */
9130 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9131 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9132 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9133 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9134 size_int (GET_MODE_BITSIZE
9139 emit_move_insn (new, op0);
9140 op0 = copy_rtx (new);
9141 PUT_MODE (op0, BLKmode);
9145 /* Get a reference to just this component. */
9146 op0 = change_address (op0, mode1,
9147 plus_constant (XEXP (op0, 0),
9148 (bitpos / BITS_PER_UNIT)));
9150 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9152 /* Adjust the alignment in case the bit position is not
9153 a multiple of the alignment of the inner object. */
9154 while (bitpos % alignment != 0)
9157 if (GET_CODE (XEXP (op0, 0)) == REG)
9158 mark_reg_pointer (XEXP (op0, 0), alignment);
9160 MEM_IN_STRUCT_P (op0) = 1;
9161 MEM_VOLATILE_P (op0) |= volatilep;
9163 *palign = alignment;
9172 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9175 /* Return the tree node if a ARG corresponds to a string constant or zero
9176 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9177 in bytes within the string that ARG is accessing. The type of the
9178 offset will be `sizetype'. */
9181 string_constant (arg, ptr_offset)
9187 if (TREE_CODE (arg) == ADDR_EXPR
9188 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9190 *ptr_offset = size_zero_node;
9191 return TREE_OPERAND (arg, 0);
9193 else if (TREE_CODE (arg) == PLUS_EXPR)
9195 tree arg0 = TREE_OPERAND (arg, 0);
9196 tree arg1 = TREE_OPERAND (arg, 1);
9201 if (TREE_CODE (arg0) == ADDR_EXPR
9202 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9204 *ptr_offset = convert (sizetype, arg1);
9205 return TREE_OPERAND (arg0, 0);
9207 else if (TREE_CODE (arg1) == ADDR_EXPR
9208 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9210 *ptr_offset = convert (sizetype, arg0);
9211 return TREE_OPERAND (arg1, 0);
9218 /* Expand code for a post- or pre- increment or decrement
9219 and return the RTX for the result.
9220 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9223 expand_increment (exp, post, ignore)
9227 register rtx op0, op1;
9228 register rtx temp, value;
9229 register tree incremented = TREE_OPERAND (exp, 0);
9230 optab this_optab = add_optab;
9232 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9233 int op0_is_copy = 0;
9234 int single_insn = 0;
9235 /* 1 means we can't store into OP0 directly,
9236 because it is a subreg narrower than a word,
9237 and we don't dare clobber the rest of the word. */
9240 /* Stabilize any component ref that might need to be
9241 evaluated more than once below. */
9243 || TREE_CODE (incremented) == BIT_FIELD_REF
9244 || (TREE_CODE (incremented) == COMPONENT_REF
9245 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9246 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9247 incremented = stabilize_reference (incremented);
9248 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9249 ones into save exprs so that they don't accidentally get evaluated
9250 more than once by the code below. */
9251 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9252 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9253 incremented = save_expr (incremented);
9255 /* Compute the operands as RTX.
9256 Note whether OP0 is the actual lvalue or a copy of it:
9257 I believe it is a copy iff it is a register or subreg
9258 and insns were generated in computing it. */
9260 temp = get_last_insn ();
9261 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9263 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9264 in place but instead must do sign- or zero-extension during assignment,
9265 so we copy it into a new register and let the code below use it as
9268 Note that we can safely modify this SUBREG since it is know not to be
9269 shared (it was made by the expand_expr call above). */
9271 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9274 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9278 else if (GET_CODE (op0) == SUBREG
9279 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9281 /* We cannot increment this SUBREG in place. If we are
9282 post-incrementing, get a copy of the old value. Otherwise,
9283 just mark that we cannot increment in place. */
9285 op0 = copy_to_reg (op0);
9290 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9291 && temp != get_last_insn ());
9292 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9293 EXPAND_MEMORY_USE_BAD);
9295 /* Decide whether incrementing or decrementing. */
9296 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9297 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9298 this_optab = sub_optab;
9300 /* Convert decrement by a constant into a negative increment. */
9301 if (this_optab == sub_optab
9302 && GET_CODE (op1) == CONST_INT)
9304 op1 = GEN_INT (-INTVAL (op1));
9305 this_optab = add_optab;
9308 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9309 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9311 /* For a preincrement, see if we can do this with a single instruction. */
9314 icode = (int) this_optab->handlers[(int) mode].insn_code;
9315 if (icode != (int) CODE_FOR_nothing
9316 /* Make sure that OP0 is valid for operands 0 and 1
9317 of the insn we want to queue. */
9318 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9319 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9320 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9324 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9325 then we cannot just increment OP0. We must therefore contrive to
9326 increment the original value. Then, for postincrement, we can return
9327 OP0 since it is a copy of the old value. For preincrement, expand here
9328 unless we can do it with a single insn.
9330 Likewise if storing directly into OP0 would clobber high bits
9331 we need to preserve (bad_subreg). */
9332 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9334 /* This is the easiest way to increment the value wherever it is.
9335 Problems with multiple evaluation of INCREMENTED are prevented
9336 because either (1) it is a component_ref or preincrement,
9337 in which case it was stabilized above, or (2) it is an array_ref
9338 with constant index in an array in a register, which is
9339 safe to reevaluate. */
9340 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9341 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9342 ? MINUS_EXPR : PLUS_EXPR),
9345 TREE_OPERAND (exp, 1));
9347 while (TREE_CODE (incremented) == NOP_EXPR
9348 || TREE_CODE (incremented) == CONVERT_EXPR)
9350 newexp = convert (TREE_TYPE (incremented), newexp);
9351 incremented = TREE_OPERAND (incremented, 0);
9354 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9355 return post ? op0 : temp;
9360 /* We have a true reference to the value in OP0.
9361 If there is an insn to add or subtract in this mode, queue it.
9362 Queueing the increment insn avoids the register shuffling
9363 that often results if we must increment now and first save
9364 the old value for subsequent use. */
9366 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9367 op0 = stabilize (op0);
9370 icode = (int) this_optab->handlers[(int) mode].insn_code;
9371 if (icode != (int) CODE_FOR_nothing
9372 /* Make sure that OP0 is valid for operands 0 and 1
9373 of the insn we want to queue. */
9374 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9375 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9377 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9378 op1 = force_reg (mode, op1);
9380 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9382 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9384 rtx addr = (general_operand (XEXP (op0, 0), mode)
9385 ? force_reg (Pmode, XEXP (op0, 0))
9386 : copy_to_reg (XEXP (op0, 0)));
9389 op0 = change_address (op0, VOIDmode, addr);
9390 temp = force_reg (GET_MODE (op0), op0);
9391 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9392 op1 = force_reg (mode, op1);
9394 /* The increment queue is LIFO, thus we have to `queue'
9395 the instructions in reverse order. */
9396 enqueue_insn (op0, gen_move_insn (op0, temp));
9397 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9402 /* Preincrement, or we can't increment with one simple insn. */
9404 /* Save a copy of the value before inc or dec, to return it later. */
9405 temp = value = copy_to_reg (op0);
9407 /* Arrange to return the incremented value. */
9408 /* Copy the rtx because expand_binop will protect from the queue,
9409 and the results of that would be invalid for us to return
9410 if our caller does emit_queue before using our result. */
9411 temp = copy_rtx (value = op0);
9413 /* Increment however we can. */
9414 op1 = expand_binop (mode, this_optab, value, op1,
9415 current_function_check_memory_usage ? NULL_RTX : op0,
9416 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9417 /* Make sure the value is stored into OP0. */
9419 emit_move_insn (op0, op1);
9424 /* At the start of a function, record that we have no previously-pushed
9425 arguments waiting to be popped. */
9428 init_pending_stack_adjust ()
9430 pending_stack_adjust = 0;
9433 /* When exiting from function, if safe, clear out any pending stack adjust
9434 so the adjustment won't get done.
9436 Note, if the current function calls alloca, then it must have a
9437 frame pointer regardless of the value of flag_omit_frame_pointer. */
9440 clear_pending_stack_adjust ()
9442 #ifdef EXIT_IGNORE_STACK
9444 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9445 && EXIT_IGNORE_STACK
9446 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9447 && ! flag_inline_functions)
9449 stack_pointer_delta -= pending_stack_adjust,
9450 pending_stack_adjust = 0;
9455 /* Pop any previously-pushed arguments that have not been popped yet. */
9458 do_pending_stack_adjust ()
9460 if (inhibit_defer_pop == 0)
9462 if (pending_stack_adjust != 0)
9463 adjust_stack (GEN_INT (pending_stack_adjust));
9464 pending_stack_adjust = 0;
9468 /* Expand conditional expressions. */
9470 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9471 LABEL is an rtx of code CODE_LABEL, in this function and all the
9475 jumpifnot (exp, label)
9479 do_jump (exp, label, NULL_RTX);
9482 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9489 do_jump (exp, NULL_RTX, label);
9492 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9493 the result is zero, or IF_TRUE_LABEL if the result is one.
9494 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9495 meaning fall through in that case.
9497 do_jump always does any pending stack adjust except when it does not
9498 actually perform a jump. An example where there is no jump
9499 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9501 This function is responsible for optimizing cases such as
9502 &&, || and comparison operators in EXP. */
9505 do_jump (exp, if_false_label, if_true_label)
9507 rtx if_false_label, if_true_label;
9509 register enum tree_code code = TREE_CODE (exp);
9510 /* Some cases need to create a label to jump to
9511 in order to properly fall through.
9512 These cases set DROP_THROUGH_LABEL nonzero. */
9513 rtx drop_through_label = 0;
9517 enum machine_mode mode;
9519 #ifdef MAX_INTEGER_COMPUTATION_MODE
9520 check_max_integer_computation_mode (exp);
9531 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9537 /* This is not true with #pragma weak */
9539 /* The address of something can never be zero. */
9541 emit_jump (if_true_label);
9546 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9547 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9548 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9549 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9552 /* If we are narrowing the operand, we have to do the compare in the
9554 if ((TYPE_PRECISION (TREE_TYPE (exp))
9555 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9557 case NON_LVALUE_EXPR:
9558 case REFERENCE_EXPR:
9563 /* These cannot change zero->non-zero or vice versa. */
9564 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9567 case WITH_RECORD_EXPR:
9568 /* Put the object on the placeholder list, recurse through our first
9569 operand, and pop the list. */
9570 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9572 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9573 placeholder_list = TREE_CHAIN (placeholder_list);
9577 /* This is never less insns than evaluating the PLUS_EXPR followed by
9578 a test and can be longer if the test is eliminated. */
9580 /* Reduce to minus. */
9581 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9582 TREE_OPERAND (exp, 0),
9583 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9584 TREE_OPERAND (exp, 1))));
9585 /* Process as MINUS. */
9589 /* Non-zero iff operands of minus differ. */
9590 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9591 TREE_OPERAND (exp, 0),
9592 TREE_OPERAND (exp, 1)),
9593 NE, NE, if_false_label, if_true_label);
9597 /* If we are AND'ing with a small constant, do this comparison in the
9598 smallest type that fits. If the machine doesn't have comparisons
9599 that small, it will be converted back to the wider comparison.
9600 This helps if we are testing the sign bit of a narrower object.
9601 combine can't do this for us because it can't know whether a
9602 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9604 if (! SLOW_BYTE_ACCESS
9605 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9606 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9607 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9608 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9609 && (type = type_for_mode (mode, 1)) != 0
9610 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9611 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9612 != CODE_FOR_nothing))
9614 do_jump (convert (type, exp), if_false_label, if_true_label);
9619 case TRUTH_NOT_EXPR:
9620 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9623 case TRUTH_ANDIF_EXPR:
9624 if (if_false_label == 0)
9625 if_false_label = drop_through_label = gen_label_rtx ();
9626 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9627 start_cleanup_deferral ();
9628 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9629 end_cleanup_deferral ();
9632 case TRUTH_ORIF_EXPR:
9633 if (if_true_label == 0)
9634 if_true_label = drop_through_label = gen_label_rtx ();
9635 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9636 start_cleanup_deferral ();
9637 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9638 end_cleanup_deferral ();
9643 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9644 preserve_temp_slots (NULL_RTX);
9648 do_pending_stack_adjust ();
9649 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9655 case ARRAY_RANGE_REF:
9657 HOST_WIDE_INT bitsize, bitpos;
9659 enum machine_mode mode;
9663 unsigned int alignment;
9665 /* Get description of this reference. We don't actually care
9666 about the underlying object here. */
9667 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9668 &unsignedp, &volatilep, &alignment);
9670 type = type_for_size (bitsize, unsignedp);
9671 if (! SLOW_BYTE_ACCESS
9672 && type != 0 && bitsize >= 0
9673 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9674 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9675 != CODE_FOR_nothing))
9677 do_jump (convert (type, exp), if_false_label, if_true_label);
9684 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9685 if (integer_onep (TREE_OPERAND (exp, 1))
9686 && integer_zerop (TREE_OPERAND (exp, 2)))
9687 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9689 else if (integer_zerop (TREE_OPERAND (exp, 1))
9690 && integer_onep (TREE_OPERAND (exp, 2)))
9691 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9695 register rtx label1 = gen_label_rtx ();
9696 drop_through_label = gen_label_rtx ();
9698 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9700 start_cleanup_deferral ();
9701 /* Now the THEN-expression. */
9702 do_jump (TREE_OPERAND (exp, 1),
9703 if_false_label ? if_false_label : drop_through_label,
9704 if_true_label ? if_true_label : drop_through_label);
9705 /* In case the do_jump just above never jumps. */
9706 do_pending_stack_adjust ();
9707 emit_label (label1);
9709 /* Now the ELSE-expression. */
9710 do_jump (TREE_OPERAND (exp, 2),
9711 if_false_label ? if_false_label : drop_through_label,
9712 if_true_label ? if_true_label : drop_through_label);
9713 end_cleanup_deferral ();
9719 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9721 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9722 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9724 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9725 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9728 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9729 fold (build (EQ_EXPR, TREE_TYPE (exp),
9730 fold (build1 (REALPART_EXPR,
9731 TREE_TYPE (inner_type),
9733 fold (build1 (REALPART_EXPR,
9734 TREE_TYPE (inner_type),
9736 fold (build (EQ_EXPR, TREE_TYPE (exp),
9737 fold (build1 (IMAGPART_EXPR,
9738 TREE_TYPE (inner_type),
9740 fold (build1 (IMAGPART_EXPR,
9741 TREE_TYPE (inner_type),
9743 if_false_label, if_true_label);
9746 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9747 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9749 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9750 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9751 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9753 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9759 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9761 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9762 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9764 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9765 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9768 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9769 fold (build (NE_EXPR, TREE_TYPE (exp),
9770 fold (build1 (REALPART_EXPR,
9771 TREE_TYPE (inner_type),
9773 fold (build1 (REALPART_EXPR,
9774 TREE_TYPE (inner_type),
9776 fold (build (NE_EXPR, TREE_TYPE (exp),
9777 fold (build1 (IMAGPART_EXPR,
9778 TREE_TYPE (inner_type),
9780 fold (build1 (IMAGPART_EXPR,
9781 TREE_TYPE (inner_type),
9783 if_false_label, if_true_label);
9786 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9787 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9789 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9790 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9791 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9793 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9798 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9799 if (GET_MODE_CLASS (mode) == MODE_INT
9800 && ! can_compare_p (LT, mode, ccp_jump))
9801 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9803 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9807 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9808 if (GET_MODE_CLASS (mode) == MODE_INT
9809 && ! can_compare_p (LE, mode, ccp_jump))
9810 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9812 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9816 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9817 if (GET_MODE_CLASS (mode) == MODE_INT
9818 && ! can_compare_p (GT, mode, ccp_jump))
9819 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9821 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9825 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9826 if (GET_MODE_CLASS (mode) == MODE_INT
9827 && ! can_compare_p (GE, mode, ccp_jump))
9828 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9830 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9833 case UNORDERED_EXPR:
9836 enum rtx_code cmp, rcmp;
9839 if (code == UNORDERED_EXPR)
9840 cmp = UNORDERED, rcmp = ORDERED;
9842 cmp = ORDERED, rcmp = UNORDERED;
9843 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9846 if (! can_compare_p (cmp, mode, ccp_jump)
9847 && (can_compare_p (rcmp, mode, ccp_jump)
9848 /* If the target doesn't provide either UNORDERED or ORDERED
9849 comparisons, canonicalize on UNORDERED for the library. */
9850 || rcmp == UNORDERED))
9854 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9856 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9861 enum rtx_code rcode1;
9862 enum tree_code tcode2;
9886 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9887 if (can_compare_p (rcode1, mode, ccp_jump))
9888 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9892 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9893 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9896 /* If the target doesn't support combined unordered
9897 compares, decompose into UNORDERED + comparison. */
9898 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9899 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9900 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9901 do_jump (exp, if_false_label, if_true_label);
9908 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9910 /* This is not needed any more and causes poor code since it causes
9911 comparisons and tests from non-SI objects to have different code
9913 /* Copy to register to avoid generating bad insns by cse
9914 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9915 if (!cse_not_expected && GET_CODE (temp) == MEM)
9916 temp = copy_to_reg (temp);
9918 do_pending_stack_adjust ();
9919 /* Do any postincrements in the expression that was tested. */
9922 if (GET_CODE (temp) == CONST_INT
9923 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9924 || GET_CODE (temp) == LABEL_REF)
9926 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9930 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9931 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9932 /* Note swapping the labels gives us not-equal. */
9933 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9934 else if (GET_MODE (temp) != VOIDmode)
9935 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9936 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9937 GET_MODE (temp), NULL_RTX, 0,
9938 if_false_label, if_true_label);
9943 if (drop_through_label)
9945 /* If do_jump produces code that might be jumped around,
9946 do any stack adjusts from that code, before the place
9947 where control merges in. */
9948 do_pending_stack_adjust ();
9949 emit_label (drop_through_label);
9953 /* Given a comparison expression EXP for values too wide to be compared
9954 with one insn, test the comparison and jump to the appropriate label.
9955 The code of EXP is ignored; we always test GT if SWAP is 0,
9956 and LT if SWAP is 1. */
9959 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9962 rtx if_false_label, if_true_label;
9964 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9965 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9966 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9967 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9969 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9972 /* Compare OP0 with OP1, word at a time, in mode MODE.
9973 UNSIGNEDP says to do unsigned comparison.
9974 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9977 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9978 enum machine_mode mode;
9981 rtx if_false_label, if_true_label;
9983 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9984 rtx drop_through_label = 0;
9987 if (! if_true_label || ! if_false_label)
9988 drop_through_label = gen_label_rtx ();
9989 if (! if_true_label)
9990 if_true_label = drop_through_label;
9991 if (! if_false_label)
9992 if_false_label = drop_through_label;
9994 /* Compare a word at a time, high order first. */
9995 for (i = 0; i < nwords; i++)
9997 rtx op0_word, op1_word;
9999 if (WORDS_BIG_ENDIAN)
10001 op0_word = operand_subword_force (op0, i, mode);
10002 op1_word = operand_subword_force (op1, i, mode);
10006 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10007 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10010 /* All but high-order word must be compared as unsigned. */
10011 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10012 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10013 NULL_RTX, if_true_label);
10015 /* Consider lower words only if these are equal. */
10016 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10017 NULL_RTX, 0, NULL_RTX, if_false_label);
10020 if (if_false_label)
10021 emit_jump (if_false_label);
10022 if (drop_through_label)
10023 emit_label (drop_through_label);
10026 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10027 with one insn, test the comparison and jump to the appropriate label. */
10030 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10032 rtx if_false_label, if_true_label;
10034 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10035 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10036 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10037 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10039 rtx drop_through_label = 0;
10041 if (! if_false_label)
10042 drop_through_label = if_false_label = gen_label_rtx ();
10044 for (i = 0; i < nwords; i++)
10045 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10046 operand_subword_force (op1, i, mode),
10047 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10048 word_mode, NULL_RTX, 0, if_false_label,
10052 emit_jump (if_true_label);
10053 if (drop_through_label)
10054 emit_label (drop_through_label);
10057 /* Jump according to whether OP0 is 0.
10058 We assume that OP0 has an integer mode that is too wide
10059 for the available compare insns. */
10062 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10064 rtx if_false_label, if_true_label;
10066 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10069 rtx drop_through_label = 0;
10071 /* The fastest way of doing this comparison on almost any machine is to
10072 "or" all the words and compare the result. If all have to be loaded
10073 from memory and this is a very wide item, it's possible this may
10074 be slower, but that's highly unlikely. */
10076 part = gen_reg_rtx (word_mode);
10077 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10078 for (i = 1; i < nwords && part != 0; i++)
10079 part = expand_binop (word_mode, ior_optab, part,
10080 operand_subword_force (op0, i, GET_MODE (op0)),
10081 part, 1, OPTAB_WIDEN);
10085 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10086 NULL_RTX, 0, if_false_label, if_true_label);
10091 /* If we couldn't do the "or" simply, do this with a series of compares. */
10092 if (! if_false_label)
10093 drop_through_label = if_false_label = gen_label_rtx ();
10095 for (i = 0; i < nwords; i++)
10096 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10097 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10098 if_false_label, NULL_RTX);
10101 emit_jump (if_true_label);
10103 if (drop_through_label)
10104 emit_label (drop_through_label);
10107 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10108 (including code to compute the values to be compared)
10109 and set (CC0) according to the result.
10110 The decision as to signed or unsigned comparison must be made by the caller.
10112 We force a stack adjustment unless there are currently
10113 things pushed on the stack that aren't yet used.
10115 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10118 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10119 size of MODE should be used. */
10122 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10123 register rtx op0, op1;
10124 enum rtx_code code;
10126 enum machine_mode mode;
10128 unsigned int align;
10132 /* If one operand is constant, make it the second one. Only do this
10133 if the other operand is not constant as well. */
10135 if (swap_commutative_operands_p (op0, op1))
10140 code = swap_condition (code);
10143 if (flag_force_mem)
10145 op0 = force_not_mem (op0);
10146 op1 = force_not_mem (op1);
10149 do_pending_stack_adjust ();
10151 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10152 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10156 /* There's no need to do this now that combine.c can eliminate lots of
10157 sign extensions. This can be less efficient in certain cases on other
10160 /* If this is a signed equality comparison, we can do it as an
10161 unsigned comparison since zero-extension is cheaper than sign
10162 extension and comparisons with zero are done as unsigned. This is
10163 the case even on machines that can do fast sign extension, since
10164 zero-extension is easier to combine with other operations than
10165 sign-extension is. If we are comparing against a constant, we must
10166 convert it to what it would look like unsigned. */
10167 if ((code == EQ || code == NE) && ! unsignedp
10168 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10170 if (GET_CODE (op1) == CONST_INT
10171 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10172 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10177 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10179 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10182 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10183 The decision as to signed or unsigned comparison must be made by the caller.
10185 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10188 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10189 size of MODE should be used. */
10192 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10193 if_false_label, if_true_label)
10194 register rtx op0, op1;
10195 enum rtx_code code;
10197 enum machine_mode mode;
10199 unsigned int align;
10200 rtx if_false_label, if_true_label;
10203 int dummy_true_label = 0;
10205 /* Reverse the comparison if that is safe and we want to jump if it is
10207 if (! if_true_label && ! FLOAT_MODE_P (mode))
10209 if_true_label = if_false_label;
10210 if_false_label = 0;
10211 code = reverse_condition (code);
10214 /* If one operand is constant, make it the second one. Only do this
10215 if the other operand is not constant as well. */
10217 if (swap_commutative_operands_p (op0, op1))
10222 code = swap_condition (code);
10225 if (flag_force_mem)
10227 op0 = force_not_mem (op0);
10228 op1 = force_not_mem (op1);
10231 do_pending_stack_adjust ();
10233 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10234 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10236 if (tem == const_true_rtx)
10239 emit_jump (if_true_label);
10243 if (if_false_label)
10244 emit_jump (if_false_label);
10250 /* There's no need to do this now that combine.c can eliminate lots of
10251 sign extensions. This can be less efficient in certain cases on other
10254 /* If this is a signed equality comparison, we can do it as an
10255 unsigned comparison since zero-extension is cheaper than sign
10256 extension and comparisons with zero are done as unsigned. This is
10257 the case even on machines that can do fast sign extension, since
10258 zero-extension is easier to combine with other operations than
10259 sign-extension is. If we are comparing against a constant, we must
10260 convert it to what it would look like unsigned. */
10261 if ((code == EQ || code == NE) && ! unsignedp
10262 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10264 if (GET_CODE (op1) == CONST_INT
10265 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10266 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10271 if (! if_true_label)
10273 dummy_true_label = 1;
10274 if_true_label = gen_label_rtx ();
10277 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10280 if (if_false_label)
10281 emit_jump (if_false_label);
10282 if (dummy_true_label)
10283 emit_label (if_true_label);
10286 /* Generate code for a comparison expression EXP (including code to compute
10287 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10288 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10289 generated code will drop through.
10290 SIGNED_CODE should be the rtx operation for this comparison for
10291 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10293 We force a stack adjustment unless there are currently
10294 things pushed on the stack that aren't yet used. */
10297 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10300 enum rtx_code signed_code, unsigned_code;
10301 rtx if_false_label, if_true_label;
10303 unsigned int align0, align1;
10304 register rtx op0, op1;
10305 register tree type;
10306 register enum machine_mode mode;
10308 enum rtx_code code;
10310 /* Don't crash if the comparison was erroneous. */
10311 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10312 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10315 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10316 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10319 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10320 mode = TYPE_MODE (type);
10321 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10322 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10323 || (GET_MODE_BITSIZE (mode)
10324 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10327 /* op0 might have been replaced by promoted constant, in which
10328 case the type of second argument should be used. */
10329 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10330 mode = TYPE_MODE (type);
10332 unsignedp = TREE_UNSIGNED (type);
10333 code = unsignedp ? unsigned_code : signed_code;
10335 #ifdef HAVE_canonicalize_funcptr_for_compare
10336 /* If function pointers need to be "canonicalized" before they can
10337 be reliably compared, then canonicalize them. */
10338 if (HAVE_canonicalize_funcptr_for_compare
10339 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10340 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10343 rtx new_op0 = gen_reg_rtx (mode);
10345 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10349 if (HAVE_canonicalize_funcptr_for_compare
10350 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10351 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10354 rtx new_op1 = gen_reg_rtx (mode);
10356 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10361 /* Do any postincrements in the expression that was tested. */
10364 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10366 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10367 MIN (align0, align1),
10368 if_false_label, if_true_label);
10371 /* Generate code to calculate EXP using a store-flag instruction
10372 and return an rtx for the result. EXP is either a comparison
10373 or a TRUTH_NOT_EXPR whose operand is a comparison.
10375 If TARGET is nonzero, store the result there if convenient.
10377 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10380 Return zero if there is no suitable set-flag instruction
10381 available on this machine.
10383 Once expand_expr has been called on the arguments of the comparison,
10384 we are committed to doing the store flag, since it is not safe to
10385 re-evaluate the expression. We emit the store-flag insn by calling
10386 emit_store_flag, but only expand the arguments if we have a reason
10387 to believe that emit_store_flag will be successful. If we think that
10388 it will, but it isn't, we have to simulate the store-flag with a
10389 set/jump/set sequence. */
10392 do_store_flag (exp, target, mode, only_cheap)
10395 enum machine_mode mode;
10398 enum rtx_code code;
10399 tree arg0, arg1, type;
10401 enum machine_mode operand_mode;
10405 enum insn_code icode;
10406 rtx subtarget = target;
10409 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10410 result at the end. We can't simply invert the test since it would
10411 have already been inverted if it were valid. This case occurs for
10412 some floating-point comparisons. */
10414 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10415 invert = 1, exp = TREE_OPERAND (exp, 0);
10417 arg0 = TREE_OPERAND (exp, 0);
10418 arg1 = TREE_OPERAND (exp, 1);
10420 /* Don't crash if the comparison was erroneous. */
10421 if (arg0 == error_mark_node || arg1 == error_mark_node)
10424 type = TREE_TYPE (arg0);
10425 operand_mode = TYPE_MODE (type);
10426 unsignedp = TREE_UNSIGNED (type);
10428 /* We won't bother with BLKmode store-flag operations because it would mean
10429 passing a lot of information to emit_store_flag. */
10430 if (operand_mode == BLKmode)
10433 /* We won't bother with store-flag operations involving function pointers
10434 when function pointers must be canonicalized before comparisons. */
10435 #ifdef HAVE_canonicalize_funcptr_for_compare
10436 if (HAVE_canonicalize_funcptr_for_compare
10437 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10438 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10440 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10441 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10442 == FUNCTION_TYPE))))
10449 /* Get the rtx comparison code to use. We know that EXP is a comparison
10450 operation of some type. Some comparisons against 1 and -1 can be
10451 converted to comparisons with zero. Do so here so that the tests
10452 below will be aware that we have a comparison with zero. These
10453 tests will not catch constants in the first operand, but constants
10454 are rarely passed as the first operand. */
10456 switch (TREE_CODE (exp))
10465 if (integer_onep (arg1))
10466 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10468 code = unsignedp ? LTU : LT;
10471 if (! unsignedp && integer_all_onesp (arg1))
10472 arg1 = integer_zero_node, code = LT;
10474 code = unsignedp ? LEU : LE;
10477 if (! unsignedp && integer_all_onesp (arg1))
10478 arg1 = integer_zero_node, code = GE;
10480 code = unsignedp ? GTU : GT;
10483 if (integer_onep (arg1))
10484 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10486 code = unsignedp ? GEU : GE;
10489 case UNORDERED_EXPR:
10515 /* Put a constant second. */
10516 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10518 tem = arg0; arg0 = arg1; arg1 = tem;
10519 code = swap_condition (code);
10522 /* If this is an equality or inequality test of a single bit, we can
10523 do this by shifting the bit being tested to the low-order bit and
10524 masking the result with the constant 1. If the condition was EQ,
10525 we xor it with 1. This does not require an scc insn and is faster
10526 than an scc insn even if we have it. */
10528 if ((code == NE || code == EQ)
10529 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10530 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10532 tree inner = TREE_OPERAND (arg0, 0);
10533 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10536 /* If INNER is a right shift of a constant and it plus BITNUM does
10537 not overflow, adjust BITNUM and INNER. */
10539 if (TREE_CODE (inner) == RSHIFT_EXPR
10540 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10541 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10542 && bitnum < TYPE_PRECISION (type)
10543 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10544 bitnum - TYPE_PRECISION (type)))
10546 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10547 inner = TREE_OPERAND (inner, 0);
10550 /* If we are going to be able to omit the AND below, we must do our
10551 operations as unsigned. If we must use the AND, we have a choice.
10552 Normally unsigned is faster, but for some machines signed is. */
10553 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10554 #ifdef LOAD_EXTEND_OP
10555 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10561 if (! get_subtarget (subtarget)
10562 || GET_MODE (subtarget) != operand_mode
10563 || ! safe_from_p (subtarget, inner, 1))
10566 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10569 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10570 size_int (bitnum), subtarget, ops_unsignedp);
10572 if (GET_MODE (op0) != mode)
10573 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10575 if ((code == EQ && ! invert) || (code == NE && invert))
10576 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10577 ops_unsignedp, OPTAB_LIB_WIDEN);
10579 /* Put the AND last so it can combine with more things. */
10580 if (bitnum != TYPE_PRECISION (type) - 1)
10581 op0 = expand_and (op0, const1_rtx, subtarget);
10586 /* Now see if we are likely to be able to do this. Return if not. */
10587 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10590 icode = setcc_gen_code[(int) code];
10591 if (icode == CODE_FOR_nothing
10592 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10594 /* We can only do this if it is one of the special cases that
10595 can be handled without an scc insn. */
10596 if ((code == LT && integer_zerop (arg1))
10597 || (! only_cheap && code == GE && integer_zerop (arg1)))
10599 else if (BRANCH_COST >= 0
10600 && ! only_cheap && (code == NE || code == EQ)
10601 && TREE_CODE (type) != REAL_TYPE
10602 && ((abs_optab->handlers[(int) operand_mode].insn_code
10603 != CODE_FOR_nothing)
10604 || (ffs_optab->handlers[(int) operand_mode].insn_code
10605 != CODE_FOR_nothing)))
10611 if (! get_subtarget (target)
10612 || GET_MODE (subtarget) != operand_mode
10613 || ! safe_from_p (subtarget, arg1, 1))
10616 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10617 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10620 target = gen_reg_rtx (mode);
10622 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10623 because, if the emit_store_flag does anything it will succeed and
10624 OP0 and OP1 will not be used subsequently. */
10626 result = emit_store_flag (target, code,
10627 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10628 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10629 operand_mode, unsignedp, 1);
10634 result = expand_binop (mode, xor_optab, result, const1_rtx,
10635 result, 0, OPTAB_LIB_WIDEN);
10639 /* If this failed, we have to do this with set/compare/jump/set code. */
10640 if (GET_CODE (target) != REG
10641 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10642 target = gen_reg_rtx (GET_MODE (target));
10644 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10645 result = compare_from_rtx (op0, op1, code, unsignedp,
10646 operand_mode, NULL_RTX, 0);
10647 if (GET_CODE (result) == CONST_INT)
10648 return (((result == const0_rtx && ! invert)
10649 || (result != const0_rtx && invert))
10650 ? const0_rtx : const1_rtx);
10652 label = gen_label_rtx ();
10653 if (bcc_gen_fctn[(int) code] == 0)
10656 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10657 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10658 emit_label (label);
10663 /* Generate a tablejump instruction (used for switch statements). */
10665 #ifdef HAVE_tablejump
10667 /* INDEX is the value being switched on, with the lowest value
10668 in the table already subtracted.
10669 MODE is its expected mode (needed if INDEX is constant).
10670 RANGE is the length of the jump table.
10671 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10673 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10674 index value is out of range. */
10677 do_tablejump (index, mode, range, table_label, default_label)
10678 rtx index, range, table_label, default_label;
10679 enum machine_mode mode;
10681 register rtx temp, vector;
10683 /* Do an unsigned comparison (in the proper mode) between the index
10684 expression and the value which represents the length of the range.
10685 Since we just finished subtracting the lower bound of the range
10686 from the index expression, this comparison allows us to simultaneously
10687 check that the original index expression value is both greater than
10688 or equal to the minimum value of the range and less than or equal to
10689 the maximum value of the range. */
10691 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10694 /* If index is in range, it must fit in Pmode.
10695 Convert to Pmode so we can index with it. */
10697 index = convert_to_mode (Pmode, index, 1);
10699 /* Don't let a MEM slip thru, because then INDEX that comes
10700 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10701 and break_out_memory_refs will go to work on it and mess it up. */
10702 #ifdef PIC_CASE_VECTOR_ADDRESS
10703 if (flag_pic && GET_CODE (index) != REG)
10704 index = copy_to_mode_reg (Pmode, index);
10707 /* If flag_force_addr were to affect this address
10708 it could interfere with the tricky assumptions made
10709 about addresses that contain label-refs,
10710 which may be valid only very near the tablejump itself. */
10711 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10712 GET_MODE_SIZE, because this indicates how large insns are. The other
10713 uses should all be Pmode, because they are addresses. This code
10714 could fail if addresses and insns are not the same size. */
10715 index = gen_rtx_PLUS (Pmode,
10716 gen_rtx_MULT (Pmode, index,
10717 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10718 gen_rtx_LABEL_REF (Pmode, table_label));
10719 #ifdef PIC_CASE_VECTOR_ADDRESS
10721 index = PIC_CASE_VECTOR_ADDRESS (index);
10724 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10725 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10726 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10727 RTX_UNCHANGING_P (vector) = 1;
10728 convert_move (temp, vector, 0);
10730 emit_jump_insn (gen_tablejump (temp, table_label));
10732 /* If we are generating PIC code or if the table is PC-relative, the
10733 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10734 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10738 #endif /* HAVE_tablejump */