1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
40 #include "typeclass.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust;
90 /* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94 int inhibit_defer_pop;
96 /* Nonzero means __builtin_saveregs has already been done in this function.
97 The value is the pseudoreg containing the value __builtin_saveregs
99 static rtx saveregs_value;
101 /* Similarly for __builtin_apply_args. */
102 static rtx apply_args_value;
104 /* Nonzero if the machine description has been fixed to accept
105 CONSTANT_P_RTX patterns. We will emit a warning and continue
106 if we find we must actually use such a beast. */
107 static int can_handle_constant_p;
109 /* Don't check memory usage, since code is being emitted to check a memory
110 usage. Used when current_function_check_memory_usage is true, to avoid
111 infinite recursion. */
112 static int in_check_memory_usage;
114 /* Postincrements that still need to be expanded. */
115 static rtx pending_chain;
117 /* This structure is used by move_by_pieces to describe the move to
119 struct move_by_pieces
129 int explicit_inc_from;
136 /* This structure is used by clear_by_pieces to describe the clear to
139 struct clear_by_pieces
151 extern struct obstack permanent_obstack;
152 extern rtx arg_pointer_save_area;
154 static rtx get_push_address PROTO ((int));
156 static rtx enqueue_insn PROTO((rtx, rtx));
157 static int queued_subexp_p PROTO((rtx));
158 static void init_queue PROTO((void));
159 static int move_by_pieces_ninsns PROTO((unsigned int, int));
160 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
161 struct move_by_pieces *));
162 static void clear_by_pieces PROTO((rtx, int, int));
163 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
164 struct clear_by_pieces *));
165 static int is_zeros_p PROTO((tree));
166 static int mostly_zeros_p PROTO((tree));
167 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
169 static void store_constructor PROTO((tree, rtx, int));
170 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
171 enum machine_mode, int, int,
173 static enum memory_use_mode
174 get_memory_usage_from_modifier PROTO((enum expand_modifier));
175 static tree save_noncopied_parts PROTO((tree, tree));
176 static tree init_noncopied_parts PROTO((tree, tree));
177 static int safe_from_p PROTO((rtx, tree, int));
178 static int fixed_type_p PROTO((tree));
179 static rtx var_rtx PROTO((tree));
180 static int get_pointer_alignment PROTO((tree, unsigned));
181 static tree string_constant PROTO((tree, tree *));
182 static tree c_strlen PROTO((tree));
183 static rtx get_memory_rtx PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 static void preexpand_calls PROTO((tree));
194 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
195 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
196 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
197 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
198 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
200 /* Record for each mode whether we can move a register directly to or
201 from an object of that mode in memory. If we can't, we won't try
202 to use that mode directly when accessing a field of that mode. */
204 static char direct_load[NUM_MACHINE_MODES];
205 static char direct_store[NUM_MACHINE_MODES];
207 /* If a memory-to-memory move would take MOVE_RATIO or more simple
208 move-instruction sequences, we will do a movstr or libcall instead. */
211 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
214 /* If we are optimizing for space (-Os), cut down the default move ratio */
215 #define MOVE_RATIO (optimize_size ? 3 : 15)
219 /* This array records the insn_code of insns to perform block moves. */
220 enum insn_code movstr_optab[NUM_MACHINE_MODES];
222 /* This array records the insn_code of insns to perform block clears. */
223 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
225 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
227 #ifndef SLOW_UNALIGNED_ACCESS
228 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
231 /* Register mappings for target machines without register windows. */
232 #ifndef INCOMING_REGNO
233 #define INCOMING_REGNO(OUT) (OUT)
235 #ifndef OUTGOING_REGNO
236 #define OUTGOING_REGNO(IN) (IN)
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
246 enum machine_mode mode;
253 /* Since we are on the permanent obstack, we must be sure we save this
254 spot AFTER we call start_sequence, since it will reuse the rtl it
256 free_point = (char *) oballoc (0);
258 /* Try indexing by frame ptr and try by stack ptr.
259 It is known that on the Convex the stack ptr isn't a valid index.
260 With luck, one or the other is valid on any machine. */
261 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
262 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
264 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
265 pat = PATTERN (insn);
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
273 direct_load[(int) mode] = direct_store[(int) mode] = 0;
274 PUT_MODE (mem, mode);
275 PUT_MODE (mem1, mode);
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
285 if (! HARD_REGNO_MODE_OK (regno, mode))
288 reg = gen_rtx_REG (mode, regno);
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
312 /* Find out if CONSTANT_P_RTX is accepted. */
313 SET_DEST (pat) = gen_rtx_REG (TYPE_MODE (integer_type_node),
314 FIRST_PSEUDO_REGISTER);
315 SET_SRC (pat) = gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 can_handle_constant_p = 1;
324 /* This is run at the start of compiling a function. */
331 pending_stack_adjust = 0;
332 inhibit_defer_pop = 0;
334 apply_args_value = 0;
338 /* Save all variables describing the current status into the structure *P.
339 This is used before starting a nested function. */
345 p->pending_chain = pending_chain;
346 p->pending_stack_adjust = pending_stack_adjust;
347 p->inhibit_defer_pop = inhibit_defer_pop;
348 p->saveregs_value = saveregs_value;
349 p->apply_args_value = apply_args_value;
350 p->forced_labels = forced_labels;
352 pending_chain = NULL_RTX;
353 pending_stack_adjust = 0;
354 inhibit_defer_pop = 0;
356 apply_args_value = 0;
360 /* Restore all variables describing the current status from the structure *P.
361 This is used after a nested function. */
364 restore_expr_status (p)
367 pending_chain = p->pending_chain;
368 pending_stack_adjust = p->pending_stack_adjust;
369 inhibit_defer_pop = p->inhibit_defer_pop;
370 saveregs_value = p->saveregs_value;
371 apply_args_value = p->apply_args_value;
372 forced_labels = p->forced_labels;
375 /* Manage the queue of increment instructions to be output
376 for POSTINCREMENT_EXPR expressions, etc. */
378 /* Queue up to increment (or change) VAR later. BODY says how:
379 BODY should be the same thing you would pass to emit_insn
380 to increment right away. It will go to emit_insn later on.
382 The value is a QUEUED expression to be used in place of VAR
383 where you want to guarantee the pre-incrementation value of VAR. */
386 enqueue_insn (var, body)
389 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
390 var, NULL_RTX, NULL_RTX, body,
392 return pending_chain;
395 /* Use protect_from_queue to convert a QUEUED expression
396 into something that you can put immediately into an instruction.
397 If the queued incrementation has not happened yet,
398 protect_from_queue returns the variable itself.
399 If the incrementation has happened, protect_from_queue returns a temp
400 that contains a copy of the old value of the variable.
402 Any time an rtx which might possibly be a QUEUED is to be put
403 into an instruction, it must be passed through protect_from_queue first.
404 QUEUED expressions are not meaningful in instructions.
406 Do not pass a value through protect_from_queue and then hold
407 on to it for a while before putting it in an instruction!
408 If the queue is flushed in between, incorrect code will result. */
411 protect_from_queue (x, modify)
415 register RTX_CODE code = GET_CODE (x);
417 #if 0 /* A QUEUED can hang around after the queue is forced out. */
418 /* Shortcut for most common case. */
419 if (pending_chain == 0)
425 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
426 use of autoincrement. Make a copy of the contents of the memory
427 location rather than a copy of the address, but not if the value is
428 of mode BLKmode. Don't modify X in place since it might be
430 if (code == MEM && GET_MODE (x) != BLKmode
431 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
433 register rtx y = XEXP (x, 0);
434 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
436 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
437 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
438 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
439 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
443 register rtx temp = gen_reg_rtx (GET_MODE (new));
444 emit_insn_before (gen_move_insn (temp, new),
450 /* Otherwise, recursively protect the subexpressions of all
451 the kinds of rtx's that can contain a QUEUED. */
454 rtx tem = protect_from_queue (XEXP (x, 0), 0);
455 if (tem != XEXP (x, 0))
461 else if (code == PLUS || code == MULT)
463 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
464 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
465 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
474 /* If the increment has not happened, use the variable itself. */
475 if (QUEUED_INSN (x) == 0)
476 return QUEUED_VAR (x);
477 /* If the increment has happened and a pre-increment copy exists,
479 if (QUEUED_COPY (x) != 0)
480 return QUEUED_COPY (x);
481 /* The increment has happened but we haven't set up a pre-increment copy.
482 Set one up now, and use it. */
483 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
484 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
486 return QUEUED_COPY (x);
489 /* Return nonzero if X contains a QUEUED expression:
490 if it contains anything that will be altered by a queued increment.
491 We handle only combinations of MEM, PLUS, MINUS and MULT operators
492 since memory addresses generally contain only those. */
498 register enum rtx_code code = GET_CODE (x);
504 return queued_subexp_p (XEXP (x, 0));
508 return (queued_subexp_p (XEXP (x, 0))
509 || queued_subexp_p (XEXP (x, 1)));
515 /* Perform all the pending incrementations. */
521 while ((p = pending_chain))
523 rtx body = QUEUED_BODY (p);
525 if (GET_CODE (body) == SEQUENCE)
527 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
528 emit_insn (QUEUED_BODY (p));
531 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
532 pending_chain = QUEUED_NEXT (p);
543 /* Copy data from FROM to TO, where the machine modes are not the same.
544 Both modes may be integer, or both may be floating.
545 UNSIGNEDP should be nonzero if FROM is an unsigned type.
546 This causes zero-extension instead of sign-extension. */
549 convert_move (to, from, unsignedp)
550 register rtx to, from;
553 enum machine_mode to_mode = GET_MODE (to);
554 enum machine_mode from_mode = GET_MODE (from);
555 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
556 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
560 /* rtx code for making an equivalent value. */
561 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
563 to = protect_from_queue (to, 1);
564 from = protect_from_queue (from, 0);
566 if (to_real != from_real)
569 /* If FROM is a SUBREG that indicates that we have already done at least
570 the required extension, strip it. We don't handle such SUBREGs as
573 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
574 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
575 >= GET_MODE_SIZE (to_mode))
576 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
577 from = gen_lowpart (to_mode, from), from_mode = to_mode;
579 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
582 if (to_mode == from_mode
583 || (from_mode == VOIDmode && CONSTANT_P (from)))
585 emit_move_insn (to, from);
593 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
595 /* Try converting directly if the insn is supported. */
596 if ((code = can_extend_p (to_mode, from_mode, 0))
599 emit_unop_insn (code, to, from, UNKNOWN);
604 #ifdef HAVE_trunchfqf2
605 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
607 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
611 #ifdef HAVE_trunctqfqf2
612 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
614 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
618 #ifdef HAVE_truncsfqf2
619 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
621 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
625 #ifdef HAVE_truncdfqf2
626 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
628 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
632 #ifdef HAVE_truncxfqf2
633 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
635 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
639 #ifdef HAVE_trunctfqf2
640 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
642 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
647 #ifdef HAVE_trunctqfhf2
648 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
650 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
654 #ifdef HAVE_truncsfhf2
655 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
657 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
661 #ifdef HAVE_truncdfhf2
662 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
664 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncxfhf2
669 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
671 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
675 #ifdef HAVE_trunctfhf2
676 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
678 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncsftqf2
684 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
686 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
690 #ifdef HAVE_truncdftqf2
691 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
693 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
697 #ifdef HAVE_truncxftqf2
698 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
700 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
704 #ifdef HAVE_trunctftqf2
705 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
707 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
712 #ifdef HAVE_truncdfsf2
713 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
715 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
719 #ifdef HAVE_truncxfsf2
720 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
722 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
726 #ifdef HAVE_trunctfsf2
727 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
729 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
733 #ifdef HAVE_truncxfdf2
734 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
736 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
740 #ifdef HAVE_trunctfdf2
741 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
743 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
755 libcall = extendsfdf2_libfunc;
759 libcall = extendsfxf2_libfunc;
763 libcall = extendsftf2_libfunc;
775 libcall = truncdfsf2_libfunc;
779 libcall = extenddfxf2_libfunc;
783 libcall = extenddftf2_libfunc;
795 libcall = truncxfsf2_libfunc;
799 libcall = truncxfdf2_libfunc;
811 libcall = trunctfsf2_libfunc;
815 libcall = trunctfdf2_libfunc;
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
833 emit_move_insn (to, value);
837 /* Now both modes are integers. */
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
861 emit_unop_insn (code, to, from, equiv_code);
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
877 /* No special multiword conversion insn; do it by hand. */
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
890 lowpart_mode = from_mode;
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
897 /* Compute the value to put in each remaining word. */
899 fill_value = const0_rtx;
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
936 insns = get_insns ();
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PQImode)
962 if (from_mode != QImode)
963 from = convert_to_mode (QImode, from, unsignedp);
965 #ifdef HAVE_truncqipqi2
966 if (HAVE_truncqipqi2)
968 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
971 #endif /* HAVE_truncqipqi2 */
975 if (from_mode == PQImode)
977 if (to_mode != QImode)
979 from = convert_to_mode (QImode, from, unsignedp);
984 #ifdef HAVE_extendpqiqi2
985 if (HAVE_extendpqiqi2)
987 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
990 #endif /* HAVE_extendpqiqi2 */
995 if (to_mode == PSImode)
997 if (from_mode != SImode)
998 from = convert_to_mode (SImode, from, unsignedp);
1000 #ifdef HAVE_truncsipsi2
1001 if (HAVE_truncsipsi2)
1003 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1006 #endif /* HAVE_truncsipsi2 */
1010 if (from_mode == PSImode)
1012 if (to_mode != SImode)
1014 from = convert_to_mode (SImode, from, unsignedp);
1019 #ifdef HAVE_extendpsisi2
1020 if (HAVE_extendpsisi2)
1022 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1025 #endif /* HAVE_extendpsisi2 */
1030 if (to_mode == PDImode)
1032 if (from_mode != DImode)
1033 from = convert_to_mode (DImode, from, unsignedp);
1035 #ifdef HAVE_truncdipdi2
1036 if (HAVE_truncdipdi2)
1038 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1041 #endif /* HAVE_truncdipdi2 */
1045 if (from_mode == PDImode)
1047 if (to_mode != DImode)
1049 from = convert_to_mode (DImode, from, unsignedp);
1054 #ifdef HAVE_extendpdidi2
1055 if (HAVE_extendpdidi2)
1057 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1060 #endif /* HAVE_extendpdidi2 */
1065 /* Now follow all the conversions between integers
1066 no more than a word long. */
1068 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1069 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1070 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1071 GET_MODE_BITSIZE (from_mode)))
1073 if (!((GET_CODE (from) == MEM
1074 && ! MEM_VOLATILE_P (from)
1075 && direct_load[(int) to_mode]
1076 && ! mode_dependent_address_p (XEXP (from, 0)))
1077 || GET_CODE (from) == REG
1078 || GET_CODE (from) == SUBREG))
1079 from = force_reg (from_mode, from);
1080 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1081 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1082 from = copy_to_reg (from);
1083 emit_move_insn (to, gen_lowpart (to_mode, from));
1087 /* Handle extension. */
1088 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1090 /* Convert directly if that works. */
1091 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1092 != CODE_FOR_nothing)
1094 emit_unop_insn (code, to, from, equiv_code);
1099 enum machine_mode intermediate;
1103 /* Search for a mode to convert via. */
1104 for (intermediate = from_mode; intermediate != VOIDmode;
1105 intermediate = GET_MODE_WIDER_MODE (intermediate))
1106 if (((can_extend_p (to_mode, intermediate, unsignedp)
1107 != CODE_FOR_nothing)
1108 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1109 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1110 && (can_extend_p (intermediate, from_mode, unsignedp)
1111 != CODE_FOR_nothing))
1113 convert_move (to, convert_to_mode (intermediate, from,
1114 unsignedp), unsignedp);
1118 /* No suitable intermediate mode.
1119 Generate what we need with shifts. */
1120 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1121 - GET_MODE_BITSIZE (from_mode), 0);
1122 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1123 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1125 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1128 emit_move_insn (to, tmp);
1133 /* Support special truncate insns for certain modes. */
1135 if (from_mode == DImode && to_mode == SImode)
1137 #ifdef HAVE_truncdisi2
1138 if (HAVE_truncdisi2)
1140 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1144 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 if (from_mode == DImode && to_mode == HImode)
1150 #ifdef HAVE_truncdihi2
1151 if (HAVE_truncdihi2)
1153 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1157 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 if (from_mode == DImode && to_mode == QImode)
1163 #ifdef HAVE_truncdiqi2
1164 if (HAVE_truncdiqi2)
1166 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1170 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 if (from_mode == SImode && to_mode == HImode)
1176 #ifdef HAVE_truncsihi2
1177 if (HAVE_truncsihi2)
1179 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1183 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 if (from_mode == SImode && to_mode == QImode)
1189 #ifdef HAVE_truncsiqi2
1190 if (HAVE_truncsiqi2)
1192 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1196 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 if (from_mode == HImode && to_mode == QImode)
1202 #ifdef HAVE_trunchiqi2
1203 if (HAVE_trunchiqi2)
1205 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1209 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 if (from_mode == TImode && to_mode == DImode)
1215 #ifdef HAVE_trunctidi2
1216 if (HAVE_trunctidi2)
1218 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1222 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 if (from_mode == TImode && to_mode == SImode)
1228 #ifdef HAVE_trunctisi2
1229 if (HAVE_trunctisi2)
1231 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1235 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 if (from_mode == TImode && to_mode == HImode)
1241 #ifdef HAVE_trunctihi2
1242 if (HAVE_trunctihi2)
1244 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1248 convert_move (to, force_reg (from_mode, from), unsignedp);
1252 if (from_mode == TImode && to_mode == QImode)
1254 #ifdef HAVE_trunctiqi2
1255 if (HAVE_trunctiqi2)
1257 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1261 convert_move (to, force_reg (from_mode, from), unsignedp);
1265 /* Handle truncation of volatile memrefs, and so on;
1266 the things that couldn't be truncated directly,
1267 and for which there was no special instruction. */
1268 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1270 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1271 emit_move_insn (to, temp);
1275 /* Mode combination is not recognized. */
1279 /* Return an rtx for a value that would result
1280 from converting X to mode MODE.
1281 Both X and MODE may be floating, or both integer.
1282 UNSIGNEDP is nonzero if X is an unsigned value.
1283 This can be done by referring to a part of X in place
1284 or by copying to a new temporary with conversion.
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1290 convert_to_mode (mode, x, unsignedp)
1291 enum machine_mode mode;
1295 return convert_modes (mode, VOIDmode, x, unsignedp);
1298 /* Return an rtx for a value that would result
1299 from converting X from mode OLDMODE to mode MODE.
1300 Both modes may be floating, or both integer.
1301 UNSIGNEDP is nonzero if X is an unsigned value.
1303 This can be done by referring to a part of X in place
1304 or by copying to a new temporary with conversion.
1306 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1308 This function *must not* call protect_from_queue
1309 except when putting X into an insn (in which case convert_move does it). */
1312 convert_modes (mode, oldmode, x, unsignedp)
1313 enum machine_mode mode, oldmode;
1319 /* If FROM is a SUBREG that indicates that we have already done at least
1320 the required extension, strip it. */
1322 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1323 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1324 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1325 x = gen_lowpart (mode, x);
1327 if (GET_MODE (x) != VOIDmode)
1328 oldmode = GET_MODE (x);
1330 if (mode == oldmode)
1333 /* There is one case that we must handle specially: If we are converting
1334 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1335 we are to interpret the constant as unsigned, gen_lowpart will do
1336 the wrong if the constant appears negative. What we want to do is
1337 make the high-order word of the constant zero, not all ones. */
1339 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1340 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1341 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1343 HOST_WIDE_INT val = INTVAL (x);
1345 if (oldmode != VOIDmode
1346 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1348 int width = GET_MODE_BITSIZE (oldmode);
1350 /* We need to zero extend VAL. */
1351 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1354 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1357 /* We can do this with a gen_lowpart if both desired and current modes
1358 are integer, and this is either a constant integer, a register, or a
1359 non-volatile MEM. Except for the constant case where MODE is no
1360 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1362 if ((GET_CODE (x) == CONST_INT
1363 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1364 || (GET_MODE_CLASS (mode) == MODE_INT
1365 && GET_MODE_CLASS (oldmode) == MODE_INT
1366 && (GET_CODE (x) == CONST_DOUBLE
1367 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1368 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1369 && direct_load[(int) mode])
1370 || (GET_CODE (x) == REG
1371 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1372 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1374 /* ?? If we don't know OLDMODE, we have to assume here that
1375 X does not need sign- or zero-extension. This may not be
1376 the case, but it's the best we can do. */
1377 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1378 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1380 HOST_WIDE_INT val = INTVAL (x);
1381 int width = GET_MODE_BITSIZE (oldmode);
1383 /* We must sign or zero-extend in this case. Start by
1384 zero-extending, then sign extend if we need to. */
1385 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1387 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1388 val |= (HOST_WIDE_INT) (-1) << width;
1390 return GEN_INT (val);
1393 return gen_lowpart (mode, x);
1396 temp = gen_reg_rtx (mode);
1397 convert_move (temp, x, unsignedp);
1401 /* Generate several move instructions to copy LEN bytes
1402 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1403 The caller must pass FROM and TO
1404 through protect_from_queue before calling.
1405 ALIGN (in bytes) is maximum alignment we can assume. */
1408 move_by_pieces (to, from, len, align)
1412 struct move_by_pieces data;
1413 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1414 int max_size = MOVE_MAX + 1;
1417 data.to_addr = to_addr;
1418 data.from_addr = from_addr;
1422 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1423 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1425 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1426 || GET_CODE (from_addr) == POST_INC
1427 || GET_CODE (from_addr) == POST_DEC);
1429 data.explicit_inc_from = 0;
1430 data.explicit_inc_to = 0;
1432 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1433 if (data.reverse) data.offset = len;
1436 data.to_struct = MEM_IN_STRUCT_P (to);
1437 data.from_struct = MEM_IN_STRUCT_P (from);
1439 /* If copying requires more than two move insns,
1440 copy addresses to registers (to make displacements shorter)
1441 and use post-increment if available. */
1442 if (!(data.autinc_from && data.autinc_to)
1443 && move_by_pieces_ninsns (len, align) > 2)
1445 if (HAVE_PRE_DECREMENT && data.reverse && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1451 if (HAVE_POST_INCREMENT && ! data.autinc_from)
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 if (HAVE_PRE_DECREMENT && data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1463 data.explicit_inc_to = -1;
1465 if (HAVE_POST_INCREMENT && ! data.reverse && ! data.autinc_to)
1467 data.to_addr = copy_addr_to_reg (to_addr);
1469 data.explicit_inc_to = 1;
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1475 if (! SLOW_UNALIGNED_ACCESS
1476 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1482 while (max_size > 1)
1484 enum machine_mode mode = VOIDmode, tmode;
1485 enum insn_code icode;
1487 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1488 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1489 if (GET_MODE_SIZE (tmode) < max_size)
1492 if (mode == VOIDmode)
1495 icode = mov_optab->handlers[(int) mode].insn_code;
1496 if (icode != CODE_FOR_nothing
1497 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1498 GET_MODE_SIZE (mode)))
1499 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1501 max_size = GET_MODE_SIZE (mode);
1504 /* The code above should have handled everything. */
1509 /* Return number of insns required to move L bytes by pieces.
1510 ALIGN (in bytes) is maximum alignment we can assume. */
1513 move_by_pieces_ninsns (l, align)
1517 register int n_insns = 0;
1518 int max_size = MOVE_MAX + 1;
1520 if (! SLOW_UNALIGNED_ACCESS
1521 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1524 while (max_size > 1)
1526 enum machine_mode mode = VOIDmode, tmode;
1527 enum insn_code icode;
1529 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1530 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1531 if (GET_MODE_SIZE (tmode) < max_size)
1534 if (mode == VOIDmode)
1537 icode = mov_optab->handlers[(int) mode].insn_code;
1538 if (icode != CODE_FOR_nothing
1539 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1540 GET_MODE_SIZE (mode)))
1541 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1543 max_size = GET_MODE_SIZE (mode);
1549 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1550 with move instructions for mode MODE. GENFUN is the gen_... function
1551 to make a move insn for that mode. DATA has all the other info. */
1554 move_by_pieces_1 (genfun, mode, data)
1555 rtx (*genfun) PROTO ((rtx, ...));
1556 enum machine_mode mode;
1557 struct move_by_pieces *data;
1559 register int size = GET_MODE_SIZE (mode);
1560 register rtx to1, from1;
1562 while (data->len >= size)
1564 if (data->reverse) data->offset -= size;
1566 to1 = (data->autinc_to
1567 ? gen_rtx_MEM (mode, data->to_addr)
1568 : copy_rtx (change_address (data->to, mode,
1569 plus_constant (data->to_addr,
1571 MEM_IN_STRUCT_P (to1) = data->to_struct;
1574 = (data->autinc_from
1575 ? gen_rtx_MEM (mode, data->from_addr)
1576 : copy_rtx (change_address (data->from, mode,
1577 plus_constant (data->from_addr,
1579 MEM_IN_STRUCT_P (from1) = data->from_struct;
1581 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1582 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1583 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1584 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1586 emit_insn ((*genfun) (to1, from1));
1587 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1588 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1589 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1590 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1592 if (! data->reverse) data->offset += size;
1598 /* Emit code to move a block Y to a block X.
1599 This may be done with string-move instructions,
1600 with multiple scalar move instructions, or with a library call.
1602 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1604 SIZE is an rtx that says how long they are.
1605 ALIGN is the maximum alignment we can assume they have,
1608 Return the address of the new block, if memcpy is called and returns it,
1612 emit_block_move (x, y, size, align)
1618 #ifdef TARGET_MEM_FUNCTIONS
1620 tree call_expr, arg_list;
1623 if (GET_MODE (x) != BLKmode)
1626 if (GET_MODE (y) != BLKmode)
1629 x = protect_from_queue (x, 1);
1630 y = protect_from_queue (y, 0);
1631 size = protect_from_queue (size, 0);
1633 if (GET_CODE (x) != MEM)
1635 if (GET_CODE (y) != MEM)
1640 if (GET_CODE (size) == CONST_INT
1641 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1642 move_by_pieces (x, y, INTVAL (size), align);
1645 /* Try the most limited insn first, because there's no point
1646 including more than one in the machine description unless
1647 the more limited one has some advantage. */
1649 rtx opalign = GEN_INT (align);
1650 enum machine_mode mode;
1652 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1653 mode = GET_MODE_WIDER_MODE (mode))
1655 enum insn_code code = movstr_optab[(int) mode];
1657 if (code != CODE_FOR_nothing
1658 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1659 here because if SIZE is less than the mode mask, as it is
1660 returned by the macro, it will definitely be less than the
1661 actual mode mask. */
1662 && ((GET_CODE (size) == CONST_INT
1663 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1664 <= (GET_MODE_MASK (mode) >> 1)))
1665 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1666 && (insn_operand_predicate[(int) code][0] == 0
1667 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1668 && (insn_operand_predicate[(int) code][1] == 0
1669 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1670 && (insn_operand_predicate[(int) code][3] == 0
1671 || (*insn_operand_predicate[(int) code][3]) (opalign,
1675 rtx last = get_last_insn ();
1678 op2 = convert_to_mode (mode, size, 1);
1679 if (insn_operand_predicate[(int) code][2] != 0
1680 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1681 op2 = copy_to_mode_reg (mode, op2);
1683 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1690 delete_insns_since (last);
1694 #ifdef TARGET_MEM_FUNCTIONS
1695 /* It is incorrect to use the libcall calling conventions to call
1696 memcpy in this context.
1698 This could be a user call to memcpy and the user may wish to
1699 examine the return value from memcpy.
1701 For targets where libcalls and normal calls have different conventions
1702 for returning pointers, we could end up generating incorrect code.
1704 So instead of using a libcall sequence we build up a suitable
1705 CALL_EXPR and expand the call in the normal fashion. */
1706 if (fn == NULL_TREE)
1710 /* This was copied from except.c, I don't know if all this is
1711 necessary in this context or not. */
1712 fn = get_identifier ("memcpy");
1713 push_obstacks_nochange ();
1714 end_temporary_allocation ();
1715 fntype = build_pointer_type (void_type_node);
1716 fntype = build_function_type (fntype, NULL_TREE);
1717 fn = build_decl (FUNCTION_DECL, fn, fntype);
1718 DECL_EXTERNAL (fn) = 1;
1719 TREE_PUBLIC (fn) = 1;
1720 DECL_ARTIFICIAL (fn) = 1;
1721 make_decl_rtl (fn, NULL_PTR, 1);
1722 assemble_external (fn);
1726 /* We need to make an argument list for the function call.
1728 memcpy has three arguments, the first two are void * addresses and
1729 the last is a size_t byte count for the copy. */
1731 = build_tree_list (NULL_TREE,
1732 make_tree (build_pointer_type (void_type_node),
1734 TREE_CHAIN (arg_list)
1735 = build_tree_list (NULL_TREE,
1736 make_tree (build_pointer_type (void_type_node),
1738 TREE_CHAIN (TREE_CHAIN (arg_list))
1739 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1740 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1742 /* Now we have to build up the CALL_EXPR itself. */
1743 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1744 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1745 call_expr, arg_list, NULL_TREE);
1746 TREE_SIDE_EFFECTS (call_expr) = 1;
1748 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1750 emit_library_call (bcopy_libfunc, 0,
1751 VOIDmode, 3, XEXP (y, 0), Pmode,
1753 convert_to_mode (TYPE_MODE (integer_type_node), size,
1754 TREE_UNSIGNED (integer_type_node)),
1755 TYPE_MODE (integer_type_node));
1762 /* Copy all or part of a value X into registers starting at REGNO.
1763 The number of registers to be filled is NREGS. */
1766 move_block_to_reg (regno, x, nregs, mode)
1770 enum machine_mode mode;
1773 #ifdef HAVE_load_multiple
1781 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1782 x = validize_mem (force_const_mem (mode, x));
1784 /* See if the machine can do this with a load multiple insn. */
1785 #ifdef HAVE_load_multiple
1786 if (HAVE_load_multiple)
1788 last = get_last_insn ();
1789 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1797 delete_insns_since (last);
1801 for (i = 0; i < nregs; i++)
1802 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1803 operand_subword_force (x, i, mode));
1806 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1807 The number of registers to be filled is NREGS. SIZE indicates the number
1808 of bytes in the object X. */
1812 move_block_from_reg (regno, x, nregs, size)
1819 #ifdef HAVE_store_multiple
1823 enum machine_mode mode;
1825 /* If SIZE is that of a mode no bigger than a word, just use that
1826 mode's store operation. */
1827 if (size <= UNITS_PER_WORD
1828 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1830 emit_move_insn (change_address (x, mode, NULL),
1831 gen_rtx_REG (mode, regno));
1835 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1836 to the left before storing to memory. Note that the previous test
1837 doesn't handle all cases (e.g. SIZE == 3). */
1838 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1840 rtx tem = operand_subword (x, 0, 1, BLKmode);
1846 shift = expand_shift (LSHIFT_EXPR, word_mode,
1847 gen_rtx_REG (word_mode, regno),
1848 build_int_2 ((UNITS_PER_WORD - size)
1849 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1850 emit_move_insn (tem, shift);
1854 /* See if the machine can do this with a store multiple insn. */
1855 #ifdef HAVE_store_multiple
1856 if (HAVE_store_multiple)
1858 last = get_last_insn ();
1859 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1867 delete_insns_since (last);
1871 for (i = 0; i < nregs; i++)
1873 rtx tem = operand_subword (x, i, 1, BLKmode);
1878 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1882 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1883 registers represented by a PARALLEL. SSIZE represents the total size of
1884 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1886 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1887 the balance will be in what would be the low-order memory addresses, i.e.
1888 left justified for big endian, right justified for little endian. This
1889 happens to be true for the targets currently using this support. If this
1890 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1894 emit_group_load (dst, orig_src, ssize, align)
1901 if (GET_CODE (dst) != PARALLEL)
1904 /* Check for a NULL entry, used to indicate that the parameter goes
1905 both on the stack and in registers. */
1906 if (XEXP (XVECEXP (dst, 0, 0), 0))
1911 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1913 /* If we won't be loading directly from memory, protect the real source
1914 from strange tricks we might play. */
1916 if (GET_CODE (src) != MEM)
1918 src = gen_reg_rtx (GET_MODE (orig_src));
1919 emit_move_insn (src, orig_src);
1922 /* Process the pieces. */
1923 for (i = start; i < XVECLEN (dst, 0); i++)
1925 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1926 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1927 int bytelen = GET_MODE_SIZE (mode);
1930 /* Handle trailing fragments that run over the size of the struct. */
1931 if (ssize >= 0 && bytepos + bytelen > ssize)
1933 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1934 bytelen = ssize - bytepos;
1939 /* Optimize the access just a bit. */
1940 if (GET_CODE (src) == MEM
1941 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1942 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1943 && bytelen == GET_MODE_SIZE (mode))
1945 tmps[i] = gen_reg_rtx (mode);
1946 emit_move_insn (tmps[i],
1947 change_address (src, mode,
1948 plus_constant (XEXP (src, 0),
1953 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1954 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1955 mode, mode, align, ssize);
1958 if (BYTES_BIG_ENDIAN && shift)
1960 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1961 tmps[i], 0, OPTAB_WIDEN);
1966 /* Copy the extracted pieces into the proper (probable) hard regs. */
1967 for (i = start; i < XVECLEN (dst, 0); i++)
1968 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1971 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1972 registers represented by a PARALLEL. SSIZE represents the total size of
1973 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1976 emit_group_store (orig_dst, src, ssize, align)
1983 if (GET_CODE (src) != PARALLEL)
1986 /* Check for a NULL entry, used to indicate that the parameter goes
1987 both on the stack and in registers. */
1988 if (XEXP (XVECEXP (src, 0, 0), 0))
1993 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1995 /* Copy the (probable) hard regs into pseudos. */
1996 for (i = start; i < XVECLEN (src, 0); i++)
1998 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1999 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2000 emit_move_insn (tmps[i], reg);
2004 /* If we won't be storing directly into memory, protect the real destination
2005 from strange tricks we might play. */
2007 if (GET_CODE (dst) == PARALLEL)
2011 /* We can get a PARALLEL dst if there is a conditional expression in
2012 a return statement. In that case, the dst and src are the same,
2013 so no action is necessary. */
2014 if (rtx_equal_p (dst, src))
2017 /* It is unclear if we can ever reach here, but we may as well handle
2018 it. Allocate a temporary, and split this into a store/load to/from
2021 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2022 emit_group_store (temp, src, ssize, align);
2023 emit_group_load (dst, temp, ssize, align);
2026 else if (GET_CODE (dst) != MEM)
2028 dst = gen_reg_rtx (GET_MODE (orig_dst));
2029 /* Make life a bit easier for combine. */
2030 emit_move_insn (dst, const0_rtx);
2032 else if (! MEM_IN_STRUCT_P (dst))
2034 /* store_bit_field requires that memory operations have
2035 mem_in_struct_p set; we might not. */
2037 dst = copy_rtx (orig_dst);
2038 MEM_IN_STRUCT_P (dst) = 1;
2041 /* Process the pieces. */
2042 for (i = start; i < XVECLEN (src, 0); i++)
2044 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2045 enum machine_mode mode = GET_MODE (tmps[i]);
2046 int bytelen = GET_MODE_SIZE (mode);
2048 /* Handle trailing fragments that run over the size of the struct. */
2049 if (ssize >= 0 && bytepos + bytelen > ssize)
2051 if (BYTES_BIG_ENDIAN)
2053 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2054 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2055 tmps[i], 0, OPTAB_WIDEN);
2057 bytelen = ssize - bytepos;
2060 /* Optimize the access just a bit. */
2061 if (GET_CODE (dst) == MEM
2062 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2063 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2064 && bytelen == GET_MODE_SIZE (mode))
2066 emit_move_insn (change_address (dst, mode,
2067 plus_constant (XEXP (dst, 0),
2073 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2074 mode, tmps[i], align, ssize);
2079 /* Copy from the pseudo into the (probable) hard reg. */
2080 if (GET_CODE (dst) == REG)
2081 emit_move_insn (orig_dst, dst);
2084 /* Generate code to copy a BLKmode object of TYPE out of a
2085 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2086 is null, a stack temporary is created. TGTBLK is returned.
2088 The primary purpose of this routine is to handle functions
2089 that return BLKmode structures in registers. Some machines
2090 (the PA for example) want to return all small structures
2091 in registers regardless of the structure's alignment.
2095 copy_blkmode_from_reg(tgtblk,srcreg,type)
2100 int bytes = int_size_in_bytes (type);
2101 rtx src = NULL, dst = NULL;
2102 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2103 int bitpos, xbitpos, big_endian_correction = 0;
2107 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2108 MEM_IN_STRUCT_P (tgtblk) = AGGREGATE_TYPE_P (type);
2109 preserve_temp_slots (tgtblk);
2112 /* This code assumes srcreg is at least a full word. If it isn't,
2113 copy it into a new pseudo which is a full word. */
2114 if (GET_MODE (srcreg) != BLKmode
2115 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2116 srcreg = convert_to_mode (word_mode, srcreg,
2117 TREE_UNSIGNED (type));
2119 /* Structures whose size is not a multiple of a word are aligned
2120 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2121 machine, this means we must skip the empty high order bytes when
2122 calculating the bit offset. */
2123 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2124 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2127 /* Copy the structure BITSIZE bites at a time.
2129 We could probably emit more efficient code for machines
2130 which do not use strict alignment, but it doesn't seem
2131 worth the effort at the current time. */
2132 for (bitpos = 0, xbitpos = big_endian_correction;
2133 bitpos < bytes * BITS_PER_UNIT;
2134 bitpos += bitsize, xbitpos += bitsize)
2137 /* We need a new source operand each time xbitpos is on a
2138 word boundary and when xbitpos == big_endian_correction
2139 (the first time through). */
2140 if (xbitpos % BITS_PER_WORD == 0
2141 || xbitpos == big_endian_correction)
2142 src = operand_subword_force (srcreg,
2143 xbitpos / BITS_PER_WORD,
2146 /* We need a new destination operand each time bitpos is on
2148 if (bitpos % BITS_PER_WORD == 0)
2149 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2151 /* Use xbitpos for the source extraction (right justified) and
2152 xbitpos for the destination store (left justified). */
2153 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2154 extract_bit_field (src, bitsize,
2155 xbitpos % BITS_PER_WORD, 1,
2156 NULL_RTX, word_mode,
2158 bitsize / BITS_PER_UNIT,
2160 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2166 /* Add a USE expression for REG to the (possibly empty) list pointed
2167 to by CALL_FUSAGE. REG must denote a hard register. */
2170 use_reg (call_fusage, reg)
2171 rtx *call_fusage, reg;
2173 if (GET_CODE (reg) != REG
2174 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2178 = gen_rtx_EXPR_LIST (VOIDmode,
2179 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2182 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2183 starting at REGNO. All of these registers must be hard registers. */
2186 use_regs (call_fusage, regno, nregs)
2193 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2196 for (i = 0; i < nregs; i++)
2197 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2200 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2201 PARALLEL REGS. This is for calls that pass values in multiple
2202 non-contiguous locations. The Irix 6 ABI has examples of this. */
2205 use_group_regs (call_fusage, regs)
2211 for (i = 0; i < XVECLEN (regs, 0); i++)
2213 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2215 /* A NULL entry means the parameter goes both on the stack and in
2216 registers. This can also be a MEM for targets that pass values
2217 partially on the stack and partially in registers. */
2218 if (reg != 0 && GET_CODE (reg) == REG)
2219 use_reg (call_fusage, reg);
2223 /* Generate several move instructions to clear LEN bytes of block TO.
2224 (A MEM rtx with BLKmode). The caller must pass TO through
2225 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2229 clear_by_pieces (to, len, align)
2233 struct clear_by_pieces data;
2234 rtx to_addr = XEXP (to, 0);
2235 int max_size = MOVE_MAX + 1;
2238 data.to_addr = to_addr;
2241 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2242 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2244 data.explicit_inc_to = 0;
2246 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2247 if (data.reverse) data.offset = len;
2250 data.to_struct = MEM_IN_STRUCT_P (to);
2252 /* If copying requires more than two move insns,
2253 copy addresses to registers (to make displacements shorter)
2254 and use post-increment if available. */
2256 && move_by_pieces_ninsns (len, align) > 2)
2258 if (HAVE_PRE_DECREMENT && data.reverse && ! data.autinc_to)
2260 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2262 data.explicit_inc_to = -1;
2264 if (HAVE_POST_INCREMENT && ! data.reverse && ! data.autinc_to)
2266 data.to_addr = copy_addr_to_reg (to_addr);
2268 data.explicit_inc_to = 1;
2270 if (!data.autinc_to && CONSTANT_P (to_addr))
2271 data.to_addr = copy_addr_to_reg (to_addr);
2274 if (! SLOW_UNALIGNED_ACCESS
2275 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2278 /* First move what we can in the largest integer mode, then go to
2279 successively smaller modes. */
2281 while (max_size > 1)
2283 enum machine_mode mode = VOIDmode, tmode;
2284 enum insn_code icode;
2286 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2287 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2288 if (GET_MODE_SIZE (tmode) < max_size)
2291 if (mode == VOIDmode)
2294 icode = mov_optab->handlers[(int) mode].insn_code;
2295 if (icode != CODE_FOR_nothing
2296 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2297 GET_MODE_SIZE (mode)))
2298 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2300 max_size = GET_MODE_SIZE (mode);
2303 /* The code above should have handled everything. */
2308 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2309 with move instructions for mode MODE. GENFUN is the gen_... function
2310 to make a move insn for that mode. DATA has all the other info. */
2313 clear_by_pieces_1 (genfun, mode, data)
2314 rtx (*genfun) PROTO ((rtx, ...));
2315 enum machine_mode mode;
2316 struct clear_by_pieces *data;
2318 register int size = GET_MODE_SIZE (mode);
2321 while (data->len >= size)
2323 if (data->reverse) data->offset -= size;
2325 to1 = (data->autinc_to
2326 ? gen_rtx_MEM (mode, data->to_addr)
2327 : copy_rtx (change_address (data->to, mode,
2328 plus_constant (data->to_addr,
2330 MEM_IN_STRUCT_P (to1) = data->to_struct;
2332 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2333 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2335 emit_insn ((*genfun) (to1, const0_rtx));
2336 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2337 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2339 if (! data->reverse) data->offset += size;
2345 /* Write zeros through the storage of OBJECT.
2346 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2347 the maximum alignment we can is has, measured in bytes.
2349 If we call a function that returns the length of the block, return it. */
2352 clear_storage (object, size, align)
2357 #ifdef TARGET_MEM_FUNCTIONS
2359 tree call_expr, arg_list;
2363 if (GET_MODE (object) == BLKmode)
2365 object = protect_from_queue (object, 1);
2366 size = protect_from_queue (size, 0);
2368 if (GET_CODE (size) == CONST_INT
2369 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2370 clear_by_pieces (object, INTVAL (size), align);
2374 /* Try the most limited insn first, because there's no point
2375 including more than one in the machine description unless
2376 the more limited one has some advantage. */
2378 rtx opalign = GEN_INT (align);
2379 enum machine_mode mode;
2381 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2382 mode = GET_MODE_WIDER_MODE (mode))
2384 enum insn_code code = clrstr_optab[(int) mode];
2386 if (code != CODE_FOR_nothing
2387 /* We don't need MODE to be narrower than
2388 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2389 the mode mask, as it is returned by the macro, it will
2390 definitely be less than the actual mode mask. */
2391 && ((GET_CODE (size) == CONST_INT
2392 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2393 <= (GET_MODE_MASK (mode) >> 1)))
2394 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2395 && (insn_operand_predicate[(int) code][0] == 0
2396 || (*insn_operand_predicate[(int) code][0]) (object,
2398 && (insn_operand_predicate[(int) code][2] == 0
2399 || (*insn_operand_predicate[(int) code][2]) (opalign,
2403 rtx last = get_last_insn ();
2406 op1 = convert_to_mode (mode, size, 1);
2407 if (insn_operand_predicate[(int) code][1] != 0
2408 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2410 op1 = copy_to_mode_reg (mode, op1);
2412 pat = GEN_FCN ((int) code) (object, op1, opalign);
2419 delete_insns_since (last);
2424 #ifdef TARGET_MEM_FUNCTIONS
2425 /* It is incorrect to use the libcall calling conventions to call
2426 memset in this context.
2428 This could be a user call to memset and the user may wish to
2429 examine the return value from memset.
2431 For targets where libcalls and normal calls have different conventions
2432 for returning pointers, we could end up generating incorrect code.
2434 So instead of using a libcall sequence we build up a suitable
2435 CALL_EXPR and expand the call in the normal fashion. */
2436 if (fn == NULL_TREE)
2440 /* This was copied from except.c, I don't know if all this is
2441 necessary in this context or not. */
2442 fn = get_identifier ("memset");
2443 push_obstacks_nochange ();
2444 end_temporary_allocation ();
2445 fntype = build_pointer_type (void_type_node);
2446 fntype = build_function_type (fntype, NULL_TREE);
2447 fn = build_decl (FUNCTION_DECL, fn, fntype);
2448 DECL_EXTERNAL (fn) = 1;
2449 TREE_PUBLIC (fn) = 1;
2450 DECL_ARTIFICIAL (fn) = 1;
2451 make_decl_rtl (fn, NULL_PTR, 1);
2452 assemble_external (fn);
2456 /* We need to make an argument list for the function call.
2458 memset has three arguments, the first is a void * addresses, the
2459 second a integer with the initialization value, the last is a size_t
2460 byte count for the copy. */
2462 = build_tree_list (NULL_TREE,
2463 make_tree (build_pointer_type (void_type_node),
2465 TREE_CHAIN (arg_list)
2466 = build_tree_list (NULL_TREE,
2467 make_tree (integer_type_node, const0_rtx));
2468 TREE_CHAIN (TREE_CHAIN (arg_list))
2469 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2470 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2472 /* Now we have to build up the CALL_EXPR itself. */
2473 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2474 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2475 call_expr, arg_list, NULL_TREE);
2476 TREE_SIDE_EFFECTS (call_expr) = 1;
2478 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2480 emit_library_call (bzero_libfunc, 0,
2482 XEXP (object, 0), Pmode,
2484 (TYPE_MODE (integer_type_node), size,
2485 TREE_UNSIGNED (integer_type_node)),
2486 TYPE_MODE (integer_type_node));
2491 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2496 /* Generate code to copy Y into X.
2497 Both Y and X must have the same mode, except that
2498 Y can be a constant with VOIDmode.
2499 This mode cannot be BLKmode; use emit_block_move for that.
2501 Return the last instruction emitted. */
2504 emit_move_insn (x, y)
2507 enum machine_mode mode = GET_MODE (x);
2509 x = protect_from_queue (x, 1);
2510 y = protect_from_queue (y, 0);
2512 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2515 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2516 y = force_const_mem (mode, y);
2518 /* If X or Y are memory references, verify that their addresses are valid
2520 if (GET_CODE (x) == MEM
2521 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2522 && ! push_operand (x, GET_MODE (x)))
2524 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2525 x = change_address (x, VOIDmode, XEXP (x, 0));
2527 if (GET_CODE (y) == MEM
2528 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2530 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2531 y = change_address (y, VOIDmode, XEXP (y, 0));
2533 if (mode == BLKmode)
2536 return emit_move_insn_1 (x, y);
2539 /* Low level part of emit_move_insn.
2540 Called just like emit_move_insn, but assumes X and Y
2541 are basically valid. */
2544 emit_move_insn_1 (x, y)
2547 enum machine_mode mode = GET_MODE (x);
2548 enum machine_mode submode;
2549 enum mode_class class = GET_MODE_CLASS (mode);
2552 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2554 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2556 /* Expand complex moves by moving real part and imag part, if possible. */
2557 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2558 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2560 (class == MODE_COMPLEX_INT
2561 ? MODE_INT : MODE_FLOAT),
2563 && (mov_optab->handlers[(int) submode].insn_code
2564 != CODE_FOR_nothing))
2566 /* Don't split destination if it is a stack push. */
2567 int stack = push_operand (x, GET_MODE (x));
2569 /* If this is a stack, push the highpart first, so it
2570 will be in the argument order.
2572 In that case, change_address is used only to convert
2573 the mode, not to change the address. */
2576 /* Note that the real part always precedes the imag part in memory
2577 regardless of machine's endianness. */
2578 #ifdef STACK_GROWS_DOWNWARD
2579 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2580 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2581 gen_imagpart (submode, y)));
2582 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2583 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2584 gen_realpart (submode, y)));
2586 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2587 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2588 gen_realpart (submode, y)));
2589 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2590 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2591 gen_imagpart (submode, y)));
2596 /* Show the output dies here. */
2598 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2600 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2601 (gen_realpart (submode, x), gen_realpart (submode, y)));
2602 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2603 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2606 return get_last_insn ();
2609 /* This will handle any multi-word mode that lacks a move_insn pattern.
2610 However, you will get better code if you define such patterns,
2611 even if they must turn into multiple assembler instructions. */
2612 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2616 #ifdef PUSH_ROUNDING
2618 /* If X is a push on the stack, do the push now and replace
2619 X with a reference to the stack pointer. */
2620 if (push_operand (x, GET_MODE (x)))
2622 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2623 x = change_address (x, VOIDmode, stack_pointer_rtx);
2627 /* Show the output dies here. */
2629 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2632 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2635 rtx xpart = operand_subword (x, i, 1, mode);
2636 rtx ypart = operand_subword (y, i, 1, mode);
2638 /* If we can't get a part of Y, put Y into memory if it is a
2639 constant. Otherwise, force it into a register. If we still
2640 can't get a part of Y, abort. */
2641 if (ypart == 0 && CONSTANT_P (y))
2643 y = force_const_mem (mode, y);
2644 ypart = operand_subword (y, i, 1, mode);
2646 else if (ypart == 0)
2647 ypart = operand_subword_force (y, i, mode);
2649 if (xpart == 0 || ypart == 0)
2652 last_insn = emit_move_insn (xpart, ypart);
2661 /* Pushing data onto the stack. */
2663 /* Push a block of length SIZE (perhaps variable)
2664 and return an rtx to address the beginning of the block.
2665 Note that it is not possible for the value returned to be a QUEUED.
2666 The value may be virtual_outgoing_args_rtx.
2668 EXTRA is the number of bytes of padding to push in addition to SIZE.
2669 BELOW nonzero means this padding comes at low addresses;
2670 otherwise, the padding comes at high addresses. */
2673 push_block (size, extra, below)
2679 size = convert_modes (Pmode, ptr_mode, size, 1);
2680 if (CONSTANT_P (size))
2681 anti_adjust_stack (plus_constant (size, extra));
2682 else if (GET_CODE (size) == REG && extra == 0)
2683 anti_adjust_stack (size);
2686 rtx temp = copy_to_mode_reg (Pmode, size);
2688 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2689 temp, 0, OPTAB_LIB_WIDEN);
2690 anti_adjust_stack (temp);
2693 #if defined (STACK_GROWS_DOWNWARD) \
2694 || (defined (ARGS_GROW_DOWNWARD) \
2695 && !defined (ACCUMULATE_OUTGOING_ARGS))
2697 /* Return the lowest stack address when STACK or ARGS grow downward and
2698 we are not aaccumulating outgoing arguments (the c4x port uses such
2700 temp = virtual_outgoing_args_rtx;
2701 if (extra != 0 && below)
2702 temp = plus_constant (temp, extra);
2704 if (GET_CODE (size) == CONST_INT)
2705 temp = plus_constant (virtual_outgoing_args_rtx,
2706 - INTVAL (size) - (below ? 0 : extra));
2707 else if (extra != 0 && !below)
2708 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2709 negate_rtx (Pmode, plus_constant (size, extra)));
2711 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2712 negate_rtx (Pmode, size));
2715 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2721 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2724 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2725 block of SIZE bytes. */
2728 get_push_address (size)
2733 if (STACK_PUSH_CODE == POST_DEC)
2734 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2735 else if (STACK_PUSH_CODE == POST_INC)
2736 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2738 temp = stack_pointer_rtx;
2740 return copy_to_reg (temp);
2743 /* Generate code to push X onto the stack, assuming it has mode MODE and
2745 MODE is redundant except when X is a CONST_INT (since they don't
2747 SIZE is an rtx for the size of data to be copied (in bytes),
2748 needed only if X is BLKmode.
2750 ALIGN (in bytes) is maximum alignment we can assume.
2752 If PARTIAL and REG are both nonzero, then copy that many of the first
2753 words of X into registers starting with REG, and push the rest of X.
2754 The amount of space pushed is decreased by PARTIAL words,
2755 rounded *down* to a multiple of PARM_BOUNDARY.
2756 REG must be a hard register in this case.
2757 If REG is zero but PARTIAL is not, take any all others actions for an
2758 argument partially in registers, but do not actually load any
2761 EXTRA is the amount in bytes of extra space to leave next to this arg.
2762 This is ignored if an argument block has already been allocated.
2764 On a machine that lacks real push insns, ARGS_ADDR is the address of
2765 the bottom of the argument block for this call. We use indexing off there
2766 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2767 argument block has not been preallocated.
2769 ARGS_SO_FAR is the size of args previously pushed for this call.
2771 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2772 for arguments passed in registers. If nonzero, it will be the number
2773 of bytes required. */
2776 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2777 args_addr, args_so_far, reg_parm_stack_space)
2779 enum machine_mode mode;
2788 int reg_parm_stack_space;
2791 enum direction stack_direction
2792 #ifdef STACK_GROWS_DOWNWARD
2798 /* Decide where to pad the argument: `downward' for below,
2799 `upward' for above, or `none' for don't pad it.
2800 Default is below for small data on big-endian machines; else above. */
2801 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2803 /* Invert direction if stack is post-update. */
2804 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2805 if (where_pad != none)
2806 where_pad = (where_pad == downward ? upward : downward);
2808 xinner = x = protect_from_queue (x, 0);
2810 if (mode == BLKmode)
2812 /* Copy a block into the stack, entirely or partially. */
2815 int used = partial * UNITS_PER_WORD;
2816 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2824 /* USED is now the # of bytes we need not copy to the stack
2825 because registers will take care of them. */
2828 xinner = change_address (xinner, BLKmode,
2829 plus_constant (XEXP (xinner, 0), used));
2831 /* If the partial register-part of the arg counts in its stack size,
2832 skip the part of stack space corresponding to the registers.
2833 Otherwise, start copying to the beginning of the stack space,
2834 by setting SKIP to 0. */
2835 skip = (reg_parm_stack_space == 0) ? 0 : used;
2837 #ifdef PUSH_ROUNDING
2838 /* Do it with several push insns if that doesn't take lots of insns
2839 and if there is no difficulty with push insns that skip bytes
2840 on the stack for alignment purposes. */
2842 && GET_CODE (size) == CONST_INT
2844 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2846 /* Here we avoid the case of a structure whose weak alignment
2847 forces many pushes of a small amount of data,
2848 and such small pushes do rounding that causes trouble. */
2849 && ((! SLOW_UNALIGNED_ACCESS)
2850 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2851 || PUSH_ROUNDING (align) == align)
2852 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2854 /* Push padding now if padding above and stack grows down,
2855 or if padding below and stack grows up.
2856 But if space already allocated, this has already been done. */
2857 if (extra && args_addr == 0
2858 && where_pad != none && where_pad != stack_direction)
2859 anti_adjust_stack (GEN_INT (extra));
2861 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2862 INTVAL (size) - used, align);
2864 if (current_function_check_memory_usage && ! in_check_memory_usage)
2868 in_check_memory_usage = 1;
2869 temp = get_push_address (INTVAL(size) - used);
2870 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2871 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2873 XEXP (xinner, 0), ptr_mode,
2874 GEN_INT (INTVAL(size) - used),
2875 TYPE_MODE (sizetype));
2877 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2879 GEN_INT (INTVAL(size) - used),
2880 TYPE_MODE (sizetype),
2881 GEN_INT (MEMORY_USE_RW),
2882 TYPE_MODE (integer_type_node));
2883 in_check_memory_usage = 0;
2887 #endif /* PUSH_ROUNDING */
2889 /* Otherwise make space on the stack and copy the data
2890 to the address of that space. */
2892 /* Deduct words put into registers from the size we must copy. */
2895 if (GET_CODE (size) == CONST_INT)
2896 size = GEN_INT (INTVAL (size) - used);
2898 size = expand_binop (GET_MODE (size), sub_optab, size,
2899 GEN_INT (used), NULL_RTX, 0,
2903 /* Get the address of the stack space.
2904 In this case, we do not deal with EXTRA separately.
2905 A single stack adjust will do. */
2908 temp = push_block (size, extra, where_pad == downward);
2911 else if (GET_CODE (args_so_far) == CONST_INT)
2912 temp = memory_address (BLKmode,
2913 plus_constant (args_addr,
2914 skip + INTVAL (args_so_far)));
2916 temp = memory_address (BLKmode,
2917 plus_constant (gen_rtx_PLUS (Pmode,
2921 if (current_function_check_memory_usage && ! in_check_memory_usage)
2925 in_check_memory_usage = 1;
2926 target = copy_to_reg (temp);
2927 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2928 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2930 XEXP (xinner, 0), ptr_mode,
2931 size, TYPE_MODE (sizetype));
2933 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2935 size, TYPE_MODE (sizetype),
2936 GEN_INT (MEMORY_USE_RW),
2937 TYPE_MODE (integer_type_node));
2938 in_check_memory_usage = 0;
2941 /* TEMP is the address of the block. Copy the data there. */
2942 if (GET_CODE (size) == CONST_INT
2943 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2946 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2947 INTVAL (size), align);
2952 rtx opalign = GEN_INT (align);
2953 enum machine_mode mode;
2954 rtx target = gen_rtx_MEM (BLKmode, temp);
2956 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2958 mode = GET_MODE_WIDER_MODE (mode))
2960 enum insn_code code = movstr_optab[(int) mode];
2962 if (code != CODE_FOR_nothing
2963 && ((GET_CODE (size) == CONST_INT
2964 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2965 <= (GET_MODE_MASK (mode) >> 1)))
2966 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2967 && (insn_operand_predicate[(int) code][0] == 0
2968 || ((*insn_operand_predicate[(int) code][0])
2970 && (insn_operand_predicate[(int) code][1] == 0
2971 || ((*insn_operand_predicate[(int) code][1])
2973 && (insn_operand_predicate[(int) code][3] == 0
2974 || ((*insn_operand_predicate[(int) code][3])
2975 (opalign, VOIDmode))))
2977 rtx op2 = convert_to_mode (mode, size, 1);
2978 rtx last = get_last_insn ();
2981 if (insn_operand_predicate[(int) code][2] != 0
2982 && ! ((*insn_operand_predicate[(int) code][2])
2984 op2 = copy_to_mode_reg (mode, op2);
2986 pat = GEN_FCN ((int) code) (target, xinner,
2994 delete_insns_since (last);
2999 #ifndef ACCUMULATE_OUTGOING_ARGS
3000 /* If the source is referenced relative to the stack pointer,
3001 copy it to another register to stabilize it. We do not need
3002 to do this if we know that we won't be changing sp. */
3004 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3005 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3006 temp = copy_to_reg (temp);
3009 /* Make inhibit_defer_pop nonzero around the library call
3010 to force it to pop the bcopy-arguments right away. */
3012 #ifdef TARGET_MEM_FUNCTIONS
3013 emit_library_call (memcpy_libfunc, 0,
3014 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3015 convert_to_mode (TYPE_MODE (sizetype),
3016 size, TREE_UNSIGNED (sizetype)),
3017 TYPE_MODE (sizetype));
3019 emit_library_call (bcopy_libfunc, 0,
3020 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3021 convert_to_mode (TYPE_MODE (integer_type_node),
3023 TREE_UNSIGNED (integer_type_node)),
3024 TYPE_MODE (integer_type_node));
3029 else if (partial > 0)
3031 /* Scalar partly in registers. */
3033 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3036 /* # words of start of argument
3037 that we must make space for but need not store. */
3038 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3039 int args_offset = INTVAL (args_so_far);
3042 /* Push padding now if padding above and stack grows down,
3043 or if padding below and stack grows up.
3044 But if space already allocated, this has already been done. */
3045 if (extra && args_addr == 0
3046 && where_pad != none && where_pad != stack_direction)
3047 anti_adjust_stack (GEN_INT (extra));
3049 /* If we make space by pushing it, we might as well push
3050 the real data. Otherwise, we can leave OFFSET nonzero
3051 and leave the space uninitialized. */
3055 /* Now NOT_STACK gets the number of words that we don't need to
3056 allocate on the stack. */
3057 not_stack = partial - offset;
3059 /* If the partial register-part of the arg counts in its stack size,
3060 skip the part of stack space corresponding to the registers.
3061 Otherwise, start copying to the beginning of the stack space,
3062 by setting SKIP to 0. */
3063 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3065 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3066 x = validize_mem (force_const_mem (mode, x));
3068 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3069 SUBREGs of such registers are not allowed. */
3070 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3071 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3072 x = copy_to_reg (x);
3074 /* Loop over all the words allocated on the stack for this arg. */
3075 /* We can do it by words, because any scalar bigger than a word
3076 has a size a multiple of a word. */
3077 #ifndef PUSH_ARGS_REVERSED
3078 for (i = not_stack; i < size; i++)
3080 for (i = size - 1; i >= not_stack; i--)
3082 if (i >= not_stack + offset)
3083 emit_push_insn (operand_subword_force (x, i, mode),
3084 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3086 GEN_INT (args_offset + ((i - not_stack + skip)
3088 reg_parm_stack_space);
3093 rtx target = NULL_RTX;
3095 /* Push padding now if padding above and stack grows down,
3096 or if padding below and stack grows up.
3097 But if space already allocated, this has already been done. */
3098 if (extra && args_addr == 0
3099 && where_pad != none && where_pad != stack_direction)
3100 anti_adjust_stack (GEN_INT (extra));
3102 #ifdef PUSH_ROUNDING
3104 addr = gen_push_operand ();
3108 if (GET_CODE (args_so_far) == CONST_INT)
3110 = memory_address (mode,
3111 plus_constant (args_addr,
3112 INTVAL (args_so_far)));
3114 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3119 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3121 if (current_function_check_memory_usage && ! in_check_memory_usage)
3123 in_check_memory_usage = 1;
3125 target = get_push_address (GET_MODE_SIZE (mode));
3127 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3128 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3130 XEXP (x, 0), ptr_mode,
3131 GEN_INT (GET_MODE_SIZE (mode)),
3132 TYPE_MODE (sizetype));
3134 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3136 GEN_INT (GET_MODE_SIZE (mode)),
3137 TYPE_MODE (sizetype),
3138 GEN_INT (MEMORY_USE_RW),
3139 TYPE_MODE (integer_type_node));
3140 in_check_memory_usage = 0;
3145 /* If part should go in registers, copy that part
3146 into the appropriate registers. Do this now, at the end,
3147 since mem-to-mem copies above may do function calls. */
3148 if (partial > 0 && reg != 0)
3150 /* Handle calls that pass values in multiple non-contiguous locations.
3151 The Irix 6 ABI has examples of this. */
3152 if (GET_CODE (reg) == PARALLEL)
3153 emit_group_load (reg, x, -1, align); /* ??? size? */
3155 move_block_to_reg (REGNO (reg), x, partial, mode);
3158 if (extra && args_addr == 0 && where_pad == stack_direction)
3159 anti_adjust_stack (GEN_INT (extra));
3162 /* Expand an assignment that stores the value of FROM into TO.
3163 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3164 (This may contain a QUEUED rtx;
3165 if the value is constant, this rtx is a constant.)
3166 Otherwise, the returned value is NULL_RTX.
3168 SUGGEST_REG is no longer actually used.
3169 It used to mean, copy the value through a register
3170 and return that register, if that is possible.
3171 We now use WANT_VALUE to decide whether to do this. */
3174 expand_assignment (to, from, want_value, suggest_reg)
3179 register rtx to_rtx = 0;
3182 /* Don't crash if the lhs of the assignment was erroneous. */
3184 if (TREE_CODE (to) == ERROR_MARK)
3186 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3187 return want_value ? result : NULL_RTX;
3190 /* Assignment of a structure component needs special treatment
3191 if the structure component's rtx is not simply a MEM.
3192 Assignment of an array element at a constant index, and assignment of
3193 an array element in an unaligned packed structure field, has the same
3196 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3197 || TREE_CODE (to) == ARRAY_REF)
3199 enum machine_mode mode1;
3209 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3210 &unsignedp, &volatilep, &alignment);
3212 /* If we are going to use store_bit_field and extract_bit_field,
3213 make sure to_rtx will be safe for multiple use. */
3215 if (mode1 == VOIDmode && want_value)
3216 tem = stabilize_reference (tem);
3218 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3221 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3223 if (GET_CODE (to_rtx) != MEM)
3226 if (GET_MODE (offset_rtx) != ptr_mode)
3228 #ifdef POINTERS_EXTEND_UNSIGNED
3229 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3231 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3235 if (GET_CODE (to_rtx) == MEM
3236 && GET_MODE (to_rtx) == BLKmode
3238 && (bitpos % bitsize) == 0
3239 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3240 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3242 rtx temp = change_address (to_rtx, mode1,
3243 plus_constant (XEXP (to_rtx, 0),
3246 if (GET_CODE (XEXP (temp, 0)) == REG)
3249 to_rtx = change_address (to_rtx, mode1,
3250 force_reg (GET_MODE (XEXP (temp, 0)),
3255 to_rtx = change_address (to_rtx, VOIDmode,
3256 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3257 force_reg (ptr_mode, offset_rtx)));
3261 if (GET_CODE (to_rtx) == MEM)
3263 /* When the offset is zero, to_rtx is the address of the
3264 structure we are storing into, and hence may be shared.
3265 We must make a new MEM before setting the volatile bit. */
3267 to_rtx = copy_rtx (to_rtx);
3269 MEM_VOLATILE_P (to_rtx) = 1;
3271 #if 0 /* This was turned off because, when a field is volatile
3272 in an object which is not volatile, the object may be in a register,
3273 and then we would abort over here. */
3279 if (TREE_CODE (to) == COMPONENT_REF
3280 && TREE_READONLY (TREE_OPERAND (to, 1)))
3283 to_rtx = copy_rtx (to_rtx);
3285 RTX_UNCHANGING_P (to_rtx) = 1;
3288 /* Check the access. */
3289 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3294 enum machine_mode best_mode;
3296 best_mode = get_best_mode (bitsize, bitpos,
3297 TYPE_ALIGN (TREE_TYPE (tem)),
3299 if (best_mode == VOIDmode)
3302 best_mode_size = GET_MODE_BITSIZE (best_mode);
3303 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3304 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3305 size *= GET_MODE_SIZE (best_mode);
3307 /* Check the access right of the pointer. */
3309 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3311 GEN_INT (size), TYPE_MODE (sizetype),
3312 GEN_INT (MEMORY_USE_WO),
3313 TYPE_MODE (integer_type_node));
3316 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3318 /* Spurious cast makes HPUX compiler happy. */
3319 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3322 /* Required alignment of containing datum. */
3324 int_size_in_bytes (TREE_TYPE (tem)),
3325 get_alias_set (to));
3326 preserve_temp_slots (result);
3330 /* If the value is meaningful, convert RESULT to the proper mode.
3331 Otherwise, return nothing. */
3332 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3333 TYPE_MODE (TREE_TYPE (from)),
3335 TREE_UNSIGNED (TREE_TYPE (to)))
3339 /* If the rhs is a function call and its value is not an aggregate,
3340 call the function before we start to compute the lhs.
3341 This is needed for correct code for cases such as
3342 val = setjmp (buf) on machines where reference to val
3343 requires loading up part of an address in a separate insn.
3345 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3346 a promoted variable where the zero- or sign- extension needs to be done.
3347 Handling this in the normal way is safe because no computation is done
3349 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3350 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3351 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3356 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3358 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3360 /* Handle calls that return values in multiple non-contiguous locations.
3361 The Irix 6 ABI has examples of this. */
3362 if (GET_CODE (to_rtx) == PARALLEL)
3363 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3364 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3365 else if (GET_MODE (to_rtx) == BLKmode)
3366 emit_block_move (to_rtx, value, expr_size (from),
3367 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3369 emit_move_insn (to_rtx, value);
3370 preserve_temp_slots (to_rtx);
3373 return want_value ? to_rtx : NULL_RTX;
3376 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3377 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3381 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3382 if (GET_CODE (to_rtx) == MEM)
3383 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3386 /* Don't move directly into a return register. */
3387 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3392 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3393 emit_move_insn (to_rtx, temp);
3394 preserve_temp_slots (to_rtx);
3397 return want_value ? to_rtx : NULL_RTX;
3400 /* In case we are returning the contents of an object which overlaps
3401 the place the value is being stored, use a safe function when copying
3402 a value through a pointer into a structure value return block. */
3403 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3404 && current_function_returns_struct
3405 && !current_function_returns_pcc_struct)
3410 size = expr_size (from);
3411 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3412 EXPAND_MEMORY_USE_DONT);
3414 /* Copy the rights of the bitmap. */
3415 if (current_function_check_memory_usage)
3416 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3417 XEXP (to_rtx, 0), ptr_mode,
3418 XEXP (from_rtx, 0), ptr_mode,
3419 convert_to_mode (TYPE_MODE (sizetype),
3420 size, TREE_UNSIGNED (sizetype)),
3421 TYPE_MODE (sizetype));
3423 #ifdef TARGET_MEM_FUNCTIONS
3424 emit_library_call (memcpy_libfunc, 0,
3425 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3426 XEXP (from_rtx, 0), Pmode,
3427 convert_to_mode (TYPE_MODE (sizetype),
3428 size, TREE_UNSIGNED (sizetype)),
3429 TYPE_MODE (sizetype));
3431 emit_library_call (bcopy_libfunc, 0,
3432 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3433 XEXP (to_rtx, 0), Pmode,
3434 convert_to_mode (TYPE_MODE (integer_type_node),
3435 size, TREE_UNSIGNED (integer_type_node)),
3436 TYPE_MODE (integer_type_node));
3439 preserve_temp_slots (to_rtx);
3442 return want_value ? to_rtx : NULL_RTX;
3445 /* Compute FROM and store the value in the rtx we got. */
3448 result = store_expr (from, to_rtx, want_value);
3449 preserve_temp_slots (result);
3452 return want_value ? result : NULL_RTX;
3455 /* Generate code for computing expression EXP,
3456 and storing the value into TARGET.
3457 TARGET may contain a QUEUED rtx.
3459 If WANT_VALUE is nonzero, return a copy of the value
3460 not in TARGET, so that we can be sure to use the proper
3461 value in a containing expression even if TARGET has something
3462 else stored in it. If possible, we copy the value through a pseudo
3463 and return that pseudo. Or, if the value is constant, we try to
3464 return the constant. In some cases, we return a pseudo
3465 copied *from* TARGET.
3467 If the mode is BLKmode then we may return TARGET itself.
3468 It turns out that in BLKmode it doesn't cause a problem.
3469 because C has no operators that could combine two different
3470 assignments into the same BLKmode object with different values
3471 with no sequence point. Will other languages need this to
3474 If WANT_VALUE is 0, we return NULL, to make sure
3475 to catch quickly any cases where the caller uses the value
3476 and fails to set WANT_VALUE. */
3479 store_expr (exp, target, want_value)
3481 register rtx target;
3485 int dont_return_target = 0;
3487 if (TREE_CODE (exp) == COMPOUND_EXPR)
3489 /* Perform first part of compound expression, then assign from second
3491 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3493 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3495 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3497 /* For conditional expression, get safe form of the target. Then
3498 test the condition, doing the appropriate assignment on either
3499 side. This avoids the creation of unnecessary temporaries.
3500 For non-BLKmode, it is more efficient not to do this. */
3502 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3505 target = protect_from_queue (target, 1);
3507 do_pending_stack_adjust ();
3509 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3510 start_cleanup_deferral ();
3511 store_expr (TREE_OPERAND (exp, 1), target, 0);
3512 end_cleanup_deferral ();
3514 emit_jump_insn (gen_jump (lab2));
3517 start_cleanup_deferral ();
3518 store_expr (TREE_OPERAND (exp, 2), target, 0);
3519 end_cleanup_deferral ();
3524 return want_value ? target : NULL_RTX;
3526 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3527 && GET_MODE (target) != BLKmode)
3528 /* If target is in memory and caller wants value in a register instead,
3529 arrange that. Pass TARGET as target for expand_expr so that,
3530 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3531 We know expand_expr will not use the target in that case.
3532 Don't do this if TARGET is volatile because we are supposed
3533 to write it and then read it. */
3535 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3536 GET_MODE (target), 0);
3537 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3538 temp = copy_to_reg (temp);
3539 dont_return_target = 1;
3541 else if (queued_subexp_p (target))
3542 /* If target contains a postincrement, let's not risk
3543 using it as the place to generate the rhs. */
3545 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3547 /* Expand EXP into a new pseudo. */
3548 temp = gen_reg_rtx (GET_MODE (target));
3549 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3552 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3554 /* If target is volatile, ANSI requires accessing the value
3555 *from* the target, if it is accessed. So make that happen.
3556 In no case return the target itself. */
3557 if (! MEM_VOLATILE_P (target) && want_value)
3558 dont_return_target = 1;
3560 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3561 /* If this is an scalar in a register that is stored in a wider mode
3562 than the declared mode, compute the result into its declared mode
3563 and then convert to the wider mode. Our value is the computed
3566 /* If we don't want a value, we can do the conversion inside EXP,
3567 which will often result in some optimizations. Do the conversion
3568 in two steps: first change the signedness, if needed, then
3569 the extend. But don't do this if the type of EXP is a subtype
3570 of something else since then the conversion might involve
3571 more than just converting modes. */
3572 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3573 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3575 if (TREE_UNSIGNED (TREE_TYPE (exp))
3576 != SUBREG_PROMOTED_UNSIGNED_P (target))
3579 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3583 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3584 SUBREG_PROMOTED_UNSIGNED_P (target)),
3588 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3590 /* If TEMP is a volatile MEM and we want a result value, make
3591 the access now so it gets done only once. Likewise if
3592 it contains TARGET. */
3593 if (GET_CODE (temp) == MEM && want_value
3594 && (MEM_VOLATILE_P (temp)
3595 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3596 temp = copy_to_reg (temp);
3598 /* If TEMP is a VOIDmode constant, use convert_modes to make
3599 sure that we properly convert it. */
3600 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3601 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3602 TYPE_MODE (TREE_TYPE (exp)), temp,
3603 SUBREG_PROMOTED_UNSIGNED_P (target));
3605 convert_move (SUBREG_REG (target), temp,
3606 SUBREG_PROMOTED_UNSIGNED_P (target));
3607 return want_value ? temp : NULL_RTX;
3611 temp = expand_expr (exp, target, GET_MODE (target), 0);
3612 /* Return TARGET if it's a specified hardware register.
3613 If TARGET is a volatile mem ref, either return TARGET
3614 or return a reg copied *from* TARGET; ANSI requires this.
3616 Otherwise, if TEMP is not TARGET, return TEMP
3617 if it is constant (for efficiency),
3618 or if we really want the correct value. */
3619 if (!(target && GET_CODE (target) == REG
3620 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3621 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3622 && ! rtx_equal_p (temp, target)
3623 && (CONSTANT_P (temp) || want_value))
3624 dont_return_target = 1;
3627 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3628 the same as that of TARGET, adjust the constant. This is needed, for
3629 example, in case it is a CONST_DOUBLE and we want only a word-sized
3631 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3632 && TREE_CODE (exp) != ERROR_MARK
3633 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3634 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3635 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3637 if (current_function_check_memory_usage
3638 && GET_CODE (target) == MEM
3639 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3641 if (GET_CODE (temp) == MEM)
3642 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3643 XEXP (target, 0), ptr_mode,
3644 XEXP (temp, 0), ptr_mode,
3645 expr_size (exp), TYPE_MODE (sizetype));
3647 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3648 XEXP (target, 0), ptr_mode,
3649 expr_size (exp), TYPE_MODE (sizetype),
3650 GEN_INT (MEMORY_USE_WO),
3651 TYPE_MODE (integer_type_node));
3654 /* If value was not generated in the target, store it there.
3655 Convert the value to TARGET's type first if nec. */
3656 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3657 one or both of them are volatile memory refs, we have to distinguish
3659 - expand_expr has used TARGET. In this case, we must not generate
3660 another copy. This can be detected by TARGET being equal according
3662 - expand_expr has not used TARGET - that means that the source just
3663 happens to have the same RTX form. Since temp will have been created
3664 by expand_expr, it will compare unequal according to == .
3665 We must generate a copy in this case, to reach the correct number
3666 of volatile memory references. */
3668 if ((! rtx_equal_p (temp, target)
3669 || (temp != target && (side_effects_p (temp)
3670 || side_effects_p (target))))
3671 && TREE_CODE (exp) != ERROR_MARK)
3673 target = protect_from_queue (target, 1);
3674 if (GET_MODE (temp) != GET_MODE (target)
3675 && GET_MODE (temp) != VOIDmode)
3677 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3678 if (dont_return_target)
3680 /* In this case, we will return TEMP,
3681 so make sure it has the proper mode.
3682 But don't forget to store the value into TARGET. */
3683 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3684 emit_move_insn (target, temp);
3687 convert_move (target, temp, unsignedp);
3690 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3692 /* Handle copying a string constant into an array.
3693 The string constant may be shorter than the array.
3694 So copy just the string's actual length, and clear the rest. */
3698 /* Get the size of the data type of the string,
3699 which is actually the size of the target. */
3700 size = expr_size (exp);
3701 if (GET_CODE (size) == CONST_INT
3702 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3703 emit_block_move (target, temp, size,
3704 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3707 /* Compute the size of the data to copy from the string. */
3709 = size_binop (MIN_EXPR,
3710 make_tree (sizetype, size),
3712 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3713 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3717 /* Copy that much. */
3718 emit_block_move (target, temp, copy_size_rtx,
3719 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3721 /* Figure out how much is left in TARGET that we have to clear.
3722 Do all calculations in ptr_mode. */
3724 addr = XEXP (target, 0);
3725 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3727 if (GET_CODE (copy_size_rtx) == CONST_INT)
3729 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3730 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3734 addr = force_reg (ptr_mode, addr);
3735 addr = expand_binop (ptr_mode, add_optab, addr,
3736 copy_size_rtx, NULL_RTX, 0,
3739 size = expand_binop (ptr_mode, sub_optab, size,
3740 copy_size_rtx, NULL_RTX, 0,
3743 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3744 GET_MODE (size), 0, 0);
3745 label = gen_label_rtx ();
3746 emit_jump_insn (gen_blt (label));
3749 if (size != const0_rtx)
3751 /* Be sure we can write on ADDR. */
3752 if (current_function_check_memory_usage)
3753 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3755 size, TYPE_MODE (sizetype),
3756 GEN_INT (MEMORY_USE_WO),
3757 TYPE_MODE (integer_type_node));
3758 #ifdef TARGET_MEM_FUNCTIONS
3759 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3761 const0_rtx, TYPE_MODE (integer_type_node),
3762 convert_to_mode (TYPE_MODE (sizetype),
3764 TREE_UNSIGNED (sizetype)),
3765 TYPE_MODE (sizetype));
3767 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3769 convert_to_mode (TYPE_MODE (integer_type_node),
3771 TREE_UNSIGNED (integer_type_node)),
3772 TYPE_MODE (integer_type_node));
3780 /* Handle calls that return values in multiple non-contiguous locations.
3781 The Irix 6 ABI has examples of this. */
3782 else if (GET_CODE (target) == PARALLEL)
3783 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3784 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3785 else if (GET_MODE (temp) == BLKmode)
3786 emit_block_move (target, temp, expr_size (exp),
3787 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3789 emit_move_insn (target, temp);
3792 /* If we don't want a value, return NULL_RTX. */
3796 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3797 ??? The latter test doesn't seem to make sense. */
3798 else if (dont_return_target && GET_CODE (temp) != MEM)
3801 /* Return TARGET itself if it is a hard register. */
3802 else if (want_value && GET_MODE (target) != BLKmode
3803 && ! (GET_CODE (target) == REG
3804 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3805 return copy_to_reg (target);
3811 /* Return 1 if EXP just contains zeros. */
3819 switch (TREE_CODE (exp))
3823 case NON_LVALUE_EXPR:
3824 return is_zeros_p (TREE_OPERAND (exp, 0));
3827 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3831 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3834 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3837 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3838 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3839 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3840 if (! is_zeros_p (TREE_VALUE (elt)))
3850 /* Return 1 if EXP contains mostly (3/4) zeros. */
3853 mostly_zeros_p (exp)
3856 if (TREE_CODE (exp) == CONSTRUCTOR)
3858 int elts = 0, zeros = 0;
3859 tree elt = CONSTRUCTOR_ELTS (exp);
3860 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3862 /* If there are no ranges of true bits, it is all zero. */
3863 return elt == NULL_TREE;
3865 for (; elt; elt = TREE_CHAIN (elt))
3867 /* We do not handle the case where the index is a RANGE_EXPR,
3868 so the statistic will be somewhat inaccurate.
3869 We do make a more accurate count in store_constructor itself,
3870 so since this function is only used for nested array elements,
3871 this should be close enough. */
3872 if (mostly_zeros_p (TREE_VALUE (elt)))
3877 return 4 * zeros >= 3 * elts;
3880 return is_zeros_p (exp);
3883 /* Helper function for store_constructor.
3884 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3885 TYPE is the type of the CONSTRUCTOR, not the element type.
3886 CLEARED is as for store_constructor.
3888 This provides a recursive shortcut back to store_constructor when it isn't
3889 necessary to go through store_field. This is so that we can pass through
3890 the cleared field to let store_constructor know that we may not have to
3891 clear a substructure if the outer structure has already been cleared. */
3894 store_constructor_field (target, bitsize, bitpos,
3895 mode, exp, type, cleared)
3897 int bitsize, bitpos;
3898 enum machine_mode mode;
3902 if (TREE_CODE (exp) == CONSTRUCTOR
3903 && bitpos % BITS_PER_UNIT == 0
3904 /* If we have a non-zero bitpos for a register target, then we just
3905 let store_field do the bitfield handling. This is unlikely to
3906 generate unnecessary clear instructions anyways. */
3907 && (bitpos == 0 || GET_CODE (target) == MEM))
3910 target = change_address (target, VOIDmode,
3911 plus_constant (XEXP (target, 0),
3912 bitpos / BITS_PER_UNIT));
3913 store_constructor (exp, target, cleared);
3916 store_field (target, bitsize, bitpos, mode, exp,
3917 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3918 int_size_in_bytes (type), 0);
3921 /* Store the value of constructor EXP into the rtx TARGET.
3922 TARGET is either a REG or a MEM.
3923 CLEARED is true if TARGET is known to have been zero'd. */
3926 store_constructor (exp, target, cleared)
3931 tree type = TREE_TYPE (exp);
3932 rtx exp_size = expr_size (exp);
3934 /* We know our target cannot conflict, since safe_from_p has been called. */
3936 /* Don't try copying piece by piece into a hard register
3937 since that is vulnerable to being clobbered by EXP.
3938 Instead, construct in a pseudo register and then copy it all. */
3939 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3941 rtx temp = gen_reg_rtx (GET_MODE (target));
3942 store_constructor (exp, temp, 0);
3943 emit_move_insn (target, temp);
3948 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3949 || TREE_CODE (type) == QUAL_UNION_TYPE)
3953 /* Inform later passes that the whole union value is dead. */
3954 if (TREE_CODE (type) == UNION_TYPE
3955 || TREE_CODE (type) == QUAL_UNION_TYPE)
3956 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3958 /* If we are building a static constructor into a register,
3959 set the initial value as zero so we can fold the value into
3960 a constant. But if more than one register is involved,
3961 this probably loses. */
3962 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3963 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3966 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3971 /* If the constructor has fewer fields than the structure
3972 or if we are initializing the structure to mostly zeros,
3973 clear the whole structure first. */
3974 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3975 != list_length (TYPE_FIELDS (type)))
3976 || mostly_zeros_p (exp))
3979 clear_storage (target, expr_size (exp),
3980 TYPE_ALIGN (type) / BITS_PER_UNIT);
3985 /* Inform later passes that the old value is dead. */
3986 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3988 /* Store each element of the constructor into
3989 the corresponding field of TARGET. */
3991 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3993 register tree field = TREE_PURPOSE (elt);
3994 tree value = TREE_VALUE (elt);
3995 register enum machine_mode mode;
3999 tree pos, constant = 0, offset = 0;
4000 rtx to_rtx = target;
4002 /* Just ignore missing fields.
4003 We cleared the whole structure, above,
4004 if any fields are missing. */
4008 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4011 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4012 unsignedp = TREE_UNSIGNED (field);
4013 mode = DECL_MODE (field);
4014 if (DECL_BIT_FIELD (field))
4017 pos = DECL_FIELD_BITPOS (field);
4018 if (TREE_CODE (pos) == INTEGER_CST)
4020 else if (TREE_CODE (pos) == PLUS_EXPR
4021 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4022 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4027 bitpos = TREE_INT_CST_LOW (constant);
4033 if (contains_placeholder_p (offset))
4034 offset = build (WITH_RECORD_EXPR, sizetype,
4035 offset, make_tree (TREE_TYPE (exp), target));
4037 offset = size_binop (FLOOR_DIV_EXPR, offset,
4038 size_int (BITS_PER_UNIT));
4040 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4041 if (GET_CODE (to_rtx) != MEM)
4044 if (GET_MODE (offset_rtx) != ptr_mode)
4046 #ifdef POINTERS_EXTEND_UNSIGNED
4047 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4049 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4054 = change_address (to_rtx, VOIDmode,
4055 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4056 force_reg (ptr_mode, offset_rtx)));
4058 if (TREE_READONLY (field))
4060 if (GET_CODE (to_rtx) == MEM)
4061 to_rtx = copy_rtx (to_rtx);
4063 RTX_UNCHANGING_P (to_rtx) = 1;
4066 #ifdef WORD_REGISTER_OPERATIONS
4067 /* If this initializes a field that is smaller than a word, at the
4068 start of a word, try to widen it to a full word.
4069 This special case allows us to output C++ member function
4070 initializations in a form that the optimizers can understand. */
4072 && GET_CODE (target) == REG
4073 && bitsize < BITS_PER_WORD
4074 && bitpos % BITS_PER_WORD == 0
4075 && GET_MODE_CLASS (mode) == MODE_INT
4076 && TREE_CODE (value) == INTEGER_CST
4077 && GET_CODE (exp_size) == CONST_INT
4078 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4080 tree type = TREE_TYPE (value);
4081 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4083 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4084 value = convert (type, value);
4086 if (BYTES_BIG_ENDIAN)
4088 = fold (build (LSHIFT_EXPR, type, value,
4089 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4090 bitsize = BITS_PER_WORD;
4094 store_constructor_field (to_rtx, bitsize, bitpos,
4095 mode, value, type, cleared);
4098 else if (TREE_CODE (type) == ARRAY_TYPE)
4103 tree domain = TYPE_DOMAIN (type);
4104 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4105 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4106 tree elttype = TREE_TYPE (type);
4108 /* If the constructor has fewer elements than the array,
4109 clear the whole array first. Similarly if this is
4110 static constructor of a non-BLKmode object. */
4111 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4115 HOST_WIDE_INT count = 0, zero_count = 0;
4117 /* This loop is a more accurate version of the loop in
4118 mostly_zeros_p (it handles RANGE_EXPR in an index).
4119 It is also needed to check for missing elements. */
4120 for (elt = CONSTRUCTOR_ELTS (exp);
4122 elt = TREE_CHAIN (elt))
4124 tree index = TREE_PURPOSE (elt);
4125 HOST_WIDE_INT this_node_count;
4126 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4128 tree lo_index = TREE_OPERAND (index, 0);
4129 tree hi_index = TREE_OPERAND (index, 1);
4130 if (TREE_CODE (lo_index) != INTEGER_CST
4131 || TREE_CODE (hi_index) != INTEGER_CST)
4136 this_node_count = TREE_INT_CST_LOW (hi_index)
4137 - TREE_INT_CST_LOW (lo_index) + 1;
4140 this_node_count = 1;
4141 count += this_node_count;
4142 if (mostly_zeros_p (TREE_VALUE (elt)))
4143 zero_count += this_node_count;
4145 /* Clear the entire array first if there are any missing elements,
4146 or if the incidence of zero elements is >= 75%. */
4147 if (count < maxelt - minelt + 1
4148 || 4 * zero_count >= 3 * count)
4154 clear_storage (target, expr_size (exp),
4155 TYPE_ALIGN (type) / BITS_PER_UNIT);
4159 /* Inform later passes that the old value is dead. */
4160 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4162 /* Store each element of the constructor into
4163 the corresponding element of TARGET, determined
4164 by counting the elements. */
4165 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4167 elt = TREE_CHAIN (elt), i++)
4169 register enum machine_mode mode;
4173 tree value = TREE_VALUE (elt);
4174 tree index = TREE_PURPOSE (elt);
4175 rtx xtarget = target;
4177 if (cleared && is_zeros_p (value))
4180 mode = TYPE_MODE (elttype);
4181 bitsize = GET_MODE_BITSIZE (mode);
4182 unsignedp = TREE_UNSIGNED (elttype);
4184 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4186 tree lo_index = TREE_OPERAND (index, 0);
4187 tree hi_index = TREE_OPERAND (index, 1);
4188 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4189 struct nesting *loop;
4190 HOST_WIDE_INT lo, hi, count;
4193 /* If the range is constant and "small", unroll the loop. */
4194 if (TREE_CODE (lo_index) == INTEGER_CST
4195 && TREE_CODE (hi_index) == INTEGER_CST
4196 && (lo = TREE_INT_CST_LOW (lo_index),
4197 hi = TREE_INT_CST_LOW (hi_index),
4198 count = hi - lo + 1,
4199 (GET_CODE (target) != MEM
4201 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4202 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4205 lo -= minelt; hi -= minelt;
4206 for (; lo <= hi; lo++)
4208 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4209 store_constructor_field (target, bitsize, bitpos,
4210 mode, value, type, cleared);
4215 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4216 loop_top = gen_label_rtx ();
4217 loop_end = gen_label_rtx ();
4219 unsignedp = TREE_UNSIGNED (domain);
4221 index = build_decl (VAR_DECL, NULL_TREE, domain);
4223 DECL_RTL (index) = index_r
4224 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4227 if (TREE_CODE (value) == SAVE_EXPR
4228 && SAVE_EXPR_RTL (value) == 0)
4230 /* Make sure value gets expanded once before the
4232 expand_expr (value, const0_rtx, VOIDmode, 0);
4235 store_expr (lo_index, index_r, 0);
4236 loop = expand_start_loop (0);
4238 /* Assign value to element index. */
4239 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4240 size_int (BITS_PER_UNIT));
4241 position = size_binop (MULT_EXPR,
4242 size_binop (MINUS_EXPR, index,
4243 TYPE_MIN_VALUE (domain)),
4245 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4246 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4247 xtarget = change_address (target, mode, addr);
4248 if (TREE_CODE (value) == CONSTRUCTOR)
4249 store_constructor (value, xtarget, cleared);
4251 store_expr (value, xtarget, 0);
4253 expand_exit_loop_if_false (loop,
4254 build (LT_EXPR, integer_type_node,
4257 expand_increment (build (PREINCREMENT_EXPR,
4259 index, integer_one_node), 0, 0);
4261 emit_label (loop_end);
4263 /* Needed by stupid register allocation. to extend the
4264 lifetime of pseudo-regs used by target past the end
4266 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4269 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4270 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4276 index = size_int (i);
4279 index = size_binop (MINUS_EXPR, index,
4280 TYPE_MIN_VALUE (domain));
4281 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4282 size_int (BITS_PER_UNIT));
4283 position = size_binop (MULT_EXPR, index, position);
4284 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4285 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4286 xtarget = change_address (target, mode, addr);
4287 store_expr (value, xtarget, 0);
4292 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4293 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4295 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4296 store_constructor_field (target, bitsize, bitpos,
4297 mode, value, type, cleared);
4301 /* set constructor assignments */
4302 else if (TREE_CODE (type) == SET_TYPE)
4304 tree elt = CONSTRUCTOR_ELTS (exp);
4305 int nbytes = int_size_in_bytes (type), nbits;
4306 tree domain = TYPE_DOMAIN (type);
4307 tree domain_min, domain_max, bitlength;
4309 /* The default implementation strategy is to extract the constant
4310 parts of the constructor, use that to initialize the target,
4311 and then "or" in whatever non-constant ranges we need in addition.
4313 If a large set is all zero or all ones, it is
4314 probably better to set it using memset (if available) or bzero.
4315 Also, if a large set has just a single range, it may also be
4316 better to first clear all the first clear the set (using
4317 bzero/memset), and set the bits we want. */
4319 /* Check for all zeros. */
4320 if (elt == NULL_TREE)
4323 clear_storage (target, expr_size (exp),
4324 TYPE_ALIGN (type) / BITS_PER_UNIT);
4328 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4329 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4330 bitlength = size_binop (PLUS_EXPR,
4331 size_binop (MINUS_EXPR, domain_max, domain_min),
4334 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4336 nbits = TREE_INT_CST_LOW (bitlength);
4338 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4339 are "complicated" (more than one range), initialize (the
4340 constant parts) by copying from a constant. */
4341 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4342 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4344 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4345 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4346 char *bit_buffer = (char *) alloca (nbits);
4347 HOST_WIDE_INT word = 0;
4350 int offset = 0; /* In bytes from beginning of set. */
4351 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4354 if (bit_buffer[ibit])
4356 if (BYTES_BIG_ENDIAN)
4357 word |= (1 << (set_word_size - 1 - bit_pos));
4359 word |= 1 << bit_pos;
4362 if (bit_pos >= set_word_size || ibit == nbits)
4364 if (word != 0 || ! cleared)
4366 rtx datum = GEN_INT (word);
4368 /* The assumption here is that it is safe to use
4369 XEXP if the set is multi-word, but not if
4370 it's single-word. */
4371 if (GET_CODE (target) == MEM)
4373 to_rtx = plus_constant (XEXP (target, 0), offset);
4374 to_rtx = change_address (target, mode, to_rtx);
4376 else if (offset == 0)
4380 emit_move_insn (to_rtx, datum);
4386 offset += set_word_size / BITS_PER_UNIT;
4392 /* Don't bother clearing storage if the set is all ones. */
4393 if (TREE_CHAIN (elt) != NULL_TREE
4394 || (TREE_PURPOSE (elt) == NULL_TREE
4396 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4397 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4398 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4399 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4401 clear_storage (target, expr_size (exp),
4402 TYPE_ALIGN (type) / BITS_PER_UNIT);
4405 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4407 /* start of range of element or NULL */
4408 tree startbit = TREE_PURPOSE (elt);
4409 /* end of range of element, or element value */
4410 tree endbit = TREE_VALUE (elt);
4411 #ifdef TARGET_MEM_FUNCTIONS
4412 HOST_WIDE_INT startb, endb;
4414 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4416 bitlength_rtx = expand_expr (bitlength,
4417 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4419 /* handle non-range tuple element like [ expr ] */
4420 if (startbit == NULL_TREE)
4422 startbit = save_expr (endbit);
4425 startbit = convert (sizetype, startbit);
4426 endbit = convert (sizetype, endbit);
4427 if (! integer_zerop (domain_min))
4429 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4430 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4432 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4433 EXPAND_CONST_ADDRESS);
4434 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4435 EXPAND_CONST_ADDRESS);
4439 targetx = assign_stack_temp (GET_MODE (target),
4440 GET_MODE_SIZE (GET_MODE (target)),
4442 emit_move_insn (targetx, target);
4444 else if (GET_CODE (target) == MEM)
4449 #ifdef TARGET_MEM_FUNCTIONS
4450 /* Optimization: If startbit and endbit are
4451 constants divisible by BITS_PER_UNIT,
4452 call memset instead. */
4453 if (TREE_CODE (startbit) == INTEGER_CST
4454 && TREE_CODE (endbit) == INTEGER_CST
4455 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4456 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4458 emit_library_call (memset_libfunc, 0,
4460 plus_constant (XEXP (targetx, 0),
4461 startb / BITS_PER_UNIT),
4463 constm1_rtx, TYPE_MODE (integer_type_node),
4464 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4465 TYPE_MODE (sizetype));
4470 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4471 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4472 bitlength_rtx, TYPE_MODE (sizetype),
4473 startbit_rtx, TYPE_MODE (sizetype),
4474 endbit_rtx, TYPE_MODE (sizetype));
4477 emit_move_insn (target, targetx);
4485 /* Store the value of EXP (an expression tree)
4486 into a subfield of TARGET which has mode MODE and occupies
4487 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4488 If MODE is VOIDmode, it means that we are storing into a bit-field.
4490 If VALUE_MODE is VOIDmode, return nothing in particular.
4491 UNSIGNEDP is not used in this case.
4493 Otherwise, return an rtx for the value stored. This rtx
4494 has mode VALUE_MODE if that is convenient to do.
4495 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4497 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4498 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4500 ALIAS_SET is the alias set for the destination. This value will
4501 (in general) be different from that for TARGET, since TARGET is a
4502 reference to the containing structure. */
4505 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4506 unsignedp, align, total_size, alias_set)
4508 int bitsize, bitpos;
4509 enum machine_mode mode;
4511 enum machine_mode value_mode;
4517 HOST_WIDE_INT width_mask = 0;
4519 if (TREE_CODE (exp) == ERROR_MARK)
4522 if (bitsize < HOST_BITS_PER_WIDE_INT)
4523 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4525 /* If we are storing into an unaligned field of an aligned union that is
4526 in a register, we may have the mode of TARGET being an integer mode but
4527 MODE == BLKmode. In that case, get an aligned object whose size and
4528 alignment are the same as TARGET and store TARGET into it (we can avoid
4529 the store if the field being stored is the entire width of TARGET). Then
4530 call ourselves recursively to store the field into a BLKmode version of
4531 that object. Finally, load from the object into TARGET. This is not
4532 very efficient in general, but should only be slightly more expensive
4533 than the otherwise-required unaligned accesses. Perhaps this can be
4534 cleaned up later. */
4537 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4539 rtx object = assign_stack_temp (GET_MODE (target),
4540 GET_MODE_SIZE (GET_MODE (target)), 0);
4541 rtx blk_object = copy_rtx (object);
4543 MEM_IN_STRUCT_P (object) = 1;
4544 MEM_IN_STRUCT_P (blk_object) = 1;
4545 PUT_MODE (blk_object, BLKmode);
4547 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4548 emit_move_insn (object, target);
4550 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4551 align, total_size, alias_set);
4553 /* Even though we aren't returning target, we need to
4554 give it the updated value. */
4555 emit_move_insn (target, object);
4560 /* If the structure is in a register or if the component
4561 is a bit field, we cannot use addressing to access it.
4562 Use bit-field techniques or SUBREG to store in it. */
4564 if (mode == VOIDmode
4565 || (mode != BLKmode && ! direct_store[(int) mode])
4566 || GET_CODE (target) == REG
4567 || GET_CODE (target) == SUBREG
4568 /* If the field isn't aligned enough to store as an ordinary memref,
4569 store it as a bit field. */
4570 || (SLOW_UNALIGNED_ACCESS
4571 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4572 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4574 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4576 /* If BITSIZE is narrower than the size of the type of EXP
4577 we will be narrowing TEMP. Normally, what's wanted are the
4578 low-order bits. However, if EXP's type is a record and this is
4579 big-endian machine, we want the upper BITSIZE bits. */
4580 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4581 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4582 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4583 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4584 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4588 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4590 if (mode != VOIDmode && mode != BLKmode
4591 && mode != TYPE_MODE (TREE_TYPE (exp)))
4592 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4594 /* If the modes of TARGET and TEMP are both BLKmode, both
4595 must be in memory and BITPOS must be aligned on a byte
4596 boundary. If so, we simply do a block copy. */
4597 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4599 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4600 || bitpos % BITS_PER_UNIT != 0)
4603 target = change_address (target, VOIDmode,
4604 plus_constant (XEXP (target, 0),
4605 bitpos / BITS_PER_UNIT));
4607 emit_block_move (target, temp,
4608 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4612 return value_mode == VOIDmode ? const0_rtx : target;
4615 /* Store the value in the bitfield. */
4616 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4617 if (value_mode != VOIDmode)
4619 /* The caller wants an rtx for the value. */
4620 /* If possible, avoid refetching from the bitfield itself. */
4622 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4625 enum machine_mode tmode;
4628 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4629 tmode = GET_MODE (temp);
4630 if (tmode == VOIDmode)
4632 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4633 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4634 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4636 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4637 NULL_RTX, value_mode, 0, align,
4644 rtx addr = XEXP (target, 0);
4647 /* If a value is wanted, it must be the lhs;
4648 so make the address stable for multiple use. */
4650 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4651 && ! CONSTANT_ADDRESS_P (addr)
4652 /* A frame-pointer reference is already stable. */
4653 && ! (GET_CODE (addr) == PLUS
4654 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4655 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4656 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4657 addr = copy_to_reg (addr);
4659 /* Now build a reference to just the desired component. */
4661 to_rtx = copy_rtx (change_address (target, mode,
4662 plus_constant (addr,
4664 / BITS_PER_UNIT))));
4665 MEM_IN_STRUCT_P (to_rtx) = 1;
4666 MEM_ALIAS_SET (to_rtx) = alias_set;
4668 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4672 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4673 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4674 ARRAY_REFs and find the ultimate containing object, which we return.
4676 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4677 bit position, and *PUNSIGNEDP to the signedness of the field.
4678 If the position of the field is variable, we store a tree
4679 giving the variable offset (in units) in *POFFSET.
4680 This offset is in addition to the bit position.
4681 If the position is not variable, we store 0 in *POFFSET.
4682 We set *PALIGNMENT to the alignment in bytes of the address that will be
4683 computed. This is the alignment of the thing we return if *POFFSET
4684 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4686 If any of the extraction expressions is volatile,
4687 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4689 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4690 is a mode that can be used to access the field. In that case, *PBITSIZE
4693 If the field describes a variable-sized object, *PMODE is set to
4694 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4695 this case, but the address of the object can be found. */
4698 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4699 punsignedp, pvolatilep, palignment)
4704 enum machine_mode *pmode;
4709 tree orig_exp = exp;
4711 enum machine_mode mode = VOIDmode;
4712 tree offset = integer_zero_node;
4713 unsigned int alignment = BIGGEST_ALIGNMENT;
4715 if (TREE_CODE (exp) == COMPONENT_REF)
4717 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4718 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4719 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4720 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4722 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4724 size_tree = TREE_OPERAND (exp, 1);
4725 *punsignedp = TREE_UNSIGNED (exp);
4729 mode = TYPE_MODE (TREE_TYPE (exp));
4730 *pbitsize = GET_MODE_BITSIZE (mode);
4731 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4736 if (TREE_CODE (size_tree) != INTEGER_CST)
4737 mode = BLKmode, *pbitsize = -1;
4739 *pbitsize = TREE_INT_CST_LOW (size_tree);
4742 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4743 and find the ultimate containing object. */
4749 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4751 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4752 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4753 : TREE_OPERAND (exp, 2));
4754 tree constant = integer_zero_node, var = pos;
4756 /* If this field hasn't been filled in yet, don't go
4757 past it. This should only happen when folding expressions
4758 made during type construction. */
4762 /* Assume here that the offset is a multiple of a unit.
4763 If not, there should be an explicitly added constant. */
4764 if (TREE_CODE (pos) == PLUS_EXPR
4765 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4766 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4767 else if (TREE_CODE (pos) == INTEGER_CST)
4768 constant = pos, var = integer_zero_node;
4770 *pbitpos += TREE_INT_CST_LOW (constant);
4771 offset = size_binop (PLUS_EXPR, offset,
4772 size_binop (EXACT_DIV_EXPR, var,
4773 size_int (BITS_PER_UNIT)));
4776 else if (TREE_CODE (exp) == ARRAY_REF)
4778 /* This code is based on the code in case ARRAY_REF in expand_expr
4779 below. We assume here that the size of an array element is
4780 always an integral multiple of BITS_PER_UNIT. */
4782 tree index = TREE_OPERAND (exp, 1);
4783 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4785 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4786 tree index_type = TREE_TYPE (index);
4789 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4791 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4793 index_type = TREE_TYPE (index);
4796 /* Optimize the special-case of a zero lower bound.
4798 We convert the low_bound to sizetype to avoid some problems
4799 with constant folding. (E.g. suppose the lower bound is 1,
4800 and its mode is QI. Without the conversion, (ARRAY
4801 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4802 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4804 But sizetype isn't quite right either (especially if
4805 the lowbound is negative). FIXME */
4807 if (! integer_zerop (low_bound))
4808 index = fold (build (MINUS_EXPR, index_type, index,
4809 convert (sizetype, low_bound)));
4811 if (TREE_CODE (index) == INTEGER_CST)
4813 index = convert (sbitsizetype, index);
4814 index_type = TREE_TYPE (index);
4817 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4818 convert (sbitsizetype,
4819 TYPE_SIZE (TREE_TYPE (exp)))));
4821 if (TREE_CODE (xindex) == INTEGER_CST
4822 && TREE_INT_CST_HIGH (xindex) == 0)
4823 *pbitpos += TREE_INT_CST_LOW (xindex);
4826 /* Either the bit offset calculated above is not constant, or
4827 it overflowed. In either case, redo the multiplication
4828 against the size in units. This is especially important
4829 in the non-constant case to avoid a division at runtime. */
4830 xindex = fold (build (MULT_EXPR, ssizetype, index,
4832 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4834 if (contains_placeholder_p (xindex))
4835 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4837 offset = size_binop (PLUS_EXPR, offset, xindex);
4840 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4841 && ! ((TREE_CODE (exp) == NOP_EXPR
4842 || TREE_CODE (exp) == CONVERT_EXPR)
4843 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4844 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4846 && (TYPE_MODE (TREE_TYPE (exp))
4847 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4850 /* If any reference in the chain is volatile, the effect is volatile. */
4851 if (TREE_THIS_VOLATILE (exp))
4854 /* If the offset is non-constant already, then we can't assume any
4855 alignment more than the alignment here. */
4856 if (! integer_zerop (offset))
4857 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4859 exp = TREE_OPERAND (exp, 0);
4862 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4863 alignment = MIN (alignment, DECL_ALIGN (exp));
4864 else if (TREE_TYPE (exp) != 0)
4865 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4867 if (integer_zerop (offset))
4870 if (offset != 0 && contains_placeholder_p (offset))
4871 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4875 *palignment = alignment / BITS_PER_UNIT;
4879 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4880 static enum memory_use_mode
4881 get_memory_usage_from_modifier (modifier)
4882 enum expand_modifier modifier;
4888 return MEMORY_USE_RO;
4890 case EXPAND_MEMORY_USE_WO:
4891 return MEMORY_USE_WO;
4893 case EXPAND_MEMORY_USE_RW:
4894 return MEMORY_USE_RW;
4896 case EXPAND_MEMORY_USE_DONT:
4897 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4898 MEMORY_USE_DONT, because they are modifiers to a call of
4899 expand_expr in the ADDR_EXPR case of expand_expr. */
4900 case EXPAND_CONST_ADDRESS:
4901 case EXPAND_INITIALIZER:
4902 return MEMORY_USE_DONT;
4903 case EXPAND_MEMORY_USE_BAD:
4909 /* Given an rtx VALUE that may contain additions and multiplications,
4910 return an equivalent value that just refers to a register or memory.
4911 This is done by generating instructions to perform the arithmetic
4912 and returning a pseudo-register containing the value.
4914 The returned value may be a REG, SUBREG, MEM or constant. */
4917 force_operand (value, target)
4920 register optab binoptab = 0;
4921 /* Use a temporary to force order of execution of calls to
4925 /* Use subtarget as the target for operand 0 of a binary operation. */
4926 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4928 /* Check for a PIC address load. */
4930 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4931 && XEXP (value, 0) == pic_offset_table_rtx
4932 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4933 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4934 || GET_CODE (XEXP (value, 1)) == CONST))
4937 subtarget = gen_reg_rtx (GET_MODE (value));
4938 emit_move_insn (subtarget, value);
4942 if (GET_CODE (value) == PLUS)
4943 binoptab = add_optab;
4944 else if (GET_CODE (value) == MINUS)
4945 binoptab = sub_optab;
4946 else if (GET_CODE (value) == MULT)
4948 op2 = XEXP (value, 1);
4949 if (!CONSTANT_P (op2)
4950 && !(GET_CODE (op2) == REG && op2 != subtarget))
4952 tmp = force_operand (XEXP (value, 0), subtarget);
4953 return expand_mult (GET_MODE (value), tmp,
4954 force_operand (op2, NULL_RTX),
4960 op2 = XEXP (value, 1);
4961 if (!CONSTANT_P (op2)
4962 && !(GET_CODE (op2) == REG && op2 != subtarget))
4964 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4966 binoptab = add_optab;
4967 op2 = negate_rtx (GET_MODE (value), op2);
4970 /* Check for an addition with OP2 a constant integer and our first
4971 operand a PLUS of a virtual register and something else. In that
4972 case, we want to emit the sum of the virtual register and the
4973 constant first and then add the other value. This allows virtual
4974 register instantiation to simply modify the constant rather than
4975 creating another one around this addition. */
4976 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4977 && GET_CODE (XEXP (value, 0)) == PLUS
4978 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4979 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4980 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4982 rtx temp = expand_binop (GET_MODE (value), binoptab,
4983 XEXP (XEXP (value, 0), 0), op2,
4984 subtarget, 0, OPTAB_LIB_WIDEN);
4985 return expand_binop (GET_MODE (value), binoptab, temp,
4986 force_operand (XEXP (XEXP (value, 0), 1), 0),
4987 target, 0, OPTAB_LIB_WIDEN);
4990 tmp = force_operand (XEXP (value, 0), subtarget);
4991 return expand_binop (GET_MODE (value), binoptab, tmp,
4992 force_operand (op2, NULL_RTX),
4993 target, 0, OPTAB_LIB_WIDEN);
4994 /* We give UNSIGNEDP = 0 to expand_binop
4995 because the only operations we are expanding here are signed ones. */
5000 /* Subroutine of expand_expr:
5001 save the non-copied parts (LIST) of an expr (LHS), and return a list
5002 which can restore these values to their previous values,
5003 should something modify their storage. */
5006 save_noncopied_parts (lhs, list)
5013 for (tail = list; tail; tail = TREE_CHAIN (tail))
5014 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5015 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5018 tree part = TREE_VALUE (tail);
5019 tree part_type = TREE_TYPE (part);
5020 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5021 rtx target = assign_temp (part_type, 0, 1, 1);
5022 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5023 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5024 parts = tree_cons (to_be_saved,
5025 build (RTL_EXPR, part_type, NULL_TREE,
5028 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5033 /* Subroutine of expand_expr:
5034 record the non-copied parts (LIST) of an expr (LHS), and return a list
5035 which specifies the initial values of these parts. */
5038 init_noncopied_parts (lhs, list)
5045 for (tail = list; tail; tail = TREE_CHAIN (tail))
5046 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5047 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5050 tree part = TREE_VALUE (tail);
5051 tree part_type = TREE_TYPE (part);
5052 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5053 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5058 /* Subroutine of expand_expr: return nonzero iff there is no way that
5059 EXP can reference X, which is being modified. TOP_P is nonzero if this
5060 call is going to be used to determine whether we need a temporary
5061 for EXP, as opposed to a recursive call to this function.
5063 It is always safe for this routine to return zero since it merely
5064 searches for optimization opportunities. */
5067 safe_from_p (x, exp, top_p)
5074 static int save_expr_count;
5075 static int save_expr_size = 0;
5076 static tree *save_expr_rewritten;
5077 static tree save_expr_trees[256];
5080 /* If EXP has varying size, we MUST use a target since we currently
5081 have no way of allocating temporaries of variable size
5082 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5083 So we assume here that something at a higher level has prevented a
5084 clash. This is somewhat bogus, but the best we can do. Only
5085 do this when X is BLKmode and when we are at the top level. */
5086 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5087 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5088 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5089 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5090 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5092 && GET_MODE (x) == BLKmode))
5095 if (top_p && save_expr_size == 0)
5099 save_expr_count = 0;
5100 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5101 save_expr_rewritten = &save_expr_trees[0];
5103 rtn = safe_from_p (x, exp, 1);
5105 for (i = 0; i < save_expr_count; ++i)
5107 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5109 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5117 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5118 find the underlying pseudo. */
5119 if (GET_CODE (x) == SUBREG)
5122 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5126 /* If X is a location in the outgoing argument area, it is always safe. */
5127 if (GET_CODE (x) == MEM
5128 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5129 || (GET_CODE (XEXP (x, 0)) == PLUS
5130 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5133 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5136 exp_rtl = DECL_RTL (exp);
5143 if (TREE_CODE (exp) == TREE_LIST)
5144 return ((TREE_VALUE (exp) == 0
5145 || safe_from_p (x, TREE_VALUE (exp), 0))
5146 && (TREE_CHAIN (exp) == 0
5147 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5148 else if (TREE_CODE (exp) == ERROR_MARK)
5149 return 1; /* An already-visited SAVE_EXPR? */
5154 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5158 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5159 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5163 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5164 the expression. If it is set, we conflict iff we are that rtx or
5165 both are in memory. Otherwise, we check all operands of the
5166 expression recursively. */
5168 switch (TREE_CODE (exp))
5171 return (staticp (TREE_OPERAND (exp, 0))
5172 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5173 || TREE_STATIC (exp));
5176 if (GET_CODE (x) == MEM)
5181 exp_rtl = CALL_EXPR_RTL (exp);
5184 /* Assume that the call will clobber all hard registers and
5186 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5187 || GET_CODE (x) == MEM)
5194 /* If a sequence exists, we would have to scan every instruction
5195 in the sequence to see if it was safe. This is probably not
5197 if (RTL_EXPR_SEQUENCE (exp))
5200 exp_rtl = RTL_EXPR_RTL (exp);
5203 case WITH_CLEANUP_EXPR:
5204 exp_rtl = RTL_EXPR_RTL (exp);
5207 case CLEANUP_POINT_EXPR:
5208 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5211 exp_rtl = SAVE_EXPR_RTL (exp);
5215 /* This SAVE_EXPR might appear many times in the top-level
5216 safe_from_p() expression, and if it has a complex
5217 subexpression, examining it multiple times could result
5218 in a combinatorial explosion. E.g. on an Alpha
5219 running at least 200MHz, a Fortran test case compiled with
5220 optimization took about 28 minutes to compile -- even though
5221 it was only a few lines long, and the complicated line causing
5222 so much time to be spent in the earlier version of safe_from_p()
5223 had only 293 or so unique nodes.
5225 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5226 where it is so we can turn it back in the top-level safe_from_p()
5229 /* For now, don't bother re-sizing the array. */
5230 if (save_expr_count >= save_expr_size)
5232 save_expr_rewritten[save_expr_count++] = exp;
5234 nops = tree_code_length[(int) SAVE_EXPR];
5235 for (i = 0; i < nops; i++)
5237 tree operand = TREE_OPERAND (exp, i);
5238 if (operand == NULL_TREE)
5240 TREE_SET_CODE (exp, ERROR_MARK);
5241 if (!safe_from_p (x, operand, 0))
5243 TREE_SET_CODE (exp, SAVE_EXPR);
5245 TREE_SET_CODE (exp, ERROR_MARK);
5249 /* The only operand we look at is operand 1. The rest aren't
5250 part of the expression. */
5251 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5253 case METHOD_CALL_EXPR:
5254 /* This takes a rtx argument, but shouldn't appear here. */
5261 /* If we have an rtx, we do not need to scan our operands. */
5265 nops = tree_code_length[(int) TREE_CODE (exp)];
5266 for (i = 0; i < nops; i++)
5267 if (TREE_OPERAND (exp, i) != 0
5268 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5272 /* If we have an rtl, find any enclosed object. Then see if we conflict
5276 if (GET_CODE (exp_rtl) == SUBREG)
5278 exp_rtl = SUBREG_REG (exp_rtl);
5279 if (GET_CODE (exp_rtl) == REG
5280 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5284 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5285 are memory and EXP is not readonly. */
5286 return ! (rtx_equal_p (x, exp_rtl)
5287 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5288 && ! TREE_READONLY (exp)));
5291 /* If we reach here, it is safe. */
5295 /* Subroutine of expand_expr: return nonzero iff EXP is an
5296 expression whose type is statically determinable. */
5302 if (TREE_CODE (exp) == PARM_DECL
5303 || TREE_CODE (exp) == VAR_DECL
5304 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5305 || TREE_CODE (exp) == COMPONENT_REF
5306 || TREE_CODE (exp) == ARRAY_REF)
5311 /* Subroutine of expand_expr: return rtx if EXP is a
5312 variable or parameter; else return 0. */
5319 switch (TREE_CODE (exp))
5323 return DECL_RTL (exp);
5329 #ifdef MAX_INTEGER_COMPUTATION_MODE
5331 check_max_integer_computation_mode (exp)
5334 enum tree_code code = TREE_CODE (exp);
5335 enum machine_mode mode;
5337 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5338 if (code == NOP_EXPR
5339 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5342 /* First check the type of the overall operation. We need only look at
5343 unary, binary and relational operations. */
5344 if (TREE_CODE_CLASS (code) == '1'
5345 || TREE_CODE_CLASS (code) == '2'
5346 || TREE_CODE_CLASS (code) == '<')
5348 mode = TYPE_MODE (TREE_TYPE (exp));
5349 if (GET_MODE_CLASS (mode) == MODE_INT
5350 && mode > MAX_INTEGER_COMPUTATION_MODE)
5351 fatal ("unsupported wide integer operation");
5354 /* Check operand of a unary op. */
5355 if (TREE_CODE_CLASS (code) == '1')
5357 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5358 if (GET_MODE_CLASS (mode) == MODE_INT
5359 && mode > MAX_INTEGER_COMPUTATION_MODE)
5360 fatal ("unsupported wide integer operation");
5363 /* Check operands of a binary/comparison op. */
5364 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5366 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5367 if (GET_MODE_CLASS (mode) == MODE_INT
5368 && mode > MAX_INTEGER_COMPUTATION_MODE)
5369 fatal ("unsupported wide integer operation");
5371 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5372 if (GET_MODE_CLASS (mode) == MODE_INT
5373 && mode > MAX_INTEGER_COMPUTATION_MODE)
5374 fatal ("unsupported wide integer operation");
5380 /* expand_expr: generate code for computing expression EXP.
5381 An rtx for the computed value is returned. The value is never null.
5382 In the case of a void EXP, const0_rtx is returned.
5384 The value may be stored in TARGET if TARGET is nonzero.
5385 TARGET is just a suggestion; callers must assume that
5386 the rtx returned may not be the same as TARGET.
5388 If TARGET is CONST0_RTX, it means that the value will be ignored.
5390 If TMODE is not VOIDmode, it suggests generating the
5391 result in mode TMODE. But this is done only when convenient.
5392 Otherwise, TMODE is ignored and the value generated in its natural mode.
5393 TMODE is just a suggestion; callers must assume that
5394 the rtx returned may not have mode TMODE.
5396 Note that TARGET may have neither TMODE nor MODE. In that case, it
5397 probably will not be used.
5399 If MODIFIER is EXPAND_SUM then when EXP is an addition
5400 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5401 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5402 products as above, or REG or MEM, or constant.
5403 Ordinarily in such cases we would output mul or add instructions
5404 and then return a pseudo reg containing the sum.
5406 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5407 it also marks a label as absolutely required (it can't be dead).
5408 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5409 This is used for outputting expressions used in initializers.
5411 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5412 with a constant address even if that address is not normally legitimate.
5413 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5416 expand_expr (exp, target, tmode, modifier)
5419 enum machine_mode tmode;
5420 enum expand_modifier modifier;
5422 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5423 This is static so it will be accessible to our recursive callees. */
5424 static tree placeholder_list = 0;
5425 register rtx op0, op1, temp;
5426 tree type = TREE_TYPE (exp);
5427 int unsignedp = TREE_UNSIGNED (type);
5428 register enum machine_mode mode = TYPE_MODE (type);
5429 register enum tree_code code = TREE_CODE (exp);
5431 /* Use subtarget as the target for operand 0 of a binary operation. */
5432 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5433 rtx original_target = target;
5434 int ignore = (target == const0_rtx
5435 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5436 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5437 || code == COND_EXPR)
5438 && TREE_CODE (type) == VOID_TYPE));
5440 /* Used by check-memory-usage to make modifier read only. */
5441 enum expand_modifier ro_modifier;
5443 /* Make a read-only version of the modifier. */
5444 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5445 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5446 ro_modifier = modifier;
5448 ro_modifier = EXPAND_NORMAL;
5450 /* Don't use hard regs as subtargets, because the combiner
5451 can only handle pseudo regs. */
5452 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5454 /* Avoid subtargets inside loops,
5455 since they hide some invariant expressions. */
5456 if (preserve_subexpressions_p ())
5459 /* If we are going to ignore this result, we need only do something
5460 if there is a side-effect somewhere in the expression. If there
5461 is, short-circuit the most common cases here. Note that we must
5462 not call expand_expr with anything but const0_rtx in case this
5463 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5467 if (! TREE_SIDE_EFFECTS (exp))
5470 /* Ensure we reference a volatile object even if value is ignored. */
5471 if (TREE_THIS_VOLATILE (exp)
5472 && TREE_CODE (exp) != FUNCTION_DECL
5473 && mode != VOIDmode && mode != BLKmode)
5475 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5476 if (GET_CODE (temp) == MEM)
5477 temp = copy_to_reg (temp);
5481 if (TREE_CODE_CLASS (code) == '1')
5482 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5483 VOIDmode, ro_modifier);
5484 else if (TREE_CODE_CLASS (code) == '2'
5485 || TREE_CODE_CLASS (code) == '<')
5487 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5488 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5491 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5492 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5493 /* If the second operand has no side effects, just evaluate
5495 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5496 VOIDmode, ro_modifier);
5501 #ifdef MAX_INTEGER_COMPUTATION_MODE
5503 && TREE_CODE (exp) != INTEGER_CST
5504 && TREE_CODE (exp) != PARM_DECL
5505 && TREE_CODE (exp) != ARRAY_REF
5506 && TREE_CODE (exp) != COMPONENT_REF
5507 && TREE_CODE (exp) != BIT_FIELD_REF
5508 && TREE_CODE (exp) != INDIRECT_REF
5509 && TREE_CODE (exp) != VAR_DECL)
5511 enum machine_mode mode = GET_MODE (target);
5513 if (GET_MODE_CLASS (mode) == MODE_INT
5514 && mode > MAX_INTEGER_COMPUTATION_MODE)
5515 fatal ("unsupported wide integer operation");
5518 if (TREE_CODE (exp) != INTEGER_CST
5519 && TREE_CODE (exp) != PARM_DECL
5520 && TREE_CODE (exp) != ARRAY_REF
5521 && TREE_CODE (exp) != COMPONENT_REF
5522 && TREE_CODE (exp) != BIT_FIELD_REF
5523 && TREE_CODE (exp) != INDIRECT_REF
5524 && TREE_CODE (exp) != VAR_DECL
5525 && GET_MODE_CLASS (tmode) == MODE_INT
5526 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5527 fatal ("unsupported wide integer operation");
5529 check_max_integer_computation_mode (exp);
5532 /* If will do cse, generate all results into pseudo registers
5533 since 1) that allows cse to find more things
5534 and 2) otherwise cse could produce an insn the machine
5537 if (! cse_not_expected && mode != BLKmode && target
5538 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5545 tree function = decl_function_context (exp);
5546 /* Handle using a label in a containing function. */
5547 if (function != current_function_decl
5548 && function != inline_function_decl && function != 0)
5550 struct function *p = find_function_data (function);
5551 /* Allocate in the memory associated with the function
5552 that the label is in. */
5553 push_obstacks (p->function_obstack,
5554 p->function_maybepermanent_obstack);
5556 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5561 else if (modifier == EXPAND_INITIALIZER)
5562 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5563 label_rtx (exp), forced_labels);
5564 temp = gen_rtx_MEM (FUNCTION_MODE,
5565 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5566 if (function != current_function_decl
5567 && function != inline_function_decl && function != 0)
5568 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5573 if (DECL_RTL (exp) == 0)
5575 error_with_decl (exp, "prior parameter's size depends on `%s'");
5576 return CONST0_RTX (mode);
5579 /* ... fall through ... */
5582 /* If a static var's type was incomplete when the decl was written,
5583 but the type is complete now, lay out the decl now. */
5584 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5585 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5587 push_obstacks_nochange ();
5588 end_temporary_allocation ();
5589 layout_decl (exp, 0);
5590 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5594 /* Although static-storage variables start off initialized, according to
5595 ANSI C, a memcpy could overwrite them with uninitialized values. So
5596 we check them too. This also lets us check for read-only variables
5597 accessed via a non-const declaration, in case it won't be detected
5598 any other way (e.g., in an embedded system or OS kernel without
5601 Aggregates are not checked here; they're handled elsewhere. */
5602 if (current_function_check_memory_usage && code == VAR_DECL
5603 && GET_CODE (DECL_RTL (exp)) == MEM
5604 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5606 enum memory_use_mode memory_usage;
5607 memory_usage = get_memory_usage_from_modifier (modifier);
5609 if (memory_usage != MEMORY_USE_DONT)
5610 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5611 XEXP (DECL_RTL (exp), 0), ptr_mode,
5612 GEN_INT (int_size_in_bytes (type)),
5613 TYPE_MODE (sizetype),
5614 GEN_INT (memory_usage),
5615 TYPE_MODE (integer_type_node));
5618 /* ... fall through ... */
5622 if (DECL_RTL (exp) == 0)
5625 /* Ensure variable marked as used even if it doesn't go through
5626 a parser. If it hasn't be used yet, write out an external
5628 if (! TREE_USED (exp))
5630 assemble_external (exp);
5631 TREE_USED (exp) = 1;
5634 /* Show we haven't gotten RTL for this yet. */
5637 /* Handle variables inherited from containing functions. */
5638 context = decl_function_context (exp);
5640 /* We treat inline_function_decl as an alias for the current function
5641 because that is the inline function whose vars, types, etc.
5642 are being merged into the current function.
5643 See expand_inline_function. */
5645 if (context != 0 && context != current_function_decl
5646 && context != inline_function_decl
5647 /* If var is static, we don't need a static chain to access it. */
5648 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5649 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5653 /* Mark as non-local and addressable. */
5654 DECL_NONLOCAL (exp) = 1;
5655 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5657 mark_addressable (exp);
5658 if (GET_CODE (DECL_RTL (exp)) != MEM)
5660 addr = XEXP (DECL_RTL (exp), 0);
5661 if (GET_CODE (addr) == MEM)
5662 addr = gen_rtx_MEM (Pmode,
5663 fix_lexical_addr (XEXP (addr, 0), exp));
5665 addr = fix_lexical_addr (addr, exp);
5666 temp = change_address (DECL_RTL (exp), mode, addr);
5669 /* This is the case of an array whose size is to be determined
5670 from its initializer, while the initializer is still being parsed.
5673 else if (GET_CODE (DECL_RTL (exp)) == MEM
5674 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5675 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5676 XEXP (DECL_RTL (exp), 0));
5678 /* If DECL_RTL is memory, we are in the normal case and either
5679 the address is not valid or it is not a register and -fforce-addr
5680 is specified, get the address into a register. */
5682 else if (GET_CODE (DECL_RTL (exp)) == MEM
5683 && modifier != EXPAND_CONST_ADDRESS
5684 && modifier != EXPAND_SUM
5685 && modifier != EXPAND_INITIALIZER
5686 && (! memory_address_p (DECL_MODE (exp),
5687 XEXP (DECL_RTL (exp), 0))
5689 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5690 temp = change_address (DECL_RTL (exp), VOIDmode,
5691 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5693 /* If we got something, return it. But first, set the alignment
5694 the address is a register. */
5697 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5698 mark_reg_pointer (XEXP (temp, 0),
5699 DECL_ALIGN (exp) / BITS_PER_UNIT);
5704 /* If the mode of DECL_RTL does not match that of the decl, it
5705 must be a promoted value. We return a SUBREG of the wanted mode,
5706 but mark it so that we know that it was already extended. */
5708 if (GET_CODE (DECL_RTL (exp)) == REG
5709 && GET_MODE (DECL_RTL (exp)) != mode)
5711 /* Get the signedness used for this variable. Ensure we get the
5712 same mode we got when the variable was declared. */
5713 if (GET_MODE (DECL_RTL (exp))
5714 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5717 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5718 SUBREG_PROMOTED_VAR_P (temp) = 1;
5719 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5723 return DECL_RTL (exp);
5726 return immed_double_const (TREE_INT_CST_LOW (exp),
5727 TREE_INT_CST_HIGH (exp),
5731 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5732 EXPAND_MEMORY_USE_BAD);
5735 /* If optimized, generate immediate CONST_DOUBLE
5736 which will be turned into memory by reload if necessary.
5738 We used to force a register so that loop.c could see it. But
5739 this does not allow gen_* patterns to perform optimizations with
5740 the constants. It also produces two insns in cases like "x = 1.0;".
5741 On most machines, floating-point constants are not permitted in
5742 many insns, so we'd end up copying it to a register in any case.
5744 Now, we do the copying in expand_binop, if appropriate. */
5745 return immed_real_const (exp);
5749 if (! TREE_CST_RTL (exp))
5750 output_constant_def (exp);
5752 /* TREE_CST_RTL probably contains a constant address.
5753 On RISC machines where a constant address isn't valid,
5754 make some insns to get that address into a register. */
5755 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5756 && modifier != EXPAND_CONST_ADDRESS
5757 && modifier != EXPAND_INITIALIZER
5758 && modifier != EXPAND_SUM
5759 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5761 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5762 return change_address (TREE_CST_RTL (exp), VOIDmode,
5763 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5764 return TREE_CST_RTL (exp);
5766 case EXPR_WITH_FILE_LOCATION:
5769 char *saved_input_filename = input_filename;
5770 int saved_lineno = lineno;
5771 input_filename = EXPR_WFL_FILENAME (exp);
5772 lineno = EXPR_WFL_LINENO (exp);
5773 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5774 emit_line_note (input_filename, lineno);
5775 /* Possibly avoid switching back and force here */
5776 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5777 input_filename = saved_input_filename;
5778 lineno = saved_lineno;
5783 context = decl_function_context (exp);
5785 /* If this SAVE_EXPR was at global context, assume we are an
5786 initialization function and move it into our context. */
5788 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5790 /* We treat inline_function_decl as an alias for the current function
5791 because that is the inline function whose vars, types, etc.
5792 are being merged into the current function.
5793 See expand_inline_function. */
5794 if (context == current_function_decl || context == inline_function_decl)
5797 /* If this is non-local, handle it. */
5800 /* The following call just exists to abort if the context is
5801 not of a containing function. */
5802 find_function_data (context);
5804 temp = SAVE_EXPR_RTL (exp);
5805 if (temp && GET_CODE (temp) == REG)
5807 put_var_into_stack (exp);
5808 temp = SAVE_EXPR_RTL (exp);
5810 if (temp == 0 || GET_CODE (temp) != MEM)
5812 return change_address (temp, mode,
5813 fix_lexical_addr (XEXP (temp, 0), exp));
5815 if (SAVE_EXPR_RTL (exp) == 0)
5817 if (mode == VOIDmode)
5820 temp = assign_temp (type, 3, 0, 0);
5822 SAVE_EXPR_RTL (exp) = temp;
5823 if (!optimize && GET_CODE (temp) == REG)
5824 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5827 /* If the mode of TEMP does not match that of the expression, it
5828 must be a promoted value. We pass store_expr a SUBREG of the
5829 wanted mode but mark it so that we know that it was already
5830 extended. Note that `unsignedp' was modified above in
5833 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5835 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5836 SUBREG_PROMOTED_VAR_P (temp) = 1;
5837 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5840 if (temp == const0_rtx)
5841 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5842 EXPAND_MEMORY_USE_BAD);
5844 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5846 TREE_USED (exp) = 1;
5849 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5850 must be a promoted value. We return a SUBREG of the wanted mode,
5851 but mark it so that we know that it was already extended. */
5853 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5854 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5856 /* Compute the signedness and make the proper SUBREG. */
5857 promote_mode (type, mode, &unsignedp, 0);
5858 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5859 SUBREG_PROMOTED_VAR_P (temp) = 1;
5860 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5864 return SAVE_EXPR_RTL (exp);
5869 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5870 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5874 case PLACEHOLDER_EXPR:
5876 tree placeholder_expr;
5878 /* If there is an object on the head of the placeholder list,
5879 see if some object in it of type TYPE or a pointer to it. For
5880 further information, see tree.def. */
5881 for (placeholder_expr = placeholder_list;
5882 placeholder_expr != 0;
5883 placeholder_expr = TREE_CHAIN (placeholder_expr))
5885 tree need_type = TYPE_MAIN_VARIANT (type);
5887 tree old_list = placeholder_list;
5890 /* Find the outermost reference that is of the type we want.
5891 If none, see if any object has a type that is a pointer to
5892 the type we want. */
5893 for (elt = TREE_PURPOSE (placeholder_expr);
5894 elt != 0 && object == 0;
5896 = ((TREE_CODE (elt) == COMPOUND_EXPR
5897 || TREE_CODE (elt) == COND_EXPR)
5898 ? TREE_OPERAND (elt, 1)
5899 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5900 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5901 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5902 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5903 ? TREE_OPERAND (elt, 0) : 0))
5904 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5907 for (elt = TREE_PURPOSE (placeholder_expr);
5908 elt != 0 && object == 0;
5910 = ((TREE_CODE (elt) == COMPOUND_EXPR
5911 || TREE_CODE (elt) == COND_EXPR)
5912 ? TREE_OPERAND (elt, 1)
5913 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5914 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5915 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5916 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5917 ? TREE_OPERAND (elt, 0) : 0))
5918 if (POINTER_TYPE_P (TREE_TYPE (elt))
5919 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5921 object = build1 (INDIRECT_REF, need_type, elt);
5925 /* Expand this object skipping the list entries before
5926 it was found in case it is also a PLACEHOLDER_EXPR.
5927 In that case, we want to translate it using subsequent
5929 placeholder_list = TREE_CHAIN (placeholder_expr);
5930 temp = expand_expr (object, original_target, tmode,
5932 placeholder_list = old_list;
5938 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5941 case WITH_RECORD_EXPR:
5942 /* Put the object on the placeholder list, expand our first operand,
5943 and pop the list. */
5944 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5946 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5947 tmode, ro_modifier);
5948 placeholder_list = TREE_CHAIN (placeholder_list);
5952 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
5953 expand_goto (TREE_OPERAND (exp, 0));
5955 expand_computed_goto (TREE_OPERAND (exp, 0));
5959 expand_exit_loop_if_false (NULL_PTR,
5960 invert_truthvalue (TREE_OPERAND (exp, 0)));
5963 case LABELED_BLOCK_EXPR:
5964 if (LABELED_BLOCK_BODY (exp))
5965 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
5966 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
5969 case EXIT_BLOCK_EXPR:
5970 if (EXIT_BLOCK_RETURN (exp))
5971 really_sorry ("returned value in block_exit_expr");
5972 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
5977 expand_start_loop (1);
5978 expand_expr_stmt (TREE_OPERAND (exp, 0));
5986 tree vars = TREE_OPERAND (exp, 0);
5987 int vars_need_expansion = 0;
5989 /* Need to open a binding contour here because
5990 if there are any cleanups they must be contained here. */
5991 expand_start_bindings (0);
5993 /* Mark the corresponding BLOCK for output in its proper place. */
5994 if (TREE_OPERAND (exp, 2) != 0
5995 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5996 insert_block (TREE_OPERAND (exp, 2));
5998 /* If VARS have not yet been expanded, expand them now. */
6001 if (DECL_RTL (vars) == 0)
6003 vars_need_expansion = 1;
6006 expand_decl_init (vars);
6007 vars = TREE_CHAIN (vars);
6010 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6012 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6018 if (RTL_EXPR_SEQUENCE (exp))
6020 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6022 emit_insns (RTL_EXPR_SEQUENCE (exp));
6023 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6025 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6026 free_temps_for_rtl_expr (exp);
6027 return RTL_EXPR_RTL (exp);
6030 /* If we don't need the result, just ensure we evaluate any
6035 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6036 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6037 EXPAND_MEMORY_USE_BAD);
6041 /* All elts simple constants => refer to a constant in memory. But
6042 if this is a non-BLKmode mode, let it store a field at a time
6043 since that should make a CONST_INT or CONST_DOUBLE when we
6044 fold. Likewise, if we have a target we can use, it is best to
6045 store directly into the target unless the type is large enough
6046 that memcpy will be used. If we are making an initializer and
6047 all operands are constant, put it in memory as well. */
6048 else if ((TREE_STATIC (exp)
6049 && ((mode == BLKmode
6050 && ! (target != 0 && safe_from_p (target, exp, 1)))
6051 || TREE_ADDRESSABLE (exp)
6052 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6053 && (move_by_pieces_ninsns
6054 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6055 TYPE_ALIGN (type) / BITS_PER_UNIT)
6057 && ! mostly_zeros_p (exp))))
6058 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6060 rtx constructor = output_constant_def (exp);
6061 if (modifier != EXPAND_CONST_ADDRESS
6062 && modifier != EXPAND_INITIALIZER
6063 && modifier != EXPAND_SUM
6064 && (! memory_address_p (GET_MODE (constructor),
6065 XEXP (constructor, 0))
6067 && GET_CODE (XEXP (constructor, 0)) != REG)))
6068 constructor = change_address (constructor, VOIDmode,
6069 XEXP (constructor, 0));
6075 /* Handle calls that pass values in multiple non-contiguous
6076 locations. The Irix 6 ABI has examples of this. */
6077 if (target == 0 || ! safe_from_p (target, exp, 1)
6078 || GET_CODE (target) == PARALLEL)
6080 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6081 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6083 target = assign_temp (type, 0, 1, 1);
6086 if (TREE_READONLY (exp))
6088 if (GET_CODE (target) == MEM)
6089 target = copy_rtx (target);
6091 RTX_UNCHANGING_P (target) = 1;
6094 store_constructor (exp, target, 0);
6100 tree exp1 = TREE_OPERAND (exp, 0);
6103 tree string = string_constant (exp1, &index);
6106 /* Try to optimize reads from const strings. */
6108 && TREE_CODE (string) == STRING_CST
6109 && TREE_CODE (index) == INTEGER_CST
6110 && !TREE_INT_CST_HIGH (index)
6111 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6112 && GET_MODE_CLASS (mode) == MODE_INT
6113 && GET_MODE_SIZE (mode) == 1
6114 && modifier != EXPAND_MEMORY_USE_WO)
6115 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6117 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6118 op0 = memory_address (mode, op0);
6120 if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6122 enum memory_use_mode memory_usage;
6123 memory_usage = get_memory_usage_from_modifier (modifier);
6125 if (memory_usage != MEMORY_USE_DONT)
6127 in_check_memory_usage = 1;
6128 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6130 GEN_INT (int_size_in_bytes (type)),
6131 TYPE_MODE (sizetype),
6132 GEN_INT (memory_usage),
6133 TYPE_MODE (integer_type_node));
6134 in_check_memory_usage = 0;
6138 temp = gen_rtx_MEM (mode, op0);
6139 /* If address was computed by addition,
6140 mark this as an element of an aggregate. */
6141 if (TREE_CODE (exp1) == PLUS_EXPR
6142 || (TREE_CODE (exp1) == SAVE_EXPR
6143 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6144 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6145 || (TREE_CODE (exp1) == ADDR_EXPR
6146 && (exp2 = TREE_OPERAND (exp1, 0))
6147 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6148 MEM_IN_STRUCT_P (temp) = 1;
6150 /* If the pointer is actually a REFERENCE_TYPE, this could be pointing
6151 into some aggregate too. In theory we could fold this into the
6152 previous check and use rtx_addr_varies_p there too.
6154 However, this seems safer. */
6155 if (!MEM_IN_STRUCT_P (temp)
6156 && (TREE_CODE (TREE_TYPE (exp1)) == REFERENCE_TYPE
6157 /* This may have been an array reference to the first element
6158 that was optimized away from being an addition. */
6159 || (TREE_CODE (exp1) == NOP_EXPR
6160 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
6162 || ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
6164 && (AGGREGATE_TYPE_P
6165 (TREE_TYPE (TREE_TYPE
6166 (TREE_OPERAND (exp1, 0))))))))))
6167 MEM_IN_STRUCT_P (temp) = ! rtx_addr_varies_p (temp);
6169 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6170 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6172 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6173 here, because, in C and C++, the fact that a location is accessed
6174 through a pointer to const does not mean that the value there can
6175 never change. Languages where it can never change should
6176 also set TREE_STATIC. */
6177 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6182 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6186 tree array = TREE_OPERAND (exp, 0);
6187 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6188 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6189 tree index = TREE_OPERAND (exp, 1);
6190 tree index_type = TREE_TYPE (index);
6193 /* Optimize the special-case of a zero lower bound.
6195 We convert the low_bound to sizetype to avoid some problems
6196 with constant folding. (E.g. suppose the lower bound is 1,
6197 and its mode is QI. Without the conversion, (ARRAY
6198 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6199 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6201 But sizetype isn't quite right either (especially if
6202 the lowbound is negative). FIXME */
6204 if (! integer_zerop (low_bound))
6205 index = fold (build (MINUS_EXPR, index_type, index,
6206 convert (sizetype, low_bound)));
6208 /* Fold an expression like: "foo"[2].
6209 This is not done in fold so it won't happen inside &.
6210 Don't fold if this is for wide characters since it's too
6211 difficult to do correctly and this is a very rare case. */
6213 if (TREE_CODE (array) == STRING_CST
6214 && TREE_CODE (index) == INTEGER_CST
6215 && !TREE_INT_CST_HIGH (index)
6216 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6217 && GET_MODE_CLASS (mode) == MODE_INT
6218 && GET_MODE_SIZE (mode) == 1)
6219 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6221 /* If this is a constant index into a constant array,
6222 just get the value from the array. Handle both the cases when
6223 we have an explicit constructor and when our operand is a variable
6224 that was declared const. */
6226 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6228 if (TREE_CODE (index) == INTEGER_CST
6229 && TREE_INT_CST_HIGH (index) == 0)
6231 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6233 i = TREE_INT_CST_LOW (index);
6235 elem = TREE_CHAIN (elem);
6237 return expand_expr (fold (TREE_VALUE (elem)), target,
6238 tmode, ro_modifier);
6242 else if (optimize >= 1
6243 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6244 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6245 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6247 if (TREE_CODE (index) == INTEGER_CST)
6249 tree init = DECL_INITIAL (array);
6251 i = TREE_INT_CST_LOW (index);
6252 if (TREE_CODE (init) == CONSTRUCTOR)
6254 tree elem = CONSTRUCTOR_ELTS (init);
6257 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6258 elem = TREE_CHAIN (elem);
6260 return expand_expr (fold (TREE_VALUE (elem)), target,
6261 tmode, ro_modifier);
6263 else if (TREE_CODE (init) == STRING_CST
6264 && TREE_INT_CST_HIGH (index) == 0
6265 && (TREE_INT_CST_LOW (index)
6266 < TREE_STRING_LENGTH (init)))
6268 (TREE_STRING_POINTER
6269 (init)[TREE_INT_CST_LOW (index)]));
6274 /* ... fall through ... */
6278 /* If the operand is a CONSTRUCTOR, we can just extract the
6279 appropriate field if it is present. Don't do this if we have
6280 already written the data since we want to refer to that copy
6281 and varasm.c assumes that's what we'll do. */
6282 if (code != ARRAY_REF
6283 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6284 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6288 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6289 elt = TREE_CHAIN (elt))
6290 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6291 /* We can normally use the value of the field in the
6292 CONSTRUCTOR. However, if this is a bitfield in
6293 an integral mode that we can fit in a HOST_WIDE_INT,
6294 we must mask only the number of bits in the bitfield,
6295 since this is done implicitly by the constructor. If
6296 the bitfield does not meet either of those conditions,
6297 we can't do this optimization. */
6298 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6299 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6301 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6302 <= HOST_BITS_PER_WIDE_INT))))
6304 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6305 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6307 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6309 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6311 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6312 op0 = expand_and (op0, op1, target);
6316 enum machine_mode imode
6317 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6319 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6322 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6324 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6334 enum machine_mode mode1;
6340 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6341 &mode1, &unsignedp, &volatilep,
6344 /* If we got back the original object, something is wrong. Perhaps
6345 we are evaluating an expression too early. In any event, don't
6346 infinitely recurse. */
6350 /* If TEM's type is a union of variable size, pass TARGET to the inner
6351 computation, since it will need a temporary and TARGET is known
6352 to have to do. This occurs in unchecked conversion in Ada. */
6354 op0 = expand_expr (tem,
6355 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6356 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6358 ? target : NULL_RTX),
6360 modifier == EXPAND_INITIALIZER
6361 ? modifier : EXPAND_NORMAL);
6363 /* If this is a constant, put it into a register if it is a
6364 legitimate constant and memory if it isn't. */
6365 if (CONSTANT_P (op0))
6367 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6368 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6369 op0 = force_reg (mode, op0);
6371 op0 = validize_mem (force_const_mem (mode, op0));
6376 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6378 if (GET_CODE (op0) != MEM)
6381 if (GET_MODE (offset_rtx) != ptr_mode)
6383 #ifdef POINTERS_EXTEND_UNSIGNED
6384 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6386 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6390 if (GET_CODE (op0) == MEM
6391 && GET_MODE (op0) == BLKmode
6393 && (bitpos % bitsize) == 0
6394 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6395 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6397 rtx temp = change_address (op0, mode1,
6398 plus_constant (XEXP (op0, 0),
6401 if (GET_CODE (XEXP (temp, 0)) == REG)
6404 op0 = change_address (op0, mode1,
6405 force_reg (GET_MODE (XEXP (temp, 0)),
6411 op0 = change_address (op0, VOIDmode,
6412 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6413 force_reg (ptr_mode, offset_rtx)));
6416 /* Don't forget about volatility even if this is a bitfield. */
6417 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6419 op0 = copy_rtx (op0);
6420 MEM_VOLATILE_P (op0) = 1;
6423 /* Check the access. */
6424 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6426 enum memory_use_mode memory_usage;
6427 memory_usage = get_memory_usage_from_modifier (modifier);
6429 if (memory_usage != MEMORY_USE_DONT)
6434 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6435 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6437 /* Check the access right of the pointer. */
6438 if (size > BITS_PER_UNIT)
6439 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6441 GEN_INT (size / BITS_PER_UNIT),
6442 TYPE_MODE (sizetype),
6443 GEN_INT (memory_usage),
6444 TYPE_MODE (integer_type_node));
6448 /* In cases where an aligned union has an unaligned object
6449 as a field, we might be extracting a BLKmode value from
6450 an integer-mode (e.g., SImode) object. Handle this case
6451 by doing the extract into an object as wide as the field
6452 (which we know to be the width of a basic mode), then
6453 storing into memory, and changing the mode to BLKmode.
6454 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6455 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6456 if (mode1 == VOIDmode
6457 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6458 || (modifier != EXPAND_CONST_ADDRESS
6459 && modifier != EXPAND_INITIALIZER
6460 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6461 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6462 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6463 /* If the field isn't aligned enough to fetch as a memref,
6464 fetch it as a bit field. */
6465 || (SLOW_UNALIGNED_ACCESS
6466 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6467 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6469 enum machine_mode ext_mode = mode;
6471 if (ext_mode == BLKmode)
6472 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6474 if (ext_mode == BLKmode)
6476 /* In this case, BITPOS must start at a byte boundary and
6477 TARGET, if specified, must be a MEM. */
6478 if (GET_CODE (op0) != MEM
6479 || (target != 0 && GET_CODE (target) != MEM)
6480 || bitpos % BITS_PER_UNIT != 0)
6483 op0 = change_address (op0, VOIDmode,
6484 plus_constant (XEXP (op0, 0),
6485 bitpos / BITS_PER_UNIT));
6487 target = assign_temp (type, 0, 1, 1);
6489 emit_block_move (target, op0,
6490 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6497 op0 = validize_mem (op0);
6499 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6500 mark_reg_pointer (XEXP (op0, 0), alignment);
6502 op0 = extract_bit_field (op0, bitsize, bitpos,
6503 unsignedp, target, ext_mode, ext_mode,
6505 int_size_in_bytes (TREE_TYPE (tem)));
6507 /* If the result is a record type and BITSIZE is narrower than
6508 the mode of OP0, an integral mode, and this is a big endian
6509 machine, we must put the field into the high-order bits. */
6510 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6511 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6512 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6513 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6514 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6518 if (mode == BLKmode)
6520 rtx new = assign_stack_temp (ext_mode,
6521 bitsize / BITS_PER_UNIT, 0);
6523 emit_move_insn (new, op0);
6524 op0 = copy_rtx (new);
6525 PUT_MODE (op0, BLKmode);
6526 MEM_IN_STRUCT_P (op0) = 1;
6532 /* If the result is BLKmode, use that to access the object
6534 if (mode == BLKmode)
6537 /* Get a reference to just this component. */
6538 if (modifier == EXPAND_CONST_ADDRESS
6539 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6540 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6541 (bitpos / BITS_PER_UNIT)));
6543 op0 = change_address (op0, mode1,
6544 plus_constant (XEXP (op0, 0),
6545 (bitpos / BITS_PER_UNIT)));
6547 if (GET_CODE (op0) == MEM)
6548 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6550 if (GET_CODE (XEXP (op0, 0)) == REG)
6551 mark_reg_pointer (XEXP (op0, 0), alignment);
6553 MEM_IN_STRUCT_P (op0) = 1;
6554 MEM_VOLATILE_P (op0) |= volatilep;
6555 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6556 || modifier == EXPAND_CONST_ADDRESS
6557 || modifier == EXPAND_INITIALIZER)
6559 else if (target == 0)
6560 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6562 convert_move (target, op0, unsignedp);
6566 /* Intended for a reference to a buffer of a file-object in Pascal.
6567 But it's not certain that a special tree code will really be
6568 necessary for these. INDIRECT_REF might work for them. */
6574 /* Pascal set IN expression.
6577 rlo = set_low - (set_low%bits_per_word);
6578 the_word = set [ (index - rlo)/bits_per_word ];
6579 bit_index = index % bits_per_word;
6580 bitmask = 1 << bit_index;
6581 return !!(the_word & bitmask); */
6583 tree set = TREE_OPERAND (exp, 0);
6584 tree index = TREE_OPERAND (exp, 1);
6585 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6586 tree set_type = TREE_TYPE (set);
6587 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6588 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6589 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6590 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6591 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6592 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6593 rtx setaddr = XEXP (setval, 0);
6594 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6596 rtx diff, quo, rem, addr, bit, result;
6598 preexpand_calls (exp);
6600 /* If domain is empty, answer is no. Likewise if index is constant
6601 and out of bounds. */
6602 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6603 && TREE_CODE (set_low_bound) == INTEGER_CST
6604 && tree_int_cst_lt (set_high_bound, set_low_bound))
6605 || (TREE_CODE (index) == INTEGER_CST
6606 && TREE_CODE (set_low_bound) == INTEGER_CST
6607 && tree_int_cst_lt (index, set_low_bound))
6608 || (TREE_CODE (set_high_bound) == INTEGER_CST
6609 && TREE_CODE (index) == INTEGER_CST
6610 && tree_int_cst_lt (set_high_bound, index))))
6614 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6616 /* If we get here, we have to generate the code for both cases
6617 (in range and out of range). */
6619 op0 = gen_label_rtx ();
6620 op1 = gen_label_rtx ();
6622 if (! (GET_CODE (index_val) == CONST_INT
6623 && GET_CODE (lo_r) == CONST_INT))
6625 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
6626 GET_MODE (index_val), iunsignedp, 0);
6627 emit_jump_insn (gen_blt (op1));
6630 if (! (GET_CODE (index_val) == CONST_INT
6631 && GET_CODE (hi_r) == CONST_INT))
6633 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
6634 GET_MODE (index_val), iunsignedp, 0);
6635 emit_jump_insn (gen_bgt (op1));
6638 /* Calculate the element number of bit zero in the first word
6640 if (GET_CODE (lo_r) == CONST_INT)
6641 rlow = GEN_INT (INTVAL (lo_r)
6642 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6644 rlow = expand_binop (index_mode, and_optab, lo_r,
6645 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6646 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6648 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6649 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6651 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6652 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6653 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6654 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6656 addr = memory_address (byte_mode,
6657 expand_binop (index_mode, add_optab, diff,
6658 setaddr, NULL_RTX, iunsignedp,
6661 /* Extract the bit we want to examine */
6662 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6663 gen_rtx_MEM (byte_mode, addr),
6664 make_tree (TREE_TYPE (index), rem),
6666 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6667 GET_MODE (target) == byte_mode ? target : 0,
6668 1, OPTAB_LIB_WIDEN);
6670 if (result != target)
6671 convert_move (target, result, 1);
6673 /* Output the code to handle the out-of-range case. */
6676 emit_move_insn (target, const0_rtx);
6681 case WITH_CLEANUP_EXPR:
6682 if (RTL_EXPR_RTL (exp) == 0)
6685 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6686 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6688 /* That's it for this cleanup. */
6689 TREE_OPERAND (exp, 2) = 0;
6691 return RTL_EXPR_RTL (exp);
6693 case CLEANUP_POINT_EXPR:
6695 extern int temp_slot_level;
6696 /* Start a new binding layer that will keep track of all cleanup
6697 actions to be performed. */
6698 expand_start_bindings (0);
6700 target_temp_slot_level = temp_slot_level;
6702 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6703 /* If we're going to use this value, load it up now. */
6705 op0 = force_not_mem (op0);
6706 preserve_temp_slots (op0);
6707 expand_end_bindings (NULL_TREE, 0, 0);
6712 /* Check for a built-in function. */
6713 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6714 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6716 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6717 return expand_builtin (exp, target, subtarget, tmode, ignore);
6719 /* If this call was expanded already by preexpand_calls,
6720 just return the result we got. */
6721 if (CALL_EXPR_RTL (exp) != 0)
6722 return CALL_EXPR_RTL (exp);
6724 return expand_call (exp, target, ignore);
6726 case NON_LVALUE_EXPR:
6729 case REFERENCE_EXPR:
6730 if (TREE_CODE (type) == UNION_TYPE)
6732 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6735 if (mode != BLKmode)
6736 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6738 target = assign_temp (type, 0, 1, 1);
6741 if (GET_CODE (target) == MEM)
6742 /* Store data into beginning of memory target. */
6743 store_expr (TREE_OPERAND (exp, 0),
6744 change_address (target, TYPE_MODE (valtype), 0), 0);
6746 else if (GET_CODE (target) == REG)
6747 /* Store this field into a union of the proper type. */
6748 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6749 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6751 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6756 /* Return the entire union. */
6760 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6762 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6765 /* If the signedness of the conversion differs and OP0 is
6766 a promoted SUBREG, clear that indication since we now
6767 have to do the proper extension. */
6768 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6769 && GET_CODE (op0) == SUBREG)
6770 SUBREG_PROMOTED_VAR_P (op0) = 0;
6775 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6776 if (GET_MODE (op0) == mode)
6779 /* If OP0 is a constant, just convert it into the proper mode. */
6780 if (CONSTANT_P (op0))
6782 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6783 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6785 if (modifier == EXPAND_INITIALIZER)
6786 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6790 convert_to_mode (mode, op0,
6791 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6793 convert_move (target, op0,
6794 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6798 /* We come here from MINUS_EXPR when the second operand is a
6801 this_optab = add_optab;
6803 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6804 something else, make sure we add the register to the constant and
6805 then to the other thing. This case can occur during strength
6806 reduction and doing it this way will produce better code if the
6807 frame pointer or argument pointer is eliminated.
6809 fold-const.c will ensure that the constant is always in the inner
6810 PLUS_EXPR, so the only case we need to do anything about is if
6811 sp, ap, or fp is our second argument, in which case we must swap
6812 the innermost first argument and our second argument. */
6814 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6815 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6816 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6817 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6818 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6819 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6821 tree t = TREE_OPERAND (exp, 1);
6823 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6824 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6827 /* If the result is to be ptr_mode and we are adding an integer to
6828 something, we might be forming a constant. So try to use
6829 plus_constant. If it produces a sum and we can't accept it,
6830 use force_operand. This allows P = &ARR[const] to generate
6831 efficient code on machines where a SYMBOL_REF is not a valid
6834 If this is an EXPAND_SUM call, always return the sum. */
6835 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6836 || mode == ptr_mode)
6838 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6839 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6840 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6842 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6844 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6845 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6846 op1 = force_operand (op1, target);
6850 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6851 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6852 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6854 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6856 if (! CONSTANT_P (op0))
6858 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6859 VOIDmode, modifier);
6860 /* Don't go to both_summands if modifier
6861 says it's not right to return a PLUS. */
6862 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6866 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6867 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6868 op0 = force_operand (op0, target);
6873 /* No sense saving up arithmetic to be done
6874 if it's all in the wrong mode to form part of an address.
6875 And force_operand won't know whether to sign-extend or
6877 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6878 || mode != ptr_mode)
6881 preexpand_calls (exp);
6882 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6885 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6886 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6889 /* Make sure any term that's a sum with a constant comes last. */
6890 if (GET_CODE (op0) == PLUS
6891 && CONSTANT_P (XEXP (op0, 1)))
6897 /* If adding to a sum including a constant,
6898 associate it to put the constant outside. */
6899 if (GET_CODE (op1) == PLUS
6900 && CONSTANT_P (XEXP (op1, 1)))
6902 rtx constant_term = const0_rtx;
6904 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6907 /* Ensure that MULT comes first if there is one. */
6908 else if (GET_CODE (op0) == MULT)
6909 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6911 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6913 /* Let's also eliminate constants from op0 if possible. */
6914 op0 = eliminate_constant_term (op0, &constant_term);
6916 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6917 their sum should be a constant. Form it into OP1, since the
6918 result we want will then be OP0 + OP1. */
6920 temp = simplify_binary_operation (PLUS, mode, constant_term,
6925 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6928 /* Put a constant term last and put a multiplication first. */
6929 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6930 temp = op1, op1 = op0, op0 = temp;
6932 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6933 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6936 /* For initializers, we are allowed to return a MINUS of two
6937 symbolic constants. Here we handle all cases when both operands
6939 /* Handle difference of two symbolic constants,
6940 for the sake of an initializer. */
6941 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6942 && really_constant_p (TREE_OPERAND (exp, 0))
6943 && really_constant_p (TREE_OPERAND (exp, 1)))
6945 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6946 VOIDmode, ro_modifier);
6947 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6948 VOIDmode, ro_modifier);
6950 /* If the last operand is a CONST_INT, use plus_constant of
6951 the negated constant. Else make the MINUS. */
6952 if (GET_CODE (op1) == CONST_INT)
6953 return plus_constant (op0, - INTVAL (op1));
6955 return gen_rtx_MINUS (mode, op0, op1);
6957 /* Convert A - const to A + (-const). */
6958 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6960 tree negated = fold (build1 (NEGATE_EXPR, type,
6961 TREE_OPERAND (exp, 1)));
6963 /* Deal with the case where we can't negate the constant
6965 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6967 tree newtype = signed_type (type);
6968 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6969 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6970 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6972 if (! TREE_OVERFLOW (newneg))
6973 return expand_expr (convert (type,
6974 build (PLUS_EXPR, newtype,
6976 target, tmode, ro_modifier);
6980 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6984 this_optab = sub_optab;
6988 preexpand_calls (exp);
6989 /* If first operand is constant, swap them.
6990 Thus the following special case checks need only
6991 check the second operand. */
6992 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6994 register tree t1 = TREE_OPERAND (exp, 0);
6995 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6996 TREE_OPERAND (exp, 1) = t1;
6999 /* Attempt to return something suitable for generating an
7000 indexed address, for machines that support that. */
7002 if (modifier == EXPAND_SUM && mode == ptr_mode
7003 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7004 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7006 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7009 /* Apply distributive law if OP0 is x+c. */
7010 if (GET_CODE (op0) == PLUS
7011 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7012 return gen_rtx_PLUS (mode,
7013 gen_rtx_MULT (mode, XEXP (op0, 0),
7014 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7015 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7016 * INTVAL (XEXP (op0, 1))));
7018 if (GET_CODE (op0) != REG)
7019 op0 = force_operand (op0, NULL_RTX);
7020 if (GET_CODE (op0) != REG)
7021 op0 = copy_to_mode_reg (mode, op0);
7023 return gen_rtx_MULT (mode, op0,
7024 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7027 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7030 /* Check for multiplying things that have been extended
7031 from a narrower type. If this machine supports multiplying
7032 in that narrower type with a result in the desired type,
7033 do it that way, and avoid the explicit type-conversion. */
7034 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7035 && TREE_CODE (type) == INTEGER_TYPE
7036 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7037 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7038 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7039 && int_fits_type_p (TREE_OPERAND (exp, 1),
7040 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7041 /* Don't use a widening multiply if a shift will do. */
7042 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7043 > HOST_BITS_PER_WIDE_INT)
7044 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7046 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7047 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7049 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7050 /* If both operands are extended, they must either both
7051 be zero-extended or both be sign-extended. */
7052 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7054 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7056 enum machine_mode innermode
7057 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7058 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7059 ? smul_widen_optab : umul_widen_optab);
7060 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7061 ? umul_widen_optab : smul_widen_optab);
7062 if (mode == GET_MODE_WIDER_MODE (innermode))
7064 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7066 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7067 NULL_RTX, VOIDmode, 0);
7068 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7069 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7072 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7073 NULL_RTX, VOIDmode, 0);
7076 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7077 && innermode == word_mode)
7080 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7081 NULL_RTX, VOIDmode, 0);
7082 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7083 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7086 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7087 NULL_RTX, VOIDmode, 0);
7088 temp = expand_binop (mode, other_optab, op0, op1, target,
7089 unsignedp, OPTAB_LIB_WIDEN);
7090 htem = expand_mult_highpart_adjust (innermode,
7091 gen_highpart (innermode, temp),
7093 gen_highpart (innermode, temp),
7095 emit_move_insn (gen_highpart (innermode, temp), htem);
7100 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7101 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7102 return expand_mult (mode, op0, op1, target, unsignedp);
7104 case TRUNC_DIV_EXPR:
7105 case FLOOR_DIV_EXPR:
7107 case ROUND_DIV_EXPR:
7108 case EXACT_DIV_EXPR:
7109 preexpand_calls (exp);
7110 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7112 /* Possible optimization: compute the dividend with EXPAND_SUM
7113 then if the divisor is constant can optimize the case
7114 where some terms of the dividend have coeffs divisible by it. */
7115 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7116 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7117 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7120 this_optab = flodiv_optab;
7123 case TRUNC_MOD_EXPR:
7124 case FLOOR_MOD_EXPR:
7126 case ROUND_MOD_EXPR:
7127 preexpand_calls (exp);
7128 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7130 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7131 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7132 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7134 case FIX_ROUND_EXPR:
7135 case FIX_FLOOR_EXPR:
7137 abort (); /* Not used for C. */
7139 case FIX_TRUNC_EXPR:
7140 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7142 target = gen_reg_rtx (mode);
7143 expand_fix (target, op0, unsignedp);
7147 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7149 target = gen_reg_rtx (mode);
7150 /* expand_float can't figure out what to do if FROM has VOIDmode.
7151 So give it the correct mode. With -O, cse will optimize this. */
7152 if (GET_MODE (op0) == VOIDmode)
7153 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7155 expand_float (target, op0,
7156 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7160 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7161 temp = expand_unop (mode, neg_optab, op0, target, 0);
7167 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7169 /* Handle complex values specially. */
7170 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7171 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7172 return expand_complex_abs (mode, op0, target, unsignedp);
7174 /* Unsigned abs is simply the operand. Testing here means we don't
7175 risk generating incorrect code below. */
7176 if (TREE_UNSIGNED (type))
7179 return expand_abs (mode, op0, target, unsignedp,
7180 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7184 target = original_target;
7185 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7186 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7187 || GET_MODE (target) != mode
7188 || (GET_CODE (target) == REG
7189 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7190 target = gen_reg_rtx (mode);
7191 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7192 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7194 /* First try to do it with a special MIN or MAX instruction.
7195 If that does not win, use a conditional jump to select the proper
7197 this_optab = (TREE_UNSIGNED (type)
7198 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7199 : (code == MIN_EXPR ? smin_optab : smax_optab));
7201 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7206 /* At this point, a MEM target is no longer useful; we will get better
7209 if (GET_CODE (target) == MEM)
7210 target = gen_reg_rtx (mode);
7213 emit_move_insn (target, op0);
7215 op0 = gen_label_rtx ();
7217 /* If this mode is an integer too wide to compare properly,
7218 compare word by word. Rely on cse to optimize constant cases. */
7219 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7221 if (code == MAX_EXPR)
7222 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7223 target, op1, NULL_RTX, op0);
7225 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7226 op1, target, NULL_RTX, op0);
7227 emit_move_insn (target, op1);
7231 if (code == MAX_EXPR)
7232 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7233 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7234 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7236 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7237 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7238 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7239 if (temp == const0_rtx)
7240 emit_move_insn (target, op1);
7241 else if (temp != const_true_rtx)
7243 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7244 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7247 emit_move_insn (target, op1);
7254 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7255 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7261 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7262 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7267 /* ??? Can optimize bitwise operations with one arg constant.
7268 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7269 and (a bitwise1 b) bitwise2 b (etc)
7270 but that is probably not worth while. */
7272 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7273 boolean values when we want in all cases to compute both of them. In
7274 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7275 as actual zero-or-1 values and then bitwise anding. In cases where
7276 there cannot be any side effects, better code would be made by
7277 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7278 how to recognize those cases. */
7280 case TRUTH_AND_EXPR:
7282 this_optab = and_optab;
7287 this_optab = ior_optab;
7290 case TRUTH_XOR_EXPR:
7292 this_optab = xor_optab;
7299 preexpand_calls (exp);
7300 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7302 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7303 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7306 /* Could determine the answer when only additive constants differ. Also,
7307 the addition of one can be handled by changing the condition. */
7314 preexpand_calls (exp);
7315 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7319 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7320 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7322 && GET_CODE (original_target) == REG
7323 && (GET_MODE (original_target)
7324 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7326 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7329 if (temp != original_target)
7330 temp = copy_to_reg (temp);
7332 op1 = gen_label_rtx ();
7333 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
7334 GET_MODE (temp), unsignedp, 0);
7335 emit_jump_insn (gen_beq (op1));
7336 emit_move_insn (temp, const1_rtx);
7341 /* If no set-flag instruction, must generate a conditional
7342 store into a temporary variable. Drop through
7343 and handle this like && and ||. */
7345 case TRUTH_ANDIF_EXPR:
7346 case TRUTH_ORIF_EXPR:
7348 && (target == 0 || ! safe_from_p (target, exp, 1)
7349 /* Make sure we don't have a hard reg (such as function's return
7350 value) live across basic blocks, if not optimizing. */
7351 || (!optimize && GET_CODE (target) == REG
7352 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7353 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7356 emit_clr_insn (target);
7358 op1 = gen_label_rtx ();
7359 jumpifnot (exp, op1);
7362 emit_0_to_1_insn (target);
7365 return ignore ? const0_rtx : target;
7367 case TRUTH_NOT_EXPR:
7368 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7369 /* The parser is careful to generate TRUTH_NOT_EXPR
7370 only with operands that are always zero or one. */
7371 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7372 target, 1, OPTAB_LIB_WIDEN);
7378 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7380 return expand_expr (TREE_OPERAND (exp, 1),
7381 (ignore ? const0_rtx : target),
7385 /* If we would have a "singleton" (see below) were it not for a
7386 conversion in each arm, bring that conversion back out. */
7387 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7388 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7389 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7390 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7392 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7393 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7395 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7396 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7397 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7398 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7399 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7400 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7401 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7402 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7403 return expand_expr (build1 (NOP_EXPR, type,
7404 build (COND_EXPR, TREE_TYPE (true),
7405 TREE_OPERAND (exp, 0),
7407 target, tmode, modifier);
7411 /* Note that COND_EXPRs whose type is a structure or union
7412 are required to be constructed to contain assignments of
7413 a temporary variable, so that we can evaluate them here
7414 for side effect only. If type is void, we must do likewise. */
7416 /* If an arm of the branch requires a cleanup,
7417 only that cleanup is performed. */
7420 tree binary_op = 0, unary_op = 0;
7422 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7423 convert it to our mode, if necessary. */
7424 if (integer_onep (TREE_OPERAND (exp, 1))
7425 && integer_zerop (TREE_OPERAND (exp, 2))
7426 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7430 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7435 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7436 if (GET_MODE (op0) == mode)
7440 target = gen_reg_rtx (mode);
7441 convert_move (target, op0, unsignedp);
7445 /* Check for X ? A + B : A. If we have this, we can copy A to the
7446 output and conditionally add B. Similarly for unary operations.
7447 Don't do this if X has side-effects because those side effects
7448 might affect A or B and the "?" operation is a sequence point in
7449 ANSI. (operand_equal_p tests for side effects.) */
7451 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7452 && operand_equal_p (TREE_OPERAND (exp, 2),
7453 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7454 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7455 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7456 && operand_equal_p (TREE_OPERAND (exp, 1),
7457 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7458 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7459 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7460 && operand_equal_p (TREE_OPERAND (exp, 2),
7461 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7462 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7463 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7464 && operand_equal_p (TREE_OPERAND (exp, 1),
7465 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7466 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7468 /* If we are not to produce a result, we have no target. Otherwise,
7469 if a target was specified use it; it will not be used as an
7470 intermediate target unless it is safe. If no target, use a
7475 else if (original_target
7476 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7477 || (singleton && GET_CODE (original_target) == REG
7478 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7479 && original_target == var_rtx (singleton)))
7480 && GET_MODE (original_target) == mode
7481 #ifdef HAVE_conditional_move
7482 && (! can_conditionally_move_p (mode)
7483 || GET_CODE (original_target) == REG
7484 || TREE_ADDRESSABLE (type))
7486 && ! (GET_CODE (original_target) == MEM
7487 && MEM_VOLATILE_P (original_target)))
7488 temp = original_target;
7489 else if (TREE_ADDRESSABLE (type))
7492 temp = assign_temp (type, 0, 0, 1);
7494 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7495 do the test of X as a store-flag operation, do this as
7496 A + ((X != 0) << log C). Similarly for other simple binary
7497 operators. Only do for C == 1 if BRANCH_COST is low. */
7498 if (temp && singleton && binary_op
7499 && (TREE_CODE (binary_op) == PLUS_EXPR
7500 || TREE_CODE (binary_op) == MINUS_EXPR
7501 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7502 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7503 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7504 : integer_onep (TREE_OPERAND (binary_op, 1)))
7505 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7508 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7509 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7510 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7513 /* If we had X ? A : A + 1, do this as A + (X == 0).
7515 We have to invert the truth value here and then put it
7516 back later if do_store_flag fails. We cannot simply copy
7517 TREE_OPERAND (exp, 0) to another variable and modify that
7518 because invert_truthvalue can modify the tree pointed to
7520 if (singleton == TREE_OPERAND (exp, 1))
7521 TREE_OPERAND (exp, 0)
7522 = invert_truthvalue (TREE_OPERAND (exp, 0));
7524 result = do_store_flag (TREE_OPERAND (exp, 0),
7525 (safe_from_p (temp, singleton, 1)
7527 mode, BRANCH_COST <= 1);
7529 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7530 result = expand_shift (LSHIFT_EXPR, mode, result,
7531 build_int_2 (tree_log2
7535 (safe_from_p (temp, singleton, 1)
7536 ? temp : NULL_RTX), 0);
7540 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7541 return expand_binop (mode, boptab, op1, result, temp,
7542 unsignedp, OPTAB_LIB_WIDEN);
7544 else if (singleton == TREE_OPERAND (exp, 1))
7545 TREE_OPERAND (exp, 0)
7546 = invert_truthvalue (TREE_OPERAND (exp, 0));
7549 do_pending_stack_adjust ();
7551 op0 = gen_label_rtx ();
7553 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7557 /* If the target conflicts with the other operand of the
7558 binary op, we can't use it. Also, we can't use the target
7559 if it is a hard register, because evaluating the condition
7560 might clobber it. */
7562 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7563 || (GET_CODE (temp) == REG
7564 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7565 temp = gen_reg_rtx (mode);
7566 store_expr (singleton, temp, 0);
7569 expand_expr (singleton,
7570 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7571 if (singleton == TREE_OPERAND (exp, 1))
7572 jumpif (TREE_OPERAND (exp, 0), op0);
7574 jumpifnot (TREE_OPERAND (exp, 0), op0);
7576 start_cleanup_deferral ();
7577 if (binary_op && temp == 0)
7578 /* Just touch the other operand. */
7579 expand_expr (TREE_OPERAND (binary_op, 1),
7580 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7582 store_expr (build (TREE_CODE (binary_op), type,
7583 make_tree (type, temp),
7584 TREE_OPERAND (binary_op, 1)),
7587 store_expr (build1 (TREE_CODE (unary_op), type,
7588 make_tree (type, temp)),
7592 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7593 comparison operator. If we have one of these cases, set the
7594 output to A, branch on A (cse will merge these two references),
7595 then set the output to FOO. */
7597 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7598 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7599 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7600 TREE_OPERAND (exp, 1), 0)
7601 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7602 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7603 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7605 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7606 temp = gen_reg_rtx (mode);
7607 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7608 jumpif (TREE_OPERAND (exp, 0), op0);
7610 start_cleanup_deferral ();
7611 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7615 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7616 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7617 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7618 TREE_OPERAND (exp, 2), 0)
7619 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7620 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7621 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7623 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7624 temp = gen_reg_rtx (mode);
7625 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7626 jumpifnot (TREE_OPERAND (exp, 0), op0);
7628 start_cleanup_deferral ();
7629 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7634 op1 = gen_label_rtx ();
7635 jumpifnot (TREE_OPERAND (exp, 0), op0);
7637 start_cleanup_deferral ();
7639 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7641 expand_expr (TREE_OPERAND (exp, 1),
7642 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7643 end_cleanup_deferral ();
7645 emit_jump_insn (gen_jump (op1));
7648 start_cleanup_deferral ();
7650 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7652 expand_expr (TREE_OPERAND (exp, 2),
7653 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7656 end_cleanup_deferral ();
7667 /* Something needs to be initialized, but we didn't know
7668 where that thing was when building the tree. For example,
7669 it could be the return value of a function, or a parameter
7670 to a function which lays down in the stack, or a temporary
7671 variable which must be passed by reference.
7673 We guarantee that the expression will either be constructed
7674 or copied into our original target. */
7676 tree slot = TREE_OPERAND (exp, 0);
7677 tree cleanups = NULL_TREE;
7680 if (TREE_CODE (slot) != VAR_DECL)
7684 target = original_target;
7688 if (DECL_RTL (slot) != 0)
7690 target = DECL_RTL (slot);
7691 /* If we have already expanded the slot, so don't do
7693 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7698 target = assign_temp (type, 2, 0, 1);
7699 /* All temp slots at this level must not conflict. */
7700 preserve_temp_slots (target);
7701 DECL_RTL (slot) = target;
7702 if (TREE_ADDRESSABLE (slot))
7704 TREE_ADDRESSABLE (slot) = 0;
7705 mark_addressable (slot);
7708 /* Since SLOT is not known to the called function
7709 to belong to its stack frame, we must build an explicit
7710 cleanup. This case occurs when we must build up a reference
7711 to pass the reference as an argument. In this case,
7712 it is very likely that such a reference need not be
7715 if (TREE_OPERAND (exp, 2) == 0)
7716 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7717 cleanups = TREE_OPERAND (exp, 2);
7722 /* This case does occur, when expanding a parameter which
7723 needs to be constructed on the stack. The target
7724 is the actual stack address that we want to initialize.
7725 The function we call will perform the cleanup in this case. */
7727 /* If we have already assigned it space, use that space,
7728 not target that we were passed in, as our target
7729 parameter is only a hint. */
7730 if (DECL_RTL (slot) != 0)
7732 target = DECL_RTL (slot);
7733 /* If we have already expanded the slot, so don't do
7735 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7740 DECL_RTL (slot) = target;
7741 /* If we must have an addressable slot, then make sure that
7742 the RTL that we just stored in slot is OK. */
7743 if (TREE_ADDRESSABLE (slot))
7745 TREE_ADDRESSABLE (slot) = 0;
7746 mark_addressable (slot);
7751 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7752 /* Mark it as expanded. */
7753 TREE_OPERAND (exp, 1) = NULL_TREE;
7755 TREE_USED (slot) = 1;
7756 store_expr (exp1, target, 0);
7758 expand_decl_cleanup (NULL_TREE, cleanups);
7765 tree lhs = TREE_OPERAND (exp, 0);
7766 tree rhs = TREE_OPERAND (exp, 1);
7767 tree noncopied_parts = 0;
7768 tree lhs_type = TREE_TYPE (lhs);
7770 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7771 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7772 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7773 TYPE_NONCOPIED_PARTS (lhs_type));
7774 while (noncopied_parts != 0)
7776 expand_assignment (TREE_VALUE (noncopied_parts),
7777 TREE_PURPOSE (noncopied_parts), 0, 0);
7778 noncopied_parts = TREE_CHAIN (noncopied_parts);
7785 /* If lhs is complex, expand calls in rhs before computing it.
7786 That's so we don't compute a pointer and save it over a call.
7787 If lhs is simple, compute it first so we can give it as a
7788 target if the rhs is just a call. This avoids an extra temp and copy
7789 and that prevents a partial-subsumption which makes bad code.
7790 Actually we could treat component_ref's of vars like vars. */
7792 tree lhs = TREE_OPERAND (exp, 0);
7793 tree rhs = TREE_OPERAND (exp, 1);
7794 tree noncopied_parts = 0;
7795 tree lhs_type = TREE_TYPE (lhs);
7799 if (TREE_CODE (lhs) != VAR_DECL
7800 && TREE_CODE (lhs) != RESULT_DECL
7801 && TREE_CODE (lhs) != PARM_DECL
7802 && ! (TREE_CODE (lhs) == INDIRECT_REF
7803 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7804 preexpand_calls (exp);
7806 /* Check for |= or &= of a bitfield of size one into another bitfield
7807 of size 1. In this case, (unless we need the result of the
7808 assignment) we can do this more efficiently with a
7809 test followed by an assignment, if necessary.
7811 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7812 things change so we do, this code should be enhanced to
7815 && TREE_CODE (lhs) == COMPONENT_REF
7816 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7817 || TREE_CODE (rhs) == BIT_AND_EXPR)
7818 && TREE_OPERAND (rhs, 0) == lhs
7819 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7820 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7821 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7823 rtx label = gen_label_rtx ();
7825 do_jump (TREE_OPERAND (rhs, 1),
7826 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7827 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7828 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7829 (TREE_CODE (rhs) == BIT_IOR_EXPR
7831 : integer_zero_node)),
7833 do_pending_stack_adjust ();
7838 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7839 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7840 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7841 TYPE_NONCOPIED_PARTS (lhs_type));
7843 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7844 while (noncopied_parts != 0)
7846 expand_assignment (TREE_PURPOSE (noncopied_parts),
7847 TREE_VALUE (noncopied_parts), 0, 0);
7848 noncopied_parts = TREE_CHAIN (noncopied_parts);
7854 if (!TREE_OPERAND (exp, 0))
7855 expand_null_return ();
7857 expand_return (TREE_OPERAND (exp, 0));
7860 case PREINCREMENT_EXPR:
7861 case PREDECREMENT_EXPR:
7862 return expand_increment (exp, 0, ignore);
7864 case POSTINCREMENT_EXPR:
7865 case POSTDECREMENT_EXPR:
7866 /* Faster to treat as pre-increment if result is not used. */
7867 return expand_increment (exp, ! ignore, ignore);
7870 /* If nonzero, TEMP will be set to the address of something that might
7871 be a MEM corresponding to a stack slot. */
7874 /* Are we taking the address of a nested function? */
7875 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7876 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7877 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7878 && ! TREE_STATIC (exp))
7880 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7881 op0 = force_operand (op0, target);
7883 /* If we are taking the address of something erroneous, just
7885 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7889 /* We make sure to pass const0_rtx down if we came in with
7890 ignore set, to avoid doing the cleanups twice for something. */
7891 op0 = expand_expr (TREE_OPERAND (exp, 0),
7892 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7893 (modifier == EXPAND_INITIALIZER
7894 ? modifier : EXPAND_CONST_ADDRESS));
7896 /* If we are going to ignore the result, OP0 will have been set
7897 to const0_rtx, so just return it. Don't get confused and
7898 think we are taking the address of the constant. */
7902 op0 = protect_from_queue (op0, 0);
7904 /* We would like the object in memory. If it is a constant,
7905 we can have it be statically allocated into memory. For
7906 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7907 memory and store the value into it. */
7909 if (CONSTANT_P (op0))
7910 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7912 else if (GET_CODE (op0) == MEM)
7914 mark_temp_addr_taken (op0);
7915 temp = XEXP (op0, 0);
7918 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7919 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7921 /* If this object is in a register, it must be not
7923 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7924 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7926 mark_temp_addr_taken (memloc);
7927 emit_move_insn (memloc, op0);
7931 if (GET_CODE (op0) != MEM)
7934 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7936 temp = XEXP (op0, 0);
7937 #ifdef POINTERS_EXTEND_UNSIGNED
7938 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7939 && mode == ptr_mode)
7940 temp = convert_memory_address (ptr_mode, temp);
7945 op0 = force_operand (XEXP (op0, 0), target);
7948 if (flag_force_addr && GET_CODE (op0) != REG)
7949 op0 = force_reg (Pmode, op0);
7951 if (GET_CODE (op0) == REG
7952 && ! REG_USERVAR_P (op0))
7953 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7955 /* If we might have had a temp slot, add an equivalent address
7958 update_temp_slot_address (temp, op0);
7960 #ifdef POINTERS_EXTEND_UNSIGNED
7961 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7962 && mode == ptr_mode)
7963 op0 = convert_memory_address (ptr_mode, op0);
7968 case ENTRY_VALUE_EXPR:
7971 /* COMPLEX type for Extended Pascal & Fortran */
7974 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7977 /* Get the rtx code of the operands. */
7978 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7979 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7982 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7986 /* Move the real (op0) and imaginary (op1) parts to their location. */
7987 emit_move_insn (gen_realpart (mode, target), op0);
7988 emit_move_insn (gen_imagpart (mode, target), op1);
7990 insns = get_insns ();
7993 /* Complex construction should appear as a single unit. */
7994 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7995 each with a separate pseudo as destination.
7996 It's not correct for flow to treat them as a unit. */
7997 if (GET_CODE (target) != CONCAT)
7998 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8006 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8007 return gen_realpart (mode, op0);
8010 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8011 return gen_imagpart (mode, op0);
8015 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8019 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8022 target = gen_reg_rtx (mode);
8026 /* Store the realpart and the negated imagpart to target. */
8027 emit_move_insn (gen_realpart (partmode, target),
8028 gen_realpart (partmode, op0));
8030 imag_t = gen_imagpart (partmode, target);
8031 temp = expand_unop (partmode, neg_optab,
8032 gen_imagpart (partmode, op0), imag_t, 0);
8034 emit_move_insn (imag_t, temp);
8036 insns = get_insns ();
8039 /* Conjugate should appear as a single unit
8040 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8041 each with a separate pseudo as destination.
8042 It's not correct for flow to treat them as a unit. */
8043 if (GET_CODE (target) != CONCAT)
8044 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8051 case TRY_CATCH_EXPR:
8053 tree handler = TREE_OPERAND (exp, 1);
8055 expand_eh_region_start ();
8057 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8059 expand_eh_region_end (handler);
8066 rtx dcc = get_dynamic_cleanup_chain ();
8067 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8073 rtx dhc = get_dynamic_handler_chain ();
8074 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8079 op0 = CONST0_RTX (tmode);
8085 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8088 /* Here to do an ordinary binary operator, generating an instruction
8089 from the optab already placed in `this_optab'. */
8091 preexpand_calls (exp);
8092 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8094 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8095 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8097 temp = expand_binop (mode, this_optab, op0, op1, target,
8098 unsignedp, OPTAB_LIB_WIDEN);
8106 /* Return the alignment in bits of EXP, a pointer valued expression.
8107 But don't return more than MAX_ALIGN no matter what.
8108 The alignment returned is, by default, the alignment of the thing that
8109 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8111 Otherwise, look at the expression to see if we can do better, i.e., if the
8112 expression is actually pointing at an object whose alignment is tighter. */
8115 get_pointer_alignment (exp, max_align)
8119 unsigned align, inner;
8121 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8124 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8125 align = MIN (align, max_align);
8129 switch (TREE_CODE (exp))
8133 case NON_LVALUE_EXPR:
8134 exp = TREE_OPERAND (exp, 0);
8135 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8137 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8138 align = MIN (inner, max_align);
8142 /* If sum of pointer + int, restrict our maximum alignment to that
8143 imposed by the integer. If not, we can't do any better than
8145 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8148 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8153 exp = TREE_OPERAND (exp, 0);
8157 /* See what we are pointing at and look at its alignment. */
8158 exp = TREE_OPERAND (exp, 0);
8159 if (TREE_CODE (exp) == FUNCTION_DECL)
8160 align = FUNCTION_BOUNDARY;
8161 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8162 align = DECL_ALIGN (exp);
8163 #ifdef CONSTANT_ALIGNMENT
8164 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8165 align = CONSTANT_ALIGNMENT (exp, align);
8167 return MIN (align, max_align);
8175 /* Return the tree node and offset if a given argument corresponds to
8176 a string constant. */
8179 string_constant (arg, ptr_offset)
8185 if (TREE_CODE (arg) == ADDR_EXPR
8186 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8188 *ptr_offset = integer_zero_node;
8189 return TREE_OPERAND (arg, 0);
8191 else if (TREE_CODE (arg) == PLUS_EXPR)
8193 tree arg0 = TREE_OPERAND (arg, 0);
8194 tree arg1 = TREE_OPERAND (arg, 1);
8199 if (TREE_CODE (arg0) == ADDR_EXPR
8200 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8203 return TREE_OPERAND (arg0, 0);
8205 else if (TREE_CODE (arg1) == ADDR_EXPR
8206 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8209 return TREE_OPERAND (arg1, 0);
8216 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8217 way, because it could contain a zero byte in the middle.
8218 TREE_STRING_LENGTH is the size of the character array, not the string.
8220 Unfortunately, string_constant can't access the values of const char
8221 arrays with initializers, so neither can we do so here. */
8231 src = string_constant (src, &offset_node);
8234 max = TREE_STRING_LENGTH (src);
8235 ptr = TREE_STRING_POINTER (src);
8236 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8238 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8239 compute the offset to the following null if we don't know where to
8240 start searching for it. */
8242 for (i = 0; i < max; i++)
8245 /* We don't know the starting offset, but we do know that the string
8246 has no internal zero bytes. We can assume that the offset falls
8247 within the bounds of the string; otherwise, the programmer deserves
8248 what he gets. Subtract the offset from the length of the string,
8250 /* This would perhaps not be valid if we were dealing with named
8251 arrays in addition to literal string constants. */
8252 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8255 /* We have a known offset into the string. Start searching there for
8256 a null character. */
8257 if (offset_node == 0)
8261 /* Did we get a long long offset? If so, punt. */
8262 if (TREE_INT_CST_HIGH (offset_node) != 0)
8264 offset = TREE_INT_CST_LOW (offset_node);
8266 /* If the offset is known to be out of bounds, warn, and call strlen at
8268 if (offset < 0 || offset > max)
8270 warning ("offset outside bounds of constant string");
8273 /* Use strlen to search for the first zero byte. Since any strings
8274 constructed with build_string will have nulls appended, we win even
8275 if we get handed something like (char[4])"abcd".
8277 Since OFFSET is our starting index into the string, no further
8278 calculation is needed. */
8279 return size_int (strlen (ptr + offset));
8283 expand_builtin_return_addr (fndecl_code, count, tem)
8284 enum built_in_function fndecl_code;
8290 /* Some machines need special handling before we can access
8291 arbitrary frames. For example, on the sparc, we must first flush
8292 all register windows to the stack. */
8293 #ifdef SETUP_FRAME_ADDRESSES
8295 SETUP_FRAME_ADDRESSES ();
8298 /* On the sparc, the return address is not in the frame, it is in a
8299 register. There is no way to access it off of the current frame
8300 pointer, but it can be accessed off the previous frame pointer by
8301 reading the value from the register window save area. */
8302 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8303 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8307 /* Scan back COUNT frames to the specified frame. */
8308 for (i = 0; i < count; i++)
8310 /* Assume the dynamic chain pointer is in the word that the
8311 frame address points to, unless otherwise specified. */
8312 #ifdef DYNAMIC_CHAIN_ADDRESS
8313 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8315 tem = memory_address (Pmode, tem);
8316 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8319 /* For __builtin_frame_address, return what we've got. */
8320 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8323 /* For __builtin_return_address, Get the return address from that
8325 #ifdef RETURN_ADDR_RTX
8326 tem = RETURN_ADDR_RTX (count, tem);
8328 tem = memory_address (Pmode,
8329 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8330 tem = gen_rtx_MEM (Pmode, tem);
8335 /* __builtin_setjmp is passed a pointer to an array of five words (not
8336 all will be used on all machines). It operates similarly to the C
8337 library function of the same name, but is more efficient. Much of
8338 the code below (and for longjmp) is copied from the handling of
8341 NOTE: This is intended for use by GNAT and the exception handling
8342 scheme in the compiler and will only work in the method used by
8346 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
8349 rtx first_label, next_label;
8351 rtx lab1 = gen_label_rtx ();
8352 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8353 enum machine_mode value_mode;
8356 value_mode = TYPE_MODE (integer_type_node);
8358 #ifdef POINTERS_EXTEND_UNSIGNED
8359 buf_addr = convert_memory_address (Pmode, buf_addr);
8362 buf_addr = force_reg (Pmode, buf_addr);
8364 if (target == 0 || GET_CODE (target) != REG
8365 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8366 target = gen_reg_rtx (value_mode);
8370 /* We store the frame pointer and the address of lab1 in the buffer
8371 and use the rest of it for the stack save area, which is
8372 machine-dependent. */
8374 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8375 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8378 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8379 BUILTIN_SETJMP_FRAME_VALUE);
8380 emit_move_insn (validize_mem
8381 (gen_rtx_MEM (Pmode,
8382 plus_constant (buf_addr,
8383 GET_MODE_SIZE (Pmode)))),
8384 gen_rtx_LABEL_REF (Pmode, lab1));
8386 stack_save = gen_rtx_MEM (sa_mode,
8387 plus_constant (buf_addr,
8388 2 * GET_MODE_SIZE (Pmode)));
8389 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8391 /* If there is further processing to do, do it. */
8392 #ifdef HAVE_builtin_setjmp_setup
8393 if (HAVE_builtin_setjmp_setup)
8394 emit_insn (gen_builtin_setjmp_setup (buf_addr));
8397 /* Set TARGET to zero and branch to the first-time-through label. */
8398 emit_move_insn (target, const0_rtx);
8399 emit_jump_insn (gen_jump (first_label));
8403 /* Tell flow about the strange goings on. */
8404 current_function_has_nonlocal_label = 1;
8406 /* Clobber the FP when we get here, so we have to make sure it's
8407 marked as used by this function. */
8408 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8410 /* Mark the static chain as clobbered here so life information
8411 doesn't get messed up for it. */
8412 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8414 /* Now put in the code to restore the frame pointer, and argument
8415 pointer, if needed. The code below is from expand_end_bindings
8416 in stmt.c; see detailed documentation there. */
8417 #ifdef HAVE_nonlocal_goto
8418 if (! HAVE_nonlocal_goto)
8420 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8422 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8423 if (fixed_regs[ARG_POINTER_REGNUM])
8425 #ifdef ELIMINABLE_REGS
8427 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8429 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8430 if (elim_regs[i].from == ARG_POINTER_REGNUM
8431 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8434 if (i == sizeof elim_regs / sizeof elim_regs [0])
8437 /* Now restore our arg pointer from the address at which it
8438 was saved in our stack frame.
8439 If there hasn't be space allocated for it yet, make
8441 if (arg_pointer_save_area == 0)
8442 arg_pointer_save_area
8443 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8444 emit_move_insn (virtual_incoming_args_rtx,
8445 copy_to_reg (arg_pointer_save_area));
8450 #ifdef HAVE_builtin_setjmp_receiver
8451 if (HAVE_builtin_setjmp_receiver)
8452 emit_insn (gen_builtin_setjmp_receiver (lab1));
8455 #ifdef HAVE_nonlocal_goto_receiver
8456 if (HAVE_nonlocal_goto_receiver)
8457 emit_insn (gen_nonlocal_goto_receiver ());
8464 /* Set TARGET, and branch to the next-time-through label. */
8465 emit_move_insn (target, const1_rtx);
8466 emit_jump_insn (gen_jump (next_label));
8473 expand_builtin_longjmp (buf_addr, value)
8474 rtx buf_addr, value;
8477 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8479 #ifdef POINTERS_EXTEND_UNSIGNED
8480 buf_addr = convert_memory_address (Pmode, buf_addr);
8482 buf_addr = force_reg (Pmode, buf_addr);
8484 /* We used to store value in static_chain_rtx, but that fails if pointers
8485 are smaller than integers. We instead require that the user must pass
8486 a second argument of 1, because that is what builtin_setjmp will
8487 return. This also makes EH slightly more efficient, since we are no
8488 longer copying around a value that we don't care about. */
8489 if (value != const1_rtx)
8492 #ifdef HAVE_builtin_longjmp
8493 if (HAVE_builtin_longjmp)
8494 emit_insn (gen_builtin_longjmp (buf_addr));
8498 fp = gen_rtx_MEM (Pmode, buf_addr);
8499 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8500 GET_MODE_SIZE (Pmode)));
8502 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8503 2 * GET_MODE_SIZE (Pmode)));
8505 /* Pick up FP, label, and SP from the block and jump. This code is
8506 from expand_goto in stmt.c; see there for detailed comments. */
8507 #if HAVE_nonlocal_goto
8508 if (HAVE_nonlocal_goto)
8509 /* We have to pass a value to the nonlocal_goto pattern that will
8510 get copied into the static_chain pointer, but it does not matter
8511 what that value is, because builtin_setjmp does not use it. */
8512 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8516 lab = copy_to_reg (lab);
8518 emit_move_insn (hard_frame_pointer_rtx, fp);
8519 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8521 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8522 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8523 emit_indirect_jump (lab);
8529 get_memory_rtx (exp)
8535 mem = gen_rtx_MEM (BLKmode,
8536 memory_address (BLKmode,
8537 expand_expr (exp, NULL_RTX,
8538 ptr_mode, EXPAND_SUM)));
8540 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8542 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8543 if the value is the address of a structure or if the expression is
8544 cast to a pointer to structure type. */
8547 while (TREE_CODE (exp) == NOP_EXPR)
8549 tree cast_type = TREE_TYPE (exp);
8550 if (TREE_CODE (cast_type) == POINTER_TYPE
8551 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8556 exp = TREE_OPERAND (exp, 0);
8559 if (is_aggregate == 0)
8563 if (TREE_CODE (exp) == ADDR_EXPR)
8564 /* If this is the address of an object, check whether the
8565 object is an array. */
8566 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8568 type = TREE_TYPE (TREE_TYPE (exp));
8569 is_aggregate = AGGREGATE_TYPE_P (type);
8572 MEM_IN_STRUCT_P (mem) = is_aggregate;
8577 /* Expand an expression EXP that calls a built-in function,
8578 with result going to TARGET if that's convenient
8579 (and in mode MODE if that's convenient).
8580 SUBTARGET may be used as the target for computing one of EXP's operands.
8581 IGNORE is nonzero if the value is to be ignored. */
8583 #define CALLED_AS_BUILT_IN(NODE) \
8584 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8587 expand_builtin (exp, target, subtarget, mode, ignore)
8591 enum machine_mode mode;
8594 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8595 tree arglist = TREE_OPERAND (exp, 1);
8598 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8599 optab builtin_optab;
8601 switch (DECL_FUNCTION_CODE (fndecl))
8606 /* build_function_call changes these into ABS_EXPR. */
8611 /* Treat these like sqrt, but only if the user asks for them. */
8612 if (! flag_fast_math)
8614 case BUILT_IN_FSQRT:
8615 /* If not optimizing, call the library function. */
8620 /* Arg could be wrong type if user redeclared this fcn wrong. */
8621 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8624 /* Stabilize and compute the argument. */
8625 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8626 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8628 exp = copy_node (exp);
8629 arglist = copy_node (arglist);
8630 TREE_OPERAND (exp, 1) = arglist;
8631 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8633 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8635 /* Make a suitable register to place result in. */
8636 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8641 switch (DECL_FUNCTION_CODE (fndecl))
8644 builtin_optab = sin_optab; break;
8646 builtin_optab = cos_optab; break;
8647 case BUILT_IN_FSQRT:
8648 builtin_optab = sqrt_optab; break;
8653 /* Compute into TARGET.
8654 Set TARGET to wherever the result comes back. */
8655 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8656 builtin_optab, op0, target, 0);
8658 /* If we were unable to expand via the builtin, stop the
8659 sequence (without outputting the insns) and break, causing
8660 a call to the library function. */
8667 /* Check the results by default. But if flag_fast_math is turned on,
8668 then assume sqrt will always be called with valid arguments. */
8670 if (! flag_fast_math)
8672 /* Don't define the builtin FP instructions
8673 if your machine is not IEEE. */
8674 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8677 lab1 = gen_label_rtx ();
8679 /* Test the result; if it is NaN, set errno=EDOM because
8680 the argument was not in the domain. */
8681 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8682 emit_jump_insn (gen_beq (lab1));
8686 #ifdef GEN_ERRNO_RTX
8687 rtx errno_rtx = GEN_ERRNO_RTX;
8690 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8693 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8696 /* We can't set errno=EDOM directly; let the library call do it.
8697 Pop the arguments right away in case the call gets deleted. */
8699 expand_call (exp, target, 0);
8706 /* Output the entire sequence. */
8707 insns = get_insns ();
8716 /* __builtin_apply_args returns block of memory allocated on
8717 the stack into which is stored the arg pointer, structure
8718 value address, static chain, and all the registers that might
8719 possibly be used in performing a function call. The code is
8720 moved to the start of the function so the incoming values are
8722 case BUILT_IN_APPLY_ARGS:
8723 /* Don't do __builtin_apply_args more than once in a function.
8724 Save the result of the first call and reuse it. */
8725 if (apply_args_value != 0)
8726 return apply_args_value;
8728 /* When this function is called, it means that registers must be
8729 saved on entry to this function. So we migrate the
8730 call to the first insn of this function. */
8735 temp = expand_builtin_apply_args ();
8739 apply_args_value = temp;
8741 /* Put the sequence after the NOTE that starts the function.
8742 If this is inside a SEQUENCE, make the outer-level insn
8743 chain current, so the code is placed at the start of the
8745 push_topmost_sequence ();
8746 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8747 pop_topmost_sequence ();
8751 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8752 FUNCTION with a copy of the parameters described by
8753 ARGUMENTS, and ARGSIZE. It returns a block of memory
8754 allocated on the stack into which is stored all the registers
8755 that might possibly be used for returning the result of a
8756 function. ARGUMENTS is the value returned by
8757 __builtin_apply_args. ARGSIZE is the number of bytes of
8758 arguments that must be copied. ??? How should this value be
8759 computed? We'll also need a safe worst case value for varargs
8761 case BUILT_IN_APPLY:
8763 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8764 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8765 || TREE_CHAIN (arglist) == 0
8766 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8767 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8768 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8776 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8777 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8779 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8782 /* __builtin_return (RESULT) causes the function to return the
8783 value described by RESULT. RESULT is address of the block of
8784 memory returned by __builtin_apply. */
8785 case BUILT_IN_RETURN:
8787 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8788 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8789 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8790 NULL_RTX, VOIDmode, 0));
8793 case BUILT_IN_SAVEREGS:
8794 /* Don't do __builtin_saveregs more than once in a function.
8795 Save the result of the first call and reuse it. */
8796 if (saveregs_value != 0)
8797 return saveregs_value;
8799 /* When this function is called, it means that registers must be
8800 saved on entry to this function. So we migrate the
8801 call to the first insn of this function. */
8805 /* Now really call the function. `expand_call' does not call
8806 expand_builtin, so there is no danger of infinite recursion here. */
8809 #ifdef EXPAND_BUILTIN_SAVEREGS
8810 /* Do whatever the machine needs done in this case. */
8811 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8813 /* The register where the function returns its value
8814 is likely to have something else in it, such as an argument.
8815 So preserve that register around the call. */
8817 if (value_mode != VOIDmode)
8819 rtx valreg = hard_libcall_value (value_mode);
8820 rtx saved_valreg = gen_reg_rtx (value_mode);
8822 emit_move_insn (saved_valreg, valreg);
8823 temp = expand_call (exp, target, ignore);
8824 emit_move_insn (valreg, saved_valreg);
8827 /* Generate the call, putting the value in a pseudo. */
8828 temp = expand_call (exp, target, ignore);
8834 saveregs_value = temp;
8836 /* Put the sequence after the NOTE that starts the function.
8837 If this is inside a SEQUENCE, make the outer-level insn
8838 chain current, so the code is placed at the start of the
8840 push_topmost_sequence ();
8841 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8842 pop_topmost_sequence ();
8846 /* __builtin_args_info (N) returns word N of the arg space info
8847 for the current function. The number and meanings of words
8848 is controlled by the definition of CUMULATIVE_ARGS. */
8849 case BUILT_IN_ARGS_INFO:
8851 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8852 int *word_ptr = (int *) ¤t_function_args_info;
8854 /* These are used by the code below that is if 0'ed away */
8856 tree type, elts, result;
8859 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8860 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8861 __FILE__, __LINE__);
8865 tree arg = TREE_VALUE (arglist);
8866 if (TREE_CODE (arg) != INTEGER_CST)
8867 error ("argument of `__builtin_args_info' must be constant");
8870 int wordnum = TREE_INT_CST_LOW (arg);
8872 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8873 error ("argument of `__builtin_args_info' out of range");
8875 return GEN_INT (word_ptr[wordnum]);
8879 error ("missing argument in `__builtin_args_info'");
8884 for (i = 0; i < nwords; i++)
8885 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8887 type = build_array_type (integer_type_node,
8888 build_index_type (build_int_2 (nwords, 0)));
8889 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8890 TREE_CONSTANT (result) = 1;
8891 TREE_STATIC (result) = 1;
8892 result = build (INDIRECT_REF, build_pointer_type (type), result);
8893 TREE_CONSTANT (result) = 1;
8894 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8898 /* Return the address of the first anonymous stack arg. */
8899 case BUILT_IN_NEXT_ARG:
8901 tree fntype = TREE_TYPE (current_function_decl);
8903 if ((TYPE_ARG_TYPES (fntype) == 0
8904 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8906 && ! current_function_varargs)
8908 error ("`va_start' used in function with fixed args");
8914 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8915 tree arg = TREE_VALUE (arglist);
8917 /* Strip off all nops for the sake of the comparison. This
8918 is not quite the same as STRIP_NOPS. It does more.
8919 We must also strip off INDIRECT_EXPR for C++ reference
8921 while (TREE_CODE (arg) == NOP_EXPR
8922 || TREE_CODE (arg) == CONVERT_EXPR
8923 || TREE_CODE (arg) == NON_LVALUE_EXPR
8924 || TREE_CODE (arg) == INDIRECT_REF)
8925 arg = TREE_OPERAND (arg, 0);
8926 if (arg != last_parm)
8927 warning ("second parameter of `va_start' not last named argument");
8929 else if (! current_function_varargs)
8930 /* Evidently an out of date version of <stdarg.h>; can't validate
8931 va_start's second argument, but can still work as intended. */
8932 warning ("`__builtin_next_arg' called without an argument");
8935 return expand_binop (Pmode, add_optab,
8936 current_function_internal_arg_pointer,
8937 current_function_arg_offset_rtx,
8938 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8940 case BUILT_IN_CLASSIFY_TYPE:
8943 tree type = TREE_TYPE (TREE_VALUE (arglist));
8944 enum tree_code code = TREE_CODE (type);
8945 if (code == VOID_TYPE)
8946 return GEN_INT (void_type_class);
8947 if (code == INTEGER_TYPE)
8948 return GEN_INT (integer_type_class);
8949 if (code == CHAR_TYPE)
8950 return GEN_INT (char_type_class);
8951 if (code == ENUMERAL_TYPE)
8952 return GEN_INT (enumeral_type_class);
8953 if (code == BOOLEAN_TYPE)
8954 return GEN_INT (boolean_type_class);
8955 if (code == POINTER_TYPE)
8956 return GEN_INT (pointer_type_class);
8957 if (code == REFERENCE_TYPE)
8958 return GEN_INT (reference_type_class);
8959 if (code == OFFSET_TYPE)
8960 return GEN_INT (offset_type_class);
8961 if (code == REAL_TYPE)
8962 return GEN_INT (real_type_class);
8963 if (code == COMPLEX_TYPE)
8964 return GEN_INT (complex_type_class);
8965 if (code == FUNCTION_TYPE)
8966 return GEN_INT (function_type_class);
8967 if (code == METHOD_TYPE)
8968 return GEN_INT (method_type_class);
8969 if (code == RECORD_TYPE)
8970 return GEN_INT (record_type_class);
8971 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8972 return GEN_INT (union_type_class);
8973 if (code == ARRAY_TYPE)
8975 if (TYPE_STRING_FLAG (type))
8976 return GEN_INT (string_type_class);
8978 return GEN_INT (array_type_class);
8980 if (code == SET_TYPE)
8981 return GEN_INT (set_type_class);
8982 if (code == FILE_TYPE)
8983 return GEN_INT (file_type_class);
8984 if (code == LANG_TYPE)
8985 return GEN_INT (lang_type_class);
8987 return GEN_INT (no_type_class);
8989 case BUILT_IN_CONSTANT_P:
8994 tree arg = TREE_VALUE (arglist);
8997 if (really_constant_p (arg)
8998 || (TREE_CODE (arg) == ADDR_EXPR
8999 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9002 /* Only emit CONSTANT_P_RTX if CSE will be run.
9003 Moreover, we don't want to expand trees that have side effects,
9004 as the original __builtin_constant_p did not evaluate its
9005 argument at all, and we would break existing usage by changing
9006 this. This quirk was generally useful, eliminating a bit of hair
9007 in the writing of the macros that use this function. Now the
9008 same thing can be better accomplished in an inline function. */
9010 if (! cse_not_expected && ! TREE_SIDE_EFFECTS (arg))
9012 /* Lazy fixup of old code: issue a warning and fail the test. */
9013 if (! can_handle_constant_p)
9015 warning ("Delayed evaluation of __builtin_constant_p not supported on this target.");
9016 warning ("Please report this as a bug to egcs-bugs@cygnus.com.");
9019 return gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
9020 expand_expr (arg, NULL_RTX,
9027 case BUILT_IN_FRAME_ADDRESS:
9028 /* The argument must be a nonnegative integer constant.
9029 It counts the number of frames to scan up the stack.
9030 The value is the address of that frame. */
9031 case BUILT_IN_RETURN_ADDRESS:
9032 /* The argument must be a nonnegative integer constant.
9033 It counts the number of frames to scan up the stack.
9034 The value is the return address saved in that frame. */
9036 /* Warning about missing arg was already issued. */
9038 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9039 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9041 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9042 error ("invalid arg to `__builtin_frame_address'");
9044 error ("invalid arg to `__builtin_return_address'");
9049 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9050 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9051 hard_frame_pointer_rtx);
9053 /* Some ports cannot access arbitrary stack frames. */
9056 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9057 warning ("unsupported arg to `__builtin_frame_address'");
9059 warning ("unsupported arg to `__builtin_return_address'");
9063 /* For __builtin_frame_address, return what we've got. */
9064 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9067 if (GET_CODE (tem) != REG)
9068 tem = copy_to_reg (tem);
9072 /* Returns the address of the area where the structure is returned.
9074 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9076 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9077 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9080 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9082 case BUILT_IN_ALLOCA:
9084 /* Arg could be non-integer if user redeclared this fcn wrong. */
9085 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9088 /* Compute the argument. */
9089 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9091 /* Allocate the desired space. */
9092 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9095 /* If not optimizing, call the library function. */
9096 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9100 /* Arg could be non-integer if user redeclared this fcn wrong. */
9101 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9104 /* Compute the argument. */
9105 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9106 /* Compute ffs, into TARGET if possible.
9107 Set TARGET to wherever the result comes back. */
9108 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9109 ffs_optab, op0, target, 1);
9114 case BUILT_IN_STRLEN:
9115 /* If not optimizing, call the library function. */
9116 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9120 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9121 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9125 tree src = TREE_VALUE (arglist);
9126 tree len = c_strlen (src);
9129 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9131 rtx result, src_rtx, char_rtx;
9132 enum machine_mode insn_mode = value_mode, char_mode;
9133 enum insn_code icode;
9135 /* If the length is known, just return it. */
9137 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9139 /* If SRC is not a pointer type, don't do this operation inline. */
9143 /* Call a function if we can't compute strlen in the right mode. */
9145 while (insn_mode != VOIDmode)
9147 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9148 if (icode != CODE_FOR_nothing)
9151 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9153 if (insn_mode == VOIDmode)
9156 /* Make a place to write the result of the instruction. */
9159 && GET_CODE (result) == REG
9160 && GET_MODE (result) == insn_mode
9161 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9162 result = gen_reg_rtx (insn_mode);
9164 /* Make sure the operands are acceptable to the predicates. */
9166 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9167 result = gen_reg_rtx (insn_mode);
9168 src_rtx = memory_address (BLKmode,
9169 expand_expr (src, NULL_RTX, ptr_mode,
9172 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9173 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9175 /* Check the string is readable and has an end. */
9176 if (current_function_check_memory_usage)
9177 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9179 GEN_INT (MEMORY_USE_RO),
9180 TYPE_MODE (integer_type_node));
9182 char_rtx = const0_rtx;
9183 char_mode = insn_operand_mode[(int)icode][2];
9184 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9185 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9187 emit_insn (GEN_FCN (icode) (result,
9188 gen_rtx_MEM (BLKmode, src_rtx),
9189 char_rtx, GEN_INT (align)));
9191 /* Return the value in the proper mode for this function. */
9192 if (GET_MODE (result) == value_mode)
9194 else if (target != 0)
9196 convert_move (target, result, 0);
9200 return convert_to_mode (value_mode, result, 0);
9203 case BUILT_IN_STRCPY:
9204 /* If not optimizing, call the library function. */
9205 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9209 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9210 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9211 || TREE_CHAIN (arglist) == 0
9212 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9216 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9221 len = size_binop (PLUS_EXPR, len, integer_one_node);
9223 chainon (arglist, build_tree_list (NULL_TREE, len));
9227 case BUILT_IN_MEMCPY:
9228 /* If not optimizing, call the library function. */
9229 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9233 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9234 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9235 || TREE_CHAIN (arglist) == 0
9236 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9238 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9239 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9240 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9245 tree dest = TREE_VALUE (arglist);
9246 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9247 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9250 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9252 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9253 rtx dest_mem, src_mem, dest_addr, len_rtx;
9255 /* If either SRC or DEST is not a pointer type, don't do
9256 this operation in-line. */
9257 if (src_align == 0 || dest_align == 0)
9259 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9260 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9264 dest_mem = get_memory_rtx (dest);
9265 src_mem = get_memory_rtx (src);
9266 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9268 /* Just copy the rights of SRC to the rights of DEST. */
9269 if (current_function_check_memory_usage)
9270 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9271 XEXP (dest_mem, 0), ptr_mode,
9272 XEXP (src_mem, 0), ptr_mode,
9273 len_rtx, TYPE_MODE (sizetype));
9275 /* Copy word part most expediently. */
9277 = emit_block_move (dest_mem, src_mem, len_rtx,
9278 MIN (src_align, dest_align));
9281 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9286 case BUILT_IN_MEMSET:
9287 /* If not optimizing, call the library function. */
9288 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9292 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9293 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9294 || TREE_CHAIN (arglist) == 0
9295 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9297 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9299 != (TREE_CODE (TREE_TYPE
9301 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9305 tree dest = TREE_VALUE (arglist);
9306 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9307 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9310 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9311 rtx dest_mem, dest_addr, len_rtx;
9313 /* If DEST is not a pointer type, don't do this
9314 operation in-line. */
9315 if (dest_align == 0)
9318 /* If the arguments have side-effects, then we can only evaluate
9319 them at most once. The following code evaluates them twice if
9320 they are not constants because we break out to expand_call
9321 in that case. They can't be constants if they have side-effects
9322 so we can check for that first. Alternatively, we could call
9323 save_expr to make multiple evaluation safe. */
9324 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9327 /* If VAL is not 0, don't do this operation in-line. */
9328 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9331 /* If LEN does not expand to a constant, don't do this
9332 operation in-line. */
9333 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9334 if (GET_CODE (len_rtx) != CONST_INT)
9337 dest_mem = get_memory_rtx (dest);
9339 /* Just check DST is writable and mark it as readable. */
9340 if (current_function_check_memory_usage)
9341 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9342 XEXP (dest_mem, 0), ptr_mode,
9343 len_rtx, TYPE_MODE (sizetype),
9344 GEN_INT (MEMORY_USE_WO),
9345 TYPE_MODE (integer_type_node));
9348 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9351 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9356 /* These comparison functions need an instruction that returns an actual
9357 index. An ordinary compare that just sets the condition codes
9359 #ifdef HAVE_cmpstrsi
9360 case BUILT_IN_STRCMP:
9361 /* If not optimizing, call the library function. */
9362 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9365 /* If we need to check memory accesses, call the library function. */
9366 if (current_function_check_memory_usage)
9370 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9371 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9372 || TREE_CHAIN (arglist) == 0
9373 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9375 else if (!HAVE_cmpstrsi)
9378 tree arg1 = TREE_VALUE (arglist);
9379 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9382 len = c_strlen (arg1);
9384 len = size_binop (PLUS_EXPR, integer_one_node, len);
9385 len2 = c_strlen (arg2);
9387 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9389 /* If we don't have a constant length for the first, use the length
9390 of the second, if we know it. We don't require a constant for
9391 this case; some cost analysis could be done if both are available
9392 but neither is constant. For now, assume they're equally cheap.
9394 If both strings have constant lengths, use the smaller. This
9395 could arise if optimization results in strcpy being called with
9396 two fixed strings, or if the code was machine-generated. We should
9397 add some code to the `memcmp' handler below to deal with such
9398 situations, someday. */
9399 if (!len || TREE_CODE (len) != INTEGER_CST)
9406 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9408 if (tree_int_cst_lt (len2, len))
9412 chainon (arglist, build_tree_list (NULL_TREE, len));
9416 case BUILT_IN_MEMCMP:
9417 /* If not optimizing, call the library function. */
9418 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9421 /* If we need to check memory accesses, call the library function. */
9422 if (current_function_check_memory_usage)
9426 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9427 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9428 || TREE_CHAIN (arglist) == 0
9429 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9430 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9431 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9433 else if (!HAVE_cmpstrsi)
9436 tree arg1 = TREE_VALUE (arglist);
9437 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9438 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9442 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9444 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9445 enum machine_mode insn_mode
9446 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9448 /* If we don't have POINTER_TYPE, call the function. */
9449 if (arg1_align == 0 || arg2_align == 0)
9451 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9452 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9456 /* Make a place to write the result of the instruction. */
9459 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9460 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9461 result = gen_reg_rtx (insn_mode);
9463 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9464 get_memory_rtx (arg2),
9465 expand_expr (len, NULL_RTX, VOIDmode, 0),
9466 GEN_INT (MIN (arg1_align, arg2_align))));
9468 /* Return the value in the proper mode for this function. */
9469 mode = TYPE_MODE (TREE_TYPE (exp));
9470 if (GET_MODE (result) == mode)
9472 else if (target != 0)
9474 convert_move (target, result, 0);
9478 return convert_to_mode (mode, result, 0);
9481 case BUILT_IN_STRCMP:
9482 case BUILT_IN_MEMCMP:
9486 case BUILT_IN_SETJMP:
9488 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9492 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9494 rtx lab = gen_label_rtx ();
9495 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9500 /* __builtin_longjmp is passed a pointer to an array of five words.
9501 It's similar to the C library longjmp function but works with
9502 __builtin_setjmp above. */
9503 case BUILT_IN_LONGJMP:
9504 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9505 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9509 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9511 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9512 NULL_RTX, VOIDmode, 0);
9514 if (value != const1_rtx)
9516 error ("__builtin_longjmp second argument must be 1");
9520 expand_builtin_longjmp (buf_addr, value);
9527 emit_insn (gen_trap ());
9530 error ("__builtin_trap not supported by this target");
9534 /* Various hooks for the DWARF 2 __throw routine. */
9535 case BUILT_IN_UNWIND_INIT:
9536 expand_builtin_unwind_init ();
9538 case BUILT_IN_DWARF_CFA:
9539 return virtual_cfa_rtx;
9540 #ifdef DWARF2_UNWIND_INFO
9541 case BUILT_IN_DWARF_FP_REGNUM:
9542 return expand_builtin_dwarf_fp_regnum ();
9543 case BUILT_IN_DWARF_REG_SIZE:
9544 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9546 case BUILT_IN_FROB_RETURN_ADDR:
9547 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9548 case BUILT_IN_EXTRACT_RETURN_ADDR:
9549 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9550 case BUILT_IN_EH_RETURN:
9551 expand_builtin_eh_return (TREE_VALUE (arglist),
9552 TREE_VALUE (TREE_CHAIN (arglist)),
9553 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9556 default: /* just do library call, if unknown builtin */
9557 error ("built-in function `%s' not currently supported",
9558 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9561 /* The switch statement above can drop through to cause the function
9562 to be called normally. */
9564 return expand_call (exp, target, ignore);
9567 /* Built-in functions to perform an untyped call and return. */
9569 /* For each register that may be used for calling a function, this
9570 gives a mode used to copy the register's value. VOIDmode indicates
9571 the register is not used for calling a function. If the machine
9572 has register windows, this gives only the outbound registers.
9573 INCOMING_REGNO gives the corresponding inbound register. */
9574 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9576 /* For each register that may be used for returning values, this gives
9577 a mode used to copy the register's value. VOIDmode indicates the
9578 register is not used for returning values. If the machine has
9579 register windows, this gives only the outbound registers.
9580 INCOMING_REGNO gives the corresponding inbound register. */
9581 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9583 /* For each register that may be used for calling a function, this
9584 gives the offset of that register into the block returned by
9585 __builtin_apply_args. 0 indicates that the register is not
9586 used for calling a function. */
9587 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9589 /* Return the offset of register REGNO into the block returned by
9590 __builtin_apply_args. This is not declared static, since it is
9591 needed in objc-act.c. */
9594 apply_args_register_offset (regno)
9599 /* Arguments are always put in outgoing registers (in the argument
9600 block) if such make sense. */
9601 #ifdef OUTGOING_REGNO
9602 regno = OUTGOING_REGNO(regno);
9604 return apply_args_reg_offset[regno];
9607 /* Return the size required for the block returned by __builtin_apply_args,
9608 and initialize apply_args_mode. */
9613 static int size = -1;
9615 enum machine_mode mode;
9617 /* The values computed by this function never change. */
9620 /* The first value is the incoming arg-pointer. */
9621 size = GET_MODE_SIZE (Pmode);
9623 /* The second value is the structure value address unless this is
9624 passed as an "invisible" first argument. */
9625 if (struct_value_rtx)
9626 size += GET_MODE_SIZE (Pmode);
9628 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9629 if (FUNCTION_ARG_REGNO_P (regno))
9631 /* Search for the proper mode for copying this register's
9632 value. I'm not sure this is right, but it works so far. */
9633 enum machine_mode best_mode = VOIDmode;
9635 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9637 mode = GET_MODE_WIDER_MODE (mode))
9638 if (HARD_REGNO_MODE_OK (regno, mode)
9639 && HARD_REGNO_NREGS (regno, mode) == 1)
9642 if (best_mode == VOIDmode)
9643 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9645 mode = GET_MODE_WIDER_MODE (mode))
9646 if (HARD_REGNO_MODE_OK (regno, mode)
9647 && (mov_optab->handlers[(int) mode].insn_code
9648 != CODE_FOR_nothing))
9652 if (mode == VOIDmode)
9655 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9656 if (size % align != 0)
9657 size = CEIL (size, align) * align;
9658 apply_args_reg_offset[regno] = size;
9659 size += GET_MODE_SIZE (mode);
9660 apply_args_mode[regno] = mode;
9664 apply_args_mode[regno] = VOIDmode;
9665 apply_args_reg_offset[regno] = 0;
9671 /* Return the size required for the block returned by __builtin_apply,
9672 and initialize apply_result_mode. */
9675 apply_result_size ()
9677 static int size = -1;
9679 enum machine_mode mode;
9681 /* The values computed by this function never change. */
9686 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9687 if (FUNCTION_VALUE_REGNO_P (regno))
9689 /* Search for the proper mode for copying this register's
9690 value. I'm not sure this is right, but it works so far. */
9691 enum machine_mode best_mode = VOIDmode;
9693 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9695 mode = GET_MODE_WIDER_MODE (mode))
9696 if (HARD_REGNO_MODE_OK (regno, mode))
9699 if (best_mode == VOIDmode)
9700 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9702 mode = GET_MODE_WIDER_MODE (mode))
9703 if (HARD_REGNO_MODE_OK (regno, mode)
9704 && (mov_optab->handlers[(int) mode].insn_code
9705 != CODE_FOR_nothing))
9709 if (mode == VOIDmode)
9712 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9713 if (size % align != 0)
9714 size = CEIL (size, align) * align;
9715 size += GET_MODE_SIZE (mode);
9716 apply_result_mode[regno] = mode;
9719 apply_result_mode[regno] = VOIDmode;
9721 /* Allow targets that use untyped_call and untyped_return to override
9722 the size so that machine-specific information can be stored here. */
9723 #ifdef APPLY_RESULT_SIZE
9724 size = APPLY_RESULT_SIZE;
9730 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9731 /* Create a vector describing the result block RESULT. If SAVEP is true,
9732 the result block is used to save the values; otherwise it is used to
9733 restore the values. */
9736 result_vector (savep, result)
9740 int regno, size, align, nelts;
9741 enum machine_mode mode;
9743 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9746 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9747 if ((mode = apply_result_mode[regno]) != VOIDmode)
9749 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9750 if (size % align != 0)
9751 size = CEIL (size, align) * align;
9752 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9753 mem = change_address (result, mode,
9754 plus_constant (XEXP (result, 0), size));
9755 savevec[nelts++] = (savep
9756 ? gen_rtx_SET (VOIDmode, mem, reg)
9757 : gen_rtx_SET (VOIDmode, reg, mem));
9758 size += GET_MODE_SIZE (mode);
9760 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9762 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9764 /* Save the state required to perform an untyped call with the same
9765 arguments as were passed to the current function. */
9768 expand_builtin_apply_args ()
9771 int size, align, regno;
9772 enum machine_mode mode;
9774 /* Create a block where the arg-pointer, structure value address,
9775 and argument registers can be saved. */
9776 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9778 /* Walk past the arg-pointer and structure value address. */
9779 size = GET_MODE_SIZE (Pmode);
9780 if (struct_value_rtx)
9781 size += GET_MODE_SIZE (Pmode);
9783 /* Save each register used in calling a function to the block. */
9784 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9785 if ((mode = apply_args_mode[regno]) != VOIDmode)
9789 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9790 if (size % align != 0)
9791 size = CEIL (size, align) * align;
9793 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9796 /* For reg-stack.c's stack register household.
9797 Compare with a similar piece of code in function.c. */
9799 emit_insn (gen_rtx_USE (mode, tem));
9802 emit_move_insn (change_address (registers, mode,
9803 plus_constant (XEXP (registers, 0),
9806 size += GET_MODE_SIZE (mode);
9809 /* Save the arg pointer to the block. */
9810 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9811 copy_to_reg (virtual_incoming_args_rtx));
9812 size = GET_MODE_SIZE (Pmode);
9814 /* Save the structure value address unless this is passed as an
9815 "invisible" first argument. */
9816 if (struct_value_incoming_rtx)
9818 emit_move_insn (change_address (registers, Pmode,
9819 plus_constant (XEXP (registers, 0),
9821 copy_to_reg (struct_value_incoming_rtx));
9822 size += GET_MODE_SIZE (Pmode);
9825 /* Return the address of the block. */
9826 return copy_addr_to_reg (XEXP (registers, 0));
9829 /* Perform an untyped call and save the state required to perform an
9830 untyped return of whatever value was returned by the given function. */
9833 expand_builtin_apply (function, arguments, argsize)
9834 rtx function, arguments, argsize;
9836 int size, align, regno;
9837 enum machine_mode mode;
9838 rtx incoming_args, result, reg, dest, call_insn;
9839 rtx old_stack_level = 0;
9840 rtx call_fusage = 0;
9842 /* Create a block where the return registers can be saved. */
9843 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9845 /* ??? The argsize value should be adjusted here. */
9847 /* Fetch the arg pointer from the ARGUMENTS block. */
9848 incoming_args = gen_reg_rtx (Pmode);
9849 emit_move_insn (incoming_args,
9850 gen_rtx_MEM (Pmode, arguments));
9851 #ifndef STACK_GROWS_DOWNWARD
9852 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9853 incoming_args, 0, OPTAB_LIB_WIDEN);
9856 /* Perform postincrements before actually calling the function. */
9859 /* Push a new argument block and copy the arguments. */
9860 do_pending_stack_adjust ();
9862 /* Save the stack with nonlocal if available */
9863 #ifdef HAVE_save_stack_nonlocal
9864 if (HAVE_save_stack_nonlocal)
9865 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9868 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9870 /* Push a block of memory onto the stack to store the memory arguments.
9871 Save the address in a register, and copy the memory arguments. ??? I
9872 haven't figured out how the calling convention macros effect this,
9873 but it's likely that the source and/or destination addresses in
9874 the block copy will need updating in machine specific ways. */
9875 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9876 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9877 gen_rtx_MEM (BLKmode, incoming_args),
9879 PARM_BOUNDARY / BITS_PER_UNIT);
9881 /* Refer to the argument block. */
9883 arguments = gen_rtx_MEM (BLKmode, arguments);
9885 /* Walk past the arg-pointer and structure value address. */
9886 size = GET_MODE_SIZE (Pmode);
9887 if (struct_value_rtx)
9888 size += GET_MODE_SIZE (Pmode);
9890 /* Restore each of the registers previously saved. Make USE insns
9891 for each of these registers for use in making the call. */
9892 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9893 if ((mode = apply_args_mode[regno]) != VOIDmode)
9895 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9896 if (size % align != 0)
9897 size = CEIL (size, align) * align;
9898 reg = gen_rtx_REG (mode, regno);
9899 emit_move_insn (reg,
9900 change_address (arguments, mode,
9901 plus_constant (XEXP (arguments, 0),
9904 use_reg (&call_fusage, reg);
9905 size += GET_MODE_SIZE (mode);
9908 /* Restore the structure value address unless this is passed as an
9909 "invisible" first argument. */
9910 size = GET_MODE_SIZE (Pmode);
9911 if (struct_value_rtx)
9913 rtx value = gen_reg_rtx (Pmode);
9914 emit_move_insn (value,
9915 change_address (arguments, Pmode,
9916 plus_constant (XEXP (arguments, 0),
9918 emit_move_insn (struct_value_rtx, value);
9919 if (GET_CODE (struct_value_rtx) == REG)
9920 use_reg (&call_fusage, struct_value_rtx);
9921 size += GET_MODE_SIZE (Pmode);
9924 /* All arguments and registers used for the call are set up by now! */
9925 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9927 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9928 and we don't want to load it into a register as an optimization,
9929 because prepare_call_address already did it if it should be done. */
9930 if (GET_CODE (function) != SYMBOL_REF)
9931 function = memory_address (FUNCTION_MODE, function);
9933 /* Generate the actual call instruction and save the return value. */
9934 #ifdef HAVE_untyped_call
9935 if (HAVE_untyped_call)
9936 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9937 result, result_vector (1, result)));
9940 #ifdef HAVE_call_value
9941 if (HAVE_call_value)
9945 /* Locate the unique return register. It is not possible to
9946 express a call that sets more than one return register using
9947 call_value; use untyped_call for that. In fact, untyped_call
9948 only needs to save the return registers in the given block. */
9949 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9950 if ((mode = apply_result_mode[regno]) != VOIDmode)
9953 abort (); /* HAVE_untyped_call required. */
9954 valreg = gen_rtx_REG (mode, regno);
9957 emit_call_insn (gen_call_value (valreg,
9958 gen_rtx_MEM (FUNCTION_MODE, function),
9959 const0_rtx, NULL_RTX, const0_rtx));
9961 emit_move_insn (change_address (result, GET_MODE (valreg),
9969 /* Find the CALL insn we just emitted. */
9970 for (call_insn = get_last_insn ();
9971 call_insn && GET_CODE (call_insn) != CALL_INSN;
9972 call_insn = PREV_INSN (call_insn))
9978 /* Put the register usage information on the CALL. If there is already
9979 some usage information, put ours at the end. */
9980 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9984 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9985 link = XEXP (link, 1))
9988 XEXP (link, 1) = call_fusage;
9991 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9993 /* Restore the stack. */
9994 #ifdef HAVE_save_stack_nonlocal
9995 if (HAVE_save_stack_nonlocal)
9996 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9999 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10001 /* Return the address of the result block. */
10002 return copy_addr_to_reg (XEXP (result, 0));
10005 /* Perform an untyped return. */
10008 expand_builtin_return (result)
10011 int size, align, regno;
10012 enum machine_mode mode;
10014 rtx call_fusage = 0;
10016 apply_result_size ();
10017 result = gen_rtx_MEM (BLKmode, result);
10019 #ifdef HAVE_untyped_return
10020 if (HAVE_untyped_return)
10022 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10028 /* Restore the return value and note that each value is used. */
10030 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10031 if ((mode = apply_result_mode[regno]) != VOIDmode)
10033 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10034 if (size % align != 0)
10035 size = CEIL (size, align) * align;
10036 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10037 emit_move_insn (reg,
10038 change_address (result, mode,
10039 plus_constant (XEXP (result, 0),
10042 push_to_sequence (call_fusage);
10043 emit_insn (gen_rtx_USE (VOIDmode, reg));
10044 call_fusage = get_insns ();
10046 size += GET_MODE_SIZE (mode);
10049 /* Put the USE insns before the return. */
10050 emit_insns (call_fusage);
10052 /* Return whatever values was restored by jumping directly to the end
10053 of the function. */
10054 expand_null_return ();
10057 /* Expand code for a post- or pre- increment or decrement
10058 and return the RTX for the result.
10059 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
10062 expand_increment (exp, post, ignore)
10066 register rtx op0, op1;
10067 register rtx temp, value;
10068 register tree incremented = TREE_OPERAND (exp, 0);
10069 optab this_optab = add_optab;
10071 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10072 int op0_is_copy = 0;
10073 int single_insn = 0;
10074 /* 1 means we can't store into OP0 directly,
10075 because it is a subreg narrower than a word,
10076 and we don't dare clobber the rest of the word. */
10077 int bad_subreg = 0;
10079 /* Stabilize any component ref that might need to be
10080 evaluated more than once below. */
10082 || TREE_CODE (incremented) == BIT_FIELD_REF
10083 || (TREE_CODE (incremented) == COMPONENT_REF
10084 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10085 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10086 incremented = stabilize_reference (incremented);
10087 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10088 ones into save exprs so that they don't accidentally get evaluated
10089 more than once by the code below. */
10090 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10091 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10092 incremented = save_expr (incremented);
10094 /* Compute the operands as RTX.
10095 Note whether OP0 is the actual lvalue or a copy of it:
10096 I believe it is a copy iff it is a register or subreg
10097 and insns were generated in computing it. */
10099 temp = get_last_insn ();
10100 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10102 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10103 in place but instead must do sign- or zero-extension during assignment,
10104 so we copy it into a new register and let the code below use it as
10107 Note that we can safely modify this SUBREG since it is know not to be
10108 shared (it was made by the expand_expr call above). */
10110 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10113 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10117 else if (GET_CODE (op0) == SUBREG
10118 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10120 /* We cannot increment this SUBREG in place. If we are
10121 post-incrementing, get a copy of the old value. Otherwise,
10122 just mark that we cannot increment in place. */
10124 op0 = copy_to_reg (op0);
10129 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10130 && temp != get_last_insn ());
10131 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10132 EXPAND_MEMORY_USE_BAD);
10134 /* Decide whether incrementing or decrementing. */
10135 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10136 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10137 this_optab = sub_optab;
10139 /* Convert decrement by a constant into a negative increment. */
10140 if (this_optab == sub_optab
10141 && GET_CODE (op1) == CONST_INT)
10143 op1 = GEN_INT (- INTVAL (op1));
10144 this_optab = add_optab;
10147 /* For a preincrement, see if we can do this with a single instruction. */
10150 icode = (int) this_optab->handlers[(int) mode].insn_code;
10151 if (icode != (int) CODE_FOR_nothing
10152 /* Make sure that OP0 is valid for operands 0 and 1
10153 of the insn we want to queue. */
10154 && (*insn_operand_predicate[icode][0]) (op0, mode)
10155 && (*insn_operand_predicate[icode][1]) (op0, mode)
10156 && (*insn_operand_predicate[icode][2]) (op1, mode))
10160 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10161 then we cannot just increment OP0. We must therefore contrive to
10162 increment the original value. Then, for postincrement, we can return
10163 OP0 since it is a copy of the old value. For preincrement, expand here
10164 unless we can do it with a single insn.
10166 Likewise if storing directly into OP0 would clobber high bits
10167 we need to preserve (bad_subreg). */
10168 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10170 /* This is the easiest way to increment the value wherever it is.
10171 Problems with multiple evaluation of INCREMENTED are prevented
10172 because either (1) it is a component_ref or preincrement,
10173 in which case it was stabilized above, or (2) it is an array_ref
10174 with constant index in an array in a register, which is
10175 safe to reevaluate. */
10176 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10177 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10178 ? MINUS_EXPR : PLUS_EXPR),
10181 TREE_OPERAND (exp, 1));
10183 while (TREE_CODE (incremented) == NOP_EXPR
10184 || TREE_CODE (incremented) == CONVERT_EXPR)
10186 newexp = convert (TREE_TYPE (incremented), newexp);
10187 incremented = TREE_OPERAND (incremented, 0);
10190 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10191 return post ? op0 : temp;
10196 /* We have a true reference to the value in OP0.
10197 If there is an insn to add or subtract in this mode, queue it.
10198 Queueing the increment insn avoids the register shuffling
10199 that often results if we must increment now and first save
10200 the old value for subsequent use. */
10202 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10203 op0 = stabilize (op0);
10206 icode = (int) this_optab->handlers[(int) mode].insn_code;
10207 if (icode != (int) CODE_FOR_nothing
10208 /* Make sure that OP0 is valid for operands 0 and 1
10209 of the insn we want to queue. */
10210 && (*insn_operand_predicate[icode][0]) (op0, mode)
10211 && (*insn_operand_predicate[icode][1]) (op0, mode))
10213 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10214 op1 = force_reg (mode, op1);
10216 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10218 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10220 rtx addr = (general_operand (XEXP (op0, 0), mode)
10221 ? force_reg (Pmode, XEXP (op0, 0))
10222 : copy_to_reg (XEXP (op0, 0)));
10225 op0 = change_address (op0, VOIDmode, addr);
10226 temp = force_reg (GET_MODE (op0), op0);
10227 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10228 op1 = force_reg (mode, op1);
10230 /* The increment queue is LIFO, thus we have to `queue'
10231 the instructions in reverse order. */
10232 enqueue_insn (op0, gen_move_insn (op0, temp));
10233 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10238 /* Preincrement, or we can't increment with one simple insn. */
10240 /* Save a copy of the value before inc or dec, to return it later. */
10241 temp = value = copy_to_reg (op0);
10243 /* Arrange to return the incremented value. */
10244 /* Copy the rtx because expand_binop will protect from the queue,
10245 and the results of that would be invalid for us to return
10246 if our caller does emit_queue before using our result. */
10247 temp = copy_rtx (value = op0);
10249 /* Increment however we can. */
10250 op1 = expand_binop (mode, this_optab, value, op1,
10251 current_function_check_memory_usage ? NULL_RTX : op0,
10252 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10253 /* Make sure the value is stored into OP0. */
10255 emit_move_insn (op0, op1);
10260 /* Expand all function calls contained within EXP, innermost ones first.
10261 But don't look within expressions that have sequence points.
10262 For each CALL_EXPR, record the rtx for its value
10263 in the CALL_EXPR_RTL field. */
10266 preexpand_calls (exp)
10269 register int nops, i;
10270 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10272 if (! do_preexpand_calls)
10275 /* Only expressions and references can contain calls. */
10277 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10280 switch (TREE_CODE (exp))
10283 /* Do nothing if already expanded. */
10284 if (CALL_EXPR_RTL (exp) != 0
10285 /* Do nothing if the call returns a variable-sized object. */
10286 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10287 /* Do nothing to built-in functions. */
10288 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10289 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10291 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10294 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10297 case COMPOUND_EXPR:
10299 case TRUTH_ANDIF_EXPR:
10300 case TRUTH_ORIF_EXPR:
10301 /* If we find one of these, then we can be sure
10302 the adjust will be done for it (since it makes jumps).
10303 Do it now, so that if this is inside an argument
10304 of a function, we don't get the stack adjustment
10305 after some other args have already been pushed. */
10306 do_pending_stack_adjust ();
10311 case WITH_CLEANUP_EXPR:
10312 case CLEANUP_POINT_EXPR:
10313 case TRY_CATCH_EXPR:
10317 if (SAVE_EXPR_RTL (exp) != 0)
10324 nops = tree_code_length[(int) TREE_CODE (exp)];
10325 for (i = 0; i < nops; i++)
10326 if (TREE_OPERAND (exp, i) != 0)
10328 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10329 if (type == 'e' || type == '<' || type == '1' || type == '2'
10331 preexpand_calls (TREE_OPERAND (exp, i));
10335 /* At the start of a function, record that we have no previously-pushed
10336 arguments waiting to be popped. */
10339 init_pending_stack_adjust ()
10341 pending_stack_adjust = 0;
10344 /* When exiting from function, if safe, clear out any pending stack adjust
10345 so the adjustment won't get done.
10347 Note, if the current function calls alloca, then it must have a
10348 frame pointer regardless of the value of flag_omit_frame_pointer. */
10351 clear_pending_stack_adjust ()
10353 #ifdef EXIT_IGNORE_STACK
10355 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10356 && EXIT_IGNORE_STACK
10357 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10358 && ! flag_inline_functions)
10359 pending_stack_adjust = 0;
10363 /* Pop any previously-pushed arguments that have not been popped yet. */
10366 do_pending_stack_adjust ()
10368 if (inhibit_defer_pop == 0)
10370 if (pending_stack_adjust != 0)
10371 adjust_stack (GEN_INT (pending_stack_adjust));
10372 pending_stack_adjust = 0;
10376 /* Expand conditional expressions. */
10378 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10379 LABEL is an rtx of code CODE_LABEL, in this function and all the
10383 jumpifnot (exp, label)
10387 do_jump (exp, label, NULL_RTX);
10390 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10393 jumpif (exp, label)
10397 do_jump (exp, NULL_RTX, label);
10400 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10401 the result is zero, or IF_TRUE_LABEL if the result is one.
10402 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10403 meaning fall through in that case.
10405 do_jump always does any pending stack adjust except when it does not
10406 actually perform a jump. An example where there is no jump
10407 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10409 This function is responsible for optimizing cases such as
10410 &&, || and comparison operators in EXP. */
10413 do_jump (exp, if_false_label, if_true_label)
10415 rtx if_false_label, if_true_label;
10417 register enum tree_code code = TREE_CODE (exp);
10418 /* Some cases need to create a label to jump to
10419 in order to properly fall through.
10420 These cases set DROP_THROUGH_LABEL nonzero. */
10421 rtx drop_through_label = 0;
10423 rtx comparison = 0;
10426 enum machine_mode mode;
10428 #ifdef MAX_INTEGER_COMPUTATION_MODE
10429 check_max_integer_computation_mode (exp);
10440 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10446 /* This is not true with #pragma weak */
10448 /* The address of something can never be zero. */
10450 emit_jump (if_true_label);
10455 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10456 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10457 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10460 /* If we are narrowing the operand, we have to do the compare in the
10462 if ((TYPE_PRECISION (TREE_TYPE (exp))
10463 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10465 case NON_LVALUE_EXPR:
10466 case REFERENCE_EXPR:
10471 /* These cannot change zero->non-zero or vice versa. */
10472 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10476 /* This is never less insns than evaluating the PLUS_EXPR followed by
10477 a test and can be longer if the test is eliminated. */
10479 /* Reduce to minus. */
10480 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10481 TREE_OPERAND (exp, 0),
10482 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10483 TREE_OPERAND (exp, 1))));
10484 /* Process as MINUS. */
10488 /* Non-zero iff operands of minus differ. */
10489 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10490 TREE_OPERAND (exp, 0),
10491 TREE_OPERAND (exp, 1)),
10496 /* If we are AND'ing with a small constant, do this comparison in the
10497 smallest type that fits. If the machine doesn't have comparisons
10498 that small, it will be converted back to the wider comparison.
10499 This helps if we are testing the sign bit of a narrower object.
10500 combine can't do this for us because it can't know whether a
10501 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10503 if (! SLOW_BYTE_ACCESS
10504 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10505 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10506 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10507 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10508 && (type = type_for_mode (mode, 1)) != 0
10509 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10510 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10511 != CODE_FOR_nothing))
10513 do_jump (convert (type, exp), if_false_label, if_true_label);
10518 case TRUTH_NOT_EXPR:
10519 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10522 case TRUTH_ANDIF_EXPR:
10523 if (if_false_label == 0)
10524 if_false_label = drop_through_label = gen_label_rtx ();
10525 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10526 start_cleanup_deferral ();
10527 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10528 end_cleanup_deferral ();
10531 case TRUTH_ORIF_EXPR:
10532 if (if_true_label == 0)
10533 if_true_label = drop_through_label = gen_label_rtx ();
10534 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10535 start_cleanup_deferral ();
10536 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10537 end_cleanup_deferral ();
10540 case COMPOUND_EXPR:
10541 push_temp_slots ();
10542 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10543 preserve_temp_slots (NULL_RTX);
10544 free_temp_slots ();
10547 do_pending_stack_adjust ();
10548 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10551 case COMPONENT_REF:
10552 case BIT_FIELD_REF:
10555 int bitsize, bitpos, unsignedp;
10556 enum machine_mode mode;
10562 /* Get description of this reference. We don't actually care
10563 about the underlying object here. */
10564 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10565 &mode, &unsignedp, &volatilep,
10568 type = type_for_size (bitsize, unsignedp);
10569 if (! SLOW_BYTE_ACCESS
10570 && type != 0 && bitsize >= 0
10571 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10572 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10573 != CODE_FOR_nothing))
10575 do_jump (convert (type, exp), if_false_label, if_true_label);
10582 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10583 if (integer_onep (TREE_OPERAND (exp, 1))
10584 && integer_zerop (TREE_OPERAND (exp, 2)))
10585 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10587 else if (integer_zerop (TREE_OPERAND (exp, 1))
10588 && integer_onep (TREE_OPERAND (exp, 2)))
10589 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10593 register rtx label1 = gen_label_rtx ();
10594 drop_through_label = gen_label_rtx ();
10596 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10598 start_cleanup_deferral ();
10599 /* Now the THEN-expression. */
10600 do_jump (TREE_OPERAND (exp, 1),
10601 if_false_label ? if_false_label : drop_through_label,
10602 if_true_label ? if_true_label : drop_through_label);
10603 /* In case the do_jump just above never jumps. */
10604 do_pending_stack_adjust ();
10605 emit_label (label1);
10607 /* Now the ELSE-expression. */
10608 do_jump (TREE_OPERAND (exp, 2),
10609 if_false_label ? if_false_label : drop_through_label,
10610 if_true_label ? if_true_label : drop_through_label);
10611 end_cleanup_deferral ();
10617 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10619 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10620 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10622 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10623 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10626 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10627 fold (build (EQ_EXPR, TREE_TYPE (exp),
10628 fold (build1 (REALPART_EXPR,
10629 TREE_TYPE (inner_type),
10631 fold (build1 (REALPART_EXPR,
10632 TREE_TYPE (inner_type),
10634 fold (build (EQ_EXPR, TREE_TYPE (exp),
10635 fold (build1 (IMAGPART_EXPR,
10636 TREE_TYPE (inner_type),
10638 fold (build1 (IMAGPART_EXPR,
10639 TREE_TYPE (inner_type),
10641 if_false_label, if_true_label);
10644 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10645 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10647 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10648 && !can_compare_p (TYPE_MODE (inner_type)))
10649 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10651 comparison = compare (exp, EQ, EQ);
10657 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10659 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10660 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10662 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10663 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10666 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10667 fold (build (NE_EXPR, TREE_TYPE (exp),
10668 fold (build1 (REALPART_EXPR,
10669 TREE_TYPE (inner_type),
10671 fold (build1 (REALPART_EXPR,
10672 TREE_TYPE (inner_type),
10674 fold (build (NE_EXPR, TREE_TYPE (exp),
10675 fold (build1 (IMAGPART_EXPR,
10676 TREE_TYPE (inner_type),
10678 fold (build1 (IMAGPART_EXPR,
10679 TREE_TYPE (inner_type),
10681 if_false_label, if_true_label);
10684 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10685 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10687 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10688 && !can_compare_p (TYPE_MODE (inner_type)))
10689 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10691 comparison = compare (exp, NE, NE);
10696 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10698 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10699 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10701 comparison = compare (exp, LT, LTU);
10705 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10707 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10708 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10710 comparison = compare (exp, LE, LEU);
10714 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10716 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10717 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10719 comparison = compare (exp, GT, GTU);
10723 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10725 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10726 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10728 comparison = compare (exp, GE, GEU);
10733 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10735 /* This is not needed any more and causes poor code since it causes
10736 comparisons and tests from non-SI objects to have different code
10738 /* Copy to register to avoid generating bad insns by cse
10739 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10740 if (!cse_not_expected && GET_CODE (temp) == MEM)
10741 temp = copy_to_reg (temp);
10743 do_pending_stack_adjust ();
10744 if (GET_CODE (temp) == CONST_INT)
10745 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10746 else if (GET_CODE (temp) == LABEL_REF)
10747 comparison = const_true_rtx;
10748 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10749 && !can_compare_p (GET_MODE (temp)))
10750 /* Note swapping the labels gives us not-equal. */
10751 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10752 else if (GET_MODE (temp) != VOIDmode)
10753 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10754 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10755 GET_MODE (temp), NULL_RTX, 0);
10760 /* Do any postincrements in the expression that was tested. */
10763 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10764 straight into a conditional jump instruction as the jump condition.
10765 Otherwise, all the work has been done already. */
10767 if (comparison == const_true_rtx)
10770 emit_jump (if_true_label);
10772 else if (comparison == const0_rtx)
10774 if (if_false_label)
10775 emit_jump (if_false_label);
10777 else if (comparison)
10778 do_jump_for_compare (comparison, if_false_label, if_true_label);
10780 if (drop_through_label)
10782 /* If do_jump produces code that might be jumped around,
10783 do any stack adjusts from that code, before the place
10784 where control merges in. */
10785 do_pending_stack_adjust ();
10786 emit_label (drop_through_label);
10790 /* Given a comparison expression EXP for values too wide to be compared
10791 with one insn, test the comparison and jump to the appropriate label.
10792 The code of EXP is ignored; we always test GT if SWAP is 0,
10793 and LT if SWAP is 1. */
10796 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10799 rtx if_false_label, if_true_label;
10801 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10802 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10803 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10804 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10805 rtx drop_through_label = 0;
10806 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10809 if (! if_true_label || ! if_false_label)
10810 drop_through_label = gen_label_rtx ();
10811 if (! if_true_label)
10812 if_true_label = drop_through_label;
10813 if (! if_false_label)
10814 if_false_label = drop_through_label;
10816 /* Compare a word at a time, high order first. */
10817 for (i = 0; i < nwords; i++)
10820 rtx op0_word, op1_word;
10822 if (WORDS_BIG_ENDIAN)
10824 op0_word = operand_subword_force (op0, i, mode);
10825 op1_word = operand_subword_force (op1, i, mode);
10829 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10830 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10833 /* All but high-order word must be compared as unsigned. */
10834 comp = compare_from_rtx (op0_word, op1_word,
10835 (unsignedp || i > 0) ? GTU : GT,
10836 unsignedp, word_mode, NULL_RTX, 0);
10837 if (comp == const_true_rtx)
10838 emit_jump (if_true_label);
10839 else if (comp != const0_rtx)
10840 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10842 /* Consider lower words only if these are equal. */
10843 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10845 if (comp == const_true_rtx)
10846 emit_jump (if_false_label);
10847 else if (comp != const0_rtx)
10848 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10851 if (if_false_label)
10852 emit_jump (if_false_label);
10853 if (drop_through_label)
10854 emit_label (drop_through_label);
10857 /* Compare OP0 with OP1, word at a time, in mode MODE.
10858 UNSIGNEDP says to do unsigned comparison.
10859 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10862 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10863 enum machine_mode mode;
10866 rtx if_false_label, if_true_label;
10868 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10869 rtx drop_through_label = 0;
10872 if (! if_true_label || ! if_false_label)
10873 drop_through_label = gen_label_rtx ();
10874 if (! if_true_label)
10875 if_true_label = drop_through_label;
10876 if (! if_false_label)
10877 if_false_label = drop_through_label;
10879 /* Compare a word at a time, high order first. */
10880 for (i = 0; i < nwords; i++)
10883 rtx op0_word, op1_word;
10885 if (WORDS_BIG_ENDIAN)
10887 op0_word = operand_subword_force (op0, i, mode);
10888 op1_word = operand_subword_force (op1, i, mode);
10892 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10893 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10896 /* All but high-order word must be compared as unsigned. */
10897 comp = compare_from_rtx (op0_word, op1_word,
10898 (unsignedp || i > 0) ? GTU : GT,
10899 unsignedp, word_mode, NULL_RTX, 0);
10900 if (comp == const_true_rtx)
10901 emit_jump (if_true_label);
10902 else if (comp != const0_rtx)
10903 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10905 /* Consider lower words only if these are equal. */
10906 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10908 if (comp == const_true_rtx)
10909 emit_jump (if_false_label);
10910 else if (comp != const0_rtx)
10911 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10914 if (if_false_label)
10915 emit_jump (if_false_label);
10916 if (drop_through_label)
10917 emit_label (drop_through_label);
10920 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10921 with one insn, test the comparison and jump to the appropriate label. */
10924 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10926 rtx if_false_label, if_true_label;
10928 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10929 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10930 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10931 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10933 rtx drop_through_label = 0;
10935 if (! if_false_label)
10936 drop_through_label = if_false_label = gen_label_rtx ();
10938 for (i = 0; i < nwords; i++)
10940 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10941 operand_subword_force (op1, i, mode),
10942 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10943 word_mode, NULL_RTX, 0);
10944 if (comp == const_true_rtx)
10945 emit_jump (if_false_label);
10946 else if (comp != const0_rtx)
10947 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10951 emit_jump (if_true_label);
10952 if (drop_through_label)
10953 emit_label (drop_through_label);
10956 /* Jump according to whether OP0 is 0.
10957 We assume that OP0 has an integer mode that is too wide
10958 for the available compare insns. */
10961 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10963 rtx if_false_label, if_true_label;
10965 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10968 rtx drop_through_label = 0;
10970 /* The fastest way of doing this comparison on almost any machine is to
10971 "or" all the words and compare the result. If all have to be loaded
10972 from memory and this is a very wide item, it's possible this may
10973 be slower, but that's highly unlikely. */
10975 part = gen_reg_rtx (word_mode);
10976 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10977 for (i = 1; i < nwords && part != 0; i++)
10978 part = expand_binop (word_mode, ior_optab, part,
10979 operand_subword_force (op0, i, GET_MODE (op0)),
10980 part, 1, OPTAB_WIDEN);
10984 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10987 if (comp == const_true_rtx)
10988 emit_jump (if_false_label);
10989 else if (comp == const0_rtx)
10990 emit_jump (if_true_label);
10992 do_jump_for_compare (comp, if_false_label, if_true_label);
10997 /* If we couldn't do the "or" simply, do this with a series of compares. */
10998 if (! if_false_label)
10999 drop_through_label = if_false_label = gen_label_rtx ();
11001 for (i = 0; i < nwords; i++)
11003 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11005 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11006 if (comp == const_true_rtx)
11007 emit_jump (if_false_label);
11008 else if (comp != const0_rtx)
11009 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11013 emit_jump (if_true_label);
11015 if (drop_through_label)
11016 emit_label (drop_through_label);
11019 /* Given a comparison expression in rtl form, output conditional branches to
11020 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
11023 do_jump_for_compare (comparison, if_false_label, if_true_label)
11024 rtx comparison, if_false_label, if_true_label;
11028 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11029 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
11033 if (if_false_label)
11034 emit_jump (if_false_label);
11036 else if (if_false_label)
11039 rtx prev = get_last_insn ();
11042 /* Output the branch with the opposite condition. Then try to invert
11043 what is generated. If more than one insn is a branch, or if the
11044 branch is not the last insn written, abort. If we can't invert
11045 the branch, emit make a true label, redirect this jump to that,
11046 emit a jump to the false label and define the true label. */
11048 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11049 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
11053 /* Here we get the first insn that was just emitted. It used to be the
11054 case that, on some machines, emitting the branch would discard
11055 the previous compare insn and emit a replacement. This isn't
11056 done anymore, but abort if we see that PREV is deleted. */
11059 insn = get_insns ();
11060 else if (INSN_DELETED_P (prev))
11063 insn = NEXT_INSN (prev);
11065 for (; insn; insn = NEXT_INSN (insn))
11066 if (GET_CODE (insn) == JUMP_INSN)
11073 if (branch != get_last_insn ())
11076 JUMP_LABEL (branch) = if_false_label;
11077 if (! invert_jump (branch, if_false_label))
11079 if_true_label = gen_label_rtx ();
11080 redirect_jump (branch, if_true_label);
11081 emit_jump (if_false_label);
11082 emit_label (if_true_label);
11087 /* Generate code for a comparison expression EXP
11088 (including code to compute the values to be compared)
11089 and set (CC0) according to the result.
11090 SIGNED_CODE should be the rtx operation for this comparison for
11091 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11093 We force a stack adjustment unless there are currently
11094 things pushed on the stack that aren't yet used. */
11097 compare (exp, signed_code, unsigned_code)
11099 enum rtx_code signed_code, unsigned_code;
11102 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11104 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11105 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
11106 register enum machine_mode mode = TYPE_MODE (type);
11107 int unsignedp = TREE_UNSIGNED (type);
11108 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
11110 #ifdef HAVE_canonicalize_funcptr_for_compare
11111 /* If function pointers need to be "canonicalized" before they can
11112 be reliably compared, then canonicalize them. */
11113 if (HAVE_canonicalize_funcptr_for_compare
11114 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11115 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11118 rtx new_op0 = gen_reg_rtx (mode);
11120 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11124 if (HAVE_canonicalize_funcptr_for_compare
11125 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11126 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11129 rtx new_op1 = gen_reg_rtx (mode);
11131 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11136 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11138 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11139 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11142 /* Like compare but expects the values to compare as two rtx's.
11143 The decision as to signed or unsigned comparison must be made by the caller.
11145 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11148 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11149 size of MODE should be used. */
11152 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11153 register rtx op0, op1;
11154 enum rtx_code code;
11156 enum machine_mode mode;
11162 /* If one operand is constant, make it the second one. Only do this
11163 if the other operand is not constant as well. */
11165 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11166 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11171 code = swap_condition (code);
11174 if (flag_force_mem)
11176 op0 = force_not_mem (op0);
11177 op1 = force_not_mem (op1);
11180 do_pending_stack_adjust ();
11182 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11183 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11187 /* There's no need to do this now that combine.c can eliminate lots of
11188 sign extensions. This can be less efficient in certain cases on other
11191 /* If this is a signed equality comparison, we can do it as an
11192 unsigned comparison since zero-extension is cheaper than sign
11193 extension and comparisons with zero are done as unsigned. This is
11194 the case even on machines that can do fast sign extension, since
11195 zero-extension is easier to combine with other operations than
11196 sign-extension is. If we are comparing against a constant, we must
11197 convert it to what it would look like unsigned. */
11198 if ((code == EQ || code == NE) && ! unsignedp
11199 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11201 if (GET_CODE (op1) == CONST_INT
11202 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11203 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11208 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11210 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11213 /* Generate code to calculate EXP using a store-flag instruction
11214 and return an rtx for the result. EXP is either a comparison
11215 or a TRUTH_NOT_EXPR whose operand is a comparison.
11217 If TARGET is nonzero, store the result there if convenient.
11219 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11222 Return zero if there is no suitable set-flag instruction
11223 available on this machine.
11225 Once expand_expr has been called on the arguments of the comparison,
11226 we are committed to doing the store flag, since it is not safe to
11227 re-evaluate the expression. We emit the store-flag insn by calling
11228 emit_store_flag, but only expand the arguments if we have a reason
11229 to believe that emit_store_flag will be successful. If we think that
11230 it will, but it isn't, we have to simulate the store-flag with a
11231 set/jump/set sequence. */
11234 do_store_flag (exp, target, mode, only_cheap)
11237 enum machine_mode mode;
11240 enum rtx_code code;
11241 tree arg0, arg1, type;
11243 enum machine_mode operand_mode;
11247 enum insn_code icode;
11248 rtx subtarget = target;
11251 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11252 result at the end. We can't simply invert the test since it would
11253 have already been inverted if it were valid. This case occurs for
11254 some floating-point comparisons. */
11256 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11257 invert = 1, exp = TREE_OPERAND (exp, 0);
11259 arg0 = TREE_OPERAND (exp, 0);
11260 arg1 = TREE_OPERAND (exp, 1);
11261 type = TREE_TYPE (arg0);
11262 operand_mode = TYPE_MODE (type);
11263 unsignedp = TREE_UNSIGNED (type);
11265 /* We won't bother with BLKmode store-flag operations because it would mean
11266 passing a lot of information to emit_store_flag. */
11267 if (operand_mode == BLKmode)
11270 /* We won't bother with store-flag operations involving function pointers
11271 when function pointers must be canonicalized before comparisons. */
11272 #ifdef HAVE_canonicalize_funcptr_for_compare
11273 if (HAVE_canonicalize_funcptr_for_compare
11274 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11275 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11277 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11278 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11279 == FUNCTION_TYPE))))
11286 /* Get the rtx comparison code to use. We know that EXP is a comparison
11287 operation of some type. Some comparisons against 1 and -1 can be
11288 converted to comparisons with zero. Do so here so that the tests
11289 below will be aware that we have a comparison with zero. These
11290 tests will not catch constants in the first operand, but constants
11291 are rarely passed as the first operand. */
11293 switch (TREE_CODE (exp))
11302 if (integer_onep (arg1))
11303 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11305 code = unsignedp ? LTU : LT;
11308 if (! unsignedp && integer_all_onesp (arg1))
11309 arg1 = integer_zero_node, code = LT;
11311 code = unsignedp ? LEU : LE;
11314 if (! unsignedp && integer_all_onesp (arg1))
11315 arg1 = integer_zero_node, code = GE;
11317 code = unsignedp ? GTU : GT;
11320 if (integer_onep (arg1))
11321 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11323 code = unsignedp ? GEU : GE;
11329 /* Put a constant second. */
11330 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11332 tem = arg0; arg0 = arg1; arg1 = tem;
11333 code = swap_condition (code);
11336 /* If this is an equality or inequality test of a single bit, we can
11337 do this by shifting the bit being tested to the low-order bit and
11338 masking the result with the constant 1. If the condition was EQ,
11339 we xor it with 1. This does not require an scc insn and is faster
11340 than an scc insn even if we have it. */
11342 if ((code == NE || code == EQ)
11343 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11344 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11346 tree inner = TREE_OPERAND (arg0, 0);
11347 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11350 /* If INNER is a right shift of a constant and it plus BITNUM does
11351 not overflow, adjust BITNUM and INNER. */
11353 if (TREE_CODE (inner) == RSHIFT_EXPR
11354 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11355 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11356 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11357 < TYPE_PRECISION (type)))
11359 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11360 inner = TREE_OPERAND (inner, 0);
11363 /* If we are going to be able to omit the AND below, we must do our
11364 operations as unsigned. If we must use the AND, we have a choice.
11365 Normally unsigned is faster, but for some machines signed is. */
11366 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11367 #ifdef LOAD_EXTEND_OP
11368 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11374 if (subtarget == 0 || GET_CODE (subtarget) != REG
11375 || GET_MODE (subtarget) != operand_mode
11376 || ! safe_from_p (subtarget, inner, 1))
11379 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11382 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11383 size_int (bitnum), subtarget, ops_unsignedp);
11385 if (GET_MODE (op0) != mode)
11386 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11388 if ((code == EQ && ! invert) || (code == NE && invert))
11389 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11390 ops_unsignedp, OPTAB_LIB_WIDEN);
11392 /* Put the AND last so it can combine with more things. */
11393 if (bitnum != TYPE_PRECISION (type) - 1)
11394 op0 = expand_and (op0, const1_rtx, subtarget);
11399 /* Now see if we are likely to be able to do this. Return if not. */
11400 if (! can_compare_p (operand_mode))
11402 icode = setcc_gen_code[(int) code];
11403 if (icode == CODE_FOR_nothing
11404 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11406 /* We can only do this if it is one of the special cases that
11407 can be handled without an scc insn. */
11408 if ((code == LT && integer_zerop (arg1))
11409 || (! only_cheap && code == GE && integer_zerop (arg1)))
11411 else if (BRANCH_COST >= 0
11412 && ! only_cheap && (code == NE || code == EQ)
11413 && TREE_CODE (type) != REAL_TYPE
11414 && ((abs_optab->handlers[(int) operand_mode].insn_code
11415 != CODE_FOR_nothing)
11416 || (ffs_optab->handlers[(int) operand_mode].insn_code
11417 != CODE_FOR_nothing)))
11423 preexpand_calls (exp);
11424 if (subtarget == 0 || GET_CODE (subtarget) != REG
11425 || GET_MODE (subtarget) != operand_mode
11426 || ! safe_from_p (subtarget, arg1, 1))
11429 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11430 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11433 target = gen_reg_rtx (mode);
11435 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11436 because, if the emit_store_flag does anything it will succeed and
11437 OP0 and OP1 will not be used subsequently. */
11439 result = emit_store_flag (target, code,
11440 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11441 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11442 operand_mode, unsignedp, 1);
11447 result = expand_binop (mode, xor_optab, result, const1_rtx,
11448 result, 0, OPTAB_LIB_WIDEN);
11452 /* If this failed, we have to do this with set/compare/jump/set code. */
11453 if (GET_CODE (target) != REG
11454 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11455 target = gen_reg_rtx (GET_MODE (target));
11457 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11458 result = compare_from_rtx (op0, op1, code, unsignedp,
11459 operand_mode, NULL_RTX, 0);
11460 if (GET_CODE (result) == CONST_INT)
11461 return (((result == const0_rtx && ! invert)
11462 || (result != const0_rtx && invert))
11463 ? const0_rtx : const1_rtx);
11465 label = gen_label_rtx ();
11466 if (bcc_gen_fctn[(int) code] == 0)
11469 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11470 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11471 emit_label (label);
11476 /* Generate a tablejump instruction (used for switch statements). */
11478 #ifdef HAVE_tablejump
11480 /* INDEX is the value being switched on, with the lowest value
11481 in the table already subtracted.
11482 MODE is its expected mode (needed if INDEX is constant).
11483 RANGE is the length of the jump table.
11484 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11486 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11487 index value is out of range. */
11490 do_tablejump (index, mode, range, table_label, default_label)
11491 rtx index, range, table_label, default_label;
11492 enum machine_mode mode;
11494 register rtx temp, vector;
11496 /* Do an unsigned comparison (in the proper mode) between the index
11497 expression and the value which represents the length of the range.
11498 Since we just finished subtracting the lower bound of the range
11499 from the index expression, this comparison allows us to simultaneously
11500 check that the original index expression value is both greater than
11501 or equal to the minimum value of the range and less than or equal to
11502 the maximum value of the range. */
11504 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11505 emit_jump_insn (gen_bgtu (default_label));
11507 /* If index is in range, it must fit in Pmode.
11508 Convert to Pmode so we can index with it. */
11510 index = convert_to_mode (Pmode, index, 1);
11512 /* Don't let a MEM slip thru, because then INDEX that comes
11513 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11514 and break_out_memory_refs will go to work on it and mess it up. */
11515 #ifdef PIC_CASE_VECTOR_ADDRESS
11516 if (flag_pic && GET_CODE (index) != REG)
11517 index = copy_to_mode_reg (Pmode, index);
11520 /* If flag_force_addr were to affect this address
11521 it could interfere with the tricky assumptions made
11522 about addresses that contain label-refs,
11523 which may be valid only very near the tablejump itself. */
11524 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11525 GET_MODE_SIZE, because this indicates how large insns are. The other
11526 uses should all be Pmode, because they are addresses. This code
11527 could fail if addresses and insns are not the same size. */
11528 index = gen_rtx_PLUS (Pmode,
11529 gen_rtx_MULT (Pmode, index,
11530 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11531 gen_rtx_LABEL_REF (Pmode, table_label));
11532 #ifdef PIC_CASE_VECTOR_ADDRESS
11534 index = PIC_CASE_VECTOR_ADDRESS (index);
11537 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11538 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11539 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11540 RTX_UNCHANGING_P (vector) = 1;
11541 convert_move (temp, vector, 0);
11543 emit_jump_insn (gen_tablejump (temp, table_label));
11545 /* If we are generating PIC code or if the table is PC-relative, the
11546 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11547 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11551 #endif /* HAVE_tablejump */