1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
40 #include "typeclass.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls = 1;
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust;
90 /* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94 int inhibit_defer_pop;
96 /* Nonzero means __builtin_saveregs has already been done in this function.
97 The value is the pseudoreg containing the value __builtin_saveregs
99 static rtx saveregs_value;
101 /* Similarly for __builtin_apply_args. */
102 static rtx apply_args_value;
104 /* Nonzero if the machine description has been fixed to accept
105 CONSTANT_P_RTX patterns. We will emit a warning and continue
106 if we find we must actually use such a beast. */
107 static int can_handle_constant_p;
109 /* Don't check memory usage, since code is being emitted to check a memory
110 usage. Used when current_function_check_memory_usage is true, to avoid
111 infinite recursion. */
112 static int in_check_memory_usage;
114 /* Postincrements that still need to be expanded. */
115 static rtx pending_chain;
117 /* This structure is used by move_by_pieces to describe the move to
119 struct move_by_pieces
129 int explicit_inc_from;
136 /* This structure is used by clear_by_pieces to describe the clear to
139 struct clear_by_pieces
151 extern struct obstack permanent_obstack;
152 extern rtx arg_pointer_save_area;
154 static rtx get_push_address PROTO ((int));
156 static rtx enqueue_insn PROTO((rtx, rtx));
157 static int queued_subexp_p PROTO((rtx));
158 static void init_queue PROTO((void));
159 static int move_by_pieces_ninsns PROTO((unsigned int, int));
160 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
161 struct move_by_pieces *));
162 static void clear_by_pieces PROTO((rtx, int, int));
163 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
164 struct clear_by_pieces *));
165 static int is_zeros_p PROTO((tree));
166 static int mostly_zeros_p PROTO((tree));
167 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
169 static void store_constructor PROTO((tree, rtx, int));
170 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
171 enum machine_mode, int, int,
173 static enum memory_use_mode
174 get_memory_usage_from_modifier PROTO((enum expand_modifier));
175 static tree save_noncopied_parts PROTO((tree, tree));
176 static tree init_noncopied_parts PROTO((tree, tree));
177 static int safe_from_p PROTO((rtx, tree, int));
178 static int fixed_type_p PROTO((tree));
179 static rtx var_rtx PROTO((tree));
180 static int get_pointer_alignment PROTO((tree, unsigned));
181 static tree string_constant PROTO((tree, tree *));
182 static tree c_strlen PROTO((tree));
183 static rtx get_memory_rtx PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 static void preexpand_calls PROTO((tree));
194 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
195 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
196 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
197 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
198 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
200 /* Record for each mode whether we can move a register directly to or
201 from an object of that mode in memory. If we can't, we won't try
202 to use that mode directly when accessing a field of that mode. */
204 static char direct_load[NUM_MACHINE_MODES];
205 static char direct_store[NUM_MACHINE_MODES];
207 /* If a memory-to-memory move would take MOVE_RATIO or more simple
208 move-instruction sequences, we will do a movstr or libcall instead. */
211 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
214 /* If we are optimizing for space (-Os), cut down the default move ratio */
215 #define MOVE_RATIO (optimize_size ? 3 : 15)
219 /* This array records the insn_code of insns to perform block moves. */
220 enum insn_code movstr_optab[NUM_MACHINE_MODES];
222 /* This array records the insn_code of insns to perform block clears. */
223 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
225 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
227 #ifndef SLOW_UNALIGNED_ACCESS
228 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
231 /* Register mappings for target machines without register windows. */
232 #ifndef INCOMING_REGNO
233 #define INCOMING_REGNO(OUT) (OUT)
235 #ifndef OUTGOING_REGNO
236 #define OUTGOING_REGNO(IN) (IN)
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
246 enum machine_mode mode;
253 /* Since we are on the permanent obstack, we must be sure we save this
254 spot AFTER we call start_sequence, since it will reuse the rtl it
256 free_point = (char *) oballoc (0);
258 /* Try indexing by frame ptr and try by stack ptr.
259 It is known that on the Convex the stack ptr isn't a valid index.
260 With luck, one or the other is valid on any machine. */
261 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
262 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
264 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
265 pat = PATTERN (insn);
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
273 direct_load[(int) mode] = direct_store[(int) mode] = 0;
274 PUT_MODE (mem, mode);
275 PUT_MODE (mem1, mode);
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
285 if (! HARD_REGNO_MODE_OK (regno, mode))
288 reg = gen_rtx_REG (mode, regno);
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
312 /* Find out if CONSTANT_P_RTX is accepted. */
313 SET_DEST (pat) = gen_rtx_REG (TYPE_MODE (integer_type_node),
314 FIRST_PSEUDO_REGISTER);
315 SET_SRC (pat) = gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 can_handle_constant_p = 1;
324 /* This is run at the start of compiling a function. */
331 pending_stack_adjust = 0;
332 inhibit_defer_pop = 0;
334 apply_args_value = 0;
338 /* Save all variables describing the current status into the structure *P.
339 This is used before starting a nested function. */
345 p->pending_chain = pending_chain;
346 p->pending_stack_adjust = pending_stack_adjust;
347 p->inhibit_defer_pop = inhibit_defer_pop;
348 p->saveregs_value = saveregs_value;
349 p->apply_args_value = apply_args_value;
350 p->forced_labels = forced_labels;
352 pending_chain = NULL_RTX;
353 pending_stack_adjust = 0;
354 inhibit_defer_pop = 0;
356 apply_args_value = 0;
360 /* Restore all variables describing the current status from the structure *P.
361 This is used after a nested function. */
364 restore_expr_status (p)
367 pending_chain = p->pending_chain;
368 pending_stack_adjust = p->pending_stack_adjust;
369 inhibit_defer_pop = p->inhibit_defer_pop;
370 saveregs_value = p->saveregs_value;
371 apply_args_value = p->apply_args_value;
372 forced_labels = p->forced_labels;
375 /* Manage the queue of increment instructions to be output
376 for POSTINCREMENT_EXPR expressions, etc. */
378 /* Queue up to increment (or change) VAR later. BODY says how:
379 BODY should be the same thing you would pass to emit_insn
380 to increment right away. It will go to emit_insn later on.
382 The value is a QUEUED expression to be used in place of VAR
383 where you want to guarantee the pre-incrementation value of VAR. */
386 enqueue_insn (var, body)
389 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
390 var, NULL_RTX, NULL_RTX, body,
392 return pending_chain;
395 /* Use protect_from_queue to convert a QUEUED expression
396 into something that you can put immediately into an instruction.
397 If the queued incrementation has not happened yet,
398 protect_from_queue returns the variable itself.
399 If the incrementation has happened, protect_from_queue returns a temp
400 that contains a copy of the old value of the variable.
402 Any time an rtx which might possibly be a QUEUED is to be put
403 into an instruction, it must be passed through protect_from_queue first.
404 QUEUED expressions are not meaningful in instructions.
406 Do not pass a value through protect_from_queue and then hold
407 on to it for a while before putting it in an instruction!
408 If the queue is flushed in between, incorrect code will result. */
411 protect_from_queue (x, modify)
415 register RTX_CODE code = GET_CODE (x);
417 #if 0 /* A QUEUED can hang around after the queue is forced out. */
418 /* Shortcut for most common case. */
419 if (pending_chain == 0)
425 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
426 use of autoincrement. Make a copy of the contents of the memory
427 location rather than a copy of the address, but not if the value is
428 of mode BLKmode. Don't modify X in place since it might be
430 if (code == MEM && GET_MODE (x) != BLKmode
431 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
433 register rtx y = XEXP (x, 0);
434 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
436 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
437 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
438 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
439 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
443 register rtx temp = gen_reg_rtx (GET_MODE (new));
444 emit_insn_before (gen_move_insn (temp, new),
450 /* Otherwise, recursively protect the subexpressions of all
451 the kinds of rtx's that can contain a QUEUED. */
454 rtx tem = protect_from_queue (XEXP (x, 0), 0);
455 if (tem != XEXP (x, 0))
461 else if (code == PLUS || code == MULT)
463 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
464 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
465 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
474 /* If the increment has not happened, use the variable itself. */
475 if (QUEUED_INSN (x) == 0)
476 return QUEUED_VAR (x);
477 /* If the increment has happened and a pre-increment copy exists,
479 if (QUEUED_COPY (x) != 0)
480 return QUEUED_COPY (x);
481 /* The increment has happened but we haven't set up a pre-increment copy.
482 Set one up now, and use it. */
483 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
484 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
486 return QUEUED_COPY (x);
489 /* Return nonzero if X contains a QUEUED expression:
490 if it contains anything that will be altered by a queued increment.
491 We handle only combinations of MEM, PLUS, MINUS and MULT operators
492 since memory addresses generally contain only those. */
498 register enum rtx_code code = GET_CODE (x);
504 return queued_subexp_p (XEXP (x, 0));
508 return (queued_subexp_p (XEXP (x, 0))
509 || queued_subexp_p (XEXP (x, 1)));
515 /* Perform all the pending incrementations. */
521 while ((p = pending_chain))
523 rtx body = QUEUED_BODY (p);
525 if (GET_CODE (body) == SEQUENCE)
527 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
528 emit_insn (QUEUED_BODY (p));
531 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
532 pending_chain = QUEUED_NEXT (p);
543 /* Copy data from FROM to TO, where the machine modes are not the same.
544 Both modes may be integer, or both may be floating.
545 UNSIGNEDP should be nonzero if FROM is an unsigned type.
546 This causes zero-extension instead of sign-extension. */
549 convert_move (to, from, unsignedp)
550 register rtx to, from;
553 enum machine_mode to_mode = GET_MODE (to);
554 enum machine_mode from_mode = GET_MODE (from);
555 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
556 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
560 /* rtx code for making an equivalent value. */
561 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
563 to = protect_from_queue (to, 1);
564 from = protect_from_queue (from, 0);
566 if (to_real != from_real)
569 /* If FROM is a SUBREG that indicates that we have already done at least
570 the required extension, strip it. We don't handle such SUBREGs as
573 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
574 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
575 >= GET_MODE_SIZE (to_mode))
576 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
577 from = gen_lowpart (to_mode, from), from_mode = to_mode;
579 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
582 if (to_mode == from_mode
583 || (from_mode == VOIDmode && CONSTANT_P (from)))
585 emit_move_insn (to, from);
593 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
595 /* Try converting directly if the insn is supported. */
596 if ((code = can_extend_p (to_mode, from_mode, 0))
599 emit_unop_insn (code, to, from, UNKNOWN);
604 #ifdef HAVE_trunchfqf2
605 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
607 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
611 #ifdef HAVE_trunctqfqf2
612 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
614 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
618 #ifdef HAVE_truncsfqf2
619 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
621 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
625 #ifdef HAVE_truncdfqf2
626 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
628 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
632 #ifdef HAVE_truncxfqf2
633 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
635 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
639 #ifdef HAVE_trunctfqf2
640 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
642 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
647 #ifdef HAVE_trunctqfhf2
648 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
650 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
654 #ifdef HAVE_truncsfhf2
655 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
657 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
661 #ifdef HAVE_truncdfhf2
662 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
664 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncxfhf2
669 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
671 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
675 #ifdef HAVE_trunctfhf2
676 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
678 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncsftqf2
684 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
686 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
690 #ifdef HAVE_truncdftqf2
691 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
693 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
697 #ifdef HAVE_truncxftqf2
698 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
700 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
704 #ifdef HAVE_trunctftqf2
705 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
707 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
712 #ifdef HAVE_truncdfsf2
713 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
715 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
719 #ifdef HAVE_truncxfsf2
720 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
722 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
726 #ifdef HAVE_trunctfsf2
727 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
729 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
733 #ifdef HAVE_truncxfdf2
734 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
736 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
740 #ifdef HAVE_trunctfdf2
741 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
743 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
755 libcall = extendsfdf2_libfunc;
759 libcall = extendsfxf2_libfunc;
763 libcall = extendsftf2_libfunc;
775 libcall = truncdfsf2_libfunc;
779 libcall = extenddfxf2_libfunc;
783 libcall = extenddftf2_libfunc;
795 libcall = truncxfsf2_libfunc;
799 libcall = truncxfdf2_libfunc;
811 libcall = trunctfsf2_libfunc;
815 libcall = trunctfdf2_libfunc;
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
833 emit_move_insn (to, value);
837 /* Now both modes are integers. */
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
861 emit_unop_insn (code, to, from, equiv_code);
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
877 /* No special multiword conversion insn; do it by hand. */
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
890 lowpart_mode = from_mode;
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
897 /* Compute the value to put in each remaining word. */
899 fill_value = const0_rtx;
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
936 insns = get_insns ();
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PQImode)
962 if (from_mode != QImode)
963 from = convert_to_mode (QImode, from, unsignedp);
965 #ifdef HAVE_truncqipqi2
966 if (HAVE_truncqipqi2)
968 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
971 #endif /* HAVE_truncqipqi2 */
975 if (from_mode == PQImode)
977 if (to_mode != QImode)
979 from = convert_to_mode (QImode, from, unsignedp);
984 #ifdef HAVE_extendpqiqi2
985 if (HAVE_extendpqiqi2)
987 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
990 #endif /* HAVE_extendpqiqi2 */
995 if (to_mode == PSImode)
997 if (from_mode != SImode)
998 from = convert_to_mode (SImode, from, unsignedp);
1000 #ifdef HAVE_truncsipsi2
1001 if (HAVE_truncsipsi2)
1003 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1006 #endif /* HAVE_truncsipsi2 */
1010 if (from_mode == PSImode)
1012 if (to_mode != SImode)
1014 from = convert_to_mode (SImode, from, unsignedp);
1019 #ifdef HAVE_extendpsisi2
1020 if (HAVE_extendpsisi2)
1022 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1025 #endif /* HAVE_extendpsisi2 */
1030 if (to_mode == PDImode)
1032 if (from_mode != DImode)
1033 from = convert_to_mode (DImode, from, unsignedp);
1035 #ifdef HAVE_truncdipdi2
1036 if (HAVE_truncdipdi2)
1038 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1041 #endif /* HAVE_truncdipdi2 */
1045 if (from_mode == PDImode)
1047 if (to_mode != DImode)
1049 from = convert_to_mode (DImode, from, unsignedp);
1054 #ifdef HAVE_extendpdidi2
1055 if (HAVE_extendpdidi2)
1057 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1060 #endif /* HAVE_extendpdidi2 */
1065 /* Now follow all the conversions between integers
1066 no more than a word long. */
1068 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1069 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1070 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1071 GET_MODE_BITSIZE (from_mode)))
1073 if (!((GET_CODE (from) == MEM
1074 && ! MEM_VOLATILE_P (from)
1075 && direct_load[(int) to_mode]
1076 && ! mode_dependent_address_p (XEXP (from, 0)))
1077 || GET_CODE (from) == REG
1078 || GET_CODE (from) == SUBREG))
1079 from = force_reg (from_mode, from);
1080 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1081 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1082 from = copy_to_reg (from);
1083 emit_move_insn (to, gen_lowpart (to_mode, from));
1087 /* Handle extension. */
1088 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1090 /* Convert directly if that works. */
1091 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1092 != CODE_FOR_nothing)
1094 emit_unop_insn (code, to, from, equiv_code);
1099 enum machine_mode intermediate;
1103 /* Search for a mode to convert via. */
1104 for (intermediate = from_mode; intermediate != VOIDmode;
1105 intermediate = GET_MODE_WIDER_MODE (intermediate))
1106 if (((can_extend_p (to_mode, intermediate, unsignedp)
1107 != CODE_FOR_nothing)
1108 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1109 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1110 && (can_extend_p (intermediate, from_mode, unsignedp)
1111 != CODE_FOR_nothing))
1113 convert_move (to, convert_to_mode (intermediate, from,
1114 unsignedp), unsignedp);
1118 /* No suitable intermediate mode.
1119 Generate what we need with shifts. */
1120 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1121 - GET_MODE_BITSIZE (from_mode), 0);
1122 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1123 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1125 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1128 emit_move_insn (to, tmp);
1133 /* Support special truncate insns for certain modes. */
1135 if (from_mode == DImode && to_mode == SImode)
1137 #ifdef HAVE_truncdisi2
1138 if (HAVE_truncdisi2)
1140 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1144 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 if (from_mode == DImode && to_mode == HImode)
1150 #ifdef HAVE_truncdihi2
1151 if (HAVE_truncdihi2)
1153 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1157 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 if (from_mode == DImode && to_mode == QImode)
1163 #ifdef HAVE_truncdiqi2
1164 if (HAVE_truncdiqi2)
1166 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1170 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 if (from_mode == SImode && to_mode == HImode)
1176 #ifdef HAVE_truncsihi2
1177 if (HAVE_truncsihi2)
1179 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1183 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 if (from_mode == SImode && to_mode == QImode)
1189 #ifdef HAVE_truncsiqi2
1190 if (HAVE_truncsiqi2)
1192 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1196 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 if (from_mode == HImode && to_mode == QImode)
1202 #ifdef HAVE_trunchiqi2
1203 if (HAVE_trunchiqi2)
1205 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1209 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 if (from_mode == TImode && to_mode == DImode)
1215 #ifdef HAVE_trunctidi2
1216 if (HAVE_trunctidi2)
1218 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1222 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 if (from_mode == TImode && to_mode == SImode)
1228 #ifdef HAVE_trunctisi2
1229 if (HAVE_trunctisi2)
1231 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1235 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 if (from_mode == TImode && to_mode == HImode)
1241 #ifdef HAVE_trunctihi2
1242 if (HAVE_trunctihi2)
1244 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1248 convert_move (to, force_reg (from_mode, from), unsignedp);
1252 if (from_mode == TImode && to_mode == QImode)
1254 #ifdef HAVE_trunctiqi2
1255 if (HAVE_trunctiqi2)
1257 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1261 convert_move (to, force_reg (from_mode, from), unsignedp);
1265 /* Handle truncation of volatile memrefs, and so on;
1266 the things that couldn't be truncated directly,
1267 and for which there was no special instruction. */
1268 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1270 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1271 emit_move_insn (to, temp);
1275 /* Mode combination is not recognized. */
1279 /* Return an rtx for a value that would result
1280 from converting X to mode MODE.
1281 Both X and MODE may be floating, or both integer.
1282 UNSIGNEDP is nonzero if X is an unsigned value.
1283 This can be done by referring to a part of X in place
1284 or by copying to a new temporary with conversion.
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1290 convert_to_mode (mode, x, unsignedp)
1291 enum machine_mode mode;
1295 return convert_modes (mode, VOIDmode, x, unsignedp);
1298 /* Return an rtx for a value that would result
1299 from converting X from mode OLDMODE to mode MODE.
1300 Both modes may be floating, or both integer.
1301 UNSIGNEDP is nonzero if X is an unsigned value.
1303 This can be done by referring to a part of X in place
1304 or by copying to a new temporary with conversion.
1306 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1308 This function *must not* call protect_from_queue
1309 except when putting X into an insn (in which case convert_move does it). */
1312 convert_modes (mode, oldmode, x, unsignedp)
1313 enum machine_mode mode, oldmode;
1319 /* If FROM is a SUBREG that indicates that we have already done at least
1320 the required extension, strip it. */
1322 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1323 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1324 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1325 x = gen_lowpart (mode, x);
1327 if (GET_MODE (x) != VOIDmode)
1328 oldmode = GET_MODE (x);
1330 if (mode == oldmode)
1333 /* There is one case that we must handle specially: If we are converting
1334 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1335 we are to interpret the constant as unsigned, gen_lowpart will do
1336 the wrong if the constant appears negative. What we want to do is
1337 make the high-order word of the constant zero, not all ones. */
1339 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1340 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1341 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1343 HOST_WIDE_INT val = INTVAL (x);
1345 if (oldmode != VOIDmode
1346 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1348 int width = GET_MODE_BITSIZE (oldmode);
1350 /* We need to zero extend VAL. */
1351 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1354 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1357 /* We can do this with a gen_lowpart if both desired and current modes
1358 are integer, and this is either a constant integer, a register, or a
1359 non-volatile MEM. Except for the constant case where MODE is no
1360 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1362 if ((GET_CODE (x) == CONST_INT
1363 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1364 || (GET_MODE_CLASS (mode) == MODE_INT
1365 && GET_MODE_CLASS (oldmode) == MODE_INT
1366 && (GET_CODE (x) == CONST_DOUBLE
1367 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1368 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1369 && direct_load[(int) mode])
1370 || (GET_CODE (x) == REG
1371 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1372 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1374 /* ?? If we don't know OLDMODE, we have to assume here that
1375 X does not need sign- or zero-extension. This may not be
1376 the case, but it's the best we can do. */
1377 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1378 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1380 HOST_WIDE_INT val = INTVAL (x);
1381 int width = GET_MODE_BITSIZE (oldmode);
1383 /* We must sign or zero-extend in this case. Start by
1384 zero-extending, then sign extend if we need to. */
1385 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1387 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1388 val |= (HOST_WIDE_INT) (-1) << width;
1390 return GEN_INT (val);
1393 return gen_lowpart (mode, x);
1396 temp = gen_reg_rtx (mode);
1397 convert_move (temp, x, unsignedp);
1401 /* Generate several move instructions to copy LEN bytes
1402 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1403 The caller must pass FROM and TO
1404 through protect_from_queue before calling.
1405 ALIGN (in bytes) is maximum alignment we can assume. */
1408 move_by_pieces (to, from, len, align)
1412 struct move_by_pieces data;
1413 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1414 int max_size = MOVE_MAX + 1;
1417 data.to_addr = to_addr;
1418 data.from_addr = from_addr;
1422 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1423 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1425 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1426 || GET_CODE (from_addr) == POST_INC
1427 || GET_CODE (from_addr) == POST_DEC);
1429 data.explicit_inc_from = 0;
1430 data.explicit_inc_to = 0;
1432 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1433 if (data.reverse) data.offset = len;
1436 data.to_struct = MEM_IN_STRUCT_P (to);
1437 data.from_struct = MEM_IN_STRUCT_P (from);
1439 /* If copying requires more than two move insns,
1440 copy addresses to registers (to make displacements shorter)
1441 and use post-increment if available. */
1442 if (!(data.autinc_from && data.autinc_to)
1443 && move_by_pieces_ninsns (len, align) > 2)
1445 #ifdef HAVE_PRE_DECREMENT
1446 if (data.reverse && ! data.autinc_from)
1448 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1449 data.autinc_from = 1;
1450 data.explicit_inc_from = -1;
1453 #ifdef HAVE_POST_INCREMENT
1454 if (! data.autinc_from)
1456 data.from_addr = copy_addr_to_reg (from_addr);
1457 data.autinc_from = 1;
1458 data.explicit_inc_from = 1;
1461 if (!data.autinc_from && CONSTANT_P (from_addr))
1462 data.from_addr = copy_addr_to_reg (from_addr);
1463 #ifdef HAVE_PRE_DECREMENT
1464 if (data.reverse && ! data.autinc_to)
1466 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1468 data.explicit_inc_to = -1;
1471 #ifdef HAVE_POST_INCREMENT
1472 if (! data.reverse && ! data.autinc_to)
1474 data.to_addr = copy_addr_to_reg (to_addr);
1476 data.explicit_inc_to = 1;
1479 if (!data.autinc_to && CONSTANT_P (to_addr))
1480 data.to_addr = copy_addr_to_reg (to_addr);
1483 if (! SLOW_UNALIGNED_ACCESS
1484 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1487 /* First move what we can in the largest integer mode, then go to
1488 successively smaller modes. */
1490 while (max_size > 1)
1492 enum machine_mode mode = VOIDmode, tmode;
1493 enum insn_code icode;
1495 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1496 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1497 if (GET_MODE_SIZE (tmode) < max_size)
1500 if (mode == VOIDmode)
1503 icode = mov_optab->handlers[(int) mode].insn_code;
1504 if (icode != CODE_FOR_nothing
1505 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1506 GET_MODE_SIZE (mode)))
1507 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1509 max_size = GET_MODE_SIZE (mode);
1512 /* The code above should have handled everything. */
1517 /* Return number of insns required to move L bytes by pieces.
1518 ALIGN (in bytes) is maximum alignment we can assume. */
1521 move_by_pieces_ninsns (l, align)
1525 register int n_insns = 0;
1526 int max_size = MOVE_MAX + 1;
1528 if (! SLOW_UNALIGNED_ACCESS
1529 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1532 while (max_size > 1)
1534 enum machine_mode mode = VOIDmode, tmode;
1535 enum insn_code icode;
1537 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1538 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1539 if (GET_MODE_SIZE (tmode) < max_size)
1542 if (mode == VOIDmode)
1545 icode = mov_optab->handlers[(int) mode].insn_code;
1546 if (icode != CODE_FOR_nothing
1547 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1548 GET_MODE_SIZE (mode)))
1549 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1551 max_size = GET_MODE_SIZE (mode);
1557 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1558 with move instructions for mode MODE. GENFUN is the gen_... function
1559 to make a move insn for that mode. DATA has all the other info. */
1562 move_by_pieces_1 (genfun, mode, data)
1563 rtx (*genfun) PROTO ((rtx, ...));
1564 enum machine_mode mode;
1565 struct move_by_pieces *data;
1567 register int size = GET_MODE_SIZE (mode);
1568 register rtx to1, from1;
1570 while (data->len >= size)
1572 if (data->reverse) data->offset -= size;
1574 to1 = (data->autinc_to
1575 ? gen_rtx_MEM (mode, data->to_addr)
1576 : copy_rtx (change_address (data->to, mode,
1577 plus_constant (data->to_addr,
1579 MEM_IN_STRUCT_P (to1) = data->to_struct;
1582 = (data->autinc_from
1583 ? gen_rtx_MEM (mode, data->from_addr)
1584 : copy_rtx (change_address (data->from, mode,
1585 plus_constant (data->from_addr,
1587 MEM_IN_STRUCT_P (from1) = data->from_struct;
1589 #ifdef HAVE_PRE_DECREMENT
1590 if (data->explicit_inc_to < 0)
1591 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1592 if (data->explicit_inc_from < 0)
1593 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1596 emit_insn ((*genfun) (to1, from1));
1597 #ifdef HAVE_POST_INCREMENT
1598 if (data->explicit_inc_to > 0)
1599 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1600 if (data->explicit_inc_from > 0)
1601 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1604 if (! data->reverse) data->offset += size;
1610 /* Emit code to move a block Y to a block X.
1611 This may be done with string-move instructions,
1612 with multiple scalar move instructions, or with a library call.
1614 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1616 SIZE is an rtx that says how long they are.
1617 ALIGN is the maximum alignment we can assume they have,
1620 Return the address of the new block, if memcpy is called and returns it,
1624 emit_block_move (x, y, size, align)
1630 #ifdef TARGET_MEM_FUNCTIONS
1632 tree call_expr, arg_list;
1635 if (GET_MODE (x) != BLKmode)
1638 if (GET_MODE (y) != BLKmode)
1641 x = protect_from_queue (x, 1);
1642 y = protect_from_queue (y, 0);
1643 size = protect_from_queue (size, 0);
1645 if (GET_CODE (x) != MEM)
1647 if (GET_CODE (y) != MEM)
1652 if (GET_CODE (size) == CONST_INT
1653 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1654 move_by_pieces (x, y, INTVAL (size), align);
1657 /* Try the most limited insn first, because there's no point
1658 including more than one in the machine description unless
1659 the more limited one has some advantage. */
1661 rtx opalign = GEN_INT (align);
1662 enum machine_mode mode;
1664 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1665 mode = GET_MODE_WIDER_MODE (mode))
1667 enum insn_code code = movstr_optab[(int) mode];
1669 if (code != CODE_FOR_nothing
1670 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1671 here because if SIZE is less than the mode mask, as it is
1672 returned by the macro, it will definitely be less than the
1673 actual mode mask. */
1674 && ((GET_CODE (size) == CONST_INT
1675 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1676 <= (GET_MODE_MASK (mode) >> 1)))
1677 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1678 && (insn_operand_predicate[(int) code][0] == 0
1679 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1680 && (insn_operand_predicate[(int) code][1] == 0
1681 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1682 && (insn_operand_predicate[(int) code][3] == 0
1683 || (*insn_operand_predicate[(int) code][3]) (opalign,
1687 rtx last = get_last_insn ();
1690 op2 = convert_to_mode (mode, size, 1);
1691 if (insn_operand_predicate[(int) code][2] != 0
1692 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1693 op2 = copy_to_mode_reg (mode, op2);
1695 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1702 delete_insns_since (last);
1706 #ifdef TARGET_MEM_FUNCTIONS
1707 /* It is incorrect to use the libcall calling conventions to call
1708 memcpy in this context.
1710 This could be a user call to memcpy and the user may wish to
1711 examine the return value from memcpy.
1713 For targets where libcalls and normal calls have different conventions
1714 for returning pointers, we could end up generating incorrect code.
1716 So instead of using a libcall sequence we build up a suitable
1717 CALL_EXPR and expand the call in the normal fashion. */
1718 if (fn == NULL_TREE)
1722 /* This was copied from except.c, I don't know if all this is
1723 necessary in this context or not. */
1724 fn = get_identifier ("memcpy");
1725 push_obstacks_nochange ();
1726 end_temporary_allocation ();
1727 fntype = build_pointer_type (void_type_node);
1728 fntype = build_function_type (fntype, NULL_TREE);
1729 fn = build_decl (FUNCTION_DECL, fn, fntype);
1730 DECL_EXTERNAL (fn) = 1;
1731 TREE_PUBLIC (fn) = 1;
1732 DECL_ARTIFICIAL (fn) = 1;
1733 make_decl_rtl (fn, NULL_PTR, 1);
1734 assemble_external (fn);
1738 /* We need to make an argument list for the function call.
1740 memcpy has three arguments, the first two are void * addresses and
1741 the last is a size_t byte count for the copy. */
1743 = build_tree_list (NULL_TREE,
1744 make_tree (build_pointer_type (void_type_node),
1746 TREE_CHAIN (arg_list)
1747 = build_tree_list (NULL_TREE,
1748 make_tree (build_pointer_type (void_type_node),
1750 TREE_CHAIN (TREE_CHAIN (arg_list))
1751 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1752 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1754 /* Now we have to build up the CALL_EXPR itself. */
1755 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1756 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1757 call_expr, arg_list, NULL_TREE);
1758 TREE_SIDE_EFFECTS (call_expr) = 1;
1760 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1762 emit_library_call (bcopy_libfunc, 0,
1763 VOIDmode, 3, XEXP (y, 0), Pmode,
1765 convert_to_mode (TYPE_MODE (integer_type_node), size,
1766 TREE_UNSIGNED (integer_type_node)),
1767 TYPE_MODE (integer_type_node));
1774 /* Copy all or part of a value X into registers starting at REGNO.
1775 The number of registers to be filled is NREGS. */
1778 move_block_to_reg (regno, x, nregs, mode)
1782 enum machine_mode mode;
1785 #ifdef HAVE_load_multiple
1793 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1794 x = validize_mem (force_const_mem (mode, x));
1796 /* See if the machine can do this with a load multiple insn. */
1797 #ifdef HAVE_load_multiple
1798 if (HAVE_load_multiple)
1800 last = get_last_insn ();
1801 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1809 delete_insns_since (last);
1813 for (i = 0; i < nregs; i++)
1814 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1815 operand_subword_force (x, i, mode));
1818 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1819 The number of registers to be filled is NREGS. SIZE indicates the number
1820 of bytes in the object X. */
1824 move_block_from_reg (regno, x, nregs, size)
1831 #ifdef HAVE_store_multiple
1835 enum machine_mode mode;
1837 /* If SIZE is that of a mode no bigger than a word, just use that
1838 mode's store operation. */
1839 if (size <= UNITS_PER_WORD
1840 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1842 emit_move_insn (change_address (x, mode, NULL),
1843 gen_rtx_REG (mode, regno));
1847 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1848 to the left before storing to memory. Note that the previous test
1849 doesn't handle all cases (e.g. SIZE == 3). */
1850 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1852 rtx tem = operand_subword (x, 0, 1, BLKmode);
1858 shift = expand_shift (LSHIFT_EXPR, word_mode,
1859 gen_rtx_REG (word_mode, regno),
1860 build_int_2 ((UNITS_PER_WORD - size)
1861 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1862 emit_move_insn (tem, shift);
1866 /* See if the machine can do this with a store multiple insn. */
1867 #ifdef HAVE_store_multiple
1868 if (HAVE_store_multiple)
1870 last = get_last_insn ();
1871 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1879 delete_insns_since (last);
1883 for (i = 0; i < nregs; i++)
1885 rtx tem = operand_subword (x, i, 1, BLKmode);
1890 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1894 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1895 registers represented by a PARALLEL. SSIZE represents the total size of
1896 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1898 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1899 the balance will be in what would be the low-order memory addresses, i.e.
1900 left justified for big endian, right justified for little endian. This
1901 happens to be true for the targets currently using this support. If this
1902 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1906 emit_group_load (dst, orig_src, ssize, align)
1913 if (GET_CODE (dst) != PARALLEL)
1916 /* Check for a NULL entry, used to indicate that the parameter goes
1917 both on the stack and in registers. */
1918 if (XEXP (XVECEXP (dst, 0, 0), 0))
1923 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1925 /* If we won't be loading directly from memory, protect the real source
1926 from strange tricks we might play. */
1928 if (GET_CODE (src) != MEM)
1930 src = gen_reg_rtx (GET_MODE (orig_src));
1931 emit_move_insn (src, orig_src);
1934 /* Process the pieces. */
1935 for (i = start; i < XVECLEN (dst, 0); i++)
1937 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1938 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1939 int bytelen = GET_MODE_SIZE (mode);
1942 /* Handle trailing fragments that run over the size of the struct. */
1943 if (ssize >= 0 && bytepos + bytelen > ssize)
1945 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1946 bytelen = ssize - bytepos;
1951 /* Optimize the access just a bit. */
1952 if (GET_CODE (src) == MEM
1953 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1954 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1955 && bytelen == GET_MODE_SIZE (mode))
1957 tmps[i] = gen_reg_rtx (mode);
1958 emit_move_insn (tmps[i],
1959 change_address (src, mode,
1960 plus_constant (XEXP (src, 0),
1965 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1966 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1967 mode, mode, align, ssize);
1970 if (BYTES_BIG_ENDIAN && shift)
1972 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1973 tmps[i], 0, OPTAB_WIDEN);
1978 /* Copy the extracted pieces into the proper (probable) hard regs. */
1979 for (i = start; i < XVECLEN (dst, 0); i++)
1980 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1983 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1984 registers represented by a PARALLEL. SSIZE represents the total size of
1985 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1988 emit_group_store (orig_dst, src, ssize, align)
1995 if (GET_CODE (src) != PARALLEL)
1998 /* Check for a NULL entry, used to indicate that the parameter goes
1999 both on the stack and in registers. */
2000 if (XEXP (XVECEXP (src, 0, 0), 0))
2005 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2007 /* Copy the (probable) hard regs into pseudos. */
2008 for (i = start; i < XVECLEN (src, 0); i++)
2010 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2011 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2012 emit_move_insn (tmps[i], reg);
2016 /* If we won't be storing directly into memory, protect the real destination
2017 from strange tricks we might play. */
2019 if (GET_CODE (dst) == PARALLEL)
2023 /* We can get a PARALLEL dst if there is a conditional expression in
2024 a return statement. In that case, the dst and src are the same,
2025 so no action is necessary. */
2026 if (rtx_equal_p (dst, src))
2029 /* It is unclear if we can ever reach here, but we may as well handle
2030 it. Allocate a temporary, and split this into a store/load to/from
2033 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2034 emit_group_store (temp, src, ssize, align);
2035 emit_group_load (dst, temp, ssize, align);
2038 else if (GET_CODE (dst) != MEM)
2040 dst = gen_reg_rtx (GET_MODE (orig_dst));
2041 /* Make life a bit easier for combine. */
2042 emit_move_insn (dst, const0_rtx);
2044 else if (! MEM_IN_STRUCT_P (dst))
2046 /* store_bit_field requires that memory operations have
2047 mem_in_struct_p set; we might not. */
2049 dst = copy_rtx (orig_dst);
2050 MEM_IN_STRUCT_P (dst) = 1;
2053 /* Process the pieces. */
2054 for (i = start; i < XVECLEN (src, 0); i++)
2056 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2057 enum machine_mode mode = GET_MODE (tmps[i]);
2058 int bytelen = GET_MODE_SIZE (mode);
2060 /* Handle trailing fragments that run over the size of the struct. */
2061 if (ssize >= 0 && bytepos + bytelen > ssize)
2063 if (BYTES_BIG_ENDIAN)
2065 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2066 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2067 tmps[i], 0, OPTAB_WIDEN);
2069 bytelen = ssize - bytepos;
2072 /* Optimize the access just a bit. */
2073 if (GET_CODE (dst) == MEM
2074 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2075 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2076 && bytelen == GET_MODE_SIZE (mode))
2078 emit_move_insn (change_address (dst, mode,
2079 plus_constant (XEXP (dst, 0),
2085 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2086 mode, tmps[i], align, ssize);
2091 /* Copy from the pseudo into the (probable) hard reg. */
2092 if (GET_CODE (dst) == REG)
2093 emit_move_insn (orig_dst, dst);
2096 /* Generate code to copy a BLKmode object of TYPE out of a
2097 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2098 is null, a stack temporary is created. TGTBLK is returned.
2100 The primary purpose of this routine is to handle functions
2101 that return BLKmode structures in registers. Some machines
2102 (the PA for example) want to return all small structures
2103 in registers regardless of the structure's alignment.
2107 copy_blkmode_from_reg(tgtblk,srcreg,type)
2112 int bytes = int_size_in_bytes (type);
2113 rtx src = NULL, dst = NULL;
2114 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2115 int bitpos, xbitpos, big_endian_correction = 0;
2119 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2120 MEM_IN_STRUCT_P (tgtblk) = AGGREGATE_TYPE_P (type);
2121 preserve_temp_slots (tgtblk);
2124 /* This code assumes srcreg is at least a full word. If it isn't,
2125 copy it into a new pseudo which is a full word. */
2126 if (GET_MODE (srcreg) != BLKmode
2127 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2128 srcreg = convert_to_mode (word_mode, srcreg,
2129 TREE_UNSIGNED (type));
2131 /* Structures whose size is not a multiple of a word are aligned
2132 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2133 machine, this means we must skip the empty high order bytes when
2134 calculating the bit offset. */
2135 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2136 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2139 /* Copy the structure BITSIZE bites at a time.
2141 We could probably emit more efficient code for machines
2142 which do not use strict alignment, but it doesn't seem
2143 worth the effort at the current time. */
2144 for (bitpos = 0, xbitpos = big_endian_correction;
2145 bitpos < bytes * BITS_PER_UNIT;
2146 bitpos += bitsize, xbitpos += bitsize)
2149 /* We need a new source operand each time xbitpos is on a
2150 word boundary and when xbitpos == big_endian_correction
2151 (the first time through). */
2152 if (xbitpos % BITS_PER_WORD == 0
2153 || xbitpos == big_endian_correction)
2154 src = operand_subword_force (srcreg,
2155 xbitpos / BITS_PER_WORD,
2158 /* We need a new destination operand each time bitpos is on
2160 if (bitpos % BITS_PER_WORD == 0)
2161 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2163 /* Use xbitpos for the source extraction (right justified) and
2164 xbitpos for the destination store (left justified). */
2165 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2166 extract_bit_field (src, bitsize,
2167 xbitpos % BITS_PER_WORD, 1,
2168 NULL_RTX, word_mode,
2170 bitsize / BITS_PER_UNIT,
2172 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2178 /* Add a USE expression for REG to the (possibly empty) list pointed
2179 to by CALL_FUSAGE. REG must denote a hard register. */
2182 use_reg (call_fusage, reg)
2183 rtx *call_fusage, reg;
2185 if (GET_CODE (reg) != REG
2186 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2190 = gen_rtx_EXPR_LIST (VOIDmode,
2191 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2194 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2195 starting at REGNO. All of these registers must be hard registers. */
2198 use_regs (call_fusage, regno, nregs)
2205 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2208 for (i = 0; i < nregs; i++)
2209 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2212 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2213 PARALLEL REGS. This is for calls that pass values in multiple
2214 non-contiguous locations. The Irix 6 ABI has examples of this. */
2217 use_group_regs (call_fusage, regs)
2223 for (i = 0; i < XVECLEN (regs, 0); i++)
2225 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2227 /* A NULL entry means the parameter goes both on the stack and in
2228 registers. This can also be a MEM for targets that pass values
2229 partially on the stack and partially in registers. */
2230 if (reg != 0 && GET_CODE (reg) == REG)
2231 use_reg (call_fusage, reg);
2235 /* Generate several move instructions to clear LEN bytes of block TO.
2236 (A MEM rtx with BLKmode). The caller must pass TO through
2237 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2241 clear_by_pieces (to, len, align)
2245 struct clear_by_pieces data;
2246 rtx to_addr = XEXP (to, 0);
2247 int max_size = MOVE_MAX + 1;
2250 data.to_addr = to_addr;
2253 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2254 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2256 data.explicit_inc_to = 0;
2258 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2259 if (data.reverse) data.offset = len;
2262 data.to_struct = MEM_IN_STRUCT_P (to);
2264 /* If copying requires more than two move insns,
2265 copy addresses to registers (to make displacements shorter)
2266 and use post-increment if available. */
2268 && move_by_pieces_ninsns (len, align) > 2)
2270 #ifdef HAVE_PRE_DECREMENT
2271 if (data.reverse && ! data.autinc_to)
2273 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2275 data.explicit_inc_to = -1;
2278 #ifdef HAVE_POST_INCREMENT
2279 if (! data.reverse && ! data.autinc_to)
2281 data.to_addr = copy_addr_to_reg (to_addr);
2283 data.explicit_inc_to = 1;
2286 if (!data.autinc_to && CONSTANT_P (to_addr))
2287 data.to_addr = copy_addr_to_reg (to_addr);
2290 if (! SLOW_UNALIGNED_ACCESS
2291 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2294 /* First move what we can in the largest integer mode, then go to
2295 successively smaller modes. */
2297 while (max_size > 1)
2299 enum machine_mode mode = VOIDmode, tmode;
2300 enum insn_code icode;
2302 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2303 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2304 if (GET_MODE_SIZE (tmode) < max_size)
2307 if (mode == VOIDmode)
2310 icode = mov_optab->handlers[(int) mode].insn_code;
2311 if (icode != CODE_FOR_nothing
2312 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2313 GET_MODE_SIZE (mode)))
2314 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2316 max_size = GET_MODE_SIZE (mode);
2319 /* The code above should have handled everything. */
2324 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2325 with move instructions for mode MODE. GENFUN is the gen_... function
2326 to make a move insn for that mode. DATA has all the other info. */
2329 clear_by_pieces_1 (genfun, mode, data)
2330 rtx (*genfun) PROTO ((rtx, ...));
2331 enum machine_mode mode;
2332 struct clear_by_pieces *data;
2334 register int size = GET_MODE_SIZE (mode);
2337 while (data->len >= size)
2339 if (data->reverse) data->offset -= size;
2341 to1 = (data->autinc_to
2342 ? gen_rtx_MEM (mode, data->to_addr)
2343 : copy_rtx (change_address (data->to, mode,
2344 plus_constant (data->to_addr,
2346 MEM_IN_STRUCT_P (to1) = data->to_struct;
2348 #ifdef HAVE_PRE_DECREMENT
2349 if (data->explicit_inc_to < 0)
2350 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2353 emit_insn ((*genfun) (to1, const0_rtx));
2354 #ifdef HAVE_POST_INCREMENT
2355 if (data->explicit_inc_to > 0)
2356 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2359 if (! data->reverse) data->offset += size;
2365 /* Write zeros through the storage of OBJECT.
2366 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2367 the maximum alignment we can is has, measured in bytes.
2369 If we call a function that returns the length of the block, return it. */
2372 clear_storage (object, size, align)
2377 #ifdef TARGET_MEM_FUNCTIONS
2379 tree call_expr, arg_list;
2383 if (GET_MODE (object) == BLKmode)
2385 object = protect_from_queue (object, 1);
2386 size = protect_from_queue (size, 0);
2388 if (GET_CODE (size) == CONST_INT
2389 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2390 clear_by_pieces (object, INTVAL (size), align);
2394 /* Try the most limited insn first, because there's no point
2395 including more than one in the machine description unless
2396 the more limited one has some advantage. */
2398 rtx opalign = GEN_INT (align);
2399 enum machine_mode mode;
2401 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2402 mode = GET_MODE_WIDER_MODE (mode))
2404 enum insn_code code = clrstr_optab[(int) mode];
2406 if (code != CODE_FOR_nothing
2407 /* We don't need MODE to be narrower than
2408 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2409 the mode mask, as it is returned by the macro, it will
2410 definitely be less than the actual mode mask. */
2411 && ((GET_CODE (size) == CONST_INT
2412 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2413 <= (GET_MODE_MASK (mode) >> 1)))
2414 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2415 && (insn_operand_predicate[(int) code][0] == 0
2416 || (*insn_operand_predicate[(int) code][0]) (object,
2418 && (insn_operand_predicate[(int) code][2] == 0
2419 || (*insn_operand_predicate[(int) code][2]) (opalign,
2423 rtx last = get_last_insn ();
2426 op1 = convert_to_mode (mode, size, 1);
2427 if (insn_operand_predicate[(int) code][1] != 0
2428 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2430 op1 = copy_to_mode_reg (mode, op1);
2432 pat = GEN_FCN ((int) code) (object, op1, opalign);
2439 delete_insns_since (last);
2444 #ifdef TARGET_MEM_FUNCTIONS
2445 /* It is incorrect to use the libcall calling conventions to call
2446 memset in this context.
2448 This could be a user call to memset and the user may wish to
2449 examine the return value from memset.
2451 For targets where libcalls and normal calls have different conventions
2452 for returning pointers, we could end up generating incorrect code.
2454 So instead of using a libcall sequence we build up a suitable
2455 CALL_EXPR and expand the call in the normal fashion. */
2456 if (fn == NULL_TREE)
2460 /* This was copied from except.c, I don't know if all this is
2461 necessary in this context or not. */
2462 fn = get_identifier ("memset");
2463 push_obstacks_nochange ();
2464 end_temporary_allocation ();
2465 fntype = build_pointer_type (void_type_node);
2466 fntype = build_function_type (fntype, NULL_TREE);
2467 fn = build_decl (FUNCTION_DECL, fn, fntype);
2468 DECL_EXTERNAL (fn) = 1;
2469 TREE_PUBLIC (fn) = 1;
2470 DECL_ARTIFICIAL (fn) = 1;
2471 make_decl_rtl (fn, NULL_PTR, 1);
2472 assemble_external (fn);
2476 /* We need to make an argument list for the function call.
2478 memset has three arguments, the first is a void * addresses, the
2479 second a integer with the initialization value, the last is a size_t
2480 byte count for the copy. */
2482 = build_tree_list (NULL_TREE,
2483 make_tree (build_pointer_type (void_type_node),
2485 TREE_CHAIN (arg_list)
2486 = build_tree_list (NULL_TREE,
2487 make_tree (integer_type_node, const0_rtx));
2488 TREE_CHAIN (TREE_CHAIN (arg_list))
2489 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2490 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2492 /* Now we have to build up the CALL_EXPR itself. */
2493 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2494 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2495 call_expr, arg_list, NULL_TREE);
2496 TREE_SIDE_EFFECTS (call_expr) = 1;
2498 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2500 emit_library_call (bzero_libfunc, 0,
2502 XEXP (object, 0), Pmode,
2504 (TYPE_MODE (integer_type_node), size,
2505 TREE_UNSIGNED (integer_type_node)),
2506 TYPE_MODE (integer_type_node));
2511 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2516 /* Generate code to copy Y into X.
2517 Both Y and X must have the same mode, except that
2518 Y can be a constant with VOIDmode.
2519 This mode cannot be BLKmode; use emit_block_move for that.
2521 Return the last instruction emitted. */
2524 emit_move_insn (x, y)
2527 enum machine_mode mode = GET_MODE (x);
2529 x = protect_from_queue (x, 1);
2530 y = protect_from_queue (y, 0);
2532 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2535 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2536 y = force_const_mem (mode, y);
2538 /* If X or Y are memory references, verify that their addresses are valid
2540 if (GET_CODE (x) == MEM
2541 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2542 && ! push_operand (x, GET_MODE (x)))
2544 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2545 x = change_address (x, VOIDmode, XEXP (x, 0));
2547 if (GET_CODE (y) == MEM
2548 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2550 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2551 y = change_address (y, VOIDmode, XEXP (y, 0));
2553 if (mode == BLKmode)
2556 return emit_move_insn_1 (x, y);
2559 /* Low level part of emit_move_insn.
2560 Called just like emit_move_insn, but assumes X and Y
2561 are basically valid. */
2564 emit_move_insn_1 (x, y)
2567 enum machine_mode mode = GET_MODE (x);
2568 enum machine_mode submode;
2569 enum mode_class class = GET_MODE_CLASS (mode);
2572 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2574 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2576 /* Expand complex moves by moving real part and imag part, if possible. */
2577 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2578 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2580 (class == MODE_COMPLEX_INT
2581 ? MODE_INT : MODE_FLOAT),
2583 && (mov_optab->handlers[(int) submode].insn_code
2584 != CODE_FOR_nothing))
2586 /* Don't split destination if it is a stack push. */
2587 int stack = push_operand (x, GET_MODE (x));
2589 /* If this is a stack, push the highpart first, so it
2590 will be in the argument order.
2592 In that case, change_address is used only to convert
2593 the mode, not to change the address. */
2596 /* Note that the real part always precedes the imag part in memory
2597 regardless of machine's endianness. */
2598 #ifdef STACK_GROWS_DOWNWARD
2599 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2600 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2601 gen_imagpart (submode, y)));
2602 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2603 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2604 gen_realpart (submode, y)));
2606 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2607 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2608 gen_realpart (submode, y)));
2609 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2610 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2611 gen_imagpart (submode, y)));
2616 /* Show the output dies here. */
2618 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2620 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2621 (gen_realpart (submode, x), gen_realpart (submode, y)));
2622 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2623 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2626 return get_last_insn ();
2629 /* This will handle any multi-word mode that lacks a move_insn pattern.
2630 However, you will get better code if you define such patterns,
2631 even if they must turn into multiple assembler instructions. */
2632 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2636 #ifdef PUSH_ROUNDING
2638 /* If X is a push on the stack, do the push now and replace
2639 X with a reference to the stack pointer. */
2640 if (push_operand (x, GET_MODE (x)))
2642 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2643 x = change_address (x, VOIDmode, stack_pointer_rtx);
2647 /* Show the output dies here. */
2649 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2652 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2655 rtx xpart = operand_subword (x, i, 1, mode);
2656 rtx ypart = operand_subword (y, i, 1, mode);
2658 /* If we can't get a part of Y, put Y into memory if it is a
2659 constant. Otherwise, force it into a register. If we still
2660 can't get a part of Y, abort. */
2661 if (ypart == 0 && CONSTANT_P (y))
2663 y = force_const_mem (mode, y);
2664 ypart = operand_subword (y, i, 1, mode);
2666 else if (ypart == 0)
2667 ypart = operand_subword_force (y, i, mode);
2669 if (xpart == 0 || ypart == 0)
2672 last_insn = emit_move_insn (xpart, ypart);
2681 /* Pushing data onto the stack. */
2683 /* Push a block of length SIZE (perhaps variable)
2684 and return an rtx to address the beginning of the block.
2685 Note that it is not possible for the value returned to be a QUEUED.
2686 The value may be virtual_outgoing_args_rtx.
2688 EXTRA is the number of bytes of padding to push in addition to SIZE.
2689 BELOW nonzero means this padding comes at low addresses;
2690 otherwise, the padding comes at high addresses. */
2693 push_block (size, extra, below)
2699 size = convert_modes (Pmode, ptr_mode, size, 1);
2700 if (CONSTANT_P (size))
2701 anti_adjust_stack (plus_constant (size, extra));
2702 else if (GET_CODE (size) == REG && extra == 0)
2703 anti_adjust_stack (size);
2706 rtx temp = copy_to_mode_reg (Pmode, size);
2708 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2709 temp, 0, OPTAB_LIB_WIDEN);
2710 anti_adjust_stack (temp);
2713 #if defined (STACK_GROWS_DOWNWARD) \
2714 || (defined (ARGS_GROW_DOWNWARD) \
2715 && !defined (ACCUMULATE_OUTGOING_ARGS))
2717 /* Return the lowest stack address when STACK or ARGS grow downward and
2718 we are not aaccumulating outgoing arguments (the c4x port uses such
2720 temp = virtual_outgoing_args_rtx;
2721 if (extra != 0 && below)
2722 temp = plus_constant (temp, extra);
2724 if (GET_CODE (size) == CONST_INT)
2725 temp = plus_constant (virtual_outgoing_args_rtx,
2726 - INTVAL (size) - (below ? 0 : extra));
2727 else if (extra != 0 && !below)
2728 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2729 negate_rtx (Pmode, plus_constant (size, extra)));
2731 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2732 negate_rtx (Pmode, size));
2735 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2741 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2744 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2745 block of SIZE bytes. */
2748 get_push_address (size)
2753 if (STACK_PUSH_CODE == POST_DEC)
2754 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2755 else if (STACK_PUSH_CODE == POST_INC)
2756 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2758 temp = stack_pointer_rtx;
2760 return copy_to_reg (temp);
2763 /* Generate code to push X onto the stack, assuming it has mode MODE and
2765 MODE is redundant except when X is a CONST_INT (since they don't
2767 SIZE is an rtx for the size of data to be copied (in bytes),
2768 needed only if X is BLKmode.
2770 ALIGN (in bytes) is maximum alignment we can assume.
2772 If PARTIAL and REG are both nonzero, then copy that many of the first
2773 words of X into registers starting with REG, and push the rest of X.
2774 The amount of space pushed is decreased by PARTIAL words,
2775 rounded *down* to a multiple of PARM_BOUNDARY.
2776 REG must be a hard register in this case.
2777 If REG is zero but PARTIAL is not, take any all others actions for an
2778 argument partially in registers, but do not actually load any
2781 EXTRA is the amount in bytes of extra space to leave next to this arg.
2782 This is ignored if an argument block has already been allocated.
2784 On a machine that lacks real push insns, ARGS_ADDR is the address of
2785 the bottom of the argument block for this call. We use indexing off there
2786 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2787 argument block has not been preallocated.
2789 ARGS_SO_FAR is the size of args previously pushed for this call.
2791 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2792 for arguments passed in registers. If nonzero, it will be the number
2793 of bytes required. */
2796 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2797 args_addr, args_so_far, reg_parm_stack_space)
2799 enum machine_mode mode;
2808 int reg_parm_stack_space;
2811 enum direction stack_direction
2812 #ifdef STACK_GROWS_DOWNWARD
2818 /* Decide where to pad the argument: `downward' for below,
2819 `upward' for above, or `none' for don't pad it.
2820 Default is below for small data on big-endian machines; else above. */
2821 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2823 /* Invert direction if stack is post-update. */
2824 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2825 if (where_pad != none)
2826 where_pad = (where_pad == downward ? upward : downward);
2828 xinner = x = protect_from_queue (x, 0);
2830 if (mode == BLKmode)
2832 /* Copy a block into the stack, entirely or partially. */
2835 int used = partial * UNITS_PER_WORD;
2836 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2844 /* USED is now the # of bytes we need not copy to the stack
2845 because registers will take care of them. */
2848 xinner = change_address (xinner, BLKmode,
2849 plus_constant (XEXP (xinner, 0), used));
2851 /* If the partial register-part of the arg counts in its stack size,
2852 skip the part of stack space corresponding to the registers.
2853 Otherwise, start copying to the beginning of the stack space,
2854 by setting SKIP to 0. */
2855 skip = (reg_parm_stack_space == 0) ? 0 : used;
2857 #ifdef PUSH_ROUNDING
2858 /* Do it with several push insns if that doesn't take lots of insns
2859 and if there is no difficulty with push insns that skip bytes
2860 on the stack for alignment purposes. */
2862 && GET_CODE (size) == CONST_INT
2864 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2866 /* Here we avoid the case of a structure whose weak alignment
2867 forces many pushes of a small amount of data,
2868 and such small pushes do rounding that causes trouble. */
2869 && ((! SLOW_UNALIGNED_ACCESS)
2870 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2871 || PUSH_ROUNDING (align) == align)
2872 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2874 /* Push padding now if padding above and stack grows down,
2875 or if padding below and stack grows up.
2876 But if space already allocated, this has already been done. */
2877 if (extra && args_addr == 0
2878 && where_pad != none && where_pad != stack_direction)
2879 anti_adjust_stack (GEN_INT (extra));
2881 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2882 INTVAL (size) - used, align);
2884 if (current_function_check_memory_usage && ! in_check_memory_usage)
2888 in_check_memory_usage = 1;
2889 temp = get_push_address (INTVAL(size) - used);
2890 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2891 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2893 XEXP (xinner, 0), ptr_mode,
2894 GEN_INT (INTVAL(size) - used),
2895 TYPE_MODE (sizetype));
2897 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2899 GEN_INT (INTVAL(size) - used),
2900 TYPE_MODE (sizetype),
2901 GEN_INT (MEMORY_USE_RW),
2902 TYPE_MODE (integer_type_node));
2903 in_check_memory_usage = 0;
2907 #endif /* PUSH_ROUNDING */
2909 /* Otherwise make space on the stack and copy the data
2910 to the address of that space. */
2912 /* Deduct words put into registers from the size we must copy. */
2915 if (GET_CODE (size) == CONST_INT)
2916 size = GEN_INT (INTVAL (size) - used);
2918 size = expand_binop (GET_MODE (size), sub_optab, size,
2919 GEN_INT (used), NULL_RTX, 0,
2923 /* Get the address of the stack space.
2924 In this case, we do not deal with EXTRA separately.
2925 A single stack adjust will do. */
2928 temp = push_block (size, extra, where_pad == downward);
2931 else if (GET_CODE (args_so_far) == CONST_INT)
2932 temp = memory_address (BLKmode,
2933 plus_constant (args_addr,
2934 skip + INTVAL (args_so_far)));
2936 temp = memory_address (BLKmode,
2937 plus_constant (gen_rtx_PLUS (Pmode,
2941 if (current_function_check_memory_usage && ! in_check_memory_usage)
2945 in_check_memory_usage = 1;
2946 target = copy_to_reg (temp);
2947 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2948 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2950 XEXP (xinner, 0), ptr_mode,
2951 size, TYPE_MODE (sizetype));
2953 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2955 size, TYPE_MODE (sizetype),
2956 GEN_INT (MEMORY_USE_RW),
2957 TYPE_MODE (integer_type_node));
2958 in_check_memory_usage = 0;
2961 /* TEMP is the address of the block. Copy the data there. */
2962 if (GET_CODE (size) == CONST_INT
2963 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2966 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2967 INTVAL (size), align);
2972 rtx opalign = GEN_INT (align);
2973 enum machine_mode mode;
2974 rtx target = gen_rtx_MEM (BLKmode, temp);
2976 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2978 mode = GET_MODE_WIDER_MODE (mode))
2980 enum insn_code code = movstr_optab[(int) mode];
2982 if (code != CODE_FOR_nothing
2983 && ((GET_CODE (size) == CONST_INT
2984 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2985 <= (GET_MODE_MASK (mode) >> 1)))
2986 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2987 && (insn_operand_predicate[(int) code][0] == 0
2988 || ((*insn_operand_predicate[(int) code][0])
2990 && (insn_operand_predicate[(int) code][1] == 0
2991 || ((*insn_operand_predicate[(int) code][1])
2993 && (insn_operand_predicate[(int) code][3] == 0
2994 || ((*insn_operand_predicate[(int) code][3])
2995 (opalign, VOIDmode))))
2997 rtx op2 = convert_to_mode (mode, size, 1);
2998 rtx last = get_last_insn ();
3001 if (insn_operand_predicate[(int) code][2] != 0
3002 && ! ((*insn_operand_predicate[(int) code][2])
3004 op2 = copy_to_mode_reg (mode, op2);
3006 pat = GEN_FCN ((int) code) (target, xinner,
3014 delete_insns_since (last);
3019 #ifndef ACCUMULATE_OUTGOING_ARGS
3020 /* If the source is referenced relative to the stack pointer,
3021 copy it to another register to stabilize it. We do not need
3022 to do this if we know that we won't be changing sp. */
3024 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3025 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3026 temp = copy_to_reg (temp);
3029 /* Make inhibit_defer_pop nonzero around the library call
3030 to force it to pop the bcopy-arguments right away. */
3032 #ifdef TARGET_MEM_FUNCTIONS
3033 emit_library_call (memcpy_libfunc, 0,
3034 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3035 convert_to_mode (TYPE_MODE (sizetype),
3036 size, TREE_UNSIGNED (sizetype)),
3037 TYPE_MODE (sizetype));
3039 emit_library_call (bcopy_libfunc, 0,
3040 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3041 convert_to_mode (TYPE_MODE (integer_type_node),
3043 TREE_UNSIGNED (integer_type_node)),
3044 TYPE_MODE (integer_type_node));
3049 else if (partial > 0)
3051 /* Scalar partly in registers. */
3053 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3056 /* # words of start of argument
3057 that we must make space for but need not store. */
3058 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3059 int args_offset = INTVAL (args_so_far);
3062 /* Push padding now if padding above and stack grows down,
3063 or if padding below and stack grows up.
3064 But if space already allocated, this has already been done. */
3065 if (extra && args_addr == 0
3066 && where_pad != none && where_pad != stack_direction)
3067 anti_adjust_stack (GEN_INT (extra));
3069 /* If we make space by pushing it, we might as well push
3070 the real data. Otherwise, we can leave OFFSET nonzero
3071 and leave the space uninitialized. */
3075 /* Now NOT_STACK gets the number of words that we don't need to
3076 allocate on the stack. */
3077 not_stack = partial - offset;
3079 /* If the partial register-part of the arg counts in its stack size,
3080 skip the part of stack space corresponding to the registers.
3081 Otherwise, start copying to the beginning of the stack space,
3082 by setting SKIP to 0. */
3083 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3085 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3086 x = validize_mem (force_const_mem (mode, x));
3088 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3089 SUBREGs of such registers are not allowed. */
3090 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3091 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3092 x = copy_to_reg (x);
3094 /* Loop over all the words allocated on the stack for this arg. */
3095 /* We can do it by words, because any scalar bigger than a word
3096 has a size a multiple of a word. */
3097 #ifndef PUSH_ARGS_REVERSED
3098 for (i = not_stack; i < size; i++)
3100 for (i = size - 1; i >= not_stack; i--)
3102 if (i >= not_stack + offset)
3103 emit_push_insn (operand_subword_force (x, i, mode),
3104 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3106 GEN_INT (args_offset + ((i - not_stack + skip)
3108 reg_parm_stack_space);
3113 rtx target = NULL_RTX;
3115 /* Push padding now if padding above and stack grows down,
3116 or if padding below and stack grows up.
3117 But if space already allocated, this has already been done. */
3118 if (extra && args_addr == 0
3119 && where_pad != none && where_pad != stack_direction)
3120 anti_adjust_stack (GEN_INT (extra));
3122 #ifdef PUSH_ROUNDING
3124 addr = gen_push_operand ();
3128 if (GET_CODE (args_so_far) == CONST_INT)
3130 = memory_address (mode,
3131 plus_constant (args_addr,
3132 INTVAL (args_so_far)));
3134 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3139 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3141 if (current_function_check_memory_usage && ! in_check_memory_usage)
3143 in_check_memory_usage = 1;
3145 target = get_push_address (GET_MODE_SIZE (mode));
3147 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3148 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3150 XEXP (x, 0), ptr_mode,
3151 GEN_INT (GET_MODE_SIZE (mode)),
3152 TYPE_MODE (sizetype));
3154 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3156 GEN_INT (GET_MODE_SIZE (mode)),
3157 TYPE_MODE (sizetype),
3158 GEN_INT (MEMORY_USE_RW),
3159 TYPE_MODE (integer_type_node));
3160 in_check_memory_usage = 0;
3165 /* If part should go in registers, copy that part
3166 into the appropriate registers. Do this now, at the end,
3167 since mem-to-mem copies above may do function calls. */
3168 if (partial > 0 && reg != 0)
3170 /* Handle calls that pass values in multiple non-contiguous locations.
3171 The Irix 6 ABI has examples of this. */
3172 if (GET_CODE (reg) == PARALLEL)
3173 emit_group_load (reg, x, -1, align); /* ??? size? */
3175 move_block_to_reg (REGNO (reg), x, partial, mode);
3178 if (extra && args_addr == 0 && where_pad == stack_direction)
3179 anti_adjust_stack (GEN_INT (extra));
3182 /* Expand an assignment that stores the value of FROM into TO.
3183 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3184 (This may contain a QUEUED rtx;
3185 if the value is constant, this rtx is a constant.)
3186 Otherwise, the returned value is NULL_RTX.
3188 SUGGEST_REG is no longer actually used.
3189 It used to mean, copy the value through a register
3190 and return that register, if that is possible.
3191 We now use WANT_VALUE to decide whether to do this. */
3194 expand_assignment (to, from, want_value, suggest_reg)
3199 register rtx to_rtx = 0;
3202 /* Don't crash if the lhs of the assignment was erroneous. */
3204 if (TREE_CODE (to) == ERROR_MARK)
3206 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3207 return want_value ? result : NULL_RTX;
3210 /* Assignment of a structure component needs special treatment
3211 if the structure component's rtx is not simply a MEM.
3212 Assignment of an array element at a constant index, and assignment of
3213 an array element in an unaligned packed structure field, has the same
3216 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3217 || TREE_CODE (to) == ARRAY_REF)
3219 enum machine_mode mode1;
3229 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3230 &unsignedp, &volatilep, &alignment);
3232 /* If we are going to use store_bit_field and extract_bit_field,
3233 make sure to_rtx will be safe for multiple use. */
3235 if (mode1 == VOIDmode && want_value)
3236 tem = stabilize_reference (tem);
3238 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3241 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3243 if (GET_CODE (to_rtx) != MEM)
3246 if (GET_MODE (offset_rtx) != ptr_mode)
3248 #ifdef POINTERS_EXTEND_UNSIGNED
3249 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3251 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3255 if (GET_CODE (to_rtx) == MEM
3256 && GET_MODE (to_rtx) == BLKmode
3258 && (bitpos % bitsize) == 0
3259 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3260 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3262 rtx temp = change_address (to_rtx, mode1,
3263 plus_constant (XEXP (to_rtx, 0),
3266 if (GET_CODE (XEXP (temp, 0)) == REG)
3269 to_rtx = change_address (to_rtx, mode1,
3270 force_reg (GET_MODE (XEXP (temp, 0)),
3275 to_rtx = change_address (to_rtx, VOIDmode,
3276 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3277 force_reg (ptr_mode, offset_rtx)));
3281 if (GET_CODE (to_rtx) == MEM)
3283 /* When the offset is zero, to_rtx is the address of the
3284 structure we are storing into, and hence may be shared.
3285 We must make a new MEM before setting the volatile bit. */
3287 to_rtx = copy_rtx (to_rtx);
3289 MEM_VOLATILE_P (to_rtx) = 1;
3291 #if 0 /* This was turned off because, when a field is volatile
3292 in an object which is not volatile, the object may be in a register,
3293 and then we would abort over here. */
3299 if (TREE_CODE (to) == COMPONENT_REF
3300 && TREE_READONLY (TREE_OPERAND (to, 1)))
3303 to_rtx = copy_rtx (to_rtx);
3305 RTX_UNCHANGING_P (to_rtx) = 1;
3308 /* Check the access. */
3309 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3314 enum machine_mode best_mode;
3316 best_mode = get_best_mode (bitsize, bitpos,
3317 TYPE_ALIGN (TREE_TYPE (tem)),
3319 if (best_mode == VOIDmode)
3322 best_mode_size = GET_MODE_BITSIZE (best_mode);
3323 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3324 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3325 size *= GET_MODE_SIZE (best_mode);
3327 /* Check the access right of the pointer. */
3329 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3331 GEN_INT (size), TYPE_MODE (sizetype),
3332 GEN_INT (MEMORY_USE_WO),
3333 TYPE_MODE (integer_type_node));
3336 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3338 /* Spurious cast makes HPUX compiler happy. */
3339 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3342 /* Required alignment of containing datum. */
3344 int_size_in_bytes (TREE_TYPE (tem)),
3345 get_alias_set (to));
3346 preserve_temp_slots (result);
3350 /* If the value is meaningful, convert RESULT to the proper mode.
3351 Otherwise, return nothing. */
3352 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3353 TYPE_MODE (TREE_TYPE (from)),
3355 TREE_UNSIGNED (TREE_TYPE (to)))
3359 /* If the rhs is a function call and its value is not an aggregate,
3360 call the function before we start to compute the lhs.
3361 This is needed for correct code for cases such as
3362 val = setjmp (buf) on machines where reference to val
3363 requires loading up part of an address in a separate insn.
3365 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3366 a promoted variable where the zero- or sign- extension needs to be done.
3367 Handling this in the normal way is safe because no computation is done
3369 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3370 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3371 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3376 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3378 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3380 /* Handle calls that return values in multiple non-contiguous locations.
3381 The Irix 6 ABI has examples of this. */
3382 if (GET_CODE (to_rtx) == PARALLEL)
3383 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3384 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3385 else if (GET_MODE (to_rtx) == BLKmode)
3386 emit_block_move (to_rtx, value, expr_size (from),
3387 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3389 emit_move_insn (to_rtx, value);
3390 preserve_temp_slots (to_rtx);
3393 return want_value ? to_rtx : NULL_RTX;
3396 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3397 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3401 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3402 if (GET_CODE (to_rtx) == MEM)
3403 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3406 /* Don't move directly into a return register. */
3407 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3412 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3413 emit_move_insn (to_rtx, temp);
3414 preserve_temp_slots (to_rtx);
3417 return want_value ? to_rtx : NULL_RTX;
3420 /* In case we are returning the contents of an object which overlaps
3421 the place the value is being stored, use a safe function when copying
3422 a value through a pointer into a structure value return block. */
3423 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3424 && current_function_returns_struct
3425 && !current_function_returns_pcc_struct)
3430 size = expr_size (from);
3431 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3432 EXPAND_MEMORY_USE_DONT);
3434 /* Copy the rights of the bitmap. */
3435 if (current_function_check_memory_usage)
3436 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3437 XEXP (to_rtx, 0), ptr_mode,
3438 XEXP (from_rtx, 0), ptr_mode,
3439 convert_to_mode (TYPE_MODE (sizetype),
3440 size, TREE_UNSIGNED (sizetype)),
3441 TYPE_MODE (sizetype));
3443 #ifdef TARGET_MEM_FUNCTIONS
3444 emit_library_call (memcpy_libfunc, 0,
3445 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3446 XEXP (from_rtx, 0), Pmode,
3447 convert_to_mode (TYPE_MODE (sizetype),
3448 size, TREE_UNSIGNED (sizetype)),
3449 TYPE_MODE (sizetype));
3451 emit_library_call (bcopy_libfunc, 0,
3452 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3453 XEXP (to_rtx, 0), Pmode,
3454 convert_to_mode (TYPE_MODE (integer_type_node),
3455 size, TREE_UNSIGNED (integer_type_node)),
3456 TYPE_MODE (integer_type_node));
3459 preserve_temp_slots (to_rtx);
3462 return want_value ? to_rtx : NULL_RTX;
3465 /* Compute FROM and store the value in the rtx we got. */
3468 result = store_expr (from, to_rtx, want_value);
3469 preserve_temp_slots (result);
3472 return want_value ? result : NULL_RTX;
3475 /* Generate code for computing expression EXP,
3476 and storing the value into TARGET.
3477 TARGET may contain a QUEUED rtx.
3479 If WANT_VALUE is nonzero, return a copy of the value
3480 not in TARGET, so that we can be sure to use the proper
3481 value in a containing expression even if TARGET has something
3482 else stored in it. If possible, we copy the value through a pseudo
3483 and return that pseudo. Or, if the value is constant, we try to
3484 return the constant. In some cases, we return a pseudo
3485 copied *from* TARGET.
3487 If the mode is BLKmode then we may return TARGET itself.
3488 It turns out that in BLKmode it doesn't cause a problem.
3489 because C has no operators that could combine two different
3490 assignments into the same BLKmode object with different values
3491 with no sequence point. Will other languages need this to
3494 If WANT_VALUE is 0, we return NULL, to make sure
3495 to catch quickly any cases where the caller uses the value
3496 and fails to set WANT_VALUE. */
3499 store_expr (exp, target, want_value)
3501 register rtx target;
3505 int dont_return_target = 0;
3507 if (TREE_CODE (exp) == COMPOUND_EXPR)
3509 /* Perform first part of compound expression, then assign from second
3511 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3513 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3515 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3517 /* For conditional expression, get safe form of the target. Then
3518 test the condition, doing the appropriate assignment on either
3519 side. This avoids the creation of unnecessary temporaries.
3520 For non-BLKmode, it is more efficient not to do this. */
3522 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3525 target = protect_from_queue (target, 1);
3527 do_pending_stack_adjust ();
3529 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3530 start_cleanup_deferral ();
3531 store_expr (TREE_OPERAND (exp, 1), target, 0);
3532 end_cleanup_deferral ();
3534 emit_jump_insn (gen_jump (lab2));
3537 start_cleanup_deferral ();
3538 store_expr (TREE_OPERAND (exp, 2), target, 0);
3539 end_cleanup_deferral ();
3544 return want_value ? target : NULL_RTX;
3546 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3547 && GET_MODE (target) != BLKmode)
3548 /* If target is in memory and caller wants value in a register instead,
3549 arrange that. Pass TARGET as target for expand_expr so that,
3550 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3551 We know expand_expr will not use the target in that case.
3552 Don't do this if TARGET is volatile because we are supposed
3553 to write it and then read it. */
3555 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3556 GET_MODE (target), 0);
3557 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3558 temp = copy_to_reg (temp);
3559 dont_return_target = 1;
3561 else if (queued_subexp_p (target))
3562 /* If target contains a postincrement, let's not risk
3563 using it as the place to generate the rhs. */
3565 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3567 /* Expand EXP into a new pseudo. */
3568 temp = gen_reg_rtx (GET_MODE (target));
3569 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3572 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3574 /* If target is volatile, ANSI requires accessing the value
3575 *from* the target, if it is accessed. So make that happen.
3576 In no case return the target itself. */
3577 if (! MEM_VOLATILE_P (target) && want_value)
3578 dont_return_target = 1;
3580 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3581 /* If this is an scalar in a register that is stored in a wider mode
3582 than the declared mode, compute the result into its declared mode
3583 and then convert to the wider mode. Our value is the computed
3586 /* If we don't want a value, we can do the conversion inside EXP,
3587 which will often result in some optimizations. Do the conversion
3588 in two steps: first change the signedness, if needed, then
3589 the extend. But don't do this if the type of EXP is a subtype
3590 of something else since then the conversion might involve
3591 more than just converting modes. */
3592 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3593 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3595 if (TREE_UNSIGNED (TREE_TYPE (exp))
3596 != SUBREG_PROMOTED_UNSIGNED_P (target))
3599 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3603 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3604 SUBREG_PROMOTED_UNSIGNED_P (target)),
3608 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3610 /* If TEMP is a volatile MEM and we want a result value, make
3611 the access now so it gets done only once. Likewise if
3612 it contains TARGET. */
3613 if (GET_CODE (temp) == MEM && want_value
3614 && (MEM_VOLATILE_P (temp)
3615 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3616 temp = copy_to_reg (temp);
3618 /* If TEMP is a VOIDmode constant, use convert_modes to make
3619 sure that we properly convert it. */
3620 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3621 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3622 TYPE_MODE (TREE_TYPE (exp)), temp,
3623 SUBREG_PROMOTED_UNSIGNED_P (target));
3625 convert_move (SUBREG_REG (target), temp,
3626 SUBREG_PROMOTED_UNSIGNED_P (target));
3627 return want_value ? temp : NULL_RTX;
3631 temp = expand_expr (exp, target, GET_MODE (target), 0);
3632 /* Return TARGET if it's a specified hardware register.
3633 If TARGET is a volatile mem ref, either return TARGET
3634 or return a reg copied *from* TARGET; ANSI requires this.
3636 Otherwise, if TEMP is not TARGET, return TEMP
3637 if it is constant (for efficiency),
3638 or if we really want the correct value. */
3639 if (!(target && GET_CODE (target) == REG
3640 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3641 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3642 && ! rtx_equal_p (temp, target)
3643 && (CONSTANT_P (temp) || want_value))
3644 dont_return_target = 1;
3647 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3648 the same as that of TARGET, adjust the constant. This is needed, for
3649 example, in case it is a CONST_DOUBLE and we want only a word-sized
3651 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3652 && TREE_CODE (exp) != ERROR_MARK
3653 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3654 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3655 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3657 if (current_function_check_memory_usage
3658 && GET_CODE (target) == MEM
3659 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3661 if (GET_CODE (temp) == MEM)
3662 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3663 XEXP (target, 0), ptr_mode,
3664 XEXP (temp, 0), ptr_mode,
3665 expr_size (exp), TYPE_MODE (sizetype));
3667 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3668 XEXP (target, 0), ptr_mode,
3669 expr_size (exp), TYPE_MODE (sizetype),
3670 GEN_INT (MEMORY_USE_WO),
3671 TYPE_MODE (integer_type_node));
3674 /* If value was not generated in the target, store it there.
3675 Convert the value to TARGET's type first if nec. */
3676 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3677 one or both of them are volatile memory refs, we have to distinguish
3679 - expand_expr has used TARGET. In this case, we must not generate
3680 another copy. This can be detected by TARGET being equal according
3682 - expand_expr has not used TARGET - that means that the source just
3683 happens to have the same RTX form. Since temp will have been created
3684 by expand_expr, it will compare unequal according to == .
3685 We must generate a copy in this case, to reach the correct number
3686 of volatile memory references. */
3688 if ((! rtx_equal_p (temp, target)
3689 || (temp != target && (side_effects_p (temp)
3690 || side_effects_p (target))))
3691 && TREE_CODE (exp) != ERROR_MARK)
3693 target = protect_from_queue (target, 1);
3694 if (GET_MODE (temp) != GET_MODE (target)
3695 && GET_MODE (temp) != VOIDmode)
3697 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3698 if (dont_return_target)
3700 /* In this case, we will return TEMP,
3701 so make sure it has the proper mode.
3702 But don't forget to store the value into TARGET. */
3703 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3704 emit_move_insn (target, temp);
3707 convert_move (target, temp, unsignedp);
3710 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3712 /* Handle copying a string constant into an array.
3713 The string constant may be shorter than the array.
3714 So copy just the string's actual length, and clear the rest. */
3718 /* Get the size of the data type of the string,
3719 which is actually the size of the target. */
3720 size = expr_size (exp);
3721 if (GET_CODE (size) == CONST_INT
3722 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3723 emit_block_move (target, temp, size,
3724 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3727 /* Compute the size of the data to copy from the string. */
3729 = size_binop (MIN_EXPR,
3730 make_tree (sizetype, size),
3732 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3733 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3737 /* Copy that much. */
3738 emit_block_move (target, temp, copy_size_rtx,
3739 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3741 /* Figure out how much is left in TARGET that we have to clear.
3742 Do all calculations in ptr_mode. */
3744 addr = XEXP (target, 0);
3745 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3747 if (GET_CODE (copy_size_rtx) == CONST_INT)
3749 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3750 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3754 addr = force_reg (ptr_mode, addr);
3755 addr = expand_binop (ptr_mode, add_optab, addr,
3756 copy_size_rtx, NULL_RTX, 0,
3759 size = expand_binop (ptr_mode, sub_optab, size,
3760 copy_size_rtx, NULL_RTX, 0,
3763 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3764 GET_MODE (size), 0, 0);
3765 label = gen_label_rtx ();
3766 emit_jump_insn (gen_blt (label));
3769 if (size != const0_rtx)
3771 /* Be sure we can write on ADDR. */
3772 if (current_function_check_memory_usage)
3773 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3775 size, TYPE_MODE (sizetype),
3776 GEN_INT (MEMORY_USE_WO),
3777 TYPE_MODE (integer_type_node));
3778 #ifdef TARGET_MEM_FUNCTIONS
3779 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3781 const0_rtx, TYPE_MODE (integer_type_node),
3782 convert_to_mode (TYPE_MODE (sizetype),
3784 TREE_UNSIGNED (sizetype)),
3785 TYPE_MODE (sizetype));
3787 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3789 convert_to_mode (TYPE_MODE (integer_type_node),
3791 TREE_UNSIGNED (integer_type_node)),
3792 TYPE_MODE (integer_type_node));
3800 /* Handle calls that return values in multiple non-contiguous locations.
3801 The Irix 6 ABI has examples of this. */
3802 else if (GET_CODE (target) == PARALLEL)
3803 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3804 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3805 else if (GET_MODE (temp) == BLKmode)
3806 emit_block_move (target, temp, expr_size (exp),
3807 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3809 emit_move_insn (target, temp);
3812 /* If we don't want a value, return NULL_RTX. */
3816 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3817 ??? The latter test doesn't seem to make sense. */
3818 else if (dont_return_target && GET_CODE (temp) != MEM)
3821 /* Return TARGET itself if it is a hard register. */
3822 else if (want_value && GET_MODE (target) != BLKmode
3823 && ! (GET_CODE (target) == REG
3824 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3825 return copy_to_reg (target);
3831 /* Return 1 if EXP just contains zeros. */
3839 switch (TREE_CODE (exp))
3843 case NON_LVALUE_EXPR:
3844 return is_zeros_p (TREE_OPERAND (exp, 0));
3847 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3851 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3854 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3857 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3858 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3859 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3860 if (! is_zeros_p (TREE_VALUE (elt)))
3870 /* Return 1 if EXP contains mostly (3/4) zeros. */
3873 mostly_zeros_p (exp)
3876 if (TREE_CODE (exp) == CONSTRUCTOR)
3878 int elts = 0, zeros = 0;
3879 tree elt = CONSTRUCTOR_ELTS (exp);
3880 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3882 /* If there are no ranges of true bits, it is all zero. */
3883 return elt == NULL_TREE;
3885 for (; elt; elt = TREE_CHAIN (elt))
3887 /* We do not handle the case where the index is a RANGE_EXPR,
3888 so the statistic will be somewhat inaccurate.
3889 We do make a more accurate count in store_constructor itself,
3890 so since this function is only used for nested array elements,
3891 this should be close enough. */
3892 if (mostly_zeros_p (TREE_VALUE (elt)))
3897 return 4 * zeros >= 3 * elts;
3900 return is_zeros_p (exp);
3903 /* Helper function for store_constructor.
3904 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3905 TYPE is the type of the CONSTRUCTOR, not the element type.
3906 CLEARED is as for store_constructor.
3908 This provides a recursive shortcut back to store_constructor when it isn't
3909 necessary to go through store_field. This is so that we can pass through
3910 the cleared field to let store_constructor know that we may not have to
3911 clear a substructure if the outer structure has already been cleared. */
3914 store_constructor_field (target, bitsize, bitpos,
3915 mode, exp, type, cleared)
3917 int bitsize, bitpos;
3918 enum machine_mode mode;
3922 if (TREE_CODE (exp) == CONSTRUCTOR
3923 && bitpos % BITS_PER_UNIT == 0
3924 /* If we have a non-zero bitpos for a register target, then we just
3925 let store_field do the bitfield handling. This is unlikely to
3926 generate unnecessary clear instructions anyways. */
3927 && (bitpos == 0 || GET_CODE (target) == MEM))
3930 target = change_address (target, VOIDmode,
3931 plus_constant (XEXP (target, 0),
3932 bitpos / BITS_PER_UNIT));
3933 store_constructor (exp, target, cleared);
3936 store_field (target, bitsize, bitpos, mode, exp,
3937 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3938 int_size_in_bytes (type), 0);
3941 /* Store the value of constructor EXP into the rtx TARGET.
3942 TARGET is either a REG or a MEM.
3943 CLEARED is true if TARGET is known to have been zero'd. */
3946 store_constructor (exp, target, cleared)
3951 tree type = TREE_TYPE (exp);
3952 rtx exp_size = expr_size (exp);
3954 /* We know our target cannot conflict, since safe_from_p has been called. */
3956 /* Don't try copying piece by piece into a hard register
3957 since that is vulnerable to being clobbered by EXP.
3958 Instead, construct in a pseudo register and then copy it all. */
3959 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3961 rtx temp = gen_reg_rtx (GET_MODE (target));
3962 store_constructor (exp, temp, 0);
3963 emit_move_insn (target, temp);
3968 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3969 || TREE_CODE (type) == QUAL_UNION_TYPE)
3973 /* Inform later passes that the whole union value is dead. */
3974 if (TREE_CODE (type) == UNION_TYPE
3975 || TREE_CODE (type) == QUAL_UNION_TYPE)
3976 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3978 /* If we are building a static constructor into a register,
3979 set the initial value as zero so we can fold the value into
3980 a constant. But if more than one register is involved,
3981 this probably loses. */
3982 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3983 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3986 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3991 /* If the constructor has fewer fields than the structure
3992 or if we are initializing the structure to mostly zeros,
3993 clear the whole structure first. */
3994 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3995 != list_length (TYPE_FIELDS (type)))
3996 || mostly_zeros_p (exp))
3999 clear_storage (target, expr_size (exp),
4000 TYPE_ALIGN (type) / BITS_PER_UNIT);
4005 /* Inform later passes that the old value is dead. */
4006 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4008 /* Store each element of the constructor into
4009 the corresponding field of TARGET. */
4011 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4013 register tree field = TREE_PURPOSE (elt);
4014 tree value = TREE_VALUE (elt);
4015 register enum machine_mode mode;
4019 tree pos, constant = 0, offset = 0;
4020 rtx to_rtx = target;
4022 /* Just ignore missing fields.
4023 We cleared the whole structure, above,
4024 if any fields are missing. */
4028 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4031 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4032 unsignedp = TREE_UNSIGNED (field);
4033 mode = DECL_MODE (field);
4034 if (DECL_BIT_FIELD (field))
4037 pos = DECL_FIELD_BITPOS (field);
4038 if (TREE_CODE (pos) == INTEGER_CST)
4040 else if (TREE_CODE (pos) == PLUS_EXPR
4041 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4042 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4047 bitpos = TREE_INT_CST_LOW (constant);
4053 if (contains_placeholder_p (offset))
4054 offset = build (WITH_RECORD_EXPR, sizetype,
4055 offset, make_tree (TREE_TYPE (exp), target));
4057 offset = size_binop (FLOOR_DIV_EXPR, offset,
4058 size_int (BITS_PER_UNIT));
4060 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4061 if (GET_CODE (to_rtx) != MEM)
4064 if (GET_MODE (offset_rtx) != ptr_mode)
4066 #ifdef POINTERS_EXTEND_UNSIGNED
4067 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4069 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4074 = change_address (to_rtx, VOIDmode,
4075 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4076 force_reg (ptr_mode, offset_rtx)));
4078 if (TREE_READONLY (field))
4080 if (GET_CODE (to_rtx) == MEM)
4081 to_rtx = copy_rtx (to_rtx);
4083 RTX_UNCHANGING_P (to_rtx) = 1;
4086 #ifdef WORD_REGISTER_OPERATIONS
4087 /* If this initializes a field that is smaller than a word, at the
4088 start of a word, try to widen it to a full word.
4089 This special case allows us to output C++ member function
4090 initializations in a form that the optimizers can understand. */
4092 && GET_CODE (target) == REG
4093 && bitsize < BITS_PER_WORD
4094 && bitpos % BITS_PER_WORD == 0
4095 && GET_MODE_CLASS (mode) == MODE_INT
4096 && TREE_CODE (value) == INTEGER_CST
4097 && GET_CODE (exp_size) == CONST_INT
4098 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4100 tree type = TREE_TYPE (value);
4101 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4103 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4104 value = convert (type, value);
4106 if (BYTES_BIG_ENDIAN)
4108 = fold (build (LSHIFT_EXPR, type, value,
4109 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4110 bitsize = BITS_PER_WORD;
4114 store_constructor_field (to_rtx, bitsize, bitpos,
4115 mode, value, type, cleared);
4118 else if (TREE_CODE (type) == ARRAY_TYPE)
4123 tree domain = TYPE_DOMAIN (type);
4124 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4125 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4126 tree elttype = TREE_TYPE (type);
4128 /* If the constructor has fewer elements than the array,
4129 clear the whole array first. Similarly if this is
4130 static constructor of a non-BLKmode object. */
4131 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4135 HOST_WIDE_INT count = 0, zero_count = 0;
4137 /* This loop is a more accurate version of the loop in
4138 mostly_zeros_p (it handles RANGE_EXPR in an index).
4139 It is also needed to check for missing elements. */
4140 for (elt = CONSTRUCTOR_ELTS (exp);
4142 elt = TREE_CHAIN (elt))
4144 tree index = TREE_PURPOSE (elt);
4145 HOST_WIDE_INT this_node_count;
4146 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4148 tree lo_index = TREE_OPERAND (index, 0);
4149 tree hi_index = TREE_OPERAND (index, 1);
4150 if (TREE_CODE (lo_index) != INTEGER_CST
4151 || TREE_CODE (hi_index) != INTEGER_CST)
4156 this_node_count = TREE_INT_CST_LOW (hi_index)
4157 - TREE_INT_CST_LOW (lo_index) + 1;
4160 this_node_count = 1;
4161 count += this_node_count;
4162 if (mostly_zeros_p (TREE_VALUE (elt)))
4163 zero_count += this_node_count;
4165 /* Clear the entire array first if there are any missing elements,
4166 or if the incidence of zero elements is >= 75%. */
4167 if (count < maxelt - minelt + 1
4168 || 4 * zero_count >= 3 * count)
4174 clear_storage (target, expr_size (exp),
4175 TYPE_ALIGN (type) / BITS_PER_UNIT);
4179 /* Inform later passes that the old value is dead. */
4180 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4182 /* Store each element of the constructor into
4183 the corresponding element of TARGET, determined
4184 by counting the elements. */
4185 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4187 elt = TREE_CHAIN (elt), i++)
4189 register enum machine_mode mode;
4193 tree value = TREE_VALUE (elt);
4194 tree index = TREE_PURPOSE (elt);
4195 rtx xtarget = target;
4197 if (cleared && is_zeros_p (value))
4200 mode = TYPE_MODE (elttype);
4201 bitsize = GET_MODE_BITSIZE (mode);
4202 unsignedp = TREE_UNSIGNED (elttype);
4204 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4206 tree lo_index = TREE_OPERAND (index, 0);
4207 tree hi_index = TREE_OPERAND (index, 1);
4208 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4209 struct nesting *loop;
4210 HOST_WIDE_INT lo, hi, count;
4213 /* If the range is constant and "small", unroll the loop. */
4214 if (TREE_CODE (lo_index) == INTEGER_CST
4215 && TREE_CODE (hi_index) == INTEGER_CST
4216 && (lo = TREE_INT_CST_LOW (lo_index),
4217 hi = TREE_INT_CST_LOW (hi_index),
4218 count = hi - lo + 1,
4219 (GET_CODE (target) != MEM
4221 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4222 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4225 lo -= minelt; hi -= minelt;
4226 for (; lo <= hi; lo++)
4228 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4229 store_constructor_field (target, bitsize, bitpos,
4230 mode, value, type, cleared);
4235 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4236 loop_top = gen_label_rtx ();
4237 loop_end = gen_label_rtx ();
4239 unsignedp = TREE_UNSIGNED (domain);
4241 index = build_decl (VAR_DECL, NULL_TREE, domain);
4243 DECL_RTL (index) = index_r
4244 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4247 if (TREE_CODE (value) == SAVE_EXPR
4248 && SAVE_EXPR_RTL (value) == 0)
4250 /* Make sure value gets expanded once before the
4252 expand_expr (value, const0_rtx, VOIDmode, 0);
4255 store_expr (lo_index, index_r, 0);
4256 loop = expand_start_loop (0);
4258 /* Assign value to element index. */
4259 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4260 size_int (BITS_PER_UNIT));
4261 position = size_binop (MULT_EXPR,
4262 size_binop (MINUS_EXPR, index,
4263 TYPE_MIN_VALUE (domain)),
4265 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4266 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4267 xtarget = change_address (target, mode, addr);
4268 if (TREE_CODE (value) == CONSTRUCTOR)
4269 store_constructor (value, xtarget, cleared);
4271 store_expr (value, xtarget, 0);
4273 expand_exit_loop_if_false (loop,
4274 build (LT_EXPR, integer_type_node,
4277 expand_increment (build (PREINCREMENT_EXPR,
4279 index, integer_one_node), 0, 0);
4281 emit_label (loop_end);
4283 /* Needed by stupid register allocation. to extend the
4284 lifetime of pseudo-regs used by target past the end
4286 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4289 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4290 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4296 index = size_int (i);
4299 index = size_binop (MINUS_EXPR, index,
4300 TYPE_MIN_VALUE (domain));
4301 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4302 size_int (BITS_PER_UNIT));
4303 position = size_binop (MULT_EXPR, index, position);
4304 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4305 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4306 xtarget = change_address (target, mode, addr);
4307 store_expr (value, xtarget, 0);
4312 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4313 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4315 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4316 store_constructor_field (target, bitsize, bitpos,
4317 mode, value, type, cleared);
4321 /* set constructor assignments */
4322 else if (TREE_CODE (type) == SET_TYPE)
4324 tree elt = CONSTRUCTOR_ELTS (exp);
4325 int nbytes = int_size_in_bytes (type), nbits;
4326 tree domain = TYPE_DOMAIN (type);
4327 tree domain_min, domain_max, bitlength;
4329 /* The default implementation strategy is to extract the constant
4330 parts of the constructor, use that to initialize the target,
4331 and then "or" in whatever non-constant ranges we need in addition.
4333 If a large set is all zero or all ones, it is
4334 probably better to set it using memset (if available) or bzero.
4335 Also, if a large set has just a single range, it may also be
4336 better to first clear all the first clear the set (using
4337 bzero/memset), and set the bits we want. */
4339 /* Check for all zeros. */
4340 if (elt == NULL_TREE)
4343 clear_storage (target, expr_size (exp),
4344 TYPE_ALIGN (type) / BITS_PER_UNIT);
4348 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4349 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4350 bitlength = size_binop (PLUS_EXPR,
4351 size_binop (MINUS_EXPR, domain_max, domain_min),
4354 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4356 nbits = TREE_INT_CST_LOW (bitlength);
4358 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4359 are "complicated" (more than one range), initialize (the
4360 constant parts) by copying from a constant. */
4361 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4362 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4364 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4365 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4366 char *bit_buffer = (char *) alloca (nbits);
4367 HOST_WIDE_INT word = 0;
4370 int offset = 0; /* In bytes from beginning of set. */
4371 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4374 if (bit_buffer[ibit])
4376 if (BYTES_BIG_ENDIAN)
4377 word |= (1 << (set_word_size - 1 - bit_pos));
4379 word |= 1 << bit_pos;
4382 if (bit_pos >= set_word_size || ibit == nbits)
4384 if (word != 0 || ! cleared)
4386 rtx datum = GEN_INT (word);
4388 /* The assumption here is that it is safe to use
4389 XEXP if the set is multi-word, but not if
4390 it's single-word. */
4391 if (GET_CODE (target) == MEM)
4393 to_rtx = plus_constant (XEXP (target, 0), offset);
4394 to_rtx = change_address (target, mode, to_rtx);
4396 else if (offset == 0)
4400 emit_move_insn (to_rtx, datum);
4406 offset += set_word_size / BITS_PER_UNIT;
4412 /* Don't bother clearing storage if the set is all ones. */
4413 if (TREE_CHAIN (elt) != NULL_TREE
4414 || (TREE_PURPOSE (elt) == NULL_TREE
4416 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4417 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4418 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4419 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4421 clear_storage (target, expr_size (exp),
4422 TYPE_ALIGN (type) / BITS_PER_UNIT);
4425 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4427 /* start of range of element or NULL */
4428 tree startbit = TREE_PURPOSE (elt);
4429 /* end of range of element, or element value */
4430 tree endbit = TREE_VALUE (elt);
4431 #ifdef TARGET_MEM_FUNCTIONS
4432 HOST_WIDE_INT startb, endb;
4434 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4436 bitlength_rtx = expand_expr (bitlength,
4437 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4439 /* handle non-range tuple element like [ expr ] */
4440 if (startbit == NULL_TREE)
4442 startbit = save_expr (endbit);
4445 startbit = convert (sizetype, startbit);
4446 endbit = convert (sizetype, endbit);
4447 if (! integer_zerop (domain_min))
4449 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4450 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4452 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4453 EXPAND_CONST_ADDRESS);
4454 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4455 EXPAND_CONST_ADDRESS);
4459 targetx = assign_stack_temp (GET_MODE (target),
4460 GET_MODE_SIZE (GET_MODE (target)),
4462 emit_move_insn (targetx, target);
4464 else if (GET_CODE (target) == MEM)
4469 #ifdef TARGET_MEM_FUNCTIONS
4470 /* Optimization: If startbit and endbit are
4471 constants divisible by BITS_PER_UNIT,
4472 call memset instead. */
4473 if (TREE_CODE (startbit) == INTEGER_CST
4474 && TREE_CODE (endbit) == INTEGER_CST
4475 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4476 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4478 emit_library_call (memset_libfunc, 0,
4480 plus_constant (XEXP (targetx, 0),
4481 startb / BITS_PER_UNIT),
4483 constm1_rtx, TYPE_MODE (integer_type_node),
4484 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4485 TYPE_MODE (sizetype));
4490 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4491 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4492 bitlength_rtx, TYPE_MODE (sizetype),
4493 startbit_rtx, TYPE_MODE (sizetype),
4494 endbit_rtx, TYPE_MODE (sizetype));
4497 emit_move_insn (target, targetx);
4505 /* Store the value of EXP (an expression tree)
4506 into a subfield of TARGET which has mode MODE and occupies
4507 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4508 If MODE is VOIDmode, it means that we are storing into a bit-field.
4510 If VALUE_MODE is VOIDmode, return nothing in particular.
4511 UNSIGNEDP is not used in this case.
4513 Otherwise, return an rtx for the value stored. This rtx
4514 has mode VALUE_MODE if that is convenient to do.
4515 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4517 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4518 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4520 ALIAS_SET is the alias set for the destination. This value will
4521 (in general) be different from that for TARGET, since TARGET is a
4522 reference to the containing structure. */
4525 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4526 unsignedp, align, total_size, alias_set)
4528 int bitsize, bitpos;
4529 enum machine_mode mode;
4531 enum machine_mode value_mode;
4537 HOST_WIDE_INT width_mask = 0;
4539 if (TREE_CODE (exp) == ERROR_MARK)
4542 if (bitsize < HOST_BITS_PER_WIDE_INT)
4543 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4545 /* If we are storing into an unaligned field of an aligned union that is
4546 in a register, we may have the mode of TARGET being an integer mode but
4547 MODE == BLKmode. In that case, get an aligned object whose size and
4548 alignment are the same as TARGET and store TARGET into it (we can avoid
4549 the store if the field being stored is the entire width of TARGET). Then
4550 call ourselves recursively to store the field into a BLKmode version of
4551 that object. Finally, load from the object into TARGET. This is not
4552 very efficient in general, but should only be slightly more expensive
4553 than the otherwise-required unaligned accesses. Perhaps this can be
4554 cleaned up later. */
4557 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4559 rtx object = assign_stack_temp (GET_MODE (target),
4560 GET_MODE_SIZE (GET_MODE (target)), 0);
4561 rtx blk_object = copy_rtx (object);
4563 MEM_IN_STRUCT_P (object) = 1;
4564 MEM_IN_STRUCT_P (blk_object) = 1;
4565 PUT_MODE (blk_object, BLKmode);
4567 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4568 emit_move_insn (object, target);
4570 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4571 align, total_size, alias_set);
4573 /* Even though we aren't returning target, we need to
4574 give it the updated value. */
4575 emit_move_insn (target, object);
4580 /* If the structure is in a register or if the component
4581 is a bit field, we cannot use addressing to access it.
4582 Use bit-field techniques or SUBREG to store in it. */
4584 if (mode == VOIDmode
4585 || (mode != BLKmode && ! direct_store[(int) mode])
4586 || GET_CODE (target) == REG
4587 || GET_CODE (target) == SUBREG
4588 /* If the field isn't aligned enough to store as an ordinary memref,
4589 store it as a bit field. */
4590 || (SLOW_UNALIGNED_ACCESS
4591 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4592 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4594 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4596 /* If BITSIZE is narrower than the size of the type of EXP
4597 we will be narrowing TEMP. Normally, what's wanted are the
4598 low-order bits. However, if EXP's type is a record and this is
4599 big-endian machine, we want the upper BITSIZE bits. */
4600 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4601 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4602 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4603 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4604 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4608 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4610 if (mode != VOIDmode && mode != BLKmode
4611 && mode != TYPE_MODE (TREE_TYPE (exp)))
4612 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4614 /* If the modes of TARGET and TEMP are both BLKmode, both
4615 must be in memory and BITPOS must be aligned on a byte
4616 boundary. If so, we simply do a block copy. */
4617 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4619 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4620 || bitpos % BITS_PER_UNIT != 0)
4623 target = change_address (target, VOIDmode,
4624 plus_constant (XEXP (target, 0),
4625 bitpos / BITS_PER_UNIT));
4627 emit_block_move (target, temp,
4628 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4632 return value_mode == VOIDmode ? const0_rtx : target;
4635 /* Store the value in the bitfield. */
4636 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4637 if (value_mode != VOIDmode)
4639 /* The caller wants an rtx for the value. */
4640 /* If possible, avoid refetching from the bitfield itself. */
4642 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4645 enum machine_mode tmode;
4648 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4649 tmode = GET_MODE (temp);
4650 if (tmode == VOIDmode)
4652 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4653 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4654 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4656 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4657 NULL_RTX, value_mode, 0, align,
4664 rtx addr = XEXP (target, 0);
4667 /* If a value is wanted, it must be the lhs;
4668 so make the address stable for multiple use. */
4670 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4671 && ! CONSTANT_ADDRESS_P (addr)
4672 /* A frame-pointer reference is already stable. */
4673 && ! (GET_CODE (addr) == PLUS
4674 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4675 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4676 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4677 addr = copy_to_reg (addr);
4679 /* Now build a reference to just the desired component. */
4681 to_rtx = copy_rtx (change_address (target, mode,
4682 plus_constant (addr,
4684 / BITS_PER_UNIT))));
4685 MEM_IN_STRUCT_P (to_rtx) = 1;
4686 MEM_ALIAS_SET (to_rtx) = alias_set;
4688 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4692 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4693 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4694 ARRAY_REFs and find the ultimate containing object, which we return.
4696 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4697 bit position, and *PUNSIGNEDP to the signedness of the field.
4698 If the position of the field is variable, we store a tree
4699 giving the variable offset (in units) in *POFFSET.
4700 This offset is in addition to the bit position.
4701 If the position is not variable, we store 0 in *POFFSET.
4702 We set *PALIGNMENT to the alignment in bytes of the address that will be
4703 computed. This is the alignment of the thing we return if *POFFSET
4704 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4706 If any of the extraction expressions is volatile,
4707 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4709 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4710 is a mode that can be used to access the field. In that case, *PBITSIZE
4713 If the field describes a variable-sized object, *PMODE is set to
4714 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4715 this case, but the address of the object can be found. */
4718 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4719 punsignedp, pvolatilep, palignment)
4724 enum machine_mode *pmode;
4729 tree orig_exp = exp;
4731 enum machine_mode mode = VOIDmode;
4732 tree offset = integer_zero_node;
4733 unsigned int alignment = BIGGEST_ALIGNMENT;
4735 if (TREE_CODE (exp) == COMPONENT_REF)
4737 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4738 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4739 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4740 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4742 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4744 size_tree = TREE_OPERAND (exp, 1);
4745 *punsignedp = TREE_UNSIGNED (exp);
4749 mode = TYPE_MODE (TREE_TYPE (exp));
4750 *pbitsize = GET_MODE_BITSIZE (mode);
4751 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4756 if (TREE_CODE (size_tree) != INTEGER_CST)
4757 mode = BLKmode, *pbitsize = -1;
4759 *pbitsize = TREE_INT_CST_LOW (size_tree);
4762 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4763 and find the ultimate containing object. */
4769 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4771 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4772 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4773 : TREE_OPERAND (exp, 2));
4774 tree constant = integer_zero_node, var = pos;
4776 /* If this field hasn't been filled in yet, don't go
4777 past it. This should only happen when folding expressions
4778 made during type construction. */
4782 /* Assume here that the offset is a multiple of a unit.
4783 If not, there should be an explicitly added constant. */
4784 if (TREE_CODE (pos) == PLUS_EXPR
4785 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4786 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4787 else if (TREE_CODE (pos) == INTEGER_CST)
4788 constant = pos, var = integer_zero_node;
4790 *pbitpos += TREE_INT_CST_LOW (constant);
4791 offset = size_binop (PLUS_EXPR, offset,
4792 size_binop (EXACT_DIV_EXPR, var,
4793 size_int (BITS_PER_UNIT)));
4796 else if (TREE_CODE (exp) == ARRAY_REF)
4798 /* This code is based on the code in case ARRAY_REF in expand_expr
4799 below. We assume here that the size of an array element is
4800 always an integral multiple of BITS_PER_UNIT. */
4802 tree index = TREE_OPERAND (exp, 1);
4803 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4805 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4806 tree index_type = TREE_TYPE (index);
4809 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4811 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4813 index_type = TREE_TYPE (index);
4816 /* Optimize the special-case of a zero lower bound.
4818 We convert the low_bound to sizetype to avoid some problems
4819 with constant folding. (E.g. suppose the lower bound is 1,
4820 and its mode is QI. Without the conversion, (ARRAY
4821 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4822 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4824 But sizetype isn't quite right either (especially if
4825 the lowbound is negative). FIXME */
4827 if (! integer_zerop (low_bound))
4828 index = fold (build (MINUS_EXPR, index_type, index,
4829 convert (sizetype, low_bound)));
4831 if (TREE_CODE (index) == INTEGER_CST)
4833 index = convert (sbitsizetype, index);
4834 index_type = TREE_TYPE (index);
4837 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4838 convert (sbitsizetype,
4839 TYPE_SIZE (TREE_TYPE (exp)))));
4841 if (TREE_CODE (xindex) == INTEGER_CST
4842 && TREE_INT_CST_HIGH (xindex) == 0)
4843 *pbitpos += TREE_INT_CST_LOW (xindex);
4846 /* Either the bit offset calculated above is not constant, or
4847 it overflowed. In either case, redo the multiplication
4848 against the size in units. This is especially important
4849 in the non-constant case to avoid a division at runtime. */
4850 xindex = fold (build (MULT_EXPR, ssizetype, index,
4852 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4854 if (contains_placeholder_p (xindex))
4855 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4857 offset = size_binop (PLUS_EXPR, offset, xindex);
4860 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4861 && ! ((TREE_CODE (exp) == NOP_EXPR
4862 || TREE_CODE (exp) == CONVERT_EXPR)
4863 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4864 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4866 && (TYPE_MODE (TREE_TYPE (exp))
4867 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4870 /* If any reference in the chain is volatile, the effect is volatile. */
4871 if (TREE_THIS_VOLATILE (exp))
4874 /* If the offset is non-constant already, then we can't assume any
4875 alignment more than the alignment here. */
4876 if (! integer_zerop (offset))
4877 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4879 exp = TREE_OPERAND (exp, 0);
4882 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4883 alignment = MIN (alignment, DECL_ALIGN (exp));
4884 else if (TREE_TYPE (exp) != 0)
4885 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4887 if (integer_zerop (offset))
4890 if (offset != 0 && contains_placeholder_p (offset))
4891 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4895 *palignment = alignment / BITS_PER_UNIT;
4899 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4900 static enum memory_use_mode
4901 get_memory_usage_from_modifier (modifier)
4902 enum expand_modifier modifier;
4908 return MEMORY_USE_RO;
4910 case EXPAND_MEMORY_USE_WO:
4911 return MEMORY_USE_WO;
4913 case EXPAND_MEMORY_USE_RW:
4914 return MEMORY_USE_RW;
4916 case EXPAND_MEMORY_USE_DONT:
4917 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4918 MEMORY_USE_DONT, because they are modifiers to a call of
4919 expand_expr in the ADDR_EXPR case of expand_expr. */
4920 case EXPAND_CONST_ADDRESS:
4921 case EXPAND_INITIALIZER:
4922 return MEMORY_USE_DONT;
4923 case EXPAND_MEMORY_USE_BAD:
4929 /* Given an rtx VALUE that may contain additions and multiplications,
4930 return an equivalent value that just refers to a register or memory.
4931 This is done by generating instructions to perform the arithmetic
4932 and returning a pseudo-register containing the value.
4934 The returned value may be a REG, SUBREG, MEM or constant. */
4937 force_operand (value, target)
4940 register optab binoptab = 0;
4941 /* Use a temporary to force order of execution of calls to
4945 /* Use subtarget as the target for operand 0 of a binary operation. */
4946 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4948 /* Check for a PIC address load. */
4950 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4951 && XEXP (value, 0) == pic_offset_table_rtx
4952 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4953 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4954 || GET_CODE (XEXP (value, 1)) == CONST))
4957 subtarget = gen_reg_rtx (GET_MODE (value));
4958 emit_move_insn (subtarget, value);
4962 if (GET_CODE (value) == PLUS)
4963 binoptab = add_optab;
4964 else if (GET_CODE (value) == MINUS)
4965 binoptab = sub_optab;
4966 else if (GET_CODE (value) == MULT)
4968 op2 = XEXP (value, 1);
4969 if (!CONSTANT_P (op2)
4970 && !(GET_CODE (op2) == REG && op2 != subtarget))
4972 tmp = force_operand (XEXP (value, 0), subtarget);
4973 return expand_mult (GET_MODE (value), tmp,
4974 force_operand (op2, NULL_RTX),
4980 op2 = XEXP (value, 1);
4981 if (!CONSTANT_P (op2)
4982 && !(GET_CODE (op2) == REG && op2 != subtarget))
4984 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4986 binoptab = add_optab;
4987 op2 = negate_rtx (GET_MODE (value), op2);
4990 /* Check for an addition with OP2 a constant integer and our first
4991 operand a PLUS of a virtual register and something else. In that
4992 case, we want to emit the sum of the virtual register and the
4993 constant first and then add the other value. This allows virtual
4994 register instantiation to simply modify the constant rather than
4995 creating another one around this addition. */
4996 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4997 && GET_CODE (XEXP (value, 0)) == PLUS
4998 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4999 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5000 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5002 rtx temp = expand_binop (GET_MODE (value), binoptab,
5003 XEXP (XEXP (value, 0), 0), op2,
5004 subtarget, 0, OPTAB_LIB_WIDEN);
5005 return expand_binop (GET_MODE (value), binoptab, temp,
5006 force_operand (XEXP (XEXP (value, 0), 1), 0),
5007 target, 0, OPTAB_LIB_WIDEN);
5010 tmp = force_operand (XEXP (value, 0), subtarget);
5011 return expand_binop (GET_MODE (value), binoptab, tmp,
5012 force_operand (op2, NULL_RTX),
5013 target, 0, OPTAB_LIB_WIDEN);
5014 /* We give UNSIGNEDP = 0 to expand_binop
5015 because the only operations we are expanding here are signed ones. */
5020 /* Subroutine of expand_expr:
5021 save the non-copied parts (LIST) of an expr (LHS), and return a list
5022 which can restore these values to their previous values,
5023 should something modify their storage. */
5026 save_noncopied_parts (lhs, list)
5033 for (tail = list; tail; tail = TREE_CHAIN (tail))
5034 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5035 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5038 tree part = TREE_VALUE (tail);
5039 tree part_type = TREE_TYPE (part);
5040 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5041 rtx target = assign_temp (part_type, 0, 1, 1);
5042 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5043 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5044 parts = tree_cons (to_be_saved,
5045 build (RTL_EXPR, part_type, NULL_TREE,
5048 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5053 /* Subroutine of expand_expr:
5054 record the non-copied parts (LIST) of an expr (LHS), and return a list
5055 which specifies the initial values of these parts. */
5058 init_noncopied_parts (lhs, list)
5065 for (tail = list; tail; tail = TREE_CHAIN (tail))
5066 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5067 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5070 tree part = TREE_VALUE (tail);
5071 tree part_type = TREE_TYPE (part);
5072 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5073 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5078 /* Subroutine of expand_expr: return nonzero iff there is no way that
5079 EXP can reference X, which is being modified. TOP_P is nonzero if this
5080 call is going to be used to determine whether we need a temporary
5081 for EXP, as opposed to a recursive call to this function.
5083 It is always safe for this routine to return zero since it merely
5084 searches for optimization opportunities. */
5087 safe_from_p (x, exp, top_p)
5094 static int save_expr_count;
5095 static int save_expr_size = 0;
5096 static tree *save_expr_rewritten;
5097 static tree save_expr_trees[256];
5100 /* If EXP has varying size, we MUST use a target since we currently
5101 have no way of allocating temporaries of variable size
5102 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5103 So we assume here that something at a higher level has prevented a
5104 clash. This is somewhat bogus, but the best we can do. Only
5105 do this when X is BLKmode and when we are at the top level. */
5106 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5107 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5108 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5109 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5110 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5112 && GET_MODE (x) == BLKmode))
5115 if (top_p && save_expr_size == 0)
5119 save_expr_count = 0;
5120 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5121 save_expr_rewritten = &save_expr_trees[0];
5123 rtn = safe_from_p (x, exp, 1);
5125 for (i = 0; i < save_expr_count; ++i)
5127 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5129 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5137 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5138 find the underlying pseudo. */
5139 if (GET_CODE (x) == SUBREG)
5142 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5146 /* If X is a location in the outgoing argument area, it is always safe. */
5147 if (GET_CODE (x) == MEM
5148 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5149 || (GET_CODE (XEXP (x, 0)) == PLUS
5150 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5153 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5156 exp_rtl = DECL_RTL (exp);
5163 if (TREE_CODE (exp) == TREE_LIST)
5164 return ((TREE_VALUE (exp) == 0
5165 || safe_from_p (x, TREE_VALUE (exp), 0))
5166 && (TREE_CHAIN (exp) == 0
5167 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5168 else if (TREE_CODE (exp) == ERROR_MARK)
5169 return 1; /* An already-visited SAVE_EXPR? */
5174 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5178 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5179 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5183 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5184 the expression. If it is set, we conflict iff we are that rtx or
5185 both are in memory. Otherwise, we check all operands of the
5186 expression recursively. */
5188 switch (TREE_CODE (exp))
5191 return (staticp (TREE_OPERAND (exp, 0))
5192 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5193 || TREE_STATIC (exp));
5196 if (GET_CODE (x) == MEM)
5201 exp_rtl = CALL_EXPR_RTL (exp);
5204 /* Assume that the call will clobber all hard registers and
5206 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5207 || GET_CODE (x) == MEM)
5214 /* If a sequence exists, we would have to scan every instruction
5215 in the sequence to see if it was safe. This is probably not
5217 if (RTL_EXPR_SEQUENCE (exp))
5220 exp_rtl = RTL_EXPR_RTL (exp);
5223 case WITH_CLEANUP_EXPR:
5224 exp_rtl = RTL_EXPR_RTL (exp);
5227 case CLEANUP_POINT_EXPR:
5228 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5231 exp_rtl = SAVE_EXPR_RTL (exp);
5235 /* This SAVE_EXPR might appear many times in the top-level
5236 safe_from_p() expression, and if it has a complex
5237 subexpression, examining it multiple times could result
5238 in a combinatorial explosion. E.g. on an Alpha
5239 running at least 200MHz, a Fortran test case compiled with
5240 optimization took about 28 minutes to compile -- even though
5241 it was only a few lines long, and the complicated line causing
5242 so much time to be spent in the earlier version of safe_from_p()
5243 had only 293 or so unique nodes.
5245 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5246 where it is so we can turn it back in the top-level safe_from_p()
5249 /* For now, don't bother re-sizing the array. */
5250 if (save_expr_count >= save_expr_size)
5252 save_expr_rewritten[save_expr_count++] = exp;
5254 nops = tree_code_length[(int) SAVE_EXPR];
5255 for (i = 0; i < nops; i++)
5257 tree operand = TREE_OPERAND (exp, i);
5258 if (operand == NULL_TREE)
5260 TREE_SET_CODE (exp, ERROR_MARK);
5261 if (!safe_from_p (x, operand, 0))
5263 TREE_SET_CODE (exp, SAVE_EXPR);
5265 TREE_SET_CODE (exp, ERROR_MARK);
5269 /* The only operand we look at is operand 1. The rest aren't
5270 part of the expression. */
5271 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5273 case METHOD_CALL_EXPR:
5274 /* This takes a rtx argument, but shouldn't appear here. */
5281 /* If we have an rtx, we do not need to scan our operands. */
5285 nops = tree_code_length[(int) TREE_CODE (exp)];
5286 for (i = 0; i < nops; i++)
5287 if (TREE_OPERAND (exp, i) != 0
5288 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5292 /* If we have an rtl, find any enclosed object. Then see if we conflict
5296 if (GET_CODE (exp_rtl) == SUBREG)
5298 exp_rtl = SUBREG_REG (exp_rtl);
5299 if (GET_CODE (exp_rtl) == REG
5300 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5304 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5305 are memory and EXP is not readonly. */
5306 return ! (rtx_equal_p (x, exp_rtl)
5307 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5308 && ! TREE_READONLY (exp)));
5311 /* If we reach here, it is safe. */
5315 /* Subroutine of expand_expr: return nonzero iff EXP is an
5316 expression whose type is statically determinable. */
5322 if (TREE_CODE (exp) == PARM_DECL
5323 || TREE_CODE (exp) == VAR_DECL
5324 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5325 || TREE_CODE (exp) == COMPONENT_REF
5326 || TREE_CODE (exp) == ARRAY_REF)
5331 /* Subroutine of expand_expr: return rtx if EXP is a
5332 variable or parameter; else return 0. */
5339 switch (TREE_CODE (exp))
5343 return DECL_RTL (exp);
5349 #ifdef MAX_INTEGER_COMPUTATION_MODE
5351 check_max_integer_computation_mode (exp)
5354 enum tree_code code = TREE_CODE (exp);
5355 enum machine_mode mode;
5357 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5358 if (code == NOP_EXPR
5359 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5362 /* First check the type of the overall operation. We need only look at
5363 unary, binary and relational operations. */
5364 if (TREE_CODE_CLASS (code) == '1'
5365 || TREE_CODE_CLASS (code) == '2'
5366 || TREE_CODE_CLASS (code) == '<')
5368 mode = TYPE_MODE (TREE_TYPE (exp));
5369 if (GET_MODE_CLASS (mode) == MODE_INT
5370 && mode > MAX_INTEGER_COMPUTATION_MODE)
5371 fatal ("unsupported wide integer operation");
5374 /* Check operand of a unary op. */
5375 if (TREE_CODE_CLASS (code) == '1')
5377 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5378 if (GET_MODE_CLASS (mode) == MODE_INT
5379 && mode > MAX_INTEGER_COMPUTATION_MODE)
5380 fatal ("unsupported wide integer operation");
5383 /* Check operands of a binary/comparison op. */
5384 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5386 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5387 if (GET_MODE_CLASS (mode) == MODE_INT
5388 && mode > MAX_INTEGER_COMPUTATION_MODE)
5389 fatal ("unsupported wide integer operation");
5391 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5392 if (GET_MODE_CLASS (mode) == MODE_INT
5393 && mode > MAX_INTEGER_COMPUTATION_MODE)
5394 fatal ("unsupported wide integer operation");
5400 /* expand_expr: generate code for computing expression EXP.
5401 An rtx for the computed value is returned. The value is never null.
5402 In the case of a void EXP, const0_rtx is returned.
5404 The value may be stored in TARGET if TARGET is nonzero.
5405 TARGET is just a suggestion; callers must assume that
5406 the rtx returned may not be the same as TARGET.
5408 If TARGET is CONST0_RTX, it means that the value will be ignored.
5410 If TMODE is not VOIDmode, it suggests generating the
5411 result in mode TMODE. But this is done only when convenient.
5412 Otherwise, TMODE is ignored and the value generated in its natural mode.
5413 TMODE is just a suggestion; callers must assume that
5414 the rtx returned may not have mode TMODE.
5416 Note that TARGET may have neither TMODE nor MODE. In that case, it
5417 probably will not be used.
5419 If MODIFIER is EXPAND_SUM then when EXP is an addition
5420 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5421 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5422 products as above, or REG or MEM, or constant.
5423 Ordinarily in such cases we would output mul or add instructions
5424 and then return a pseudo reg containing the sum.
5426 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5427 it also marks a label as absolutely required (it can't be dead).
5428 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5429 This is used for outputting expressions used in initializers.
5431 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5432 with a constant address even if that address is not normally legitimate.
5433 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5436 expand_expr (exp, target, tmode, modifier)
5439 enum machine_mode tmode;
5440 enum expand_modifier modifier;
5442 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5443 This is static so it will be accessible to our recursive callees. */
5444 static tree placeholder_list = 0;
5445 register rtx op0, op1, temp;
5446 tree type = TREE_TYPE (exp);
5447 int unsignedp = TREE_UNSIGNED (type);
5448 register enum machine_mode mode = TYPE_MODE (type);
5449 register enum tree_code code = TREE_CODE (exp);
5451 /* Use subtarget as the target for operand 0 of a binary operation. */
5452 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5453 rtx original_target = target;
5454 int ignore = (target == const0_rtx
5455 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5456 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5457 || code == COND_EXPR)
5458 && TREE_CODE (type) == VOID_TYPE));
5460 /* Used by check-memory-usage to make modifier read only. */
5461 enum expand_modifier ro_modifier;
5463 /* Make a read-only version of the modifier. */
5464 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5465 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5466 ro_modifier = modifier;
5468 ro_modifier = EXPAND_NORMAL;
5470 /* Don't use hard regs as subtargets, because the combiner
5471 can only handle pseudo regs. */
5472 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5474 /* Avoid subtargets inside loops,
5475 since they hide some invariant expressions. */
5476 if (preserve_subexpressions_p ())
5479 /* If we are going to ignore this result, we need only do something
5480 if there is a side-effect somewhere in the expression. If there
5481 is, short-circuit the most common cases here. Note that we must
5482 not call expand_expr with anything but const0_rtx in case this
5483 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5487 if (! TREE_SIDE_EFFECTS (exp))
5490 /* Ensure we reference a volatile object even if value is ignored. */
5491 if (TREE_THIS_VOLATILE (exp)
5492 && TREE_CODE (exp) != FUNCTION_DECL
5493 && mode != VOIDmode && mode != BLKmode)
5495 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5496 if (GET_CODE (temp) == MEM)
5497 temp = copy_to_reg (temp);
5501 if (TREE_CODE_CLASS (code) == '1')
5502 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5503 VOIDmode, ro_modifier);
5504 else if (TREE_CODE_CLASS (code) == '2'
5505 || TREE_CODE_CLASS (code) == '<')
5507 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5508 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5511 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5512 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5513 /* If the second operand has no side effects, just evaluate
5515 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5516 VOIDmode, ro_modifier);
5521 #ifdef MAX_INTEGER_COMPUTATION_MODE
5523 && TREE_CODE (exp) != INTEGER_CST
5524 && TREE_CODE (exp) != PARM_DECL
5525 && TREE_CODE (exp) != ARRAY_REF
5526 && TREE_CODE (exp) != COMPONENT_REF
5527 && TREE_CODE (exp) != BIT_FIELD_REF
5528 && TREE_CODE (exp) != INDIRECT_REF
5529 && TREE_CODE (exp) != VAR_DECL)
5531 enum machine_mode mode = GET_MODE (target);
5533 if (GET_MODE_CLASS (mode) == MODE_INT
5534 && mode > MAX_INTEGER_COMPUTATION_MODE)
5535 fatal ("unsupported wide integer operation");
5538 if (TREE_CODE (exp) != INTEGER_CST
5539 && TREE_CODE (exp) != PARM_DECL
5540 && TREE_CODE (exp) != ARRAY_REF
5541 && TREE_CODE (exp) != COMPONENT_REF
5542 && TREE_CODE (exp) != BIT_FIELD_REF
5543 && TREE_CODE (exp) != INDIRECT_REF
5544 && TREE_CODE (exp) != VAR_DECL
5545 && GET_MODE_CLASS (tmode) == MODE_INT
5546 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5547 fatal ("unsupported wide integer operation");
5549 check_max_integer_computation_mode (exp);
5552 /* If will do cse, generate all results into pseudo registers
5553 since 1) that allows cse to find more things
5554 and 2) otherwise cse could produce an insn the machine
5557 if (! cse_not_expected && mode != BLKmode && target
5558 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5565 tree function = decl_function_context (exp);
5566 /* Handle using a label in a containing function. */
5567 if (function != current_function_decl
5568 && function != inline_function_decl && function != 0)
5570 struct function *p = find_function_data (function);
5571 /* Allocate in the memory associated with the function
5572 that the label is in. */
5573 push_obstacks (p->function_obstack,
5574 p->function_maybepermanent_obstack);
5576 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5581 else if (modifier == EXPAND_INITIALIZER)
5582 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5583 label_rtx (exp), forced_labels);
5584 temp = gen_rtx_MEM (FUNCTION_MODE,
5585 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5586 if (function != current_function_decl
5587 && function != inline_function_decl && function != 0)
5588 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5593 if (DECL_RTL (exp) == 0)
5595 error_with_decl (exp, "prior parameter's size depends on `%s'");
5596 return CONST0_RTX (mode);
5599 /* ... fall through ... */
5602 /* If a static var's type was incomplete when the decl was written,
5603 but the type is complete now, lay out the decl now. */
5604 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5605 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5607 push_obstacks_nochange ();
5608 end_temporary_allocation ();
5609 layout_decl (exp, 0);
5610 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5614 /* Although static-storage variables start off initialized, according to
5615 ANSI C, a memcpy could overwrite them with uninitialized values. So
5616 we check them too. This also lets us check for read-only variables
5617 accessed via a non-const declaration, in case it won't be detected
5618 any other way (e.g., in an embedded system or OS kernel without
5621 Aggregates are not checked here; they're handled elsewhere. */
5622 if (current_function_check_memory_usage && code == VAR_DECL
5623 && GET_CODE (DECL_RTL (exp)) == MEM
5624 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5626 enum memory_use_mode memory_usage;
5627 memory_usage = get_memory_usage_from_modifier (modifier);
5629 if (memory_usage != MEMORY_USE_DONT)
5630 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5631 XEXP (DECL_RTL (exp), 0), ptr_mode,
5632 GEN_INT (int_size_in_bytes (type)),
5633 TYPE_MODE (sizetype),
5634 GEN_INT (memory_usage),
5635 TYPE_MODE (integer_type_node));
5638 /* ... fall through ... */
5642 if (DECL_RTL (exp) == 0)
5645 /* Ensure variable marked as used even if it doesn't go through
5646 a parser. If it hasn't be used yet, write out an external
5648 if (! TREE_USED (exp))
5650 assemble_external (exp);
5651 TREE_USED (exp) = 1;
5654 /* Show we haven't gotten RTL for this yet. */
5657 /* Handle variables inherited from containing functions. */
5658 context = decl_function_context (exp);
5660 /* We treat inline_function_decl as an alias for the current function
5661 because that is the inline function whose vars, types, etc.
5662 are being merged into the current function.
5663 See expand_inline_function. */
5665 if (context != 0 && context != current_function_decl
5666 && context != inline_function_decl
5667 /* If var is static, we don't need a static chain to access it. */
5668 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5669 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5673 /* Mark as non-local and addressable. */
5674 DECL_NONLOCAL (exp) = 1;
5675 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5677 mark_addressable (exp);
5678 if (GET_CODE (DECL_RTL (exp)) != MEM)
5680 addr = XEXP (DECL_RTL (exp), 0);
5681 if (GET_CODE (addr) == MEM)
5682 addr = gen_rtx_MEM (Pmode,
5683 fix_lexical_addr (XEXP (addr, 0), exp));
5685 addr = fix_lexical_addr (addr, exp);
5686 temp = change_address (DECL_RTL (exp), mode, addr);
5689 /* This is the case of an array whose size is to be determined
5690 from its initializer, while the initializer is still being parsed.
5693 else if (GET_CODE (DECL_RTL (exp)) == MEM
5694 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5695 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5696 XEXP (DECL_RTL (exp), 0));
5698 /* If DECL_RTL is memory, we are in the normal case and either
5699 the address is not valid or it is not a register and -fforce-addr
5700 is specified, get the address into a register. */
5702 else if (GET_CODE (DECL_RTL (exp)) == MEM
5703 && modifier != EXPAND_CONST_ADDRESS
5704 && modifier != EXPAND_SUM
5705 && modifier != EXPAND_INITIALIZER
5706 && (! memory_address_p (DECL_MODE (exp),
5707 XEXP (DECL_RTL (exp), 0))
5709 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5710 temp = change_address (DECL_RTL (exp), VOIDmode,
5711 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5713 /* If we got something, return it. But first, set the alignment
5714 the address is a register. */
5717 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5718 mark_reg_pointer (XEXP (temp, 0),
5719 DECL_ALIGN (exp) / BITS_PER_UNIT);
5724 /* If the mode of DECL_RTL does not match that of the decl, it
5725 must be a promoted value. We return a SUBREG of the wanted mode,
5726 but mark it so that we know that it was already extended. */
5728 if (GET_CODE (DECL_RTL (exp)) == REG
5729 && GET_MODE (DECL_RTL (exp)) != mode)
5731 /* Get the signedness used for this variable. Ensure we get the
5732 same mode we got when the variable was declared. */
5733 if (GET_MODE (DECL_RTL (exp))
5734 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5737 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5738 SUBREG_PROMOTED_VAR_P (temp) = 1;
5739 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5743 return DECL_RTL (exp);
5746 return immed_double_const (TREE_INT_CST_LOW (exp),
5747 TREE_INT_CST_HIGH (exp),
5751 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5752 EXPAND_MEMORY_USE_BAD);
5755 /* If optimized, generate immediate CONST_DOUBLE
5756 which will be turned into memory by reload if necessary.
5758 We used to force a register so that loop.c could see it. But
5759 this does not allow gen_* patterns to perform optimizations with
5760 the constants. It also produces two insns in cases like "x = 1.0;".
5761 On most machines, floating-point constants are not permitted in
5762 many insns, so we'd end up copying it to a register in any case.
5764 Now, we do the copying in expand_binop, if appropriate. */
5765 return immed_real_const (exp);
5769 if (! TREE_CST_RTL (exp))
5770 output_constant_def (exp);
5772 /* TREE_CST_RTL probably contains a constant address.
5773 On RISC machines where a constant address isn't valid,
5774 make some insns to get that address into a register. */
5775 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5776 && modifier != EXPAND_CONST_ADDRESS
5777 && modifier != EXPAND_INITIALIZER
5778 && modifier != EXPAND_SUM
5779 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5781 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5782 return change_address (TREE_CST_RTL (exp), VOIDmode,
5783 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5784 return TREE_CST_RTL (exp);
5786 case EXPR_WITH_FILE_LOCATION:
5789 char *saved_input_filename = input_filename;
5790 int saved_lineno = lineno;
5791 input_filename = EXPR_WFL_FILENAME (exp);
5792 lineno = EXPR_WFL_LINENO (exp);
5793 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5794 emit_line_note (input_filename, lineno);
5795 /* Possibly avoid switching back and force here */
5796 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5797 input_filename = saved_input_filename;
5798 lineno = saved_lineno;
5803 context = decl_function_context (exp);
5805 /* If this SAVE_EXPR was at global context, assume we are an
5806 initialization function and move it into our context. */
5808 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5810 /* We treat inline_function_decl as an alias for the current function
5811 because that is the inline function whose vars, types, etc.
5812 are being merged into the current function.
5813 See expand_inline_function. */
5814 if (context == current_function_decl || context == inline_function_decl)
5817 /* If this is non-local, handle it. */
5820 /* The following call just exists to abort if the context is
5821 not of a containing function. */
5822 find_function_data (context);
5824 temp = SAVE_EXPR_RTL (exp);
5825 if (temp && GET_CODE (temp) == REG)
5827 put_var_into_stack (exp);
5828 temp = SAVE_EXPR_RTL (exp);
5830 if (temp == 0 || GET_CODE (temp) != MEM)
5832 return change_address (temp, mode,
5833 fix_lexical_addr (XEXP (temp, 0), exp));
5835 if (SAVE_EXPR_RTL (exp) == 0)
5837 if (mode == VOIDmode)
5840 temp = assign_temp (type, 3, 0, 0);
5842 SAVE_EXPR_RTL (exp) = temp;
5843 if (!optimize && GET_CODE (temp) == REG)
5844 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5847 /* If the mode of TEMP does not match that of the expression, it
5848 must be a promoted value. We pass store_expr a SUBREG of the
5849 wanted mode but mark it so that we know that it was already
5850 extended. Note that `unsignedp' was modified above in
5853 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5855 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5856 SUBREG_PROMOTED_VAR_P (temp) = 1;
5857 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5860 if (temp == const0_rtx)
5861 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5862 EXPAND_MEMORY_USE_BAD);
5864 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5866 TREE_USED (exp) = 1;
5869 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5870 must be a promoted value. We return a SUBREG of the wanted mode,
5871 but mark it so that we know that it was already extended. */
5873 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5874 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5876 /* Compute the signedness and make the proper SUBREG. */
5877 promote_mode (type, mode, &unsignedp, 0);
5878 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5879 SUBREG_PROMOTED_VAR_P (temp) = 1;
5880 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5884 return SAVE_EXPR_RTL (exp);
5889 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5890 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5894 case PLACEHOLDER_EXPR:
5896 tree placeholder_expr;
5898 /* If there is an object on the head of the placeholder list,
5899 see if some object in it of type TYPE or a pointer to it. For
5900 further information, see tree.def. */
5901 for (placeholder_expr = placeholder_list;
5902 placeholder_expr != 0;
5903 placeholder_expr = TREE_CHAIN (placeholder_expr))
5905 tree need_type = TYPE_MAIN_VARIANT (type);
5907 tree old_list = placeholder_list;
5910 /* Find the outermost reference that is of the type we want.
5911 If none, see if any object has a type that is a pointer to
5912 the type we want. */
5913 for (elt = TREE_PURPOSE (placeholder_expr);
5914 elt != 0 && object == 0;
5916 = ((TREE_CODE (elt) == COMPOUND_EXPR
5917 || TREE_CODE (elt) == COND_EXPR)
5918 ? TREE_OPERAND (elt, 1)
5919 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5920 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5921 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5922 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5923 ? TREE_OPERAND (elt, 0) : 0))
5924 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5927 for (elt = TREE_PURPOSE (placeholder_expr);
5928 elt != 0 && object == 0;
5930 = ((TREE_CODE (elt) == COMPOUND_EXPR
5931 || TREE_CODE (elt) == COND_EXPR)
5932 ? TREE_OPERAND (elt, 1)
5933 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5934 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5935 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5936 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5937 ? TREE_OPERAND (elt, 0) : 0))
5938 if (POINTER_TYPE_P (TREE_TYPE (elt))
5939 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5941 object = build1 (INDIRECT_REF, need_type, elt);
5945 /* Expand this object skipping the list entries before
5946 it was found in case it is also a PLACEHOLDER_EXPR.
5947 In that case, we want to translate it using subsequent
5949 placeholder_list = TREE_CHAIN (placeholder_expr);
5950 temp = expand_expr (object, original_target, tmode,
5952 placeholder_list = old_list;
5958 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5961 case WITH_RECORD_EXPR:
5962 /* Put the object on the placeholder list, expand our first operand,
5963 and pop the list. */
5964 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5966 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5967 tmode, ro_modifier);
5968 placeholder_list = TREE_CHAIN (placeholder_list);
5972 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
5973 expand_goto (TREE_OPERAND (exp, 0));
5975 expand_computed_goto (TREE_OPERAND (exp, 0));
5979 expand_exit_loop_if_false (NULL_PTR,
5980 invert_truthvalue (TREE_OPERAND (exp, 0)));
5983 case LABELED_BLOCK_EXPR:
5984 if (LABELED_BLOCK_BODY (exp))
5985 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
5986 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
5989 case EXIT_BLOCK_EXPR:
5990 if (EXIT_BLOCK_RETURN (exp))
5991 really_sorry ("returned value in block_exit_expr");
5992 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
5997 expand_start_loop (1);
5998 expand_expr_stmt (TREE_OPERAND (exp, 0));
6006 tree vars = TREE_OPERAND (exp, 0);
6007 int vars_need_expansion = 0;
6009 /* Need to open a binding contour here because
6010 if there are any cleanups they must be contained here. */
6011 expand_start_bindings (0);
6013 /* Mark the corresponding BLOCK for output in its proper place. */
6014 if (TREE_OPERAND (exp, 2) != 0
6015 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6016 insert_block (TREE_OPERAND (exp, 2));
6018 /* If VARS have not yet been expanded, expand them now. */
6021 if (DECL_RTL (vars) == 0)
6023 vars_need_expansion = 1;
6026 expand_decl_init (vars);
6027 vars = TREE_CHAIN (vars);
6030 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6032 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6038 if (RTL_EXPR_SEQUENCE (exp))
6040 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6042 emit_insns (RTL_EXPR_SEQUENCE (exp));
6043 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6045 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6046 free_temps_for_rtl_expr (exp);
6047 return RTL_EXPR_RTL (exp);
6050 /* If we don't need the result, just ensure we evaluate any
6055 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6056 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6057 EXPAND_MEMORY_USE_BAD);
6061 /* All elts simple constants => refer to a constant in memory. But
6062 if this is a non-BLKmode mode, let it store a field at a time
6063 since that should make a CONST_INT or CONST_DOUBLE when we
6064 fold. Likewise, if we have a target we can use, it is best to
6065 store directly into the target unless the type is large enough
6066 that memcpy will be used. If we are making an initializer and
6067 all operands are constant, put it in memory as well. */
6068 else if ((TREE_STATIC (exp)
6069 && ((mode == BLKmode
6070 && ! (target != 0 && safe_from_p (target, exp, 1)))
6071 || TREE_ADDRESSABLE (exp)
6072 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6073 && (move_by_pieces_ninsns
6074 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6075 TYPE_ALIGN (type) / BITS_PER_UNIT)
6077 && ! mostly_zeros_p (exp))))
6078 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6080 rtx constructor = output_constant_def (exp);
6081 if (modifier != EXPAND_CONST_ADDRESS
6082 && modifier != EXPAND_INITIALIZER
6083 && modifier != EXPAND_SUM
6084 && (! memory_address_p (GET_MODE (constructor),
6085 XEXP (constructor, 0))
6087 && GET_CODE (XEXP (constructor, 0)) != REG)))
6088 constructor = change_address (constructor, VOIDmode,
6089 XEXP (constructor, 0));
6095 /* Handle calls that pass values in multiple non-contiguous
6096 locations. The Irix 6 ABI has examples of this. */
6097 if (target == 0 || ! safe_from_p (target, exp, 1)
6098 || GET_CODE (target) == PARALLEL)
6100 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6101 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6103 target = assign_temp (type, 0, 1, 1);
6106 if (TREE_READONLY (exp))
6108 if (GET_CODE (target) == MEM)
6109 target = copy_rtx (target);
6111 RTX_UNCHANGING_P (target) = 1;
6114 store_constructor (exp, target, 0);
6120 tree exp1 = TREE_OPERAND (exp, 0);
6123 tree string = string_constant (exp1, &index);
6126 /* Try to optimize reads from const strings. */
6128 && TREE_CODE (string) == STRING_CST
6129 && TREE_CODE (index) == INTEGER_CST
6130 && !TREE_INT_CST_HIGH (index)
6131 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6132 && GET_MODE_CLASS (mode) == MODE_INT
6133 && GET_MODE_SIZE (mode) == 1
6134 && modifier != EXPAND_MEMORY_USE_WO)
6135 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6137 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6138 op0 = memory_address (mode, op0);
6140 if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6142 enum memory_use_mode memory_usage;
6143 memory_usage = get_memory_usage_from_modifier (modifier);
6145 if (memory_usage != MEMORY_USE_DONT)
6147 in_check_memory_usage = 1;
6148 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6150 GEN_INT (int_size_in_bytes (type)),
6151 TYPE_MODE (sizetype),
6152 GEN_INT (memory_usage),
6153 TYPE_MODE (integer_type_node));
6154 in_check_memory_usage = 0;
6158 temp = gen_rtx_MEM (mode, op0);
6159 /* If address was computed by addition,
6160 mark this as an element of an aggregate. */
6161 if (TREE_CODE (exp1) == PLUS_EXPR
6162 || (TREE_CODE (exp1) == SAVE_EXPR
6163 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6164 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6165 || (TREE_CODE (exp1) == ADDR_EXPR
6166 && (exp2 = TREE_OPERAND (exp1, 0))
6167 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6168 MEM_IN_STRUCT_P (temp) = 1;
6170 /* If the pointer is actually a REFERENCE_TYPE, this could be pointing
6171 into some aggregate too. In theory we could fold this into the
6172 previous check and use rtx_addr_varies_p there too.
6174 However, this seems safer. */
6175 if (!MEM_IN_STRUCT_P (temp)
6176 && (TREE_CODE (TREE_TYPE (exp1)) == REFERENCE_TYPE
6177 /* This may have been an array reference to the first element
6178 that was optimized away from being an addition. */
6179 || (TREE_CODE (exp1) == NOP_EXPR
6180 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
6182 || ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
6184 && (AGGREGATE_TYPE_P
6185 (TREE_TYPE (TREE_TYPE
6186 (TREE_OPERAND (exp1, 0))))))))))
6187 MEM_IN_STRUCT_P (temp) = ! rtx_addr_varies_p (temp);
6189 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6190 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6192 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6193 here, because, in C and C++, the fact that a location is accessed
6194 through a pointer to const does not mean that the value there can
6195 never change. Languages where it can never change should
6196 also set TREE_STATIC. */
6197 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6202 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6206 tree array = TREE_OPERAND (exp, 0);
6207 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6208 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6209 tree index = TREE_OPERAND (exp, 1);
6210 tree index_type = TREE_TYPE (index);
6213 /* Optimize the special-case of a zero lower bound.
6215 We convert the low_bound to sizetype to avoid some problems
6216 with constant folding. (E.g. suppose the lower bound is 1,
6217 and its mode is QI. Without the conversion, (ARRAY
6218 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6219 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6221 But sizetype isn't quite right either (especially if
6222 the lowbound is negative). FIXME */
6224 if (! integer_zerop (low_bound))
6225 index = fold (build (MINUS_EXPR, index_type, index,
6226 convert (sizetype, low_bound)));
6228 /* Fold an expression like: "foo"[2].
6229 This is not done in fold so it won't happen inside &.
6230 Don't fold if this is for wide characters since it's too
6231 difficult to do correctly and this is a very rare case. */
6233 if (TREE_CODE (array) == STRING_CST
6234 && TREE_CODE (index) == INTEGER_CST
6235 && !TREE_INT_CST_HIGH (index)
6236 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6237 && GET_MODE_CLASS (mode) == MODE_INT
6238 && GET_MODE_SIZE (mode) == 1)
6239 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6241 /* If this is a constant index into a constant array,
6242 just get the value from the array. Handle both the cases when
6243 we have an explicit constructor and when our operand is a variable
6244 that was declared const. */
6246 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6248 if (TREE_CODE (index) == INTEGER_CST
6249 && TREE_INT_CST_HIGH (index) == 0)
6251 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6253 i = TREE_INT_CST_LOW (index);
6255 elem = TREE_CHAIN (elem);
6257 return expand_expr (fold (TREE_VALUE (elem)), target,
6258 tmode, ro_modifier);
6262 else if (optimize >= 1
6263 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6264 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6265 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6267 if (TREE_CODE (index) == INTEGER_CST)
6269 tree init = DECL_INITIAL (array);
6271 i = TREE_INT_CST_LOW (index);
6272 if (TREE_CODE (init) == CONSTRUCTOR)
6274 tree elem = CONSTRUCTOR_ELTS (init);
6277 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6278 elem = TREE_CHAIN (elem);
6280 return expand_expr (fold (TREE_VALUE (elem)), target,
6281 tmode, ro_modifier);
6283 else if (TREE_CODE (init) == STRING_CST
6284 && TREE_INT_CST_HIGH (index) == 0
6285 && (TREE_INT_CST_LOW (index)
6286 < TREE_STRING_LENGTH (init)))
6288 (TREE_STRING_POINTER
6289 (init)[TREE_INT_CST_LOW (index)]));
6294 /* ... fall through ... */
6298 /* If the operand is a CONSTRUCTOR, we can just extract the
6299 appropriate field if it is present. Don't do this if we have
6300 already written the data since we want to refer to that copy
6301 and varasm.c assumes that's what we'll do. */
6302 if (code != ARRAY_REF
6303 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6304 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6308 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6309 elt = TREE_CHAIN (elt))
6310 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6311 /* We can normally use the value of the field in the
6312 CONSTRUCTOR. However, if this is a bitfield in
6313 an integral mode that we can fit in a HOST_WIDE_INT,
6314 we must mask only the number of bits in the bitfield,
6315 since this is done implicitly by the constructor. If
6316 the bitfield does not meet either of those conditions,
6317 we can't do this optimization. */
6318 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6319 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6321 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6322 <= HOST_BITS_PER_WIDE_INT))))
6324 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6325 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6327 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6329 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6331 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6332 op0 = expand_and (op0, op1, target);
6336 enum machine_mode imode
6337 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6339 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6342 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6344 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6354 enum machine_mode mode1;
6360 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6361 &mode1, &unsignedp, &volatilep,
6364 /* If we got back the original object, something is wrong. Perhaps
6365 we are evaluating an expression too early. In any event, don't
6366 infinitely recurse. */
6370 /* If TEM's type is a union of variable size, pass TARGET to the inner
6371 computation, since it will need a temporary and TARGET is known
6372 to have to do. This occurs in unchecked conversion in Ada. */
6374 op0 = expand_expr (tem,
6375 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6376 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6378 ? target : NULL_RTX),
6380 modifier == EXPAND_INITIALIZER
6381 ? modifier : EXPAND_NORMAL);
6383 /* If this is a constant, put it into a register if it is a
6384 legitimate constant and memory if it isn't. */
6385 if (CONSTANT_P (op0))
6387 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6388 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6389 op0 = force_reg (mode, op0);
6391 op0 = validize_mem (force_const_mem (mode, op0));
6396 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6398 if (GET_CODE (op0) != MEM)
6401 if (GET_MODE (offset_rtx) != ptr_mode)
6403 #ifdef POINTERS_EXTEND_UNSIGNED
6404 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6406 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6410 if (GET_CODE (op0) == MEM
6411 && GET_MODE (op0) == BLKmode
6413 && (bitpos % bitsize) == 0
6414 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6415 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6417 rtx temp = change_address (op0, mode1,
6418 plus_constant (XEXP (op0, 0),
6421 if (GET_CODE (XEXP (temp, 0)) == REG)
6424 op0 = change_address (op0, mode1,
6425 force_reg (GET_MODE (XEXP (temp, 0)),
6431 op0 = change_address (op0, VOIDmode,
6432 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6433 force_reg (ptr_mode, offset_rtx)));
6436 /* Don't forget about volatility even if this is a bitfield. */
6437 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6439 op0 = copy_rtx (op0);
6440 MEM_VOLATILE_P (op0) = 1;
6443 /* Check the access. */
6444 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6446 enum memory_use_mode memory_usage;
6447 memory_usage = get_memory_usage_from_modifier (modifier);
6449 if (memory_usage != MEMORY_USE_DONT)
6454 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6455 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6457 /* Check the access right of the pointer. */
6458 if (size > BITS_PER_UNIT)
6459 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6461 GEN_INT (size / BITS_PER_UNIT),
6462 TYPE_MODE (sizetype),
6463 GEN_INT (memory_usage),
6464 TYPE_MODE (integer_type_node));
6468 /* In cases where an aligned union has an unaligned object
6469 as a field, we might be extracting a BLKmode value from
6470 an integer-mode (e.g., SImode) object. Handle this case
6471 by doing the extract into an object as wide as the field
6472 (which we know to be the width of a basic mode), then
6473 storing into memory, and changing the mode to BLKmode.
6474 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6475 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6476 if (mode1 == VOIDmode
6477 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6478 || (modifier != EXPAND_CONST_ADDRESS
6479 && modifier != EXPAND_INITIALIZER
6480 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6481 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6482 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6483 /* If the field isn't aligned enough to fetch as a memref,
6484 fetch it as a bit field. */
6485 || (SLOW_UNALIGNED_ACCESS
6486 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6487 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6489 enum machine_mode ext_mode = mode;
6491 if (ext_mode == BLKmode)
6492 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6494 if (ext_mode == BLKmode)
6496 /* In this case, BITPOS must start at a byte boundary and
6497 TARGET, if specified, must be a MEM. */
6498 if (GET_CODE (op0) != MEM
6499 || (target != 0 && GET_CODE (target) != MEM)
6500 || bitpos % BITS_PER_UNIT != 0)
6503 op0 = change_address (op0, VOIDmode,
6504 plus_constant (XEXP (op0, 0),
6505 bitpos / BITS_PER_UNIT));
6507 target = assign_temp (type, 0, 1, 1);
6509 emit_block_move (target, op0,
6510 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6517 op0 = validize_mem (op0);
6519 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6520 mark_reg_pointer (XEXP (op0, 0), alignment);
6522 op0 = extract_bit_field (op0, bitsize, bitpos,
6523 unsignedp, target, ext_mode, ext_mode,
6525 int_size_in_bytes (TREE_TYPE (tem)));
6527 /* If the result is a record type and BITSIZE is narrower than
6528 the mode of OP0, an integral mode, and this is a big endian
6529 machine, we must put the field into the high-order bits. */
6530 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6531 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6532 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6533 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6534 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6538 if (mode == BLKmode)
6540 rtx new = assign_stack_temp (ext_mode,
6541 bitsize / BITS_PER_UNIT, 0);
6543 emit_move_insn (new, op0);
6544 op0 = copy_rtx (new);
6545 PUT_MODE (op0, BLKmode);
6546 MEM_IN_STRUCT_P (op0) = 1;
6552 /* If the result is BLKmode, use that to access the object
6554 if (mode == BLKmode)
6557 /* Get a reference to just this component. */
6558 if (modifier == EXPAND_CONST_ADDRESS
6559 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6560 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6561 (bitpos / BITS_PER_UNIT)));
6563 op0 = change_address (op0, mode1,
6564 plus_constant (XEXP (op0, 0),
6565 (bitpos / BITS_PER_UNIT)));
6567 if (GET_CODE (op0) == MEM)
6568 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6570 if (GET_CODE (XEXP (op0, 0)) == REG)
6571 mark_reg_pointer (XEXP (op0, 0), alignment);
6573 MEM_IN_STRUCT_P (op0) = 1;
6574 MEM_VOLATILE_P (op0) |= volatilep;
6575 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6576 || modifier == EXPAND_CONST_ADDRESS
6577 || modifier == EXPAND_INITIALIZER)
6579 else if (target == 0)
6580 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6582 convert_move (target, op0, unsignedp);
6586 /* Intended for a reference to a buffer of a file-object in Pascal.
6587 But it's not certain that a special tree code will really be
6588 necessary for these. INDIRECT_REF might work for them. */
6594 /* Pascal set IN expression.
6597 rlo = set_low - (set_low%bits_per_word);
6598 the_word = set [ (index - rlo)/bits_per_word ];
6599 bit_index = index % bits_per_word;
6600 bitmask = 1 << bit_index;
6601 return !!(the_word & bitmask); */
6603 tree set = TREE_OPERAND (exp, 0);
6604 tree index = TREE_OPERAND (exp, 1);
6605 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6606 tree set_type = TREE_TYPE (set);
6607 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6608 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6609 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6610 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6611 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6612 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6613 rtx setaddr = XEXP (setval, 0);
6614 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6616 rtx diff, quo, rem, addr, bit, result;
6618 preexpand_calls (exp);
6620 /* If domain is empty, answer is no. Likewise if index is constant
6621 and out of bounds. */
6622 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6623 && TREE_CODE (set_low_bound) == INTEGER_CST
6624 && tree_int_cst_lt (set_high_bound, set_low_bound))
6625 || (TREE_CODE (index) == INTEGER_CST
6626 && TREE_CODE (set_low_bound) == INTEGER_CST
6627 && tree_int_cst_lt (index, set_low_bound))
6628 || (TREE_CODE (set_high_bound) == INTEGER_CST
6629 && TREE_CODE (index) == INTEGER_CST
6630 && tree_int_cst_lt (set_high_bound, index))))
6634 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6636 /* If we get here, we have to generate the code for both cases
6637 (in range and out of range). */
6639 op0 = gen_label_rtx ();
6640 op1 = gen_label_rtx ();
6642 if (! (GET_CODE (index_val) == CONST_INT
6643 && GET_CODE (lo_r) == CONST_INT))
6645 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
6646 GET_MODE (index_val), iunsignedp, 0);
6647 emit_jump_insn (gen_blt (op1));
6650 if (! (GET_CODE (index_val) == CONST_INT
6651 && GET_CODE (hi_r) == CONST_INT))
6653 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
6654 GET_MODE (index_val), iunsignedp, 0);
6655 emit_jump_insn (gen_bgt (op1));
6658 /* Calculate the element number of bit zero in the first word
6660 if (GET_CODE (lo_r) == CONST_INT)
6661 rlow = GEN_INT (INTVAL (lo_r)
6662 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6664 rlow = expand_binop (index_mode, and_optab, lo_r,
6665 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6666 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6668 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6669 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6671 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6672 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6673 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6674 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6676 addr = memory_address (byte_mode,
6677 expand_binop (index_mode, add_optab, diff,
6678 setaddr, NULL_RTX, iunsignedp,
6681 /* Extract the bit we want to examine */
6682 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6683 gen_rtx_MEM (byte_mode, addr),
6684 make_tree (TREE_TYPE (index), rem),
6686 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6687 GET_MODE (target) == byte_mode ? target : 0,
6688 1, OPTAB_LIB_WIDEN);
6690 if (result != target)
6691 convert_move (target, result, 1);
6693 /* Output the code to handle the out-of-range case. */
6696 emit_move_insn (target, const0_rtx);
6701 case WITH_CLEANUP_EXPR:
6702 if (RTL_EXPR_RTL (exp) == 0)
6705 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6706 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6708 /* That's it for this cleanup. */
6709 TREE_OPERAND (exp, 2) = 0;
6711 return RTL_EXPR_RTL (exp);
6713 case CLEANUP_POINT_EXPR:
6715 extern int temp_slot_level;
6716 /* Start a new binding layer that will keep track of all cleanup
6717 actions to be performed. */
6718 expand_start_bindings (0);
6720 target_temp_slot_level = temp_slot_level;
6722 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6723 /* If we're going to use this value, load it up now. */
6725 op0 = force_not_mem (op0);
6726 preserve_temp_slots (op0);
6727 expand_end_bindings (NULL_TREE, 0, 0);
6732 /* Check for a built-in function. */
6733 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6734 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6736 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6737 return expand_builtin (exp, target, subtarget, tmode, ignore);
6739 /* If this call was expanded already by preexpand_calls,
6740 just return the result we got. */
6741 if (CALL_EXPR_RTL (exp) != 0)
6742 return CALL_EXPR_RTL (exp);
6744 return expand_call (exp, target, ignore);
6746 case NON_LVALUE_EXPR:
6749 case REFERENCE_EXPR:
6750 if (TREE_CODE (type) == UNION_TYPE)
6752 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6755 if (mode != BLKmode)
6756 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6758 target = assign_temp (type, 0, 1, 1);
6761 if (GET_CODE (target) == MEM)
6762 /* Store data into beginning of memory target. */
6763 store_expr (TREE_OPERAND (exp, 0),
6764 change_address (target, TYPE_MODE (valtype), 0), 0);
6766 else if (GET_CODE (target) == REG)
6767 /* Store this field into a union of the proper type. */
6768 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6769 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6771 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6776 /* Return the entire union. */
6780 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6782 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6785 /* If the signedness of the conversion differs and OP0 is
6786 a promoted SUBREG, clear that indication since we now
6787 have to do the proper extension. */
6788 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6789 && GET_CODE (op0) == SUBREG)
6790 SUBREG_PROMOTED_VAR_P (op0) = 0;
6795 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6796 if (GET_MODE (op0) == mode)
6799 /* If OP0 is a constant, just convert it into the proper mode. */
6800 if (CONSTANT_P (op0))
6802 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6803 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6805 if (modifier == EXPAND_INITIALIZER)
6806 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6810 convert_to_mode (mode, op0,
6811 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6813 convert_move (target, op0,
6814 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6818 /* We come here from MINUS_EXPR when the second operand is a
6821 this_optab = add_optab;
6823 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6824 something else, make sure we add the register to the constant and
6825 then to the other thing. This case can occur during strength
6826 reduction and doing it this way will produce better code if the
6827 frame pointer or argument pointer is eliminated.
6829 fold-const.c will ensure that the constant is always in the inner
6830 PLUS_EXPR, so the only case we need to do anything about is if
6831 sp, ap, or fp is our second argument, in which case we must swap
6832 the innermost first argument and our second argument. */
6834 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6835 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6836 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6837 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6838 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6839 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6841 tree t = TREE_OPERAND (exp, 1);
6843 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6844 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6847 /* If the result is to be ptr_mode and we are adding an integer to
6848 something, we might be forming a constant. So try to use
6849 plus_constant. If it produces a sum and we can't accept it,
6850 use force_operand. This allows P = &ARR[const] to generate
6851 efficient code on machines where a SYMBOL_REF is not a valid
6854 If this is an EXPAND_SUM call, always return the sum. */
6855 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6856 || mode == ptr_mode)
6858 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6859 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6860 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6862 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6864 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6865 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6866 op1 = force_operand (op1, target);
6870 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6871 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6872 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6874 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6876 if (! CONSTANT_P (op0))
6878 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6879 VOIDmode, modifier);
6880 /* Don't go to both_summands if modifier
6881 says it's not right to return a PLUS. */
6882 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6886 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6887 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6888 op0 = force_operand (op0, target);
6893 /* No sense saving up arithmetic to be done
6894 if it's all in the wrong mode to form part of an address.
6895 And force_operand won't know whether to sign-extend or
6897 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6898 || mode != ptr_mode)
6901 preexpand_calls (exp);
6902 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6905 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6906 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6909 /* Make sure any term that's a sum with a constant comes last. */
6910 if (GET_CODE (op0) == PLUS
6911 && CONSTANT_P (XEXP (op0, 1)))
6917 /* If adding to a sum including a constant,
6918 associate it to put the constant outside. */
6919 if (GET_CODE (op1) == PLUS
6920 && CONSTANT_P (XEXP (op1, 1)))
6922 rtx constant_term = const0_rtx;
6924 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6927 /* Ensure that MULT comes first if there is one. */
6928 else if (GET_CODE (op0) == MULT)
6929 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6931 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6933 /* Let's also eliminate constants from op0 if possible. */
6934 op0 = eliminate_constant_term (op0, &constant_term);
6936 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6937 their sum should be a constant. Form it into OP1, since the
6938 result we want will then be OP0 + OP1. */
6940 temp = simplify_binary_operation (PLUS, mode, constant_term,
6945 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6948 /* Put a constant term last and put a multiplication first. */
6949 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6950 temp = op1, op1 = op0, op0 = temp;
6952 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6953 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6956 /* For initializers, we are allowed to return a MINUS of two
6957 symbolic constants. Here we handle all cases when both operands
6959 /* Handle difference of two symbolic constants,
6960 for the sake of an initializer. */
6961 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6962 && really_constant_p (TREE_OPERAND (exp, 0))
6963 && really_constant_p (TREE_OPERAND (exp, 1)))
6965 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6966 VOIDmode, ro_modifier);
6967 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6968 VOIDmode, ro_modifier);
6970 /* If the last operand is a CONST_INT, use plus_constant of
6971 the negated constant. Else make the MINUS. */
6972 if (GET_CODE (op1) == CONST_INT)
6973 return plus_constant (op0, - INTVAL (op1));
6975 return gen_rtx_MINUS (mode, op0, op1);
6977 /* Convert A - const to A + (-const). */
6978 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6980 tree negated = fold (build1 (NEGATE_EXPR, type,
6981 TREE_OPERAND (exp, 1)));
6983 /* Deal with the case where we can't negate the constant
6985 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6987 tree newtype = signed_type (type);
6988 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6989 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6990 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6992 if (! TREE_OVERFLOW (newneg))
6993 return expand_expr (convert (type,
6994 build (PLUS_EXPR, newtype,
6996 target, tmode, ro_modifier);
7000 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7004 this_optab = sub_optab;
7008 preexpand_calls (exp);
7009 /* If first operand is constant, swap them.
7010 Thus the following special case checks need only
7011 check the second operand. */
7012 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7014 register tree t1 = TREE_OPERAND (exp, 0);
7015 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7016 TREE_OPERAND (exp, 1) = t1;
7019 /* Attempt to return something suitable for generating an
7020 indexed address, for machines that support that. */
7022 if (modifier == EXPAND_SUM && mode == ptr_mode
7023 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7024 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7026 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7029 /* Apply distributive law if OP0 is x+c. */
7030 if (GET_CODE (op0) == PLUS
7031 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7032 return gen_rtx_PLUS (mode,
7033 gen_rtx_MULT (mode, XEXP (op0, 0),
7034 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7035 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7036 * INTVAL (XEXP (op0, 1))));
7038 if (GET_CODE (op0) != REG)
7039 op0 = force_operand (op0, NULL_RTX);
7040 if (GET_CODE (op0) != REG)
7041 op0 = copy_to_mode_reg (mode, op0);
7043 return gen_rtx_MULT (mode, op0,
7044 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7047 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7050 /* Check for multiplying things that have been extended
7051 from a narrower type. If this machine supports multiplying
7052 in that narrower type with a result in the desired type,
7053 do it that way, and avoid the explicit type-conversion. */
7054 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7055 && TREE_CODE (type) == INTEGER_TYPE
7056 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7057 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7058 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7059 && int_fits_type_p (TREE_OPERAND (exp, 1),
7060 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7061 /* Don't use a widening multiply if a shift will do. */
7062 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7063 > HOST_BITS_PER_WIDE_INT)
7064 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7066 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7067 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7069 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7070 /* If both operands are extended, they must either both
7071 be zero-extended or both be sign-extended. */
7072 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7074 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7076 enum machine_mode innermode
7077 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7078 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7079 ? smul_widen_optab : umul_widen_optab);
7080 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7081 ? umul_widen_optab : smul_widen_optab);
7082 if (mode == GET_MODE_WIDER_MODE (innermode))
7084 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7086 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7087 NULL_RTX, VOIDmode, 0);
7088 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7089 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7092 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7093 NULL_RTX, VOIDmode, 0);
7096 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7097 && innermode == word_mode)
7100 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7101 NULL_RTX, VOIDmode, 0);
7102 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7103 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7106 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7107 NULL_RTX, VOIDmode, 0);
7108 temp = expand_binop (mode, other_optab, op0, op1, target,
7109 unsignedp, OPTAB_LIB_WIDEN);
7110 htem = expand_mult_highpart_adjust (innermode,
7111 gen_highpart (innermode, temp),
7113 gen_highpart (innermode, temp),
7115 emit_move_insn (gen_highpart (innermode, temp), htem);
7120 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7121 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7122 return expand_mult (mode, op0, op1, target, unsignedp);
7124 case TRUNC_DIV_EXPR:
7125 case FLOOR_DIV_EXPR:
7127 case ROUND_DIV_EXPR:
7128 case EXACT_DIV_EXPR:
7129 preexpand_calls (exp);
7130 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7132 /* Possible optimization: compute the dividend with EXPAND_SUM
7133 then if the divisor is constant can optimize the case
7134 where some terms of the dividend have coeffs divisible by it. */
7135 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7136 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7137 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7140 this_optab = flodiv_optab;
7143 case TRUNC_MOD_EXPR:
7144 case FLOOR_MOD_EXPR:
7146 case ROUND_MOD_EXPR:
7147 preexpand_calls (exp);
7148 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7150 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7151 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7152 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7154 case FIX_ROUND_EXPR:
7155 case FIX_FLOOR_EXPR:
7157 abort (); /* Not used for C. */
7159 case FIX_TRUNC_EXPR:
7160 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7162 target = gen_reg_rtx (mode);
7163 expand_fix (target, op0, unsignedp);
7167 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7169 target = gen_reg_rtx (mode);
7170 /* expand_float can't figure out what to do if FROM has VOIDmode.
7171 So give it the correct mode. With -O, cse will optimize this. */
7172 if (GET_MODE (op0) == VOIDmode)
7173 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7175 expand_float (target, op0,
7176 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7180 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7181 temp = expand_unop (mode, neg_optab, op0, target, 0);
7187 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7189 /* Handle complex values specially. */
7190 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7191 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7192 return expand_complex_abs (mode, op0, target, unsignedp);
7194 /* Unsigned abs is simply the operand. Testing here means we don't
7195 risk generating incorrect code below. */
7196 if (TREE_UNSIGNED (type))
7199 return expand_abs (mode, op0, target, unsignedp,
7200 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7204 target = original_target;
7205 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7206 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7207 || GET_MODE (target) != mode
7208 || (GET_CODE (target) == REG
7209 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7210 target = gen_reg_rtx (mode);
7211 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7212 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7214 /* First try to do it with a special MIN or MAX instruction.
7215 If that does not win, use a conditional jump to select the proper
7217 this_optab = (TREE_UNSIGNED (type)
7218 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7219 : (code == MIN_EXPR ? smin_optab : smax_optab));
7221 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7226 /* At this point, a MEM target is no longer useful; we will get better
7229 if (GET_CODE (target) == MEM)
7230 target = gen_reg_rtx (mode);
7233 emit_move_insn (target, op0);
7235 op0 = gen_label_rtx ();
7237 /* If this mode is an integer too wide to compare properly,
7238 compare word by word. Rely on cse to optimize constant cases. */
7239 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7241 if (code == MAX_EXPR)
7242 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7243 target, op1, NULL_RTX, op0);
7245 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7246 op1, target, NULL_RTX, op0);
7247 emit_move_insn (target, op1);
7251 if (code == MAX_EXPR)
7252 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7253 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7254 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7256 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7257 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7258 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7259 if (temp == const0_rtx)
7260 emit_move_insn (target, op1);
7261 else if (temp != const_true_rtx)
7263 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7264 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7267 emit_move_insn (target, op1);
7274 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7275 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7281 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7282 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7287 /* ??? Can optimize bitwise operations with one arg constant.
7288 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7289 and (a bitwise1 b) bitwise2 b (etc)
7290 but that is probably not worth while. */
7292 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7293 boolean values when we want in all cases to compute both of them. In
7294 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7295 as actual zero-or-1 values and then bitwise anding. In cases where
7296 there cannot be any side effects, better code would be made by
7297 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7298 how to recognize those cases. */
7300 case TRUTH_AND_EXPR:
7302 this_optab = and_optab;
7307 this_optab = ior_optab;
7310 case TRUTH_XOR_EXPR:
7312 this_optab = xor_optab;
7319 preexpand_calls (exp);
7320 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7322 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7323 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7326 /* Could determine the answer when only additive constants differ. Also,
7327 the addition of one can be handled by changing the condition. */
7334 preexpand_calls (exp);
7335 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7339 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7340 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7342 && GET_CODE (original_target) == REG
7343 && (GET_MODE (original_target)
7344 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7346 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7349 if (temp != original_target)
7350 temp = copy_to_reg (temp);
7352 op1 = gen_label_rtx ();
7353 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
7354 GET_MODE (temp), unsignedp, 0);
7355 emit_jump_insn (gen_beq (op1));
7356 emit_move_insn (temp, const1_rtx);
7361 /* If no set-flag instruction, must generate a conditional
7362 store into a temporary variable. Drop through
7363 and handle this like && and ||. */
7365 case TRUTH_ANDIF_EXPR:
7366 case TRUTH_ORIF_EXPR:
7368 && (target == 0 || ! safe_from_p (target, exp, 1)
7369 /* Make sure we don't have a hard reg (such as function's return
7370 value) live across basic blocks, if not optimizing. */
7371 || (!optimize && GET_CODE (target) == REG
7372 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7373 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7376 emit_clr_insn (target);
7378 op1 = gen_label_rtx ();
7379 jumpifnot (exp, op1);
7382 emit_0_to_1_insn (target);
7385 return ignore ? const0_rtx : target;
7387 case TRUTH_NOT_EXPR:
7388 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7389 /* The parser is careful to generate TRUTH_NOT_EXPR
7390 only with operands that are always zero or one. */
7391 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7392 target, 1, OPTAB_LIB_WIDEN);
7398 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7400 return expand_expr (TREE_OPERAND (exp, 1),
7401 (ignore ? const0_rtx : target),
7405 /* If we would have a "singleton" (see below) were it not for a
7406 conversion in each arm, bring that conversion back out. */
7407 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7408 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7409 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7410 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7412 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7413 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7415 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7416 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7417 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7418 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7419 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7420 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7421 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7422 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7423 return expand_expr (build1 (NOP_EXPR, type,
7424 build (COND_EXPR, TREE_TYPE (true),
7425 TREE_OPERAND (exp, 0),
7427 target, tmode, modifier);
7431 /* Note that COND_EXPRs whose type is a structure or union
7432 are required to be constructed to contain assignments of
7433 a temporary variable, so that we can evaluate them here
7434 for side effect only. If type is void, we must do likewise. */
7436 /* If an arm of the branch requires a cleanup,
7437 only that cleanup is performed. */
7440 tree binary_op = 0, unary_op = 0;
7442 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7443 convert it to our mode, if necessary. */
7444 if (integer_onep (TREE_OPERAND (exp, 1))
7445 && integer_zerop (TREE_OPERAND (exp, 2))
7446 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7450 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7455 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7456 if (GET_MODE (op0) == mode)
7460 target = gen_reg_rtx (mode);
7461 convert_move (target, op0, unsignedp);
7465 /* Check for X ? A + B : A. If we have this, we can copy A to the
7466 output and conditionally add B. Similarly for unary operations.
7467 Don't do this if X has side-effects because those side effects
7468 might affect A or B and the "?" operation is a sequence point in
7469 ANSI. (operand_equal_p tests for side effects.) */
7471 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7472 && operand_equal_p (TREE_OPERAND (exp, 2),
7473 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7474 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7475 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7476 && operand_equal_p (TREE_OPERAND (exp, 1),
7477 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7478 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7479 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7480 && operand_equal_p (TREE_OPERAND (exp, 2),
7481 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7482 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7483 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7484 && operand_equal_p (TREE_OPERAND (exp, 1),
7485 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7486 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7488 /* If we are not to produce a result, we have no target. Otherwise,
7489 if a target was specified use it; it will not be used as an
7490 intermediate target unless it is safe. If no target, use a
7495 else if (original_target
7496 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7497 || (singleton && GET_CODE (original_target) == REG
7498 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7499 && original_target == var_rtx (singleton)))
7500 && GET_MODE (original_target) == mode
7501 #ifdef HAVE_conditional_move
7502 && (! can_conditionally_move_p (mode)
7503 || GET_CODE (original_target) == REG
7504 || TREE_ADDRESSABLE (type))
7506 && ! (GET_CODE (original_target) == MEM
7507 && MEM_VOLATILE_P (original_target)))
7508 temp = original_target;
7509 else if (TREE_ADDRESSABLE (type))
7512 temp = assign_temp (type, 0, 0, 1);
7514 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7515 do the test of X as a store-flag operation, do this as
7516 A + ((X != 0) << log C). Similarly for other simple binary
7517 operators. Only do for C == 1 if BRANCH_COST is low. */
7518 if (temp && singleton && binary_op
7519 && (TREE_CODE (binary_op) == PLUS_EXPR
7520 || TREE_CODE (binary_op) == MINUS_EXPR
7521 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7522 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7523 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7524 : integer_onep (TREE_OPERAND (binary_op, 1)))
7525 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7528 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7529 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7530 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7533 /* If we had X ? A : A + 1, do this as A + (X == 0).
7535 We have to invert the truth value here and then put it
7536 back later if do_store_flag fails. We cannot simply copy
7537 TREE_OPERAND (exp, 0) to another variable and modify that
7538 because invert_truthvalue can modify the tree pointed to
7540 if (singleton == TREE_OPERAND (exp, 1))
7541 TREE_OPERAND (exp, 0)
7542 = invert_truthvalue (TREE_OPERAND (exp, 0));
7544 result = do_store_flag (TREE_OPERAND (exp, 0),
7545 (safe_from_p (temp, singleton, 1)
7547 mode, BRANCH_COST <= 1);
7549 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7550 result = expand_shift (LSHIFT_EXPR, mode, result,
7551 build_int_2 (tree_log2
7555 (safe_from_p (temp, singleton, 1)
7556 ? temp : NULL_RTX), 0);
7560 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7561 return expand_binop (mode, boptab, op1, result, temp,
7562 unsignedp, OPTAB_LIB_WIDEN);
7564 else if (singleton == TREE_OPERAND (exp, 1))
7565 TREE_OPERAND (exp, 0)
7566 = invert_truthvalue (TREE_OPERAND (exp, 0));
7569 do_pending_stack_adjust ();
7571 op0 = gen_label_rtx ();
7573 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7577 /* If the target conflicts with the other operand of the
7578 binary op, we can't use it. Also, we can't use the target
7579 if it is a hard register, because evaluating the condition
7580 might clobber it. */
7582 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7583 || (GET_CODE (temp) == REG
7584 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7585 temp = gen_reg_rtx (mode);
7586 store_expr (singleton, temp, 0);
7589 expand_expr (singleton,
7590 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7591 if (singleton == TREE_OPERAND (exp, 1))
7592 jumpif (TREE_OPERAND (exp, 0), op0);
7594 jumpifnot (TREE_OPERAND (exp, 0), op0);
7596 start_cleanup_deferral ();
7597 if (binary_op && temp == 0)
7598 /* Just touch the other operand. */
7599 expand_expr (TREE_OPERAND (binary_op, 1),
7600 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7602 store_expr (build (TREE_CODE (binary_op), type,
7603 make_tree (type, temp),
7604 TREE_OPERAND (binary_op, 1)),
7607 store_expr (build1 (TREE_CODE (unary_op), type,
7608 make_tree (type, temp)),
7612 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7613 comparison operator. If we have one of these cases, set the
7614 output to A, branch on A (cse will merge these two references),
7615 then set the output to FOO. */
7617 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7618 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7619 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7620 TREE_OPERAND (exp, 1), 0)
7621 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7622 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7623 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7625 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7626 temp = gen_reg_rtx (mode);
7627 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7628 jumpif (TREE_OPERAND (exp, 0), op0);
7630 start_cleanup_deferral ();
7631 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7635 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7636 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7637 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7638 TREE_OPERAND (exp, 2), 0)
7639 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7640 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7641 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7643 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7644 temp = gen_reg_rtx (mode);
7645 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7646 jumpifnot (TREE_OPERAND (exp, 0), op0);
7648 start_cleanup_deferral ();
7649 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7654 op1 = gen_label_rtx ();
7655 jumpifnot (TREE_OPERAND (exp, 0), op0);
7657 start_cleanup_deferral ();
7659 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7661 expand_expr (TREE_OPERAND (exp, 1),
7662 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7663 end_cleanup_deferral ();
7665 emit_jump_insn (gen_jump (op1));
7668 start_cleanup_deferral ();
7670 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7672 expand_expr (TREE_OPERAND (exp, 2),
7673 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7676 end_cleanup_deferral ();
7687 /* Something needs to be initialized, but we didn't know
7688 where that thing was when building the tree. For example,
7689 it could be the return value of a function, or a parameter
7690 to a function which lays down in the stack, or a temporary
7691 variable which must be passed by reference.
7693 We guarantee that the expression will either be constructed
7694 or copied into our original target. */
7696 tree slot = TREE_OPERAND (exp, 0);
7697 tree cleanups = NULL_TREE;
7700 if (TREE_CODE (slot) != VAR_DECL)
7704 target = original_target;
7708 if (DECL_RTL (slot) != 0)
7710 target = DECL_RTL (slot);
7711 /* If we have already expanded the slot, so don't do
7713 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7718 target = assign_temp (type, 2, 0, 1);
7719 /* All temp slots at this level must not conflict. */
7720 preserve_temp_slots (target);
7721 DECL_RTL (slot) = target;
7722 if (TREE_ADDRESSABLE (slot))
7724 TREE_ADDRESSABLE (slot) = 0;
7725 mark_addressable (slot);
7728 /* Since SLOT is not known to the called function
7729 to belong to its stack frame, we must build an explicit
7730 cleanup. This case occurs when we must build up a reference
7731 to pass the reference as an argument. In this case,
7732 it is very likely that such a reference need not be
7735 if (TREE_OPERAND (exp, 2) == 0)
7736 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7737 cleanups = TREE_OPERAND (exp, 2);
7742 /* This case does occur, when expanding a parameter which
7743 needs to be constructed on the stack. The target
7744 is the actual stack address that we want to initialize.
7745 The function we call will perform the cleanup in this case. */
7747 /* If we have already assigned it space, use that space,
7748 not target that we were passed in, as our target
7749 parameter is only a hint. */
7750 if (DECL_RTL (slot) != 0)
7752 target = DECL_RTL (slot);
7753 /* If we have already expanded the slot, so don't do
7755 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7760 DECL_RTL (slot) = target;
7761 /* If we must have an addressable slot, then make sure that
7762 the RTL that we just stored in slot is OK. */
7763 if (TREE_ADDRESSABLE (slot))
7765 TREE_ADDRESSABLE (slot) = 0;
7766 mark_addressable (slot);
7771 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7772 /* Mark it as expanded. */
7773 TREE_OPERAND (exp, 1) = NULL_TREE;
7775 TREE_USED (slot) = 1;
7776 store_expr (exp1, target, 0);
7778 expand_decl_cleanup (NULL_TREE, cleanups);
7785 tree lhs = TREE_OPERAND (exp, 0);
7786 tree rhs = TREE_OPERAND (exp, 1);
7787 tree noncopied_parts = 0;
7788 tree lhs_type = TREE_TYPE (lhs);
7790 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7791 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7792 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7793 TYPE_NONCOPIED_PARTS (lhs_type));
7794 while (noncopied_parts != 0)
7796 expand_assignment (TREE_VALUE (noncopied_parts),
7797 TREE_PURPOSE (noncopied_parts), 0, 0);
7798 noncopied_parts = TREE_CHAIN (noncopied_parts);
7805 /* If lhs is complex, expand calls in rhs before computing it.
7806 That's so we don't compute a pointer and save it over a call.
7807 If lhs is simple, compute it first so we can give it as a
7808 target if the rhs is just a call. This avoids an extra temp and copy
7809 and that prevents a partial-subsumption which makes bad code.
7810 Actually we could treat component_ref's of vars like vars. */
7812 tree lhs = TREE_OPERAND (exp, 0);
7813 tree rhs = TREE_OPERAND (exp, 1);
7814 tree noncopied_parts = 0;
7815 tree lhs_type = TREE_TYPE (lhs);
7819 if (TREE_CODE (lhs) != VAR_DECL
7820 && TREE_CODE (lhs) != RESULT_DECL
7821 && TREE_CODE (lhs) != PARM_DECL
7822 && ! (TREE_CODE (lhs) == INDIRECT_REF
7823 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7824 preexpand_calls (exp);
7826 /* Check for |= or &= of a bitfield of size one into another bitfield
7827 of size 1. In this case, (unless we need the result of the
7828 assignment) we can do this more efficiently with a
7829 test followed by an assignment, if necessary.
7831 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7832 things change so we do, this code should be enhanced to
7835 && TREE_CODE (lhs) == COMPONENT_REF
7836 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7837 || TREE_CODE (rhs) == BIT_AND_EXPR)
7838 && TREE_OPERAND (rhs, 0) == lhs
7839 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7840 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7841 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7843 rtx label = gen_label_rtx ();
7845 do_jump (TREE_OPERAND (rhs, 1),
7846 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7847 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7848 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7849 (TREE_CODE (rhs) == BIT_IOR_EXPR
7851 : integer_zero_node)),
7853 do_pending_stack_adjust ();
7858 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7859 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7860 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7861 TYPE_NONCOPIED_PARTS (lhs_type));
7863 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7864 while (noncopied_parts != 0)
7866 expand_assignment (TREE_PURPOSE (noncopied_parts),
7867 TREE_VALUE (noncopied_parts), 0, 0);
7868 noncopied_parts = TREE_CHAIN (noncopied_parts);
7874 if (!TREE_OPERAND (exp, 0))
7875 expand_null_return ();
7877 expand_return (TREE_OPERAND (exp, 0));
7880 case PREINCREMENT_EXPR:
7881 case PREDECREMENT_EXPR:
7882 return expand_increment (exp, 0, ignore);
7884 case POSTINCREMENT_EXPR:
7885 case POSTDECREMENT_EXPR:
7886 /* Faster to treat as pre-increment if result is not used. */
7887 return expand_increment (exp, ! ignore, ignore);
7890 /* If nonzero, TEMP will be set to the address of something that might
7891 be a MEM corresponding to a stack slot. */
7894 /* Are we taking the address of a nested function? */
7895 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7896 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7897 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7898 && ! TREE_STATIC (exp))
7900 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7901 op0 = force_operand (op0, target);
7903 /* If we are taking the address of something erroneous, just
7905 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7909 /* We make sure to pass const0_rtx down if we came in with
7910 ignore set, to avoid doing the cleanups twice for something. */
7911 op0 = expand_expr (TREE_OPERAND (exp, 0),
7912 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7913 (modifier == EXPAND_INITIALIZER
7914 ? modifier : EXPAND_CONST_ADDRESS));
7916 /* If we are going to ignore the result, OP0 will have been set
7917 to const0_rtx, so just return it. Don't get confused and
7918 think we are taking the address of the constant. */
7922 op0 = protect_from_queue (op0, 0);
7924 /* We would like the object in memory. If it is a constant,
7925 we can have it be statically allocated into memory. For
7926 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7927 memory and store the value into it. */
7929 if (CONSTANT_P (op0))
7930 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7932 else if (GET_CODE (op0) == MEM)
7934 mark_temp_addr_taken (op0);
7935 temp = XEXP (op0, 0);
7938 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7939 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7941 /* If this object is in a register, it must be not
7943 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7944 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7946 mark_temp_addr_taken (memloc);
7947 emit_move_insn (memloc, op0);
7951 if (GET_CODE (op0) != MEM)
7954 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7956 temp = XEXP (op0, 0);
7957 #ifdef POINTERS_EXTEND_UNSIGNED
7958 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7959 && mode == ptr_mode)
7960 temp = convert_memory_address (ptr_mode, temp);
7965 op0 = force_operand (XEXP (op0, 0), target);
7968 if (flag_force_addr && GET_CODE (op0) != REG)
7969 op0 = force_reg (Pmode, op0);
7971 if (GET_CODE (op0) == REG
7972 && ! REG_USERVAR_P (op0))
7973 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7975 /* If we might have had a temp slot, add an equivalent address
7978 update_temp_slot_address (temp, op0);
7980 #ifdef POINTERS_EXTEND_UNSIGNED
7981 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7982 && mode == ptr_mode)
7983 op0 = convert_memory_address (ptr_mode, op0);
7988 case ENTRY_VALUE_EXPR:
7991 /* COMPLEX type for Extended Pascal & Fortran */
7994 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7997 /* Get the rtx code of the operands. */
7998 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7999 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8002 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8006 /* Move the real (op0) and imaginary (op1) parts to their location. */
8007 emit_move_insn (gen_realpart (mode, target), op0);
8008 emit_move_insn (gen_imagpart (mode, target), op1);
8010 insns = get_insns ();
8013 /* Complex construction should appear as a single unit. */
8014 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8015 each with a separate pseudo as destination.
8016 It's not correct for flow to treat them as a unit. */
8017 if (GET_CODE (target) != CONCAT)
8018 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8026 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8027 return gen_realpart (mode, op0);
8030 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8031 return gen_imagpart (mode, op0);
8035 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8039 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8042 target = gen_reg_rtx (mode);
8046 /* Store the realpart and the negated imagpart to target. */
8047 emit_move_insn (gen_realpart (partmode, target),
8048 gen_realpart (partmode, op0));
8050 imag_t = gen_imagpart (partmode, target);
8051 temp = expand_unop (partmode, neg_optab,
8052 gen_imagpart (partmode, op0), imag_t, 0);
8054 emit_move_insn (imag_t, temp);
8056 insns = get_insns ();
8059 /* Conjugate should appear as a single unit
8060 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8061 each with a separate pseudo as destination.
8062 It's not correct for flow to treat them as a unit. */
8063 if (GET_CODE (target) != CONCAT)
8064 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8071 case TRY_CATCH_EXPR:
8073 tree handler = TREE_OPERAND (exp, 1);
8075 expand_eh_region_start ();
8077 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8079 expand_eh_region_end (handler);
8086 rtx dcc = get_dynamic_cleanup_chain ();
8087 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8093 rtx dhc = get_dynamic_handler_chain ();
8094 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8099 op0 = CONST0_RTX (tmode);
8105 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8108 /* Here to do an ordinary binary operator, generating an instruction
8109 from the optab already placed in `this_optab'. */
8111 preexpand_calls (exp);
8112 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8114 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8115 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8117 temp = expand_binop (mode, this_optab, op0, op1, target,
8118 unsignedp, OPTAB_LIB_WIDEN);
8126 /* Return the alignment in bits of EXP, a pointer valued expression.
8127 But don't return more than MAX_ALIGN no matter what.
8128 The alignment returned is, by default, the alignment of the thing that
8129 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8131 Otherwise, look at the expression to see if we can do better, i.e., if the
8132 expression is actually pointing at an object whose alignment is tighter. */
8135 get_pointer_alignment (exp, max_align)
8139 unsigned align, inner;
8141 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8144 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8145 align = MIN (align, max_align);
8149 switch (TREE_CODE (exp))
8153 case NON_LVALUE_EXPR:
8154 exp = TREE_OPERAND (exp, 0);
8155 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8157 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8158 align = MIN (inner, max_align);
8162 /* If sum of pointer + int, restrict our maximum alignment to that
8163 imposed by the integer. If not, we can't do any better than
8165 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8168 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8173 exp = TREE_OPERAND (exp, 0);
8177 /* See what we are pointing at and look at its alignment. */
8178 exp = TREE_OPERAND (exp, 0);
8179 if (TREE_CODE (exp) == FUNCTION_DECL)
8180 align = FUNCTION_BOUNDARY;
8181 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8182 align = DECL_ALIGN (exp);
8183 #ifdef CONSTANT_ALIGNMENT
8184 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8185 align = CONSTANT_ALIGNMENT (exp, align);
8187 return MIN (align, max_align);
8195 /* Return the tree node and offset if a given argument corresponds to
8196 a string constant. */
8199 string_constant (arg, ptr_offset)
8205 if (TREE_CODE (arg) == ADDR_EXPR
8206 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8208 *ptr_offset = integer_zero_node;
8209 return TREE_OPERAND (arg, 0);
8211 else if (TREE_CODE (arg) == PLUS_EXPR)
8213 tree arg0 = TREE_OPERAND (arg, 0);
8214 tree arg1 = TREE_OPERAND (arg, 1);
8219 if (TREE_CODE (arg0) == ADDR_EXPR
8220 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8223 return TREE_OPERAND (arg0, 0);
8225 else if (TREE_CODE (arg1) == ADDR_EXPR
8226 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8229 return TREE_OPERAND (arg1, 0);
8236 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8237 way, because it could contain a zero byte in the middle.
8238 TREE_STRING_LENGTH is the size of the character array, not the string.
8240 Unfortunately, string_constant can't access the values of const char
8241 arrays with initializers, so neither can we do so here. */
8251 src = string_constant (src, &offset_node);
8254 max = TREE_STRING_LENGTH (src);
8255 ptr = TREE_STRING_POINTER (src);
8256 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8258 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8259 compute the offset to the following null if we don't know where to
8260 start searching for it. */
8262 for (i = 0; i < max; i++)
8265 /* We don't know the starting offset, but we do know that the string
8266 has no internal zero bytes. We can assume that the offset falls
8267 within the bounds of the string; otherwise, the programmer deserves
8268 what he gets. Subtract the offset from the length of the string,
8270 /* This would perhaps not be valid if we were dealing with named
8271 arrays in addition to literal string constants. */
8272 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8275 /* We have a known offset into the string. Start searching there for
8276 a null character. */
8277 if (offset_node == 0)
8281 /* Did we get a long long offset? If so, punt. */
8282 if (TREE_INT_CST_HIGH (offset_node) != 0)
8284 offset = TREE_INT_CST_LOW (offset_node);
8286 /* If the offset is known to be out of bounds, warn, and call strlen at
8288 if (offset < 0 || offset > max)
8290 warning ("offset outside bounds of constant string");
8293 /* Use strlen to search for the first zero byte. Since any strings
8294 constructed with build_string will have nulls appended, we win even
8295 if we get handed something like (char[4])"abcd".
8297 Since OFFSET is our starting index into the string, no further
8298 calculation is needed. */
8299 return size_int (strlen (ptr + offset));
8303 expand_builtin_return_addr (fndecl_code, count, tem)
8304 enum built_in_function fndecl_code;
8310 /* Some machines need special handling before we can access
8311 arbitrary frames. For example, on the sparc, we must first flush
8312 all register windows to the stack. */
8313 #ifdef SETUP_FRAME_ADDRESSES
8315 SETUP_FRAME_ADDRESSES ();
8318 /* On the sparc, the return address is not in the frame, it is in a
8319 register. There is no way to access it off of the current frame
8320 pointer, but it can be accessed off the previous frame pointer by
8321 reading the value from the register window save area. */
8322 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8323 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8327 /* Scan back COUNT frames to the specified frame. */
8328 for (i = 0; i < count; i++)
8330 /* Assume the dynamic chain pointer is in the word that the
8331 frame address points to, unless otherwise specified. */
8332 #ifdef DYNAMIC_CHAIN_ADDRESS
8333 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8335 tem = memory_address (Pmode, tem);
8336 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8339 /* For __builtin_frame_address, return what we've got. */
8340 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8343 /* For __builtin_return_address, Get the return address from that
8345 #ifdef RETURN_ADDR_RTX
8346 tem = RETURN_ADDR_RTX (count, tem);
8348 tem = memory_address (Pmode,
8349 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8350 tem = gen_rtx_MEM (Pmode, tem);
8355 /* __builtin_setjmp is passed a pointer to an array of five words (not
8356 all will be used on all machines). It operates similarly to the C
8357 library function of the same name, but is more efficient. Much of
8358 the code below (and for longjmp) is copied from the handling of
8361 NOTE: This is intended for use by GNAT and the exception handling
8362 scheme in the compiler and will only work in the method used by
8366 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
8369 rtx first_label, next_label;
8371 rtx lab1 = gen_label_rtx ();
8372 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8373 enum machine_mode value_mode;
8376 value_mode = TYPE_MODE (integer_type_node);
8378 #ifdef POINTERS_EXTEND_UNSIGNED
8379 buf_addr = convert_memory_address (Pmode, buf_addr);
8382 buf_addr = force_reg (Pmode, buf_addr);
8384 if (target == 0 || GET_CODE (target) != REG
8385 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8386 target = gen_reg_rtx (value_mode);
8390 /* We store the frame pointer and the address of lab1 in the buffer
8391 and use the rest of it for the stack save area, which is
8392 machine-dependent. */
8394 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8395 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8398 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8399 BUILTIN_SETJMP_FRAME_VALUE);
8400 emit_move_insn (validize_mem
8401 (gen_rtx_MEM (Pmode,
8402 plus_constant (buf_addr,
8403 GET_MODE_SIZE (Pmode)))),
8404 gen_rtx_LABEL_REF (Pmode, lab1));
8406 stack_save = gen_rtx_MEM (sa_mode,
8407 plus_constant (buf_addr,
8408 2 * GET_MODE_SIZE (Pmode)));
8409 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8411 /* If there is further processing to do, do it. */
8412 #ifdef HAVE_builtin_setjmp_setup
8413 if (HAVE_builtin_setjmp_setup)
8414 emit_insn (gen_builtin_setjmp_setup (buf_addr));
8417 /* Set TARGET to zero and branch to the first-time-through label. */
8418 emit_move_insn (target, const0_rtx);
8419 emit_jump_insn (gen_jump (first_label));
8423 /* Tell flow about the strange goings on. */
8424 current_function_has_nonlocal_label = 1;
8426 /* Clobber the FP when we get here, so we have to make sure it's
8427 marked as used by this function. */
8428 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8430 /* Mark the static chain as clobbered here so life information
8431 doesn't get messed up for it. */
8432 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8434 /* Now put in the code to restore the frame pointer, and argument
8435 pointer, if needed. The code below is from expand_end_bindings
8436 in stmt.c; see detailed documentation there. */
8437 #ifdef HAVE_nonlocal_goto
8438 if (! HAVE_nonlocal_goto)
8440 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8442 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8443 if (fixed_regs[ARG_POINTER_REGNUM])
8445 #ifdef ELIMINABLE_REGS
8447 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8449 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8450 if (elim_regs[i].from == ARG_POINTER_REGNUM
8451 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8454 if (i == sizeof elim_regs / sizeof elim_regs [0])
8457 /* Now restore our arg pointer from the address at which it
8458 was saved in our stack frame.
8459 If there hasn't be space allocated for it yet, make
8461 if (arg_pointer_save_area == 0)
8462 arg_pointer_save_area
8463 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8464 emit_move_insn (virtual_incoming_args_rtx,
8465 copy_to_reg (arg_pointer_save_area));
8470 #ifdef HAVE_builtin_setjmp_receiver
8471 if (HAVE_builtin_setjmp_receiver)
8472 emit_insn (gen_builtin_setjmp_receiver (lab1));
8475 #ifdef HAVE_nonlocal_goto_receiver
8476 if (HAVE_nonlocal_goto_receiver)
8477 emit_insn (gen_nonlocal_goto_receiver ());
8484 /* Set TARGET, and branch to the next-time-through label. */
8485 emit_move_insn (target, const1_rtx);
8486 emit_jump_insn (gen_jump (next_label));
8493 expand_builtin_longjmp (buf_addr, value)
8494 rtx buf_addr, value;
8497 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8499 #ifdef POINTERS_EXTEND_UNSIGNED
8500 buf_addr = convert_memory_address (Pmode, buf_addr);
8502 buf_addr = force_reg (Pmode, buf_addr);
8504 /* We used to store value in static_chain_rtx, but that fails if pointers
8505 are smaller than integers. We instead require that the user must pass
8506 a second argument of 1, because that is what builtin_setjmp will
8507 return. This also makes EH slightly more efficient, since we are no
8508 longer copying around a value that we don't care about. */
8509 if (value != const1_rtx)
8512 #ifdef HAVE_builtin_longjmp
8513 if (HAVE_builtin_longjmp)
8514 emit_insn (gen_builtin_longjmp (buf_addr));
8518 fp = gen_rtx_MEM (Pmode, buf_addr);
8519 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8520 GET_MODE_SIZE (Pmode)));
8522 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8523 2 * GET_MODE_SIZE (Pmode)));
8525 /* Pick up FP, label, and SP from the block and jump. This code is
8526 from expand_goto in stmt.c; see there for detailed comments. */
8527 #if HAVE_nonlocal_goto
8528 if (HAVE_nonlocal_goto)
8529 /* We have to pass a value to the nonlocal_goto pattern that will
8530 get copied into the static_chain pointer, but it does not matter
8531 what that value is, because builtin_setjmp does not use it. */
8532 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8536 lab = copy_to_reg (lab);
8538 emit_move_insn (hard_frame_pointer_rtx, fp);
8539 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8541 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8542 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8543 emit_indirect_jump (lab);
8549 get_memory_rtx (exp)
8555 mem = gen_rtx_MEM (BLKmode,
8556 memory_address (BLKmode,
8557 expand_expr (exp, NULL_RTX,
8558 ptr_mode, EXPAND_SUM)));
8560 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8562 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8563 if the value is the address of a structure or if the expression is
8564 cast to a pointer to structure type. */
8567 while (TREE_CODE (exp) == NOP_EXPR)
8569 tree cast_type = TREE_TYPE (exp);
8570 if (TREE_CODE (cast_type) == POINTER_TYPE
8571 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8576 exp = TREE_OPERAND (exp, 0);
8579 if (is_aggregate == 0)
8583 if (TREE_CODE (exp) == ADDR_EXPR)
8584 /* If this is the address of an object, check whether the
8585 object is an array. */
8586 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8588 type = TREE_TYPE (TREE_TYPE (exp));
8589 is_aggregate = AGGREGATE_TYPE_P (type);
8592 MEM_IN_STRUCT_P (mem) = is_aggregate;
8597 /* Expand an expression EXP that calls a built-in function,
8598 with result going to TARGET if that's convenient
8599 (and in mode MODE if that's convenient).
8600 SUBTARGET may be used as the target for computing one of EXP's operands.
8601 IGNORE is nonzero if the value is to be ignored. */
8603 #define CALLED_AS_BUILT_IN(NODE) \
8604 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8607 expand_builtin (exp, target, subtarget, mode, ignore)
8611 enum machine_mode mode;
8614 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8615 tree arglist = TREE_OPERAND (exp, 1);
8618 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8619 optab builtin_optab;
8621 switch (DECL_FUNCTION_CODE (fndecl))
8626 /* build_function_call changes these into ABS_EXPR. */
8631 /* Treat these like sqrt, but only if the user asks for them. */
8632 if (! flag_fast_math)
8634 case BUILT_IN_FSQRT:
8635 /* If not optimizing, call the library function. */
8640 /* Arg could be wrong type if user redeclared this fcn wrong. */
8641 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8644 /* Stabilize and compute the argument. */
8645 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8646 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8648 exp = copy_node (exp);
8649 arglist = copy_node (arglist);
8650 TREE_OPERAND (exp, 1) = arglist;
8651 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8653 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8655 /* Make a suitable register to place result in. */
8656 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8661 switch (DECL_FUNCTION_CODE (fndecl))
8664 builtin_optab = sin_optab; break;
8666 builtin_optab = cos_optab; break;
8667 case BUILT_IN_FSQRT:
8668 builtin_optab = sqrt_optab; break;
8673 /* Compute into TARGET.
8674 Set TARGET to wherever the result comes back. */
8675 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8676 builtin_optab, op0, target, 0);
8678 /* If we were unable to expand via the builtin, stop the
8679 sequence (without outputting the insns) and break, causing
8680 a call to the library function. */
8687 /* Check the results by default. But if flag_fast_math is turned on,
8688 then assume sqrt will always be called with valid arguments. */
8690 if (! flag_fast_math)
8692 /* Don't define the builtin FP instructions
8693 if your machine is not IEEE. */
8694 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8697 lab1 = gen_label_rtx ();
8699 /* Test the result; if it is NaN, set errno=EDOM because
8700 the argument was not in the domain. */
8701 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8702 emit_jump_insn (gen_beq (lab1));
8706 #ifdef GEN_ERRNO_RTX
8707 rtx errno_rtx = GEN_ERRNO_RTX;
8710 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8713 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8716 /* We can't set errno=EDOM directly; let the library call do it.
8717 Pop the arguments right away in case the call gets deleted. */
8719 expand_call (exp, target, 0);
8726 /* Output the entire sequence. */
8727 insns = get_insns ();
8736 /* __builtin_apply_args returns block of memory allocated on
8737 the stack into which is stored the arg pointer, structure
8738 value address, static chain, and all the registers that might
8739 possibly be used in performing a function call. The code is
8740 moved to the start of the function so the incoming values are
8742 case BUILT_IN_APPLY_ARGS:
8743 /* Don't do __builtin_apply_args more than once in a function.
8744 Save the result of the first call and reuse it. */
8745 if (apply_args_value != 0)
8746 return apply_args_value;
8748 /* When this function is called, it means that registers must be
8749 saved on entry to this function. So we migrate the
8750 call to the first insn of this function. */
8755 temp = expand_builtin_apply_args ();
8759 apply_args_value = temp;
8761 /* Put the sequence after the NOTE that starts the function.
8762 If this is inside a SEQUENCE, make the outer-level insn
8763 chain current, so the code is placed at the start of the
8765 push_topmost_sequence ();
8766 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8767 pop_topmost_sequence ();
8771 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8772 FUNCTION with a copy of the parameters described by
8773 ARGUMENTS, and ARGSIZE. It returns a block of memory
8774 allocated on the stack into which is stored all the registers
8775 that might possibly be used for returning the result of a
8776 function. ARGUMENTS is the value returned by
8777 __builtin_apply_args. ARGSIZE is the number of bytes of
8778 arguments that must be copied. ??? How should this value be
8779 computed? We'll also need a safe worst case value for varargs
8781 case BUILT_IN_APPLY:
8783 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8784 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8785 || TREE_CHAIN (arglist) == 0
8786 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8787 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8788 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8796 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8797 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8799 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8802 /* __builtin_return (RESULT) causes the function to return the
8803 value described by RESULT. RESULT is address of the block of
8804 memory returned by __builtin_apply. */
8805 case BUILT_IN_RETURN:
8807 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8808 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8809 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8810 NULL_RTX, VOIDmode, 0));
8813 case BUILT_IN_SAVEREGS:
8814 /* Don't do __builtin_saveregs more than once in a function.
8815 Save the result of the first call and reuse it. */
8816 if (saveregs_value != 0)
8817 return saveregs_value;
8819 /* When this function is called, it means that registers must be
8820 saved on entry to this function. So we migrate the
8821 call to the first insn of this function. */
8825 /* Now really call the function. `expand_call' does not call
8826 expand_builtin, so there is no danger of infinite recursion here. */
8829 #ifdef EXPAND_BUILTIN_SAVEREGS
8830 /* Do whatever the machine needs done in this case. */
8831 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8833 /* The register where the function returns its value
8834 is likely to have something else in it, such as an argument.
8835 So preserve that register around the call. */
8837 if (value_mode != VOIDmode)
8839 rtx valreg = hard_libcall_value (value_mode);
8840 rtx saved_valreg = gen_reg_rtx (value_mode);
8842 emit_move_insn (saved_valreg, valreg);
8843 temp = expand_call (exp, target, ignore);
8844 emit_move_insn (valreg, saved_valreg);
8847 /* Generate the call, putting the value in a pseudo. */
8848 temp = expand_call (exp, target, ignore);
8854 saveregs_value = temp;
8856 /* Put the sequence after the NOTE that starts the function.
8857 If this is inside a SEQUENCE, make the outer-level insn
8858 chain current, so the code is placed at the start of the
8860 push_topmost_sequence ();
8861 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8862 pop_topmost_sequence ();
8866 /* __builtin_args_info (N) returns word N of the arg space info
8867 for the current function. The number and meanings of words
8868 is controlled by the definition of CUMULATIVE_ARGS. */
8869 case BUILT_IN_ARGS_INFO:
8871 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8872 int *word_ptr = (int *) ¤t_function_args_info;
8874 /* These are used by the code below that is if 0'ed away */
8876 tree type, elts, result;
8879 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8880 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8881 __FILE__, __LINE__);
8885 tree arg = TREE_VALUE (arglist);
8886 if (TREE_CODE (arg) != INTEGER_CST)
8887 error ("argument of `__builtin_args_info' must be constant");
8890 int wordnum = TREE_INT_CST_LOW (arg);
8892 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8893 error ("argument of `__builtin_args_info' out of range");
8895 return GEN_INT (word_ptr[wordnum]);
8899 error ("missing argument in `__builtin_args_info'");
8904 for (i = 0; i < nwords; i++)
8905 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8907 type = build_array_type (integer_type_node,
8908 build_index_type (build_int_2 (nwords, 0)));
8909 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8910 TREE_CONSTANT (result) = 1;
8911 TREE_STATIC (result) = 1;
8912 result = build (INDIRECT_REF, build_pointer_type (type), result);
8913 TREE_CONSTANT (result) = 1;
8914 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8918 /* Return the address of the first anonymous stack arg. */
8919 case BUILT_IN_NEXT_ARG:
8921 tree fntype = TREE_TYPE (current_function_decl);
8923 if ((TYPE_ARG_TYPES (fntype) == 0
8924 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8926 && ! current_function_varargs)
8928 error ("`va_start' used in function with fixed args");
8934 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8935 tree arg = TREE_VALUE (arglist);
8937 /* Strip off all nops for the sake of the comparison. This
8938 is not quite the same as STRIP_NOPS. It does more.
8939 We must also strip off INDIRECT_EXPR for C++ reference
8941 while (TREE_CODE (arg) == NOP_EXPR
8942 || TREE_CODE (arg) == CONVERT_EXPR
8943 || TREE_CODE (arg) == NON_LVALUE_EXPR
8944 || TREE_CODE (arg) == INDIRECT_REF)
8945 arg = TREE_OPERAND (arg, 0);
8946 if (arg != last_parm)
8947 warning ("second parameter of `va_start' not last named argument");
8949 else if (! current_function_varargs)
8950 /* Evidently an out of date version of <stdarg.h>; can't validate
8951 va_start's second argument, but can still work as intended. */
8952 warning ("`__builtin_next_arg' called without an argument");
8955 return expand_binop (Pmode, add_optab,
8956 current_function_internal_arg_pointer,
8957 current_function_arg_offset_rtx,
8958 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8960 case BUILT_IN_CLASSIFY_TYPE:
8963 tree type = TREE_TYPE (TREE_VALUE (arglist));
8964 enum tree_code code = TREE_CODE (type);
8965 if (code == VOID_TYPE)
8966 return GEN_INT (void_type_class);
8967 if (code == INTEGER_TYPE)
8968 return GEN_INT (integer_type_class);
8969 if (code == CHAR_TYPE)
8970 return GEN_INT (char_type_class);
8971 if (code == ENUMERAL_TYPE)
8972 return GEN_INT (enumeral_type_class);
8973 if (code == BOOLEAN_TYPE)
8974 return GEN_INT (boolean_type_class);
8975 if (code == POINTER_TYPE)
8976 return GEN_INT (pointer_type_class);
8977 if (code == REFERENCE_TYPE)
8978 return GEN_INT (reference_type_class);
8979 if (code == OFFSET_TYPE)
8980 return GEN_INT (offset_type_class);
8981 if (code == REAL_TYPE)
8982 return GEN_INT (real_type_class);
8983 if (code == COMPLEX_TYPE)
8984 return GEN_INT (complex_type_class);
8985 if (code == FUNCTION_TYPE)
8986 return GEN_INT (function_type_class);
8987 if (code == METHOD_TYPE)
8988 return GEN_INT (method_type_class);
8989 if (code == RECORD_TYPE)
8990 return GEN_INT (record_type_class);
8991 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8992 return GEN_INT (union_type_class);
8993 if (code == ARRAY_TYPE)
8995 if (TYPE_STRING_FLAG (type))
8996 return GEN_INT (string_type_class);
8998 return GEN_INT (array_type_class);
9000 if (code == SET_TYPE)
9001 return GEN_INT (set_type_class);
9002 if (code == FILE_TYPE)
9003 return GEN_INT (file_type_class);
9004 if (code == LANG_TYPE)
9005 return GEN_INT (lang_type_class);
9007 return GEN_INT (no_type_class);
9009 case BUILT_IN_CONSTANT_P:
9014 tree arg = TREE_VALUE (arglist);
9017 if (really_constant_p (arg)
9018 || (TREE_CODE (arg) == ADDR_EXPR
9019 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9022 /* Only emit CONSTANT_P_RTX if CSE will be run.
9023 Moreover, we don't want to expand trees that have side effects,
9024 as the original __builtin_constant_p did not evaluate its
9025 argument at all, and we would break existing usage by changing
9026 this. This quirk was generally useful, eliminating a bit of hair
9027 in the writing of the macros that use this function. Now the
9028 same thing can be better accomplished in an inline function. */
9030 if (! cse_not_expected && ! TREE_SIDE_EFFECTS (arg))
9032 /* Lazy fixup of old code: issue a warning and fail the test. */
9033 if (! can_handle_constant_p)
9035 warning ("Delayed evaluation of __builtin_constant_p not supported on this target.");
9036 warning ("Please report this as a bug to egcs-bugs@cygnus.com.");
9039 return gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
9040 expand_expr (arg, NULL_RTX,
9047 case BUILT_IN_FRAME_ADDRESS:
9048 /* The argument must be a nonnegative integer constant.
9049 It counts the number of frames to scan up the stack.
9050 The value is the address of that frame. */
9051 case BUILT_IN_RETURN_ADDRESS:
9052 /* The argument must be a nonnegative integer constant.
9053 It counts the number of frames to scan up the stack.
9054 The value is the return address saved in that frame. */
9056 /* Warning about missing arg was already issued. */
9058 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9059 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9061 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9062 error ("invalid arg to `__builtin_frame_address'");
9064 error ("invalid arg to `__builtin_return_address'");
9069 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9070 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9071 hard_frame_pointer_rtx);
9073 /* Some ports cannot access arbitrary stack frames. */
9076 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9077 warning ("unsupported arg to `__builtin_frame_address'");
9079 warning ("unsupported arg to `__builtin_return_address'");
9083 /* For __builtin_frame_address, return what we've got. */
9084 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9087 if (GET_CODE (tem) != REG)
9088 tem = copy_to_reg (tem);
9092 /* Returns the address of the area where the structure is returned.
9094 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9096 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9097 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9100 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9102 case BUILT_IN_ALLOCA:
9104 /* Arg could be non-integer if user redeclared this fcn wrong. */
9105 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9108 /* Compute the argument. */
9109 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9111 /* Allocate the desired space. */
9112 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9115 /* If not optimizing, call the library function. */
9116 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9120 /* Arg could be non-integer if user redeclared this fcn wrong. */
9121 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9124 /* Compute the argument. */
9125 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9126 /* Compute ffs, into TARGET if possible.
9127 Set TARGET to wherever the result comes back. */
9128 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9129 ffs_optab, op0, target, 1);
9134 case BUILT_IN_STRLEN:
9135 /* If not optimizing, call the library function. */
9136 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9140 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9141 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9145 tree src = TREE_VALUE (arglist);
9146 tree len = c_strlen (src);
9149 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9151 rtx result, src_rtx, char_rtx;
9152 enum machine_mode insn_mode = value_mode, char_mode;
9153 enum insn_code icode;
9155 /* If the length is known, just return it. */
9157 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9159 /* If SRC is not a pointer type, don't do this operation inline. */
9163 /* Call a function if we can't compute strlen in the right mode. */
9165 while (insn_mode != VOIDmode)
9167 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9168 if (icode != CODE_FOR_nothing)
9171 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9173 if (insn_mode == VOIDmode)
9176 /* Make a place to write the result of the instruction. */
9179 && GET_CODE (result) == REG
9180 && GET_MODE (result) == insn_mode
9181 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9182 result = gen_reg_rtx (insn_mode);
9184 /* Make sure the operands are acceptable to the predicates. */
9186 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9187 result = gen_reg_rtx (insn_mode);
9188 src_rtx = memory_address (BLKmode,
9189 expand_expr (src, NULL_RTX, ptr_mode,
9192 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9193 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9195 /* Check the string is readable and has an end. */
9196 if (current_function_check_memory_usage)
9197 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9199 GEN_INT (MEMORY_USE_RO),
9200 TYPE_MODE (integer_type_node));
9202 char_rtx = const0_rtx;
9203 char_mode = insn_operand_mode[(int)icode][2];
9204 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9205 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9207 emit_insn (GEN_FCN (icode) (result,
9208 gen_rtx_MEM (BLKmode, src_rtx),
9209 char_rtx, GEN_INT (align)));
9211 /* Return the value in the proper mode for this function. */
9212 if (GET_MODE (result) == value_mode)
9214 else if (target != 0)
9216 convert_move (target, result, 0);
9220 return convert_to_mode (value_mode, result, 0);
9223 case BUILT_IN_STRCPY:
9224 /* If not optimizing, call the library function. */
9225 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9229 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9230 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9231 || TREE_CHAIN (arglist) == 0
9232 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9236 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9241 len = size_binop (PLUS_EXPR, len, integer_one_node);
9243 chainon (arglist, build_tree_list (NULL_TREE, len));
9247 case BUILT_IN_MEMCPY:
9248 /* If not optimizing, call the library function. */
9249 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9253 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9254 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9255 || TREE_CHAIN (arglist) == 0
9256 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9258 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9259 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9260 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9265 tree dest = TREE_VALUE (arglist);
9266 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9267 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9270 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9272 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9273 rtx dest_mem, src_mem, dest_addr, len_rtx;
9275 /* If either SRC or DEST is not a pointer type, don't do
9276 this operation in-line. */
9277 if (src_align == 0 || dest_align == 0)
9279 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9280 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9284 dest_mem = get_memory_rtx (dest);
9285 src_mem = get_memory_rtx (src);
9286 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9288 /* Just copy the rights of SRC to the rights of DEST. */
9289 if (current_function_check_memory_usage)
9290 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9291 XEXP (dest_mem, 0), ptr_mode,
9292 XEXP (src_mem, 0), ptr_mode,
9293 len_rtx, TYPE_MODE (sizetype));
9295 /* Copy word part most expediently. */
9297 = emit_block_move (dest_mem, src_mem, len_rtx,
9298 MIN (src_align, dest_align));
9301 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9306 case BUILT_IN_MEMSET:
9307 /* If not optimizing, call the library function. */
9308 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9312 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9313 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9314 || TREE_CHAIN (arglist) == 0
9315 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9317 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9319 != (TREE_CODE (TREE_TYPE
9321 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9325 tree dest = TREE_VALUE (arglist);
9326 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9327 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9330 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9331 rtx dest_mem, dest_addr, len_rtx;
9333 /* If DEST is not a pointer type, don't do this
9334 operation in-line. */
9335 if (dest_align == 0)
9338 /* If the arguments have side-effects, then we can only evaluate
9339 them at most once. The following code evaluates them twice if
9340 they are not constants because we break out to expand_call
9341 in that case. They can't be constants if they have side-effects
9342 so we can check for that first. Alternatively, we could call
9343 save_expr to make multiple evaluation safe. */
9344 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9347 /* If VAL is not 0, don't do this operation in-line. */
9348 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9351 /* If LEN does not expand to a constant, don't do this
9352 operation in-line. */
9353 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9354 if (GET_CODE (len_rtx) != CONST_INT)
9357 dest_mem = get_memory_rtx (dest);
9359 /* Just check DST is writable and mark it as readable. */
9360 if (current_function_check_memory_usage)
9361 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9362 XEXP (dest_mem, 0), ptr_mode,
9363 len_rtx, TYPE_MODE (sizetype),
9364 GEN_INT (MEMORY_USE_WO),
9365 TYPE_MODE (integer_type_node));
9368 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9371 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9376 /* These comparison functions need an instruction that returns an actual
9377 index. An ordinary compare that just sets the condition codes
9379 #ifdef HAVE_cmpstrsi
9380 case BUILT_IN_STRCMP:
9381 /* If not optimizing, call the library function. */
9382 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9385 /* If we need to check memory accesses, call the library function. */
9386 if (current_function_check_memory_usage)
9390 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9391 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9392 || TREE_CHAIN (arglist) == 0
9393 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9395 else if (!HAVE_cmpstrsi)
9398 tree arg1 = TREE_VALUE (arglist);
9399 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9402 len = c_strlen (arg1);
9404 len = size_binop (PLUS_EXPR, integer_one_node, len);
9405 len2 = c_strlen (arg2);
9407 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9409 /* If we don't have a constant length for the first, use the length
9410 of the second, if we know it. We don't require a constant for
9411 this case; some cost analysis could be done if both are available
9412 but neither is constant. For now, assume they're equally cheap.
9414 If both strings have constant lengths, use the smaller. This
9415 could arise if optimization results in strcpy being called with
9416 two fixed strings, or if the code was machine-generated. We should
9417 add some code to the `memcmp' handler below to deal with such
9418 situations, someday. */
9419 if (!len || TREE_CODE (len) != INTEGER_CST)
9426 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9428 if (tree_int_cst_lt (len2, len))
9432 chainon (arglist, build_tree_list (NULL_TREE, len));
9436 case BUILT_IN_MEMCMP:
9437 /* If not optimizing, call the library function. */
9438 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9441 /* If we need to check memory accesses, call the library function. */
9442 if (current_function_check_memory_usage)
9446 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9447 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9448 || TREE_CHAIN (arglist) == 0
9449 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9450 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9451 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9453 else if (!HAVE_cmpstrsi)
9456 tree arg1 = TREE_VALUE (arglist);
9457 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9458 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9462 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9464 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9465 enum machine_mode insn_mode
9466 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9468 /* If we don't have POINTER_TYPE, call the function. */
9469 if (arg1_align == 0 || arg2_align == 0)
9471 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9472 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9476 /* Make a place to write the result of the instruction. */
9479 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9480 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9481 result = gen_reg_rtx (insn_mode);
9483 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9484 get_memory_rtx (arg2),
9485 expand_expr (len, NULL_RTX, VOIDmode, 0),
9486 GEN_INT (MIN (arg1_align, arg2_align))));
9488 /* Return the value in the proper mode for this function. */
9489 mode = TYPE_MODE (TREE_TYPE (exp));
9490 if (GET_MODE (result) == mode)
9492 else if (target != 0)
9494 convert_move (target, result, 0);
9498 return convert_to_mode (mode, result, 0);
9501 case BUILT_IN_STRCMP:
9502 case BUILT_IN_MEMCMP:
9506 case BUILT_IN_SETJMP:
9508 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9512 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9514 rtx lab = gen_label_rtx ();
9515 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9520 /* __builtin_longjmp is passed a pointer to an array of five words.
9521 It's similar to the C library longjmp function but works with
9522 __builtin_setjmp above. */
9523 case BUILT_IN_LONGJMP:
9524 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9525 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9529 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9531 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9532 NULL_RTX, VOIDmode, 0);
9534 if (value != const1_rtx)
9536 error ("__builtin_longjmp second argument must be 1");
9540 expand_builtin_longjmp (buf_addr, value);
9547 emit_insn (gen_trap ());
9550 error ("__builtin_trap not supported by this target");
9554 /* Various hooks for the DWARF 2 __throw routine. */
9555 case BUILT_IN_UNWIND_INIT:
9556 expand_builtin_unwind_init ();
9558 case BUILT_IN_DWARF_CFA:
9559 return virtual_cfa_rtx;
9560 #ifdef DWARF2_UNWIND_INFO
9561 case BUILT_IN_DWARF_FP_REGNUM:
9562 return expand_builtin_dwarf_fp_regnum ();
9563 case BUILT_IN_DWARF_REG_SIZE:
9564 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9566 case BUILT_IN_FROB_RETURN_ADDR:
9567 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9568 case BUILT_IN_EXTRACT_RETURN_ADDR:
9569 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9570 case BUILT_IN_EH_RETURN:
9571 expand_builtin_eh_return (TREE_VALUE (arglist),
9572 TREE_VALUE (TREE_CHAIN (arglist)),
9573 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9576 default: /* just do library call, if unknown builtin */
9577 error ("built-in function `%s' not currently supported",
9578 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9581 /* The switch statement above can drop through to cause the function
9582 to be called normally. */
9584 return expand_call (exp, target, ignore);
9587 /* Built-in functions to perform an untyped call and return. */
9589 /* For each register that may be used for calling a function, this
9590 gives a mode used to copy the register's value. VOIDmode indicates
9591 the register is not used for calling a function. If the machine
9592 has register windows, this gives only the outbound registers.
9593 INCOMING_REGNO gives the corresponding inbound register. */
9594 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9596 /* For each register that may be used for returning values, this gives
9597 a mode used to copy the register's value. VOIDmode indicates the
9598 register is not used for returning values. If the machine has
9599 register windows, this gives only the outbound registers.
9600 INCOMING_REGNO gives the corresponding inbound register. */
9601 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9603 /* For each register that may be used for calling a function, this
9604 gives the offset of that register into the block returned by
9605 __builtin_apply_args. 0 indicates that the register is not
9606 used for calling a function. */
9607 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9609 /* Return the offset of register REGNO into the block returned by
9610 __builtin_apply_args. This is not declared static, since it is
9611 needed in objc-act.c. */
9614 apply_args_register_offset (regno)
9619 /* Arguments are always put in outgoing registers (in the argument
9620 block) if such make sense. */
9621 #ifdef OUTGOING_REGNO
9622 regno = OUTGOING_REGNO(regno);
9624 return apply_args_reg_offset[regno];
9627 /* Return the size required for the block returned by __builtin_apply_args,
9628 and initialize apply_args_mode. */
9633 static int size = -1;
9635 enum machine_mode mode;
9637 /* The values computed by this function never change. */
9640 /* The first value is the incoming arg-pointer. */
9641 size = GET_MODE_SIZE (Pmode);
9643 /* The second value is the structure value address unless this is
9644 passed as an "invisible" first argument. */
9645 if (struct_value_rtx)
9646 size += GET_MODE_SIZE (Pmode);
9648 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9649 if (FUNCTION_ARG_REGNO_P (regno))
9651 /* Search for the proper mode for copying this register's
9652 value. I'm not sure this is right, but it works so far. */
9653 enum machine_mode best_mode = VOIDmode;
9655 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9657 mode = GET_MODE_WIDER_MODE (mode))
9658 if (HARD_REGNO_MODE_OK (regno, mode)
9659 && HARD_REGNO_NREGS (regno, mode) == 1)
9662 if (best_mode == VOIDmode)
9663 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9665 mode = GET_MODE_WIDER_MODE (mode))
9666 if (HARD_REGNO_MODE_OK (regno, mode)
9667 && (mov_optab->handlers[(int) mode].insn_code
9668 != CODE_FOR_nothing))
9672 if (mode == VOIDmode)
9675 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9676 if (size % align != 0)
9677 size = CEIL (size, align) * align;
9678 apply_args_reg_offset[regno] = size;
9679 size += GET_MODE_SIZE (mode);
9680 apply_args_mode[regno] = mode;
9684 apply_args_mode[regno] = VOIDmode;
9685 apply_args_reg_offset[regno] = 0;
9691 /* Return the size required for the block returned by __builtin_apply,
9692 and initialize apply_result_mode. */
9695 apply_result_size ()
9697 static int size = -1;
9699 enum machine_mode mode;
9701 /* The values computed by this function never change. */
9706 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9707 if (FUNCTION_VALUE_REGNO_P (regno))
9709 /* Search for the proper mode for copying this register's
9710 value. I'm not sure this is right, but it works so far. */
9711 enum machine_mode best_mode = VOIDmode;
9713 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9715 mode = GET_MODE_WIDER_MODE (mode))
9716 if (HARD_REGNO_MODE_OK (regno, mode))
9719 if (best_mode == VOIDmode)
9720 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9722 mode = GET_MODE_WIDER_MODE (mode))
9723 if (HARD_REGNO_MODE_OK (regno, mode)
9724 && (mov_optab->handlers[(int) mode].insn_code
9725 != CODE_FOR_nothing))
9729 if (mode == VOIDmode)
9732 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9733 if (size % align != 0)
9734 size = CEIL (size, align) * align;
9735 size += GET_MODE_SIZE (mode);
9736 apply_result_mode[regno] = mode;
9739 apply_result_mode[regno] = VOIDmode;
9741 /* Allow targets that use untyped_call and untyped_return to override
9742 the size so that machine-specific information can be stored here. */
9743 #ifdef APPLY_RESULT_SIZE
9744 size = APPLY_RESULT_SIZE;
9750 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9751 /* Create a vector describing the result block RESULT. If SAVEP is true,
9752 the result block is used to save the values; otherwise it is used to
9753 restore the values. */
9756 result_vector (savep, result)
9760 int regno, size, align, nelts;
9761 enum machine_mode mode;
9763 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9766 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9767 if ((mode = apply_result_mode[regno]) != VOIDmode)
9769 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9770 if (size % align != 0)
9771 size = CEIL (size, align) * align;
9772 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9773 mem = change_address (result, mode,
9774 plus_constant (XEXP (result, 0), size));
9775 savevec[nelts++] = (savep
9776 ? gen_rtx_SET (VOIDmode, mem, reg)
9777 : gen_rtx_SET (VOIDmode, reg, mem));
9778 size += GET_MODE_SIZE (mode);
9780 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9782 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9784 /* Save the state required to perform an untyped call with the same
9785 arguments as were passed to the current function. */
9788 expand_builtin_apply_args ()
9791 int size, align, regno;
9792 enum machine_mode mode;
9794 /* Create a block where the arg-pointer, structure value address,
9795 and argument registers can be saved. */
9796 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9798 /* Walk past the arg-pointer and structure value address. */
9799 size = GET_MODE_SIZE (Pmode);
9800 if (struct_value_rtx)
9801 size += GET_MODE_SIZE (Pmode);
9803 /* Save each register used in calling a function to the block. */
9804 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9805 if ((mode = apply_args_mode[regno]) != VOIDmode)
9809 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9810 if (size % align != 0)
9811 size = CEIL (size, align) * align;
9813 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9816 /* For reg-stack.c's stack register household.
9817 Compare with a similar piece of code in function.c. */
9819 emit_insn (gen_rtx_USE (mode, tem));
9822 emit_move_insn (change_address (registers, mode,
9823 plus_constant (XEXP (registers, 0),
9826 size += GET_MODE_SIZE (mode);
9829 /* Save the arg pointer to the block. */
9830 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9831 copy_to_reg (virtual_incoming_args_rtx));
9832 size = GET_MODE_SIZE (Pmode);
9834 /* Save the structure value address unless this is passed as an
9835 "invisible" first argument. */
9836 if (struct_value_incoming_rtx)
9838 emit_move_insn (change_address (registers, Pmode,
9839 plus_constant (XEXP (registers, 0),
9841 copy_to_reg (struct_value_incoming_rtx));
9842 size += GET_MODE_SIZE (Pmode);
9845 /* Return the address of the block. */
9846 return copy_addr_to_reg (XEXP (registers, 0));
9849 /* Perform an untyped call and save the state required to perform an
9850 untyped return of whatever value was returned by the given function. */
9853 expand_builtin_apply (function, arguments, argsize)
9854 rtx function, arguments, argsize;
9856 int size, align, regno;
9857 enum machine_mode mode;
9858 rtx incoming_args, result, reg, dest, call_insn;
9859 rtx old_stack_level = 0;
9860 rtx call_fusage = 0;
9862 /* Create a block where the return registers can be saved. */
9863 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9865 /* ??? The argsize value should be adjusted here. */
9867 /* Fetch the arg pointer from the ARGUMENTS block. */
9868 incoming_args = gen_reg_rtx (Pmode);
9869 emit_move_insn (incoming_args,
9870 gen_rtx_MEM (Pmode, arguments));
9871 #ifndef STACK_GROWS_DOWNWARD
9872 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9873 incoming_args, 0, OPTAB_LIB_WIDEN);
9876 /* Perform postincrements before actually calling the function. */
9879 /* Push a new argument block and copy the arguments. */
9880 do_pending_stack_adjust ();
9882 /* Save the stack with nonlocal if available */
9883 #ifdef HAVE_save_stack_nonlocal
9884 if (HAVE_save_stack_nonlocal)
9885 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9888 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9890 /* Push a block of memory onto the stack to store the memory arguments.
9891 Save the address in a register, and copy the memory arguments. ??? I
9892 haven't figured out how the calling convention macros effect this,
9893 but it's likely that the source and/or destination addresses in
9894 the block copy will need updating in machine specific ways. */
9895 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9896 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9897 gen_rtx_MEM (BLKmode, incoming_args),
9899 PARM_BOUNDARY / BITS_PER_UNIT);
9901 /* Refer to the argument block. */
9903 arguments = gen_rtx_MEM (BLKmode, arguments);
9905 /* Walk past the arg-pointer and structure value address. */
9906 size = GET_MODE_SIZE (Pmode);
9907 if (struct_value_rtx)
9908 size += GET_MODE_SIZE (Pmode);
9910 /* Restore each of the registers previously saved. Make USE insns
9911 for each of these registers for use in making the call. */
9912 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9913 if ((mode = apply_args_mode[regno]) != VOIDmode)
9915 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9916 if (size % align != 0)
9917 size = CEIL (size, align) * align;
9918 reg = gen_rtx_REG (mode, regno);
9919 emit_move_insn (reg,
9920 change_address (arguments, mode,
9921 plus_constant (XEXP (arguments, 0),
9924 use_reg (&call_fusage, reg);
9925 size += GET_MODE_SIZE (mode);
9928 /* Restore the structure value address unless this is passed as an
9929 "invisible" first argument. */
9930 size = GET_MODE_SIZE (Pmode);
9931 if (struct_value_rtx)
9933 rtx value = gen_reg_rtx (Pmode);
9934 emit_move_insn (value,
9935 change_address (arguments, Pmode,
9936 plus_constant (XEXP (arguments, 0),
9938 emit_move_insn (struct_value_rtx, value);
9939 if (GET_CODE (struct_value_rtx) == REG)
9940 use_reg (&call_fusage, struct_value_rtx);
9941 size += GET_MODE_SIZE (Pmode);
9944 /* All arguments and registers used for the call are set up by now! */
9945 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9947 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9948 and we don't want to load it into a register as an optimization,
9949 because prepare_call_address already did it if it should be done. */
9950 if (GET_CODE (function) != SYMBOL_REF)
9951 function = memory_address (FUNCTION_MODE, function);
9953 /* Generate the actual call instruction and save the return value. */
9954 #ifdef HAVE_untyped_call
9955 if (HAVE_untyped_call)
9956 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9957 result, result_vector (1, result)));
9960 #ifdef HAVE_call_value
9961 if (HAVE_call_value)
9965 /* Locate the unique return register. It is not possible to
9966 express a call that sets more than one return register using
9967 call_value; use untyped_call for that. In fact, untyped_call
9968 only needs to save the return registers in the given block. */
9969 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9970 if ((mode = apply_result_mode[regno]) != VOIDmode)
9973 abort (); /* HAVE_untyped_call required. */
9974 valreg = gen_rtx_REG (mode, regno);
9977 emit_call_insn (gen_call_value (valreg,
9978 gen_rtx_MEM (FUNCTION_MODE, function),
9979 const0_rtx, NULL_RTX, const0_rtx));
9981 emit_move_insn (change_address (result, GET_MODE (valreg),
9989 /* Find the CALL insn we just emitted. */
9990 for (call_insn = get_last_insn ();
9991 call_insn && GET_CODE (call_insn) != CALL_INSN;
9992 call_insn = PREV_INSN (call_insn))
9998 /* Put the register usage information on the CALL. If there is already
9999 some usage information, put ours at the end. */
10000 if (CALL_INSN_FUNCTION_USAGE (call_insn))
10004 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10005 link = XEXP (link, 1))
10008 XEXP (link, 1) = call_fusage;
10011 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
10013 /* Restore the stack. */
10014 #ifdef HAVE_save_stack_nonlocal
10015 if (HAVE_save_stack_nonlocal)
10016 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10019 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10021 /* Return the address of the result block. */
10022 return copy_addr_to_reg (XEXP (result, 0));
10025 /* Perform an untyped return. */
10028 expand_builtin_return (result)
10031 int size, align, regno;
10032 enum machine_mode mode;
10034 rtx call_fusage = 0;
10036 apply_result_size ();
10037 result = gen_rtx_MEM (BLKmode, result);
10039 #ifdef HAVE_untyped_return
10040 if (HAVE_untyped_return)
10042 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10048 /* Restore the return value and note that each value is used. */
10050 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10051 if ((mode = apply_result_mode[regno]) != VOIDmode)
10053 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10054 if (size % align != 0)
10055 size = CEIL (size, align) * align;
10056 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10057 emit_move_insn (reg,
10058 change_address (result, mode,
10059 plus_constant (XEXP (result, 0),
10062 push_to_sequence (call_fusage);
10063 emit_insn (gen_rtx_USE (VOIDmode, reg));
10064 call_fusage = get_insns ();
10066 size += GET_MODE_SIZE (mode);
10069 /* Put the USE insns before the return. */
10070 emit_insns (call_fusage);
10072 /* Return whatever values was restored by jumping directly to the end
10073 of the function. */
10074 expand_null_return ();
10077 /* Expand code for a post- or pre- increment or decrement
10078 and return the RTX for the result.
10079 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
10082 expand_increment (exp, post, ignore)
10086 register rtx op0, op1;
10087 register rtx temp, value;
10088 register tree incremented = TREE_OPERAND (exp, 0);
10089 optab this_optab = add_optab;
10091 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10092 int op0_is_copy = 0;
10093 int single_insn = 0;
10094 /* 1 means we can't store into OP0 directly,
10095 because it is a subreg narrower than a word,
10096 and we don't dare clobber the rest of the word. */
10097 int bad_subreg = 0;
10099 /* Stabilize any component ref that might need to be
10100 evaluated more than once below. */
10102 || TREE_CODE (incremented) == BIT_FIELD_REF
10103 || (TREE_CODE (incremented) == COMPONENT_REF
10104 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10105 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10106 incremented = stabilize_reference (incremented);
10107 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10108 ones into save exprs so that they don't accidentally get evaluated
10109 more than once by the code below. */
10110 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10111 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10112 incremented = save_expr (incremented);
10114 /* Compute the operands as RTX.
10115 Note whether OP0 is the actual lvalue or a copy of it:
10116 I believe it is a copy iff it is a register or subreg
10117 and insns were generated in computing it. */
10119 temp = get_last_insn ();
10120 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10122 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10123 in place but instead must do sign- or zero-extension during assignment,
10124 so we copy it into a new register and let the code below use it as
10127 Note that we can safely modify this SUBREG since it is know not to be
10128 shared (it was made by the expand_expr call above). */
10130 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10133 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10137 else if (GET_CODE (op0) == SUBREG
10138 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10140 /* We cannot increment this SUBREG in place. If we are
10141 post-incrementing, get a copy of the old value. Otherwise,
10142 just mark that we cannot increment in place. */
10144 op0 = copy_to_reg (op0);
10149 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10150 && temp != get_last_insn ());
10151 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10152 EXPAND_MEMORY_USE_BAD);
10154 /* Decide whether incrementing or decrementing. */
10155 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10156 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10157 this_optab = sub_optab;
10159 /* Convert decrement by a constant into a negative increment. */
10160 if (this_optab == sub_optab
10161 && GET_CODE (op1) == CONST_INT)
10163 op1 = GEN_INT (- INTVAL (op1));
10164 this_optab = add_optab;
10167 /* For a preincrement, see if we can do this with a single instruction. */
10170 icode = (int) this_optab->handlers[(int) mode].insn_code;
10171 if (icode != (int) CODE_FOR_nothing
10172 /* Make sure that OP0 is valid for operands 0 and 1
10173 of the insn we want to queue. */
10174 && (*insn_operand_predicate[icode][0]) (op0, mode)
10175 && (*insn_operand_predicate[icode][1]) (op0, mode)
10176 && (*insn_operand_predicate[icode][2]) (op1, mode))
10180 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10181 then we cannot just increment OP0. We must therefore contrive to
10182 increment the original value. Then, for postincrement, we can return
10183 OP0 since it is a copy of the old value. For preincrement, expand here
10184 unless we can do it with a single insn.
10186 Likewise if storing directly into OP0 would clobber high bits
10187 we need to preserve (bad_subreg). */
10188 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10190 /* This is the easiest way to increment the value wherever it is.
10191 Problems with multiple evaluation of INCREMENTED are prevented
10192 because either (1) it is a component_ref or preincrement,
10193 in which case it was stabilized above, or (2) it is an array_ref
10194 with constant index in an array in a register, which is
10195 safe to reevaluate. */
10196 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10197 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10198 ? MINUS_EXPR : PLUS_EXPR),
10201 TREE_OPERAND (exp, 1));
10203 while (TREE_CODE (incremented) == NOP_EXPR
10204 || TREE_CODE (incremented) == CONVERT_EXPR)
10206 newexp = convert (TREE_TYPE (incremented), newexp);
10207 incremented = TREE_OPERAND (incremented, 0);
10210 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10211 return post ? op0 : temp;
10216 /* We have a true reference to the value in OP0.
10217 If there is an insn to add or subtract in this mode, queue it.
10218 Queueing the increment insn avoids the register shuffling
10219 that often results if we must increment now and first save
10220 the old value for subsequent use. */
10222 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10223 op0 = stabilize (op0);
10226 icode = (int) this_optab->handlers[(int) mode].insn_code;
10227 if (icode != (int) CODE_FOR_nothing
10228 /* Make sure that OP0 is valid for operands 0 and 1
10229 of the insn we want to queue. */
10230 && (*insn_operand_predicate[icode][0]) (op0, mode)
10231 && (*insn_operand_predicate[icode][1]) (op0, mode))
10233 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10234 op1 = force_reg (mode, op1);
10236 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10238 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10240 rtx addr = (general_operand (XEXP (op0, 0), mode)
10241 ? force_reg (Pmode, XEXP (op0, 0))
10242 : copy_to_reg (XEXP (op0, 0)));
10245 op0 = change_address (op0, VOIDmode, addr);
10246 temp = force_reg (GET_MODE (op0), op0);
10247 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10248 op1 = force_reg (mode, op1);
10250 /* The increment queue is LIFO, thus we have to `queue'
10251 the instructions in reverse order. */
10252 enqueue_insn (op0, gen_move_insn (op0, temp));
10253 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10258 /* Preincrement, or we can't increment with one simple insn. */
10260 /* Save a copy of the value before inc or dec, to return it later. */
10261 temp = value = copy_to_reg (op0);
10263 /* Arrange to return the incremented value. */
10264 /* Copy the rtx because expand_binop will protect from the queue,
10265 and the results of that would be invalid for us to return
10266 if our caller does emit_queue before using our result. */
10267 temp = copy_rtx (value = op0);
10269 /* Increment however we can. */
10270 op1 = expand_binop (mode, this_optab, value, op1,
10271 current_function_check_memory_usage ? NULL_RTX : op0,
10272 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10273 /* Make sure the value is stored into OP0. */
10275 emit_move_insn (op0, op1);
10280 /* Expand all function calls contained within EXP, innermost ones first.
10281 But don't look within expressions that have sequence points.
10282 For each CALL_EXPR, record the rtx for its value
10283 in the CALL_EXPR_RTL field. */
10286 preexpand_calls (exp)
10289 register int nops, i;
10290 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10292 if (! do_preexpand_calls)
10295 /* Only expressions and references can contain calls. */
10297 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10300 switch (TREE_CODE (exp))
10303 /* Do nothing if already expanded. */
10304 if (CALL_EXPR_RTL (exp) != 0
10305 /* Do nothing if the call returns a variable-sized object. */
10306 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10307 /* Do nothing to built-in functions. */
10308 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10309 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10311 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10314 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10317 case COMPOUND_EXPR:
10319 case TRUTH_ANDIF_EXPR:
10320 case TRUTH_ORIF_EXPR:
10321 /* If we find one of these, then we can be sure
10322 the adjust will be done for it (since it makes jumps).
10323 Do it now, so that if this is inside an argument
10324 of a function, we don't get the stack adjustment
10325 after some other args have already been pushed. */
10326 do_pending_stack_adjust ();
10331 case WITH_CLEANUP_EXPR:
10332 case CLEANUP_POINT_EXPR:
10333 case TRY_CATCH_EXPR:
10337 if (SAVE_EXPR_RTL (exp) != 0)
10344 nops = tree_code_length[(int) TREE_CODE (exp)];
10345 for (i = 0; i < nops; i++)
10346 if (TREE_OPERAND (exp, i) != 0)
10348 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10349 if (type == 'e' || type == '<' || type == '1' || type == '2'
10351 preexpand_calls (TREE_OPERAND (exp, i));
10355 /* At the start of a function, record that we have no previously-pushed
10356 arguments waiting to be popped. */
10359 init_pending_stack_adjust ()
10361 pending_stack_adjust = 0;
10364 /* When exiting from function, if safe, clear out any pending stack adjust
10365 so the adjustment won't get done.
10367 Note, if the current function calls alloca, then it must have a
10368 frame pointer regardless of the value of flag_omit_frame_pointer. */
10371 clear_pending_stack_adjust ()
10373 #ifdef EXIT_IGNORE_STACK
10375 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10376 && EXIT_IGNORE_STACK
10377 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10378 && ! flag_inline_functions)
10379 pending_stack_adjust = 0;
10383 /* Pop any previously-pushed arguments that have not been popped yet. */
10386 do_pending_stack_adjust ()
10388 if (inhibit_defer_pop == 0)
10390 if (pending_stack_adjust != 0)
10391 adjust_stack (GEN_INT (pending_stack_adjust));
10392 pending_stack_adjust = 0;
10396 /* Expand conditional expressions. */
10398 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10399 LABEL is an rtx of code CODE_LABEL, in this function and all the
10403 jumpifnot (exp, label)
10407 do_jump (exp, label, NULL_RTX);
10410 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10413 jumpif (exp, label)
10417 do_jump (exp, NULL_RTX, label);
10420 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10421 the result is zero, or IF_TRUE_LABEL if the result is one.
10422 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10423 meaning fall through in that case.
10425 do_jump always does any pending stack adjust except when it does not
10426 actually perform a jump. An example where there is no jump
10427 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10429 This function is responsible for optimizing cases such as
10430 &&, || and comparison operators in EXP. */
10433 do_jump (exp, if_false_label, if_true_label)
10435 rtx if_false_label, if_true_label;
10437 register enum tree_code code = TREE_CODE (exp);
10438 /* Some cases need to create a label to jump to
10439 in order to properly fall through.
10440 These cases set DROP_THROUGH_LABEL nonzero. */
10441 rtx drop_through_label = 0;
10443 rtx comparison = 0;
10446 enum machine_mode mode;
10448 #ifdef MAX_INTEGER_COMPUTATION_MODE
10449 check_max_integer_computation_mode (exp);
10460 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10466 /* This is not true with #pragma weak */
10468 /* The address of something can never be zero. */
10470 emit_jump (if_true_label);
10475 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10476 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10477 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10480 /* If we are narrowing the operand, we have to do the compare in the
10482 if ((TYPE_PRECISION (TREE_TYPE (exp))
10483 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10485 case NON_LVALUE_EXPR:
10486 case REFERENCE_EXPR:
10491 /* These cannot change zero->non-zero or vice versa. */
10492 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10496 /* This is never less insns than evaluating the PLUS_EXPR followed by
10497 a test and can be longer if the test is eliminated. */
10499 /* Reduce to minus. */
10500 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10501 TREE_OPERAND (exp, 0),
10502 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10503 TREE_OPERAND (exp, 1))));
10504 /* Process as MINUS. */
10508 /* Non-zero iff operands of minus differ. */
10509 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10510 TREE_OPERAND (exp, 0),
10511 TREE_OPERAND (exp, 1)),
10516 /* If we are AND'ing with a small constant, do this comparison in the
10517 smallest type that fits. If the machine doesn't have comparisons
10518 that small, it will be converted back to the wider comparison.
10519 This helps if we are testing the sign bit of a narrower object.
10520 combine can't do this for us because it can't know whether a
10521 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10523 if (! SLOW_BYTE_ACCESS
10524 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10525 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10526 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10527 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10528 && (type = type_for_mode (mode, 1)) != 0
10529 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10530 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10531 != CODE_FOR_nothing))
10533 do_jump (convert (type, exp), if_false_label, if_true_label);
10538 case TRUTH_NOT_EXPR:
10539 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10542 case TRUTH_ANDIF_EXPR:
10543 if (if_false_label == 0)
10544 if_false_label = drop_through_label = gen_label_rtx ();
10545 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10546 start_cleanup_deferral ();
10547 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10548 end_cleanup_deferral ();
10551 case TRUTH_ORIF_EXPR:
10552 if (if_true_label == 0)
10553 if_true_label = drop_through_label = gen_label_rtx ();
10554 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10555 start_cleanup_deferral ();
10556 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10557 end_cleanup_deferral ();
10560 case COMPOUND_EXPR:
10561 push_temp_slots ();
10562 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10563 preserve_temp_slots (NULL_RTX);
10564 free_temp_slots ();
10567 do_pending_stack_adjust ();
10568 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10571 case COMPONENT_REF:
10572 case BIT_FIELD_REF:
10575 int bitsize, bitpos, unsignedp;
10576 enum machine_mode mode;
10582 /* Get description of this reference. We don't actually care
10583 about the underlying object here. */
10584 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10585 &mode, &unsignedp, &volatilep,
10588 type = type_for_size (bitsize, unsignedp);
10589 if (! SLOW_BYTE_ACCESS
10590 && type != 0 && bitsize >= 0
10591 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10592 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10593 != CODE_FOR_nothing))
10595 do_jump (convert (type, exp), if_false_label, if_true_label);
10602 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10603 if (integer_onep (TREE_OPERAND (exp, 1))
10604 && integer_zerop (TREE_OPERAND (exp, 2)))
10605 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10607 else if (integer_zerop (TREE_OPERAND (exp, 1))
10608 && integer_onep (TREE_OPERAND (exp, 2)))
10609 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10613 register rtx label1 = gen_label_rtx ();
10614 drop_through_label = gen_label_rtx ();
10616 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10618 start_cleanup_deferral ();
10619 /* Now the THEN-expression. */
10620 do_jump (TREE_OPERAND (exp, 1),
10621 if_false_label ? if_false_label : drop_through_label,
10622 if_true_label ? if_true_label : drop_through_label);
10623 /* In case the do_jump just above never jumps. */
10624 do_pending_stack_adjust ();
10625 emit_label (label1);
10627 /* Now the ELSE-expression. */
10628 do_jump (TREE_OPERAND (exp, 2),
10629 if_false_label ? if_false_label : drop_through_label,
10630 if_true_label ? if_true_label : drop_through_label);
10631 end_cleanup_deferral ();
10637 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10639 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10640 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10642 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10643 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10646 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10647 fold (build (EQ_EXPR, TREE_TYPE (exp),
10648 fold (build1 (REALPART_EXPR,
10649 TREE_TYPE (inner_type),
10651 fold (build1 (REALPART_EXPR,
10652 TREE_TYPE (inner_type),
10654 fold (build (EQ_EXPR, TREE_TYPE (exp),
10655 fold (build1 (IMAGPART_EXPR,
10656 TREE_TYPE (inner_type),
10658 fold (build1 (IMAGPART_EXPR,
10659 TREE_TYPE (inner_type),
10661 if_false_label, if_true_label);
10664 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10665 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10667 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10668 && !can_compare_p (TYPE_MODE (inner_type)))
10669 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10671 comparison = compare (exp, EQ, EQ);
10677 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10679 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10680 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10682 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10683 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10686 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10687 fold (build (NE_EXPR, TREE_TYPE (exp),
10688 fold (build1 (REALPART_EXPR,
10689 TREE_TYPE (inner_type),
10691 fold (build1 (REALPART_EXPR,
10692 TREE_TYPE (inner_type),
10694 fold (build (NE_EXPR, TREE_TYPE (exp),
10695 fold (build1 (IMAGPART_EXPR,
10696 TREE_TYPE (inner_type),
10698 fold (build1 (IMAGPART_EXPR,
10699 TREE_TYPE (inner_type),
10701 if_false_label, if_true_label);
10704 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10705 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10707 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10708 && !can_compare_p (TYPE_MODE (inner_type)))
10709 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10711 comparison = compare (exp, NE, NE);
10716 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10718 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10719 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10721 comparison = compare (exp, LT, LTU);
10725 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10727 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10728 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10730 comparison = compare (exp, LE, LEU);
10734 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10736 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10737 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10739 comparison = compare (exp, GT, GTU);
10743 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10745 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10746 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10748 comparison = compare (exp, GE, GEU);
10753 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10755 /* This is not needed any more and causes poor code since it causes
10756 comparisons and tests from non-SI objects to have different code
10758 /* Copy to register to avoid generating bad insns by cse
10759 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10760 if (!cse_not_expected && GET_CODE (temp) == MEM)
10761 temp = copy_to_reg (temp);
10763 do_pending_stack_adjust ();
10764 if (GET_CODE (temp) == CONST_INT)
10765 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10766 else if (GET_CODE (temp) == LABEL_REF)
10767 comparison = const_true_rtx;
10768 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10769 && !can_compare_p (GET_MODE (temp)))
10770 /* Note swapping the labels gives us not-equal. */
10771 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10772 else if (GET_MODE (temp) != VOIDmode)
10773 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10774 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10775 GET_MODE (temp), NULL_RTX, 0);
10780 /* Do any postincrements in the expression that was tested. */
10783 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10784 straight into a conditional jump instruction as the jump condition.
10785 Otherwise, all the work has been done already. */
10787 if (comparison == const_true_rtx)
10790 emit_jump (if_true_label);
10792 else if (comparison == const0_rtx)
10794 if (if_false_label)
10795 emit_jump (if_false_label);
10797 else if (comparison)
10798 do_jump_for_compare (comparison, if_false_label, if_true_label);
10800 if (drop_through_label)
10802 /* If do_jump produces code that might be jumped around,
10803 do any stack adjusts from that code, before the place
10804 where control merges in. */
10805 do_pending_stack_adjust ();
10806 emit_label (drop_through_label);
10810 /* Given a comparison expression EXP for values too wide to be compared
10811 with one insn, test the comparison and jump to the appropriate label.
10812 The code of EXP is ignored; we always test GT if SWAP is 0,
10813 and LT if SWAP is 1. */
10816 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10819 rtx if_false_label, if_true_label;
10821 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10822 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10823 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10824 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10825 rtx drop_through_label = 0;
10826 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10829 if (! if_true_label || ! if_false_label)
10830 drop_through_label = gen_label_rtx ();
10831 if (! if_true_label)
10832 if_true_label = drop_through_label;
10833 if (! if_false_label)
10834 if_false_label = drop_through_label;
10836 /* Compare a word at a time, high order first. */
10837 for (i = 0; i < nwords; i++)
10840 rtx op0_word, op1_word;
10842 if (WORDS_BIG_ENDIAN)
10844 op0_word = operand_subword_force (op0, i, mode);
10845 op1_word = operand_subword_force (op1, i, mode);
10849 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10850 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10853 /* All but high-order word must be compared as unsigned. */
10854 comp = compare_from_rtx (op0_word, op1_word,
10855 (unsignedp || i > 0) ? GTU : GT,
10856 unsignedp, word_mode, NULL_RTX, 0);
10857 if (comp == const_true_rtx)
10858 emit_jump (if_true_label);
10859 else if (comp != const0_rtx)
10860 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10862 /* Consider lower words only if these are equal. */
10863 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10865 if (comp == const_true_rtx)
10866 emit_jump (if_false_label);
10867 else if (comp != const0_rtx)
10868 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10871 if (if_false_label)
10872 emit_jump (if_false_label);
10873 if (drop_through_label)
10874 emit_label (drop_through_label);
10877 /* Compare OP0 with OP1, word at a time, in mode MODE.
10878 UNSIGNEDP says to do unsigned comparison.
10879 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10882 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10883 enum machine_mode mode;
10886 rtx if_false_label, if_true_label;
10888 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10889 rtx drop_through_label = 0;
10892 if (! if_true_label || ! if_false_label)
10893 drop_through_label = gen_label_rtx ();
10894 if (! if_true_label)
10895 if_true_label = drop_through_label;
10896 if (! if_false_label)
10897 if_false_label = drop_through_label;
10899 /* Compare a word at a time, high order first. */
10900 for (i = 0; i < nwords; i++)
10903 rtx op0_word, op1_word;
10905 if (WORDS_BIG_ENDIAN)
10907 op0_word = operand_subword_force (op0, i, mode);
10908 op1_word = operand_subword_force (op1, i, mode);
10912 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10913 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10916 /* All but high-order word must be compared as unsigned. */
10917 comp = compare_from_rtx (op0_word, op1_word,
10918 (unsignedp || i > 0) ? GTU : GT,
10919 unsignedp, word_mode, NULL_RTX, 0);
10920 if (comp == const_true_rtx)
10921 emit_jump (if_true_label);
10922 else if (comp != const0_rtx)
10923 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10925 /* Consider lower words only if these are equal. */
10926 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10928 if (comp == const_true_rtx)
10929 emit_jump (if_false_label);
10930 else if (comp != const0_rtx)
10931 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10934 if (if_false_label)
10935 emit_jump (if_false_label);
10936 if (drop_through_label)
10937 emit_label (drop_through_label);
10940 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10941 with one insn, test the comparison and jump to the appropriate label. */
10944 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10946 rtx if_false_label, if_true_label;
10948 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10949 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10950 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10951 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10953 rtx drop_through_label = 0;
10955 if (! if_false_label)
10956 drop_through_label = if_false_label = gen_label_rtx ();
10958 for (i = 0; i < nwords; i++)
10960 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10961 operand_subword_force (op1, i, mode),
10962 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10963 word_mode, NULL_RTX, 0);
10964 if (comp == const_true_rtx)
10965 emit_jump (if_false_label);
10966 else if (comp != const0_rtx)
10967 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10971 emit_jump (if_true_label);
10972 if (drop_through_label)
10973 emit_label (drop_through_label);
10976 /* Jump according to whether OP0 is 0.
10977 We assume that OP0 has an integer mode that is too wide
10978 for the available compare insns. */
10981 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10983 rtx if_false_label, if_true_label;
10985 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10988 rtx drop_through_label = 0;
10990 /* The fastest way of doing this comparison on almost any machine is to
10991 "or" all the words and compare the result. If all have to be loaded
10992 from memory and this is a very wide item, it's possible this may
10993 be slower, but that's highly unlikely. */
10995 part = gen_reg_rtx (word_mode);
10996 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10997 for (i = 1; i < nwords && part != 0; i++)
10998 part = expand_binop (word_mode, ior_optab, part,
10999 operand_subword_force (op0, i, GET_MODE (op0)),
11000 part, 1, OPTAB_WIDEN);
11004 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11007 if (comp == const_true_rtx)
11008 emit_jump (if_false_label);
11009 else if (comp == const0_rtx)
11010 emit_jump (if_true_label);
11012 do_jump_for_compare (comp, if_false_label, if_true_label);
11017 /* If we couldn't do the "or" simply, do this with a series of compares. */
11018 if (! if_false_label)
11019 drop_through_label = if_false_label = gen_label_rtx ();
11021 for (i = 0; i < nwords; i++)
11023 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11025 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11026 if (comp == const_true_rtx)
11027 emit_jump (if_false_label);
11028 else if (comp != const0_rtx)
11029 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11033 emit_jump (if_true_label);
11035 if (drop_through_label)
11036 emit_label (drop_through_label);
11039 /* Given a comparison expression in rtl form, output conditional branches to
11040 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
11043 do_jump_for_compare (comparison, if_false_label, if_true_label)
11044 rtx comparison, if_false_label, if_true_label;
11048 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11049 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
11053 if (if_false_label)
11054 emit_jump (if_false_label);
11056 else if (if_false_label)
11059 rtx prev = get_last_insn ();
11062 /* Output the branch with the opposite condition. Then try to invert
11063 what is generated. If more than one insn is a branch, or if the
11064 branch is not the last insn written, abort. If we can't invert
11065 the branch, emit make a true label, redirect this jump to that,
11066 emit a jump to the false label and define the true label. */
11068 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11069 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
11073 /* Here we get the first insn that was just emitted. It used to be the
11074 case that, on some machines, emitting the branch would discard
11075 the previous compare insn and emit a replacement. This isn't
11076 done anymore, but abort if we see that PREV is deleted. */
11079 insn = get_insns ();
11080 else if (INSN_DELETED_P (prev))
11083 insn = NEXT_INSN (prev);
11085 for (; insn; insn = NEXT_INSN (insn))
11086 if (GET_CODE (insn) == JUMP_INSN)
11093 if (branch != get_last_insn ())
11096 JUMP_LABEL (branch) = if_false_label;
11097 if (! invert_jump (branch, if_false_label))
11099 if_true_label = gen_label_rtx ();
11100 redirect_jump (branch, if_true_label);
11101 emit_jump (if_false_label);
11102 emit_label (if_true_label);
11107 /* Generate code for a comparison expression EXP
11108 (including code to compute the values to be compared)
11109 and set (CC0) according to the result.
11110 SIGNED_CODE should be the rtx operation for this comparison for
11111 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11113 We force a stack adjustment unless there are currently
11114 things pushed on the stack that aren't yet used. */
11117 compare (exp, signed_code, unsigned_code)
11119 enum rtx_code signed_code, unsigned_code;
11122 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11124 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11125 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
11126 register enum machine_mode mode = TYPE_MODE (type);
11127 int unsignedp = TREE_UNSIGNED (type);
11128 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
11130 #ifdef HAVE_canonicalize_funcptr_for_compare
11131 /* If function pointers need to be "canonicalized" before they can
11132 be reliably compared, then canonicalize them. */
11133 if (HAVE_canonicalize_funcptr_for_compare
11134 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11135 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11138 rtx new_op0 = gen_reg_rtx (mode);
11140 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11144 if (HAVE_canonicalize_funcptr_for_compare
11145 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11146 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11149 rtx new_op1 = gen_reg_rtx (mode);
11151 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11156 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11158 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11159 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11162 /* Like compare but expects the values to compare as two rtx's.
11163 The decision as to signed or unsigned comparison must be made by the caller.
11165 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11168 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11169 size of MODE should be used. */
11172 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11173 register rtx op0, op1;
11174 enum rtx_code code;
11176 enum machine_mode mode;
11182 /* If one operand is constant, make it the second one. Only do this
11183 if the other operand is not constant as well. */
11185 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11186 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11191 code = swap_condition (code);
11194 if (flag_force_mem)
11196 op0 = force_not_mem (op0);
11197 op1 = force_not_mem (op1);
11200 do_pending_stack_adjust ();
11202 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11203 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11207 /* There's no need to do this now that combine.c can eliminate lots of
11208 sign extensions. This can be less efficient in certain cases on other
11211 /* If this is a signed equality comparison, we can do it as an
11212 unsigned comparison since zero-extension is cheaper than sign
11213 extension and comparisons with zero are done as unsigned. This is
11214 the case even on machines that can do fast sign extension, since
11215 zero-extension is easier to combine with other operations than
11216 sign-extension is. If we are comparing against a constant, we must
11217 convert it to what it would look like unsigned. */
11218 if ((code == EQ || code == NE) && ! unsignedp
11219 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11221 if (GET_CODE (op1) == CONST_INT
11222 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11223 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11228 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11230 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11233 /* Generate code to calculate EXP using a store-flag instruction
11234 and return an rtx for the result. EXP is either a comparison
11235 or a TRUTH_NOT_EXPR whose operand is a comparison.
11237 If TARGET is nonzero, store the result there if convenient.
11239 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11242 Return zero if there is no suitable set-flag instruction
11243 available on this machine.
11245 Once expand_expr has been called on the arguments of the comparison,
11246 we are committed to doing the store flag, since it is not safe to
11247 re-evaluate the expression. We emit the store-flag insn by calling
11248 emit_store_flag, but only expand the arguments if we have a reason
11249 to believe that emit_store_flag will be successful. If we think that
11250 it will, but it isn't, we have to simulate the store-flag with a
11251 set/jump/set sequence. */
11254 do_store_flag (exp, target, mode, only_cheap)
11257 enum machine_mode mode;
11260 enum rtx_code code;
11261 tree arg0, arg1, type;
11263 enum machine_mode operand_mode;
11267 enum insn_code icode;
11268 rtx subtarget = target;
11271 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11272 result at the end. We can't simply invert the test since it would
11273 have already been inverted if it were valid. This case occurs for
11274 some floating-point comparisons. */
11276 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11277 invert = 1, exp = TREE_OPERAND (exp, 0);
11279 arg0 = TREE_OPERAND (exp, 0);
11280 arg1 = TREE_OPERAND (exp, 1);
11281 type = TREE_TYPE (arg0);
11282 operand_mode = TYPE_MODE (type);
11283 unsignedp = TREE_UNSIGNED (type);
11285 /* We won't bother with BLKmode store-flag operations because it would mean
11286 passing a lot of information to emit_store_flag. */
11287 if (operand_mode == BLKmode)
11290 /* We won't bother with store-flag operations involving function pointers
11291 when function pointers must be canonicalized before comparisons. */
11292 #ifdef HAVE_canonicalize_funcptr_for_compare
11293 if (HAVE_canonicalize_funcptr_for_compare
11294 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11295 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11297 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11298 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11299 == FUNCTION_TYPE))))
11306 /* Get the rtx comparison code to use. We know that EXP is a comparison
11307 operation of some type. Some comparisons against 1 and -1 can be
11308 converted to comparisons with zero. Do so here so that the tests
11309 below will be aware that we have a comparison with zero. These
11310 tests will not catch constants in the first operand, but constants
11311 are rarely passed as the first operand. */
11313 switch (TREE_CODE (exp))
11322 if (integer_onep (arg1))
11323 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11325 code = unsignedp ? LTU : LT;
11328 if (! unsignedp && integer_all_onesp (arg1))
11329 arg1 = integer_zero_node, code = LT;
11331 code = unsignedp ? LEU : LE;
11334 if (! unsignedp && integer_all_onesp (arg1))
11335 arg1 = integer_zero_node, code = GE;
11337 code = unsignedp ? GTU : GT;
11340 if (integer_onep (arg1))
11341 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11343 code = unsignedp ? GEU : GE;
11349 /* Put a constant second. */
11350 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11352 tem = arg0; arg0 = arg1; arg1 = tem;
11353 code = swap_condition (code);
11356 /* If this is an equality or inequality test of a single bit, we can
11357 do this by shifting the bit being tested to the low-order bit and
11358 masking the result with the constant 1. If the condition was EQ,
11359 we xor it with 1. This does not require an scc insn and is faster
11360 than an scc insn even if we have it. */
11362 if ((code == NE || code == EQ)
11363 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11364 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11366 tree inner = TREE_OPERAND (arg0, 0);
11367 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11370 /* If INNER is a right shift of a constant and it plus BITNUM does
11371 not overflow, adjust BITNUM and INNER. */
11373 if (TREE_CODE (inner) == RSHIFT_EXPR
11374 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11375 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11376 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11377 < TYPE_PRECISION (type)))
11379 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11380 inner = TREE_OPERAND (inner, 0);
11383 /* If we are going to be able to omit the AND below, we must do our
11384 operations as unsigned. If we must use the AND, we have a choice.
11385 Normally unsigned is faster, but for some machines signed is. */
11386 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11387 #ifdef LOAD_EXTEND_OP
11388 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11394 if (subtarget == 0 || GET_CODE (subtarget) != REG
11395 || GET_MODE (subtarget) != operand_mode
11396 || ! safe_from_p (subtarget, inner, 1))
11399 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11402 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11403 size_int (bitnum), subtarget, ops_unsignedp);
11405 if (GET_MODE (op0) != mode)
11406 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11408 if ((code == EQ && ! invert) || (code == NE && invert))
11409 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11410 ops_unsignedp, OPTAB_LIB_WIDEN);
11412 /* Put the AND last so it can combine with more things. */
11413 if (bitnum != TYPE_PRECISION (type) - 1)
11414 op0 = expand_and (op0, const1_rtx, subtarget);
11419 /* Now see if we are likely to be able to do this. Return if not. */
11420 if (! can_compare_p (operand_mode))
11422 icode = setcc_gen_code[(int) code];
11423 if (icode == CODE_FOR_nothing
11424 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11426 /* We can only do this if it is one of the special cases that
11427 can be handled without an scc insn. */
11428 if ((code == LT && integer_zerop (arg1))
11429 || (! only_cheap && code == GE && integer_zerop (arg1)))
11431 else if (BRANCH_COST >= 0
11432 && ! only_cheap && (code == NE || code == EQ)
11433 && TREE_CODE (type) != REAL_TYPE
11434 && ((abs_optab->handlers[(int) operand_mode].insn_code
11435 != CODE_FOR_nothing)
11436 || (ffs_optab->handlers[(int) operand_mode].insn_code
11437 != CODE_FOR_nothing)))
11443 preexpand_calls (exp);
11444 if (subtarget == 0 || GET_CODE (subtarget) != REG
11445 || GET_MODE (subtarget) != operand_mode
11446 || ! safe_from_p (subtarget, arg1, 1))
11449 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11450 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11453 target = gen_reg_rtx (mode);
11455 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11456 because, if the emit_store_flag does anything it will succeed and
11457 OP0 and OP1 will not be used subsequently. */
11459 result = emit_store_flag (target, code,
11460 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11461 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11462 operand_mode, unsignedp, 1);
11467 result = expand_binop (mode, xor_optab, result, const1_rtx,
11468 result, 0, OPTAB_LIB_WIDEN);
11472 /* If this failed, we have to do this with set/compare/jump/set code. */
11473 if (GET_CODE (target) != REG
11474 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11475 target = gen_reg_rtx (GET_MODE (target));
11477 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11478 result = compare_from_rtx (op0, op1, code, unsignedp,
11479 operand_mode, NULL_RTX, 0);
11480 if (GET_CODE (result) == CONST_INT)
11481 return (((result == const0_rtx && ! invert)
11482 || (result != const0_rtx && invert))
11483 ? const0_rtx : const1_rtx);
11485 label = gen_label_rtx ();
11486 if (bcc_gen_fctn[(int) code] == 0)
11489 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11490 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11491 emit_label (label);
11496 /* Generate a tablejump instruction (used for switch statements). */
11498 #ifdef HAVE_tablejump
11500 /* INDEX is the value being switched on, with the lowest value
11501 in the table already subtracted.
11502 MODE is its expected mode (needed if INDEX is constant).
11503 RANGE is the length of the jump table.
11504 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11506 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11507 index value is out of range. */
11510 do_tablejump (index, mode, range, table_label, default_label)
11511 rtx index, range, table_label, default_label;
11512 enum machine_mode mode;
11514 register rtx temp, vector;
11516 /* Do an unsigned comparison (in the proper mode) between the index
11517 expression and the value which represents the length of the range.
11518 Since we just finished subtracting the lower bound of the range
11519 from the index expression, this comparison allows us to simultaneously
11520 check that the original index expression value is both greater than
11521 or equal to the minimum value of the range and less than or equal to
11522 the maximum value of the range. */
11524 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11525 emit_jump_insn (gen_bgtu (default_label));
11527 /* If index is in range, it must fit in Pmode.
11528 Convert to Pmode so we can index with it. */
11530 index = convert_to_mode (Pmode, index, 1);
11532 /* Don't let a MEM slip thru, because then INDEX that comes
11533 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11534 and break_out_memory_refs will go to work on it and mess it up. */
11535 #ifdef PIC_CASE_VECTOR_ADDRESS
11536 if (flag_pic && GET_CODE (index) != REG)
11537 index = copy_to_mode_reg (Pmode, index);
11540 /* If flag_force_addr were to affect this address
11541 it could interfere with the tricky assumptions made
11542 about addresses that contain label-refs,
11543 which may be valid only very near the tablejump itself. */
11544 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11545 GET_MODE_SIZE, because this indicates how large insns are. The other
11546 uses should all be Pmode, because they are addresses. This code
11547 could fail if addresses and insns are not the same size. */
11548 index = gen_rtx_PLUS (Pmode,
11549 gen_rtx_MULT (Pmode, index,
11550 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11551 gen_rtx_LABEL_REF (Pmode, table_label));
11552 #ifdef PIC_CASE_VECTOR_ADDRESS
11554 index = PIC_CASE_VECTOR_ADDRESS (index);
11557 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11558 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11559 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11560 RTX_UNCHANGING_P (vector) = 1;
11561 convert_move (temp, vector, 0);
11563 emit_jump_insn (gen_tablejump (temp, table_label));
11565 /* If we are generating PIC code or if the table is PC-relative, the
11566 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11567 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11571 #endif /* HAVE_tablejump */