1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
36 #include "insn-config.h"
39 #include "typeclass.h"
43 #include "bc-opcode.h"
44 #include "bc-typecd.h"
49 #define CEIL(x,y) (((x) + (y) - 1) / (y))
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
60 #define PUSH_ARGS_REVERSED /* If it's last to first */
65 #ifndef STACK_PUSH_CODE
66 #ifdef STACK_GROWS_DOWNWARD
67 #define STACK_PUSH_CODE PRE_DEC
69 #define STACK_PUSH_CODE PRE_INC
73 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
74 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87 int do_preexpand_calls = 1;
89 /* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91 int pending_stack_adjust;
93 /* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97 int inhibit_defer_pop;
99 /* When temporaries are created by TARGET_EXPRs, they are created at
100 this level of temp_slot_level, so that they can remain allocated
101 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
103 int target_temp_slot_level;
105 /* Nonzero means __builtin_saveregs has already been done in this function.
106 The value is the pseudoreg containing the value __builtin_saveregs
108 static rtx saveregs_value;
110 /* Similarly for __builtin_apply_args. */
111 static rtx apply_args_value;
113 /* This structure is used by move_by_pieces to describe the move to
116 struct move_by_pieces
126 int explicit_inc_from;
133 /* This structure is used by clear_by_pieces to describe the clear to
136 struct clear_by_pieces
148 /* Used to generate bytecodes: keep track of size of local variables,
149 as well as depth of arithmetic stack. (Notice that variables are
150 stored on the machine's stack, not the arithmetic stack.) */
152 static rtx get_push_address PROTO ((int));
153 extern int local_vars_size;
154 extern int stack_depth;
155 extern int max_stack_depth;
156 extern struct obstack permanent_obstack;
157 extern rtx arg_pointer_save_area;
159 static rtx enqueue_insn PROTO((rtx, rtx));
160 static int queued_subexp_p PROTO((rtx));
161 static void init_queue PROTO((void));
162 static void move_by_pieces PROTO((rtx, rtx, int, int));
163 static int move_by_pieces_ninsns PROTO((unsigned int, int));
164 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
165 struct move_by_pieces *));
166 static void clear_by_pieces PROTO((rtx, int, int));
167 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
168 struct clear_by_pieces *));
169 static int is_zeros_p PROTO((tree));
170 static int mostly_zeros_p PROTO((tree));
171 static void store_constructor PROTO((tree, rtx, int));
172 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
173 enum machine_mode, int, int, int));
174 static tree save_noncopied_parts PROTO((tree, tree));
175 static tree init_noncopied_parts PROTO((tree, tree));
176 static int safe_from_p PROTO((rtx, tree));
177 static int fixed_type_p PROTO((tree));
178 static rtx var_rtx PROTO((tree));
179 static int get_pointer_alignment PROTO((tree, unsigned));
180 static tree string_constant PROTO((tree, tree *));
181 static tree c_strlen PROTO((tree));
182 static rtx expand_builtin PROTO((tree, rtx, rtx,
183 enum machine_mode, int));
184 static int apply_args_size PROTO((void));
185 static int apply_result_size PROTO((void));
186 static rtx result_vector PROTO((int, rtx));
187 static rtx expand_builtin_apply_args PROTO((void));
188 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
189 static void expand_builtin_return PROTO((rtx));
190 static rtx expand_increment PROTO((tree, int, int));
191 void bc_expand_increment PROTO((struct increment_operator *, tree));
192 rtx bc_allocate_local PROTO((int, int));
193 void bc_store_memory PROTO((tree, tree));
194 tree bc_expand_component_address PROTO((tree));
195 tree bc_expand_address PROTO((tree));
196 void bc_expand_constructor PROTO((tree));
197 void bc_adjust_stack PROTO((int));
198 tree bc_canonicalize_array_ref PROTO((tree));
199 void bc_load_memory PROTO((tree, tree));
200 void bc_load_externaddr PROTO((rtx));
201 void bc_load_externaddr_id PROTO((tree, int));
202 void bc_load_localaddr PROTO((rtx));
203 void bc_load_parmaddr PROTO((rtx));
204 static void preexpand_calls PROTO((tree));
205 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
206 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
207 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
208 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
209 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
210 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
211 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
212 extern tree truthvalue_conversion PROTO((tree));
214 /* Record for each mode whether we can move a register directly to or
215 from an object of that mode in memory. If we can't, we won't try
216 to use that mode directly when accessing a field of that mode. */
218 static char direct_load[NUM_MACHINE_MODES];
219 static char direct_store[NUM_MACHINE_MODES];
221 /* MOVE_RATIO is the number of move instructions that is better than
225 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
228 /* A value of around 6 would minimize code size; infinity would minimize
230 #define MOVE_RATIO 15
234 /* This array records the insn_code of insns to perform block moves. */
235 enum insn_code movstr_optab[NUM_MACHINE_MODES];
237 /* This array records the insn_code of insns to perform block clears. */
238 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
240 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
242 #ifndef SLOW_UNALIGNED_ACCESS
243 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
246 /* Register mappings for target machines without register windows. */
247 #ifndef INCOMING_REGNO
248 #define INCOMING_REGNO(OUT) (OUT)
250 #ifndef OUTGOING_REGNO
251 #define OUTGOING_REGNO(IN) (IN)
254 /* Maps used to convert modes to const, load, and store bytecodes. */
255 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
256 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
257 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
259 /* Initialize maps used to convert modes to const, load, and store
263 bc_init_mode_to_opcode_maps ()
267 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
268 mode_to_const_map[mode]
269 = mode_to_load_map[mode]
270 = mode_to_store_map[mode] = neverneverland;
272 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
273 mode_to_const_map[(int) SYM] = CONST; \
274 mode_to_load_map[(int) SYM] = LOAD; \
275 mode_to_store_map[(int) SYM] = STORE;
277 #include "modemap.def"
281 /* This is run once per compilation to set up which modes can be used
282 directly in memory and to initialize the block move optab. */
288 enum machine_mode mode;
289 /* Try indexing by frame ptr and try by stack ptr.
290 It is known that on the Convex the stack ptr isn't a valid index.
291 With luck, one or the other is valid on any machine. */
292 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
293 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
296 insn = emit_insn (gen_rtx (SET, 0, NULL_RTX, NULL_RTX));
297 pat = PATTERN (insn);
299 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
300 mode = (enum machine_mode) ((int) mode + 1))
306 direct_load[(int) mode] = direct_store[(int) mode] = 0;
307 PUT_MODE (mem, mode);
308 PUT_MODE (mem1, mode);
310 /* See if there is some register that can be used in this mode and
311 directly loaded or stored from memory. */
313 if (mode != VOIDmode && mode != BLKmode)
314 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
315 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
318 if (! HARD_REGNO_MODE_OK (regno, mode))
321 reg = gen_rtx (REG, mode, regno);
324 SET_DEST (pat) = reg;
325 if (recog (pat, insn, &num_clobbers) >= 0)
326 direct_load[(int) mode] = 1;
328 SET_SRC (pat) = mem1;
329 SET_DEST (pat) = reg;
330 if (recog (pat, insn, &num_clobbers) >= 0)
331 direct_load[(int) mode] = 1;
334 SET_DEST (pat) = mem;
335 if (recog (pat, insn, &num_clobbers) >= 0)
336 direct_store[(int) mode] = 1;
339 SET_DEST (pat) = mem1;
340 if (recog (pat, insn, &num_clobbers) >= 0)
341 direct_store[(int) mode] = 1;
348 /* This is run at the start of compiling a function. */
355 pending_stack_adjust = 0;
356 inhibit_defer_pop = 0;
358 apply_args_value = 0;
362 /* Save all variables describing the current status into the structure *P.
363 This is used before starting a nested function. */
369 /* Instead of saving the postincrement queue, empty it. */
372 p->pending_stack_adjust = pending_stack_adjust;
373 p->inhibit_defer_pop = inhibit_defer_pop;
374 p->saveregs_value = saveregs_value;
375 p->apply_args_value = apply_args_value;
376 p->forced_labels = forced_labels;
378 pending_stack_adjust = 0;
379 inhibit_defer_pop = 0;
381 apply_args_value = 0;
385 /* Restore all variables describing the current status from the structure *P.
386 This is used after a nested function. */
389 restore_expr_status (p)
392 pending_stack_adjust = p->pending_stack_adjust;
393 inhibit_defer_pop = p->inhibit_defer_pop;
394 saveregs_value = p->saveregs_value;
395 apply_args_value = p->apply_args_value;
396 forced_labels = p->forced_labels;
399 /* Manage the queue of increment instructions to be output
400 for POSTINCREMENT_EXPR expressions, etc. */
402 static rtx pending_chain;
404 /* Queue up to increment (or change) VAR later. BODY says how:
405 BODY should be the same thing you would pass to emit_insn
406 to increment right away. It will go to emit_insn later on.
408 The value is a QUEUED expression to be used in place of VAR
409 where you want to guarantee the pre-incrementation value of VAR. */
412 enqueue_insn (var, body)
415 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
416 var, NULL_RTX, NULL_RTX, body, pending_chain);
417 return pending_chain;
420 /* Use protect_from_queue to convert a QUEUED expression
421 into something that you can put immediately into an instruction.
422 If the queued incrementation has not happened yet,
423 protect_from_queue returns the variable itself.
424 If the incrementation has happened, protect_from_queue returns a temp
425 that contains a copy of the old value of the variable.
427 Any time an rtx which might possibly be a QUEUED is to be put
428 into an instruction, it must be passed through protect_from_queue first.
429 QUEUED expressions are not meaningful in instructions.
431 Do not pass a value through protect_from_queue and then hold
432 on to it for a while before putting it in an instruction!
433 If the queue is flushed in between, incorrect code will result. */
436 protect_from_queue (x, modify)
440 register RTX_CODE code = GET_CODE (x);
442 #if 0 /* A QUEUED can hang around after the queue is forced out. */
443 /* Shortcut for most common case. */
444 if (pending_chain == 0)
450 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
451 use of autoincrement. Make a copy of the contents of the memory
452 location rather than a copy of the address, but not if the value is
453 of mode BLKmode. Don't modify X in place since it might be
455 if (code == MEM && GET_MODE (x) != BLKmode
456 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
458 register rtx y = XEXP (x, 0);
459 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
461 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
462 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
463 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
467 register rtx temp = gen_reg_rtx (GET_MODE (new));
468 emit_insn_before (gen_move_insn (temp, new),
474 /* Otherwise, recursively protect the subexpressions of all
475 the kinds of rtx's that can contain a QUEUED. */
478 rtx tem = protect_from_queue (XEXP (x, 0), 0);
479 if (tem != XEXP (x, 0))
485 else if (code == PLUS || code == MULT)
487 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
488 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
489 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
498 /* If the increment has not happened, use the variable itself. */
499 if (QUEUED_INSN (x) == 0)
500 return QUEUED_VAR (x);
501 /* If the increment has happened and a pre-increment copy exists,
503 if (QUEUED_COPY (x) != 0)
504 return QUEUED_COPY (x);
505 /* The increment has happened but we haven't set up a pre-increment copy.
506 Set one up now, and use it. */
507 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
508 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
510 return QUEUED_COPY (x);
513 /* Return nonzero if X contains a QUEUED expression:
514 if it contains anything that will be altered by a queued increment.
515 We handle only combinations of MEM, PLUS, MINUS and MULT operators
516 since memory addresses generally contain only those. */
522 register enum rtx_code code = GET_CODE (x);
528 return queued_subexp_p (XEXP (x, 0));
532 return (queued_subexp_p (XEXP (x, 0))
533 || queued_subexp_p (XEXP (x, 1)));
539 /* Perform all the pending incrementations. */
545 while (p = pending_chain)
547 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
548 pending_chain = QUEUED_NEXT (p);
559 /* Copy data from FROM to TO, where the machine modes are not the same.
560 Both modes may be integer, or both may be floating.
561 UNSIGNEDP should be nonzero if FROM is an unsigned type.
562 This causes zero-extension instead of sign-extension. */
565 convert_move (to, from, unsignedp)
566 register rtx to, from;
569 enum machine_mode to_mode = GET_MODE (to);
570 enum machine_mode from_mode = GET_MODE (from);
571 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
572 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
576 /* rtx code for making an equivalent value. */
577 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
579 to = protect_from_queue (to, 1);
580 from = protect_from_queue (from, 0);
582 if (to_real != from_real)
585 /* If FROM is a SUBREG that indicates that we have already done at least
586 the required extension, strip it. We don't handle such SUBREGs as
589 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
590 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
591 >= GET_MODE_SIZE (to_mode))
592 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
593 from = gen_lowpart (to_mode, from), from_mode = to_mode;
595 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
598 if (to_mode == from_mode
599 || (from_mode == VOIDmode && CONSTANT_P (from)))
601 emit_move_insn (to, from);
609 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
611 /* Try converting directly if the insn is supported. */
612 if ((code = can_extend_p (to_mode, from_mode, 0))
615 emit_unop_insn (code, to, from, UNKNOWN);
620 #ifdef HAVE_trunchfqf2
621 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
623 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
627 #ifdef HAVE_trunctqfqf2
628 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
634 #ifdef HAVE_truncsfqf2
635 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
641 #ifdef HAVE_truncdfqf2
642 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
648 #ifdef HAVE_truncxfqf2
649 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
655 #ifdef HAVE_trunctfqf2
656 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
663 #ifdef HAVE_trunctqfhf2
664 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
666 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
670 #ifdef HAVE_truncsfhf2
671 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
677 #ifdef HAVE_truncdfhf2
678 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
684 #ifdef HAVE_truncxfhf2
685 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
691 #ifdef HAVE_trunctfhf2
692 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
699 #ifdef HAVE_truncsftqf2
700 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
702 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
706 #ifdef HAVE_truncdftqf2
707 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
713 #ifdef HAVE_truncxftqf2
714 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
720 #ifdef HAVE_trunctftqf2
721 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
728 #ifdef HAVE_truncdfsf2
729 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
731 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
735 #ifdef HAVE_truncxfsf2
736 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
742 #ifdef HAVE_trunctfsf2
743 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
749 #ifdef HAVE_truncxfdf2
750 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
752 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
756 #ifdef HAVE_trunctfdf2
757 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
771 libcall = extendsfdf2_libfunc;
775 libcall = extendsfxf2_libfunc;
779 libcall = extendsftf2_libfunc;
791 libcall = truncdfsf2_libfunc;
795 libcall = extenddfxf2_libfunc;
799 libcall = extenddftf2_libfunc;
811 libcall = truncxfsf2_libfunc;
815 libcall = truncxfdf2_libfunc;
827 libcall = trunctfsf2_libfunc;
831 libcall = trunctfdf2_libfunc;
843 if (libcall == (rtx) 0)
844 /* This conversion is not implemented yet. */
847 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
849 emit_move_insn (to, value);
853 /* Now both modes are integers. */
855 /* Handle expanding beyond a word. */
856 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
857 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
864 enum machine_mode lowpart_mode;
865 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
867 /* Try converting directly if the insn is supported. */
868 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
871 /* If FROM is a SUBREG, put it into a register. Do this
872 so that we always generate the same set of insns for
873 better cse'ing; if an intermediate assignment occurred,
874 we won't be doing the operation directly on the SUBREG. */
875 if (optimize > 0 && GET_CODE (from) == SUBREG)
876 from = force_reg (from_mode, from);
877 emit_unop_insn (code, to, from, equiv_code);
880 /* Next, try converting via full word. */
881 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
882 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
883 != CODE_FOR_nothing))
885 if (GET_CODE (to) == REG)
886 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
887 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
888 emit_unop_insn (code, to,
889 gen_lowpart (word_mode, to), equiv_code);
893 /* No special multiword conversion insn; do it by hand. */
896 /* Since we will turn this into a no conflict block, we must ensure
897 that the source does not overlap the target. */
899 if (reg_overlap_mentioned_p (to, from))
900 from = force_reg (from_mode, from);
902 /* Get a copy of FROM widened to a word, if necessary. */
903 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
904 lowpart_mode = word_mode;
906 lowpart_mode = from_mode;
908 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
910 lowpart = gen_lowpart (lowpart_mode, to);
911 emit_move_insn (lowpart, lowfrom);
913 /* Compute the value to put in each remaining word. */
915 fill_value = const0_rtx;
920 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
921 && STORE_FLAG_VALUE == -1)
923 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
925 fill_value = gen_reg_rtx (word_mode);
926 emit_insn (gen_slt (fill_value));
932 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
933 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
935 fill_value = convert_to_mode (word_mode, fill_value, 1);
939 /* Fill the remaining words. */
940 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
942 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
943 rtx subword = operand_subword (to, index, 1, to_mode);
948 if (fill_value != subword)
949 emit_move_insn (subword, fill_value);
952 insns = get_insns ();
955 emit_no_conflict_block (insns, to, from, NULL_RTX,
956 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
960 /* Truncating multi-word to a word or less. */
961 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
962 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
964 if (!((GET_CODE (from) == MEM
965 && ! MEM_VOLATILE_P (from)
966 && direct_load[(int) to_mode]
967 && ! mode_dependent_address_p (XEXP (from, 0)))
968 || GET_CODE (from) == REG
969 || GET_CODE (from) == SUBREG))
970 from = force_reg (from_mode, from);
971 convert_move (to, gen_lowpart (word_mode, from), 0);
975 /* Handle pointer conversion */ /* SPEE 900220 */
976 if (to_mode == PSImode)
978 if (from_mode != SImode)
979 from = convert_to_mode (SImode, from, unsignedp);
981 #ifdef HAVE_truncsipsi2
982 if (HAVE_truncsipsi2)
984 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
987 #endif /* HAVE_truncsipsi2 */
991 if (from_mode == PSImode)
993 if (to_mode != SImode)
995 from = convert_to_mode (SImode, from, unsignedp);
1000 #ifdef HAVE_extendpsisi2
1001 if (HAVE_extendpsisi2)
1003 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1006 #endif /* HAVE_extendpsisi2 */
1011 if (to_mode == PDImode)
1013 if (from_mode != DImode)
1014 from = convert_to_mode (DImode, from, unsignedp);
1016 #ifdef HAVE_truncdipdi2
1017 if (HAVE_truncdipdi2)
1019 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1022 #endif /* HAVE_truncdipdi2 */
1026 if (from_mode == PDImode)
1028 if (to_mode != DImode)
1030 from = convert_to_mode (DImode, from, unsignedp);
1035 #ifdef HAVE_extendpdidi2
1036 if (HAVE_extendpdidi2)
1038 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1041 #endif /* HAVE_extendpdidi2 */
1046 /* Now follow all the conversions between integers
1047 no more than a word long. */
1049 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1050 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1051 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1052 GET_MODE_BITSIZE (from_mode)))
1054 if (!((GET_CODE (from) == MEM
1055 && ! MEM_VOLATILE_P (from)
1056 && direct_load[(int) to_mode]
1057 && ! mode_dependent_address_p (XEXP (from, 0)))
1058 || GET_CODE (from) == REG
1059 || GET_CODE (from) == SUBREG))
1060 from = force_reg (from_mode, from);
1061 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1062 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1063 from = copy_to_reg (from);
1064 emit_move_insn (to, gen_lowpart (to_mode, from));
1068 /* Handle extension. */
1069 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1071 /* Convert directly if that works. */
1072 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1073 != CODE_FOR_nothing)
1075 emit_unop_insn (code, to, from, equiv_code);
1080 enum machine_mode intermediate;
1082 /* Search for a mode to convert via. */
1083 for (intermediate = from_mode; intermediate != VOIDmode;
1084 intermediate = GET_MODE_WIDER_MODE (intermediate))
1085 if (((can_extend_p (to_mode, intermediate, unsignedp)
1086 != CODE_FOR_nothing)
1087 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1088 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1089 && (can_extend_p (intermediate, from_mode, unsignedp)
1090 != CODE_FOR_nothing))
1092 convert_move (to, convert_to_mode (intermediate, from,
1093 unsignedp), unsignedp);
1097 /* No suitable intermediate mode. */
1102 /* Support special truncate insns for certain modes. */
1104 if (from_mode == DImode && to_mode == SImode)
1106 #ifdef HAVE_truncdisi2
1107 if (HAVE_truncdisi2)
1109 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1113 convert_move (to, force_reg (from_mode, from), unsignedp);
1117 if (from_mode == DImode && to_mode == HImode)
1119 #ifdef HAVE_truncdihi2
1120 if (HAVE_truncdihi2)
1122 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1126 convert_move (to, force_reg (from_mode, from), unsignedp);
1130 if (from_mode == DImode && to_mode == QImode)
1132 #ifdef HAVE_truncdiqi2
1133 if (HAVE_truncdiqi2)
1135 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1139 convert_move (to, force_reg (from_mode, from), unsignedp);
1143 if (from_mode == SImode && to_mode == HImode)
1145 #ifdef HAVE_truncsihi2
1146 if (HAVE_truncsihi2)
1148 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1152 convert_move (to, force_reg (from_mode, from), unsignedp);
1156 if (from_mode == SImode && to_mode == QImode)
1158 #ifdef HAVE_truncsiqi2
1159 if (HAVE_truncsiqi2)
1161 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1165 convert_move (to, force_reg (from_mode, from), unsignedp);
1169 if (from_mode == HImode && to_mode == QImode)
1171 #ifdef HAVE_trunchiqi2
1172 if (HAVE_trunchiqi2)
1174 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1178 convert_move (to, force_reg (from_mode, from), unsignedp);
1182 if (from_mode == TImode && to_mode == DImode)
1184 #ifdef HAVE_trunctidi2
1185 if (HAVE_trunctidi2)
1187 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1191 convert_move (to, force_reg (from_mode, from), unsignedp);
1195 if (from_mode == TImode && to_mode == SImode)
1197 #ifdef HAVE_trunctisi2
1198 if (HAVE_trunctisi2)
1200 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1204 convert_move (to, force_reg (from_mode, from), unsignedp);
1208 if (from_mode == TImode && to_mode == HImode)
1210 #ifdef HAVE_trunctihi2
1211 if (HAVE_trunctihi2)
1213 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1217 convert_move (to, force_reg (from_mode, from), unsignedp);
1221 if (from_mode == TImode && to_mode == QImode)
1223 #ifdef HAVE_trunctiqi2
1224 if (HAVE_trunctiqi2)
1226 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1230 convert_move (to, force_reg (from_mode, from), unsignedp);
1234 /* Handle truncation of volatile memrefs, and so on;
1235 the things that couldn't be truncated directly,
1236 and for which there was no special instruction. */
1237 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1239 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1240 emit_move_insn (to, temp);
1244 /* Mode combination is not recognized. */
1248 /* Return an rtx for a value that would result
1249 from converting X to mode MODE.
1250 Both X and MODE may be floating, or both integer.
1251 UNSIGNEDP is nonzero if X is an unsigned value.
1252 This can be done by referring to a part of X in place
1253 or by copying to a new temporary with conversion.
1255 This function *must not* call protect_from_queue
1256 except when putting X into an insn (in which case convert_move does it). */
1259 convert_to_mode (mode, x, unsignedp)
1260 enum machine_mode mode;
1264 return convert_modes (mode, VOIDmode, x, unsignedp);
1267 /* Return an rtx for a value that would result
1268 from converting X from mode OLDMODE to mode MODE.
1269 Both modes may be floating, or both integer.
1270 UNSIGNEDP is nonzero if X is an unsigned value.
1272 This can be done by referring to a part of X in place
1273 or by copying to a new temporary with conversion.
1275 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1277 This function *must not* call protect_from_queue
1278 except when putting X into an insn (in which case convert_move does it). */
1281 convert_modes (mode, oldmode, x, unsignedp)
1282 enum machine_mode mode, oldmode;
1288 /* If FROM is a SUBREG that indicates that we have already done at least
1289 the required extension, strip it. */
1291 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1292 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1293 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1294 x = gen_lowpart (mode, x);
1296 if (GET_MODE (x) != VOIDmode)
1297 oldmode = GET_MODE (x);
1299 if (mode == oldmode)
1302 /* There is one case that we must handle specially: If we are converting
1303 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1304 we are to interpret the constant as unsigned, gen_lowpart will do
1305 the wrong if the constant appears negative. What we want to do is
1306 make the high-order word of the constant zero, not all ones. */
1308 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1309 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1310 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1312 HOST_WIDE_INT val = INTVAL (x);
1314 if (oldmode != VOIDmode
1315 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1317 int width = GET_MODE_BITSIZE (oldmode);
1319 /* We need to zero extend VAL. */
1320 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1323 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1326 /* We can do this with a gen_lowpart if both desired and current modes
1327 are integer, and this is either a constant integer, a register, or a
1328 non-volatile MEM. Except for the constant case where MODE is no
1329 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1331 if ((GET_CODE (x) == CONST_INT
1332 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1333 || (GET_MODE_CLASS (mode) == MODE_INT
1334 && GET_MODE_CLASS (oldmode) == MODE_INT
1335 && (GET_CODE (x) == CONST_DOUBLE
1336 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1337 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1338 && direct_load[(int) mode])
1339 || (GET_CODE (x) == REG
1340 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1341 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1343 /* ?? If we don't know OLDMODE, we have to assume here that
1344 X does not need sign- or zero-extension. This may not be
1345 the case, but it's the best we can do. */
1346 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1347 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1349 HOST_WIDE_INT val = INTVAL (x);
1350 int width = GET_MODE_BITSIZE (oldmode);
1352 /* We must sign or zero-extend in this case. Start by
1353 zero-extending, then sign extend if we need to. */
1354 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1356 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1357 val |= (HOST_WIDE_INT) (-1) << width;
1359 return GEN_INT (val);
1362 return gen_lowpart (mode, x);
1365 temp = gen_reg_rtx (mode);
1366 convert_move (temp, x, unsignedp);
1370 /* Generate several move instructions to copy LEN bytes
1371 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1372 The caller must pass FROM and TO
1373 through protect_from_queue before calling.
1374 ALIGN (in bytes) is maximum alignment we can assume. */
1377 move_by_pieces (to, from, len, align)
1381 struct move_by_pieces data;
1382 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1383 int max_size = MOVE_MAX + 1;
1386 data.to_addr = to_addr;
1387 data.from_addr = from_addr;
1391 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1392 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1394 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1395 || GET_CODE (from_addr) == POST_INC
1396 || GET_CODE (from_addr) == POST_DEC);
1398 data.explicit_inc_from = 0;
1399 data.explicit_inc_to = 0;
1401 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1402 if (data.reverse) data.offset = len;
1405 data.to_struct = MEM_IN_STRUCT_P (to);
1406 data.from_struct = MEM_IN_STRUCT_P (from);
1408 /* If copying requires more than two move insns,
1409 copy addresses to registers (to make displacements shorter)
1410 and use post-increment if available. */
1411 if (!(data.autinc_from && data.autinc_to)
1412 && move_by_pieces_ninsns (len, align) > 2)
1414 #ifdef HAVE_PRE_DECREMENT
1415 if (data.reverse && ! data.autinc_from)
1417 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1418 data.autinc_from = 1;
1419 data.explicit_inc_from = -1;
1422 #ifdef HAVE_POST_INCREMENT
1423 if (! data.autinc_from)
1425 data.from_addr = copy_addr_to_reg (from_addr);
1426 data.autinc_from = 1;
1427 data.explicit_inc_from = 1;
1430 if (!data.autinc_from && CONSTANT_P (from_addr))
1431 data.from_addr = copy_addr_to_reg (from_addr);
1432 #ifdef HAVE_PRE_DECREMENT
1433 if (data.reverse && ! data.autinc_to)
1435 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1437 data.explicit_inc_to = -1;
1440 #ifdef HAVE_POST_INCREMENT
1441 if (! data.reverse && ! data.autinc_to)
1443 data.to_addr = copy_addr_to_reg (to_addr);
1445 data.explicit_inc_to = 1;
1448 if (!data.autinc_to && CONSTANT_P (to_addr))
1449 data.to_addr = copy_addr_to_reg (to_addr);
1452 if (! SLOW_UNALIGNED_ACCESS
1453 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1456 /* First move what we can in the largest integer mode, then go to
1457 successively smaller modes. */
1459 while (max_size > 1)
1461 enum machine_mode mode = VOIDmode, tmode;
1462 enum insn_code icode;
1464 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1465 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1466 if (GET_MODE_SIZE (tmode) < max_size)
1469 if (mode == VOIDmode)
1472 icode = mov_optab->handlers[(int) mode].insn_code;
1473 if (icode != CODE_FOR_nothing
1474 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1475 GET_MODE_SIZE (mode)))
1476 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1478 max_size = GET_MODE_SIZE (mode);
1481 /* The code above should have handled everything. */
1486 /* Return number of insns required to move L bytes by pieces.
1487 ALIGN (in bytes) is maximum alignment we can assume. */
1490 move_by_pieces_ninsns (l, align)
1494 register int n_insns = 0;
1495 int max_size = MOVE_MAX + 1;
1497 if (! SLOW_UNALIGNED_ACCESS
1498 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1501 while (max_size > 1)
1503 enum machine_mode mode = VOIDmode, tmode;
1504 enum insn_code icode;
1506 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1507 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1508 if (GET_MODE_SIZE (tmode) < max_size)
1511 if (mode == VOIDmode)
1514 icode = mov_optab->handlers[(int) mode].insn_code;
1515 if (icode != CODE_FOR_nothing
1516 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1517 GET_MODE_SIZE (mode)))
1518 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1520 max_size = GET_MODE_SIZE (mode);
1526 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1527 with move instructions for mode MODE. GENFUN is the gen_... function
1528 to make a move insn for that mode. DATA has all the other info. */
1531 move_by_pieces_1 (genfun, mode, data)
1532 rtx (*genfun) PROTO ((rtx, ...));
1533 enum machine_mode mode;
1534 struct move_by_pieces *data;
1536 register int size = GET_MODE_SIZE (mode);
1537 register rtx to1, from1;
1539 while (data->len >= size)
1541 if (data->reverse) data->offset -= size;
1543 to1 = (data->autinc_to
1544 ? gen_rtx (MEM, mode, data->to_addr)
1545 : copy_rtx (change_address (data->to, mode,
1546 plus_constant (data->to_addr,
1548 MEM_IN_STRUCT_P (to1) = data->to_struct;
1551 = (data->autinc_from
1552 ? gen_rtx (MEM, mode, data->from_addr)
1553 : copy_rtx (change_address (data->from, mode,
1554 plus_constant (data->from_addr,
1556 MEM_IN_STRUCT_P (from1) = data->from_struct;
1558 #ifdef HAVE_PRE_DECREMENT
1559 if (data->explicit_inc_to < 0)
1560 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1561 if (data->explicit_inc_from < 0)
1562 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1565 emit_insn ((*genfun) (to1, from1));
1566 #ifdef HAVE_POST_INCREMENT
1567 if (data->explicit_inc_to > 0)
1568 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1569 if (data->explicit_inc_from > 0)
1570 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1573 if (! data->reverse) data->offset += size;
1579 /* Emit code to move a block Y to a block X.
1580 This may be done with string-move instructions,
1581 with multiple scalar move instructions, or with a library call.
1583 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1585 SIZE is an rtx that says how long they are.
1586 ALIGN is the maximum alignment we can assume they have,
1589 Return the address of the new block, if memcpy is called and returns it,
1593 emit_block_move (x, y, size, align)
1600 if (GET_MODE (x) != BLKmode)
1603 if (GET_MODE (y) != BLKmode)
1606 x = protect_from_queue (x, 1);
1607 y = protect_from_queue (y, 0);
1608 size = protect_from_queue (size, 0);
1610 if (GET_CODE (x) != MEM)
1612 if (GET_CODE (y) != MEM)
1617 if (GET_CODE (size) == CONST_INT
1618 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1619 move_by_pieces (x, y, INTVAL (size), align);
1622 /* Try the most limited insn first, because there's no point
1623 including more than one in the machine description unless
1624 the more limited one has some advantage. */
1626 rtx opalign = GEN_INT (align);
1627 enum machine_mode mode;
1629 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1630 mode = GET_MODE_WIDER_MODE (mode))
1632 enum insn_code code = movstr_optab[(int) mode];
1634 if (code != CODE_FOR_nothing
1635 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1636 here because if SIZE is less than the mode mask, as it is
1637 returned by the macro, it will definitely be less than the
1638 actual mode mask. */
1639 && ((GET_CODE (size) == CONST_INT
1640 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1641 <= GET_MODE_MASK (mode)))
1642 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1643 && (insn_operand_predicate[(int) code][0] == 0
1644 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1645 && (insn_operand_predicate[(int) code][1] == 0
1646 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1647 && (insn_operand_predicate[(int) code][3] == 0
1648 || (*insn_operand_predicate[(int) code][3]) (opalign,
1652 rtx last = get_last_insn ();
1655 op2 = convert_to_mode (mode, size, 1);
1656 if (insn_operand_predicate[(int) code][2] != 0
1657 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1658 op2 = copy_to_mode_reg (mode, op2);
1660 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1667 delete_insns_since (last);
1671 #ifdef TARGET_MEM_FUNCTIONS
1673 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1674 ptr_mode, 3, XEXP (x, 0), Pmode,
1676 convert_to_mode (TYPE_MODE (sizetype), size,
1677 TREE_UNSIGNED (sizetype)),
1678 TYPE_MODE (sizetype));
1680 emit_library_call (bcopy_libfunc, 0,
1681 VOIDmode, 3, XEXP (y, 0), Pmode,
1683 convert_to_mode (TYPE_MODE (integer_type_node), size,
1684 TREE_UNSIGNED (integer_type_node)),
1685 TYPE_MODE (integer_type_node));
1692 /* Copy all or part of a value X into registers starting at REGNO.
1693 The number of registers to be filled is NREGS. */
1696 move_block_to_reg (regno, x, nregs, mode)
1700 enum machine_mode mode;
1708 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1709 x = validize_mem (force_const_mem (mode, x));
1711 /* See if the machine can do this with a load multiple insn. */
1712 #ifdef HAVE_load_multiple
1713 if (HAVE_load_multiple)
1715 last = get_last_insn ();
1716 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1724 delete_insns_since (last);
1728 for (i = 0; i < nregs; i++)
1729 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1730 operand_subword_force (x, i, mode));
1733 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1734 The number of registers to be filled is NREGS. SIZE indicates the number
1735 of bytes in the object X. */
1739 move_block_from_reg (regno, x, nregs, size)
1747 enum machine_mode mode;
1749 /* If SIZE is that of a mode no bigger than a word, just use that
1750 mode's store operation. */
1751 if (size <= UNITS_PER_WORD
1752 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1754 emit_move_insn (change_address (x, mode, NULL),
1755 gen_rtx (REG, mode, regno));
1759 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1760 to the left before storing to memory. Note that the previous test
1761 doesn't handle all cases (e.g. SIZE == 3). */
1762 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1764 rtx tem = operand_subword (x, 0, 1, BLKmode);
1770 shift = expand_shift (LSHIFT_EXPR, word_mode,
1771 gen_rtx (REG, word_mode, regno),
1772 build_int_2 ((UNITS_PER_WORD - size)
1773 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1774 emit_move_insn (tem, shift);
1778 /* See if the machine can do this with a store multiple insn. */
1779 #ifdef HAVE_store_multiple
1780 if (HAVE_store_multiple)
1782 last = get_last_insn ();
1783 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1791 delete_insns_since (last);
1795 for (i = 0; i < nregs; i++)
1797 rtx tem = operand_subword (x, i, 1, BLKmode);
1802 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1806 /* Emit code to move a block Y to a block X, where X is non-consecutive
1807 registers represented by a PARALLEL. */
1810 emit_group_load (x, y)
1813 rtx target_reg, source;
1816 if (GET_CODE (x) != PARALLEL)
1819 /* Check for a NULL entry, used to indicate that the parameter goes
1820 both on the stack and in registers. */
1821 if (XEXP (XVECEXP (x, 0, 0), 0))
1826 for (; i < XVECLEN (x, 0); i++)
1828 rtx element = XVECEXP (x, 0, i);
1830 target_reg = XEXP (element, 0);
1832 if (GET_CODE (y) == MEM)
1833 source = change_address (y, GET_MODE (target_reg),
1834 plus_constant (XEXP (y, 0),
1835 INTVAL (XEXP (element, 1))));
1836 else if (XEXP (element, 1) == const0_rtx)
1838 if (GET_MODE (target_reg) == GET_MODE (y))
1840 /* Allow for the target_reg to be smaller than the input register
1841 to allow for AIX with 4 DF arguments after a single SI arg. The
1842 last DF argument will only load 1 word into the integer registers,
1843 but load a DF value into the float registers. */
1844 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1845 <= GET_MODE_SIZE (GET_MODE (y)))
1846 && GET_MODE (target_reg) == word_mode)
1847 /* This might be a const_double, so we can't just use SUBREG. */
1848 source = operand_subword (y, 0, 0, VOIDmode);
1849 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1850 == GET_MODE_SIZE (GET_MODE (y)))
1851 source = gen_lowpart (GET_MODE (target_reg), y);
1858 emit_move_insn (target_reg, source);
1862 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1863 registers represented by a PARALLEL. */
1866 emit_group_store (x, y)
1869 rtx source_reg, target;
1872 if (GET_CODE (y) != PARALLEL)
1875 /* Check for a NULL entry, used to indicate that the parameter goes
1876 both on the stack and in registers. */
1877 if (XEXP (XVECEXP (y, 0, 0), 0))
1882 for (; i < XVECLEN (y, 0); i++)
1884 rtx element = XVECEXP (y, 0, i);
1886 source_reg = XEXP (element, 0);
1888 if (GET_CODE (x) == MEM)
1889 target = change_address (x, GET_MODE (source_reg),
1890 plus_constant (XEXP (x, 0),
1891 INTVAL (XEXP (element, 1))));
1892 else if (XEXP (element, 1) == const0_rtx)
1895 if (GET_MODE (target) != GET_MODE (source_reg))
1896 target = gen_lowpart (GET_MODE (source_reg), target);
1901 emit_move_insn (target, source_reg);
1905 /* Add a USE expression for REG to the (possibly empty) list pointed
1906 to by CALL_FUSAGE. REG must denote a hard register. */
1909 use_reg (call_fusage, reg)
1910 rtx *call_fusage, reg;
1912 if (GET_CODE (reg) != REG
1913 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1917 = gen_rtx (EXPR_LIST, VOIDmode,
1918 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1921 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1922 starting at REGNO. All of these registers must be hard registers. */
1925 use_regs (call_fusage, regno, nregs)
1932 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1935 for (i = 0; i < nregs; i++)
1936 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1939 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1940 PARALLEL REGS. This is for calls that pass values in multiple
1941 non-contiguous locations. The Irix 6 ABI has examples of this. */
1944 use_group_regs (call_fusage, regs)
1950 for (i = 0; i < XVECLEN (regs, 0); i++)
1952 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1954 /* A NULL entry means the parameter goes both on the stack and in
1955 registers. This can also be a MEM for targets that pass values
1956 partially on the stack and partially in registers. */
1957 if (reg != 0 && GET_CODE (reg) == REG)
1958 use_reg (call_fusage, reg);
1962 /* Generate several move instructions to clear LEN bytes of block TO.
1963 (A MEM rtx with BLKmode). The caller must pass TO through
1964 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1968 clear_by_pieces (to, len, align)
1972 struct clear_by_pieces data;
1973 rtx to_addr = XEXP (to, 0);
1974 int max_size = MOVE_MAX + 1;
1977 data.to_addr = to_addr;
1980 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1981 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1983 data.explicit_inc_to = 0;
1985 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1986 if (data.reverse) data.offset = len;
1989 data.to_struct = MEM_IN_STRUCT_P (to);
1991 /* If copying requires more than two move insns,
1992 copy addresses to registers (to make displacements shorter)
1993 and use post-increment if available. */
1995 && move_by_pieces_ninsns (len, align) > 2)
1997 #ifdef HAVE_PRE_DECREMENT
1998 if (data.reverse && ! data.autinc_to)
2000 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2002 data.explicit_inc_to = -1;
2005 #ifdef HAVE_POST_INCREMENT
2006 if (! data.reverse && ! data.autinc_to)
2008 data.to_addr = copy_addr_to_reg (to_addr);
2010 data.explicit_inc_to = 1;
2013 if (!data.autinc_to && CONSTANT_P (to_addr))
2014 data.to_addr = copy_addr_to_reg (to_addr);
2017 if (! SLOW_UNALIGNED_ACCESS
2018 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2021 /* First move what we can in the largest integer mode, then go to
2022 successively smaller modes. */
2024 while (max_size > 1)
2026 enum machine_mode mode = VOIDmode, tmode;
2027 enum insn_code icode;
2029 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2030 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2031 if (GET_MODE_SIZE (tmode) < max_size)
2034 if (mode == VOIDmode)
2037 icode = mov_optab->handlers[(int) mode].insn_code;
2038 if (icode != CODE_FOR_nothing
2039 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2040 GET_MODE_SIZE (mode)))
2041 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2043 max_size = GET_MODE_SIZE (mode);
2046 /* The code above should have handled everything. */
2051 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2052 with move instructions for mode MODE. GENFUN is the gen_... function
2053 to make a move insn for that mode. DATA has all the other info. */
2056 clear_by_pieces_1 (genfun, mode, data)
2057 rtx (*genfun) PROTO ((rtx, ...));
2058 enum machine_mode mode;
2059 struct clear_by_pieces *data;
2061 register int size = GET_MODE_SIZE (mode);
2064 while (data->len >= size)
2066 if (data->reverse) data->offset -= size;
2068 to1 = (data->autinc_to
2069 ? gen_rtx (MEM, mode, data->to_addr)
2070 : copy_rtx (change_address (data->to, mode,
2071 plus_constant (data->to_addr,
2073 MEM_IN_STRUCT_P (to1) = data->to_struct;
2075 #ifdef HAVE_PRE_DECREMENT
2076 if (data->explicit_inc_to < 0)
2077 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2080 emit_insn ((*genfun) (to1, const0_rtx));
2081 #ifdef HAVE_POST_INCREMENT
2082 if (data->explicit_inc_to > 0)
2083 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2086 if (! data->reverse) data->offset += size;
2092 /* Write zeros through the storage of OBJECT.
2093 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2094 the maximum alignment we can is has, measured in bytes.
2096 If we call a function that returns the length of the block, return it. */
2099 clear_storage (object, size, align)
2106 if (GET_MODE (object) == BLKmode)
2108 object = protect_from_queue (object, 1);
2109 size = protect_from_queue (size, 0);
2111 if (GET_CODE (size) == CONST_INT
2112 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2113 clear_by_pieces (object, INTVAL (size), align);
2117 /* Try the most limited insn first, because there's no point
2118 including more than one in the machine description unless
2119 the more limited one has some advantage. */
2121 rtx opalign = GEN_INT (align);
2122 enum machine_mode mode;
2124 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2125 mode = GET_MODE_WIDER_MODE (mode))
2127 enum insn_code code = clrstr_optab[(int) mode];
2129 if (code != CODE_FOR_nothing
2130 /* We don't need MODE to be narrower than
2131 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2132 the mode mask, as it is returned by the macro, it will
2133 definitely be less than the actual mode mask. */
2134 && ((GET_CODE (size) == CONST_INT
2135 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2136 <= GET_MODE_MASK (mode)))
2137 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2138 && (insn_operand_predicate[(int) code][0] == 0
2139 || (*insn_operand_predicate[(int) code][0]) (object,
2141 && (insn_operand_predicate[(int) code][2] == 0
2142 || (*insn_operand_predicate[(int) code][2]) (opalign,
2146 rtx last = get_last_insn ();
2149 op1 = convert_to_mode (mode, size, 1);
2150 if (insn_operand_predicate[(int) code][1] != 0
2151 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2153 op1 = copy_to_mode_reg (mode, op1);
2155 pat = GEN_FCN ((int) code) (object, op1, opalign);
2162 delete_insns_since (last);
2167 #ifdef TARGET_MEM_FUNCTIONS
2169 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2171 XEXP (object, 0), Pmode,
2173 TYPE_MODE (integer_type_node),
2175 (TYPE_MODE (sizetype), size,
2176 TREE_UNSIGNED (sizetype)),
2177 TYPE_MODE (sizetype));
2179 emit_library_call (bzero_libfunc, 0,
2181 XEXP (object, 0), Pmode,
2183 (TYPE_MODE (integer_type_node), size,
2184 TREE_UNSIGNED (integer_type_node)),
2185 TYPE_MODE (integer_type_node));
2190 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2195 /* Generate code to copy Y into X.
2196 Both Y and X must have the same mode, except that
2197 Y can be a constant with VOIDmode.
2198 This mode cannot be BLKmode; use emit_block_move for that.
2200 Return the last instruction emitted. */
2203 emit_move_insn (x, y)
2206 enum machine_mode mode = GET_MODE (x);
2208 x = protect_from_queue (x, 1);
2209 y = protect_from_queue (y, 0);
2211 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2214 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2215 y = force_const_mem (mode, y);
2217 /* If X or Y are memory references, verify that their addresses are valid
2219 if (GET_CODE (x) == MEM
2220 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2221 && ! push_operand (x, GET_MODE (x)))
2223 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2224 x = change_address (x, VOIDmode, XEXP (x, 0));
2226 if (GET_CODE (y) == MEM
2227 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2229 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2230 y = change_address (y, VOIDmode, XEXP (y, 0));
2232 if (mode == BLKmode)
2235 return emit_move_insn_1 (x, y);
2238 /* Low level part of emit_move_insn.
2239 Called just like emit_move_insn, but assumes X and Y
2240 are basically valid. */
2243 emit_move_insn_1 (x, y)
2246 enum machine_mode mode = GET_MODE (x);
2247 enum machine_mode submode;
2248 enum mode_class class = GET_MODE_CLASS (mode);
2251 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2253 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2255 /* Expand complex moves by moving real part and imag part, if possible. */
2256 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2257 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2259 (class == MODE_COMPLEX_INT
2260 ? MODE_INT : MODE_FLOAT),
2262 && (mov_optab->handlers[(int) submode].insn_code
2263 != CODE_FOR_nothing))
2265 /* Don't split destination if it is a stack push. */
2266 int stack = push_operand (x, GET_MODE (x));
2269 /* If this is a stack, push the highpart first, so it
2270 will be in the argument order.
2272 In that case, change_address is used only to convert
2273 the mode, not to change the address. */
2276 /* Note that the real part always precedes the imag part in memory
2277 regardless of machine's endianness. */
2278 #ifdef STACK_GROWS_DOWNWARD
2279 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2280 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2281 gen_imagpart (submode, y)));
2282 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2283 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2284 gen_realpart (submode, y)));
2286 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2287 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2288 gen_realpart (submode, y)));
2289 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2290 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2291 gen_imagpart (submode, y)));
2296 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2297 (gen_realpart (submode, x), gen_realpart (submode, y)));
2298 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2299 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2302 return get_last_insn ();
2305 /* This will handle any multi-word mode that lacks a move_insn pattern.
2306 However, you will get better code if you define such patterns,
2307 even if they must turn into multiple assembler instructions. */
2308 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2313 #ifdef PUSH_ROUNDING
2315 /* If X is a push on the stack, do the push now and replace
2316 X with a reference to the stack pointer. */
2317 if (push_operand (x, GET_MODE (x)))
2319 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2320 x = change_address (x, VOIDmode, stack_pointer_rtx);
2324 /* Show the output dies here. */
2326 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2329 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2332 rtx xpart = operand_subword (x, i, 1, mode);
2333 rtx ypart = operand_subword (y, i, 1, mode);
2335 /* If we can't get a part of Y, put Y into memory if it is a
2336 constant. Otherwise, force it into a register. If we still
2337 can't get a part of Y, abort. */
2338 if (ypart == 0 && CONSTANT_P (y))
2340 y = force_const_mem (mode, y);
2341 ypart = operand_subword (y, i, 1, mode);
2343 else if (ypart == 0)
2344 ypart = operand_subword_force (y, i, mode);
2346 if (xpart == 0 || ypart == 0)
2349 last_insn = emit_move_insn (xpart, ypart);
2358 /* Pushing data onto the stack. */
2360 /* Push a block of length SIZE (perhaps variable)
2361 and return an rtx to address the beginning of the block.
2362 Note that it is not possible for the value returned to be a QUEUED.
2363 The value may be virtual_outgoing_args_rtx.
2365 EXTRA is the number of bytes of padding to push in addition to SIZE.
2366 BELOW nonzero means this padding comes at low addresses;
2367 otherwise, the padding comes at high addresses. */
2370 push_block (size, extra, below)
2376 size = convert_modes (Pmode, ptr_mode, size, 1);
2377 if (CONSTANT_P (size))
2378 anti_adjust_stack (plus_constant (size, extra));
2379 else if (GET_CODE (size) == REG && extra == 0)
2380 anti_adjust_stack (size);
2383 rtx temp = copy_to_mode_reg (Pmode, size);
2385 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2386 temp, 0, OPTAB_LIB_WIDEN);
2387 anti_adjust_stack (temp);
2390 #ifdef STACK_GROWS_DOWNWARD
2391 temp = virtual_outgoing_args_rtx;
2392 if (extra != 0 && below)
2393 temp = plus_constant (temp, extra);
2395 if (GET_CODE (size) == CONST_INT)
2396 temp = plus_constant (virtual_outgoing_args_rtx,
2397 - INTVAL (size) - (below ? 0 : extra));
2398 else if (extra != 0 && !below)
2399 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2400 negate_rtx (Pmode, plus_constant (size, extra)));
2402 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2403 negate_rtx (Pmode, size));
2406 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2412 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2415 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2416 block of SIZE bytes. */
2419 get_push_address (size)
2424 if (STACK_PUSH_CODE == POST_DEC)
2425 temp = gen_rtx (PLUS, Pmode, stack_pointer_rtx, GEN_INT (size));
2426 else if (STACK_PUSH_CODE == POST_INC)
2427 temp = gen_rtx (MINUS, Pmode, stack_pointer_rtx, GEN_INT (size));
2429 temp = stack_pointer_rtx;
2431 return force_operand (temp, NULL_RTX);
2434 /* Generate code to push X onto the stack, assuming it has mode MODE and
2436 MODE is redundant except when X is a CONST_INT (since they don't
2438 SIZE is an rtx for the size of data to be copied (in bytes),
2439 needed only if X is BLKmode.
2441 ALIGN (in bytes) is maximum alignment we can assume.
2443 If PARTIAL and REG are both nonzero, then copy that many of the first
2444 words of X into registers starting with REG, and push the rest of X.
2445 The amount of space pushed is decreased by PARTIAL words,
2446 rounded *down* to a multiple of PARM_BOUNDARY.
2447 REG must be a hard register in this case.
2448 If REG is zero but PARTIAL is not, take any all others actions for an
2449 argument partially in registers, but do not actually load any
2452 EXTRA is the amount in bytes of extra space to leave next to this arg.
2453 This is ignored if an argument block has already been allocated.
2455 On a machine that lacks real push insns, ARGS_ADDR is the address of
2456 the bottom of the argument block for this call. We use indexing off there
2457 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2458 argument block has not been preallocated.
2460 ARGS_SO_FAR is the size of args previously pushed for this call. */
2463 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2464 args_addr, args_so_far)
2466 enum machine_mode mode;
2477 enum direction stack_direction
2478 #ifdef STACK_GROWS_DOWNWARD
2484 /* Decide where to pad the argument: `downward' for below,
2485 `upward' for above, or `none' for don't pad it.
2486 Default is below for small data on big-endian machines; else above. */
2487 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2489 /* Invert direction if stack is post-update. */
2490 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2491 if (where_pad != none)
2492 where_pad = (where_pad == downward ? upward : downward);
2494 xinner = x = protect_from_queue (x, 0);
2496 if (mode == BLKmode)
2498 /* Copy a block into the stack, entirely or partially. */
2501 int used = partial * UNITS_PER_WORD;
2502 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2510 /* USED is now the # of bytes we need not copy to the stack
2511 because registers will take care of them. */
2514 xinner = change_address (xinner, BLKmode,
2515 plus_constant (XEXP (xinner, 0), used));
2517 /* If the partial register-part of the arg counts in its stack size,
2518 skip the part of stack space corresponding to the registers.
2519 Otherwise, start copying to the beginning of the stack space,
2520 by setting SKIP to 0. */
2521 #ifndef REG_PARM_STACK_SPACE
2527 #ifdef PUSH_ROUNDING
2528 /* Do it with several push insns if that doesn't take lots of insns
2529 and if there is no difficulty with push insns that skip bytes
2530 on the stack for alignment purposes. */
2532 && GET_CODE (size) == CONST_INT
2534 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2536 /* Here we avoid the case of a structure whose weak alignment
2537 forces many pushes of a small amount of data,
2538 and such small pushes do rounding that causes trouble. */
2539 && ((! SLOW_UNALIGNED_ACCESS)
2540 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2541 || PUSH_ROUNDING (align) == align)
2542 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2544 /* Push padding now if padding above and stack grows down,
2545 or if padding below and stack grows up.
2546 But if space already allocated, this has already been done. */
2547 if (extra && args_addr == 0
2548 && where_pad != none && where_pad != stack_direction)
2549 anti_adjust_stack (GEN_INT (extra));
2551 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2552 INTVAL (size) - used, align);
2554 if (flag_check_memory_usage)
2558 temp = get_push_address (INTVAL(size) - used);
2559 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2560 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2562 XEXP (xinner, 0), ptr_mode,
2563 GEN_INT (INTVAL(size) - used),
2564 TYPE_MODE (sizetype));
2566 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2568 GEN_INT (INTVAL(size) - used),
2569 TYPE_MODE (sizetype),
2570 GEN_INT (MEMORY_USE_RW), QImode);
2574 #endif /* PUSH_ROUNDING */
2576 /* Otherwise make space on the stack and copy the data
2577 to the address of that space. */
2579 /* Deduct words put into registers from the size we must copy. */
2582 if (GET_CODE (size) == CONST_INT)
2583 size = GEN_INT (INTVAL (size) - used);
2585 size = expand_binop (GET_MODE (size), sub_optab, size,
2586 GEN_INT (used), NULL_RTX, 0,
2590 /* Get the address of the stack space.
2591 In this case, we do not deal with EXTRA separately.
2592 A single stack adjust will do. */
2595 temp = push_block (size, extra, where_pad == downward);
2598 else if (GET_CODE (args_so_far) == CONST_INT)
2599 temp = memory_address (BLKmode,
2600 plus_constant (args_addr,
2601 skip + INTVAL (args_so_far)));
2603 temp = memory_address (BLKmode,
2604 plus_constant (gen_rtx (PLUS, Pmode,
2605 args_addr, args_so_far),
2607 if (flag_check_memory_usage)
2611 target = copy_to_reg (temp);
2612 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2613 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2615 XEXP (xinner, 0), ptr_mode,
2616 size, TYPE_MODE (sizetype));
2618 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2620 size, TYPE_MODE (sizetype),
2621 GEN_INT (MEMORY_USE_RW), QImode);
2624 /* TEMP is the address of the block. Copy the data there. */
2625 if (GET_CODE (size) == CONST_INT
2626 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2629 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2630 INTVAL (size), align);
2633 /* Try the most limited insn first, because there's no point
2634 including more than one in the machine description unless
2635 the more limited one has some advantage. */
2636 #ifdef HAVE_movstrqi
2638 && GET_CODE (size) == CONST_INT
2639 && ((unsigned) INTVAL (size)
2640 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2642 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2643 xinner, size, GEN_INT (align));
2651 #ifdef HAVE_movstrhi
2653 && GET_CODE (size) == CONST_INT
2654 && ((unsigned) INTVAL (size)
2655 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2657 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2658 xinner, size, GEN_INT (align));
2666 #ifdef HAVE_movstrsi
2669 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2670 xinner, size, GEN_INT (align));
2678 #ifdef HAVE_movstrdi
2681 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2682 xinner, size, GEN_INT (align));
2691 #ifndef ACCUMULATE_OUTGOING_ARGS
2692 /* If the source is referenced relative to the stack pointer,
2693 copy it to another register to stabilize it. We do not need
2694 to do this if we know that we won't be changing sp. */
2696 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2697 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2698 temp = copy_to_reg (temp);
2701 /* Make inhibit_defer_pop nonzero around the library call
2702 to force it to pop the bcopy-arguments right away. */
2704 #ifdef TARGET_MEM_FUNCTIONS
2705 emit_library_call (memcpy_libfunc, 0,
2706 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2707 convert_to_mode (TYPE_MODE (sizetype),
2708 size, TREE_UNSIGNED (sizetype)),
2709 TYPE_MODE (sizetype));
2711 emit_library_call (bcopy_libfunc, 0,
2712 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2713 convert_to_mode (TYPE_MODE (integer_type_node),
2715 TREE_UNSIGNED (integer_type_node)),
2716 TYPE_MODE (integer_type_node));
2721 else if (partial > 0)
2723 /* Scalar partly in registers. */
2725 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2728 /* # words of start of argument
2729 that we must make space for but need not store. */
2730 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2731 int args_offset = INTVAL (args_so_far);
2734 /* Push padding now if padding above and stack grows down,
2735 or if padding below and stack grows up.
2736 But if space already allocated, this has already been done. */
2737 if (extra && args_addr == 0
2738 && where_pad != none && where_pad != stack_direction)
2739 anti_adjust_stack (GEN_INT (extra));
2741 /* If we make space by pushing it, we might as well push
2742 the real data. Otherwise, we can leave OFFSET nonzero
2743 and leave the space uninitialized. */
2747 /* Now NOT_STACK gets the number of words that we don't need to
2748 allocate on the stack. */
2749 not_stack = partial - offset;
2751 /* If the partial register-part of the arg counts in its stack size,
2752 skip the part of stack space corresponding to the registers.
2753 Otherwise, start copying to the beginning of the stack space,
2754 by setting SKIP to 0. */
2755 #ifndef REG_PARM_STACK_SPACE
2761 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2762 x = validize_mem (force_const_mem (mode, x));
2764 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2765 SUBREGs of such registers are not allowed. */
2766 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2767 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2768 x = copy_to_reg (x);
2770 /* Loop over all the words allocated on the stack for this arg. */
2771 /* We can do it by words, because any scalar bigger than a word
2772 has a size a multiple of a word. */
2773 #ifndef PUSH_ARGS_REVERSED
2774 for (i = not_stack; i < size; i++)
2776 for (i = size - 1; i >= not_stack; i--)
2778 if (i >= not_stack + offset)
2779 emit_push_insn (operand_subword_force (x, i, mode),
2780 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2782 GEN_INT (args_offset + ((i - not_stack + skip)
2783 * UNITS_PER_WORD)));
2788 rtx target = NULL_RTX;
2790 /* Push padding now if padding above and stack grows down,
2791 or if padding below and stack grows up.
2792 But if space already allocated, this has already been done. */
2793 if (extra && args_addr == 0
2794 && where_pad != none && where_pad != stack_direction)
2795 anti_adjust_stack (GEN_INT (extra));
2797 #ifdef PUSH_ROUNDING
2799 addr = gen_push_operand ();
2803 if (GET_CODE (args_so_far) == CONST_INT)
2805 = memory_address (mode,
2806 plus_constant (args_addr,
2807 INTVAL (args_so_far)));
2809 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2814 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2816 if (flag_check_memory_usage)
2819 target = get_push_address (GET_MODE_SIZE (mode));
2821 if (GET_CODE (x) == MEM && AGGREGATE_TYPE_P (type))
2822 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2824 XEXP (x, 0), ptr_mode,
2825 GEN_INT (GET_MODE_SIZE (mode)),
2826 TYPE_MODE (sizetype));
2828 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2830 GEN_INT (GET_MODE_SIZE (mode)),
2831 TYPE_MODE (sizetype),
2832 GEN_INT (MEMORY_USE_RW), QImode);
2837 /* If part should go in registers, copy that part
2838 into the appropriate registers. Do this now, at the end,
2839 since mem-to-mem copies above may do function calls. */
2840 if (partial > 0 && reg != 0)
2842 /* Handle calls that pass values in multiple non-contiguous locations.
2843 The Irix 6 ABI has examples of this. */
2844 if (GET_CODE (reg) == PARALLEL)
2845 emit_group_load (reg, x);
2847 move_block_to_reg (REGNO (reg), x, partial, mode);
2850 if (extra && args_addr == 0 && where_pad == stack_direction)
2851 anti_adjust_stack (GEN_INT (extra));
2854 /* Expand an assignment that stores the value of FROM into TO.
2855 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2856 (This may contain a QUEUED rtx;
2857 if the value is constant, this rtx is a constant.)
2858 Otherwise, the returned value is NULL_RTX.
2860 SUGGEST_REG is no longer actually used.
2861 It used to mean, copy the value through a register
2862 and return that register, if that is possible.
2863 We now use WANT_VALUE to decide whether to do this. */
2866 expand_assignment (to, from, want_value, suggest_reg)
2871 register rtx to_rtx = 0;
2874 /* Don't crash if the lhs of the assignment was erroneous. */
2876 if (TREE_CODE (to) == ERROR_MARK)
2878 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2879 return want_value ? result : NULL_RTX;
2882 if (output_bytecode)
2884 tree dest_innermost;
2886 bc_expand_expr (from);
2887 bc_emit_instruction (duplicate);
2889 dest_innermost = bc_expand_address (to);
2891 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2892 take care of it here. */
2894 bc_store_memory (TREE_TYPE (to), dest_innermost);
2898 /* Assignment of a structure component needs special treatment
2899 if the structure component's rtx is not simply a MEM.
2900 Assignment of an array element at a constant index, and assignment of
2901 an array element in an unaligned packed structure field, has the same
2904 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2905 || TREE_CODE (to) == ARRAY_REF)
2907 enum machine_mode mode1;
2917 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2918 &unsignedp, &volatilep, &alignment);
2920 /* If we are going to use store_bit_field and extract_bit_field,
2921 make sure to_rtx will be safe for multiple use. */
2923 if (mode1 == VOIDmode && want_value)
2924 tem = stabilize_reference (tem);
2926 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
2929 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2931 if (GET_CODE (to_rtx) != MEM)
2933 to_rtx = change_address (to_rtx, VOIDmode,
2934 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2935 force_reg (ptr_mode, offset_rtx)));
2939 if (GET_CODE (to_rtx) == MEM)
2941 /* When the offset is zero, to_rtx is the address of the
2942 structure we are storing into, and hence may be shared.
2943 We must make a new MEM before setting the volatile bit. */
2945 to_rtx = copy_rtx (to_rtx);
2947 MEM_VOLATILE_P (to_rtx) = 1;
2949 #if 0 /* This was turned off because, when a field is volatile
2950 in an object which is not volatile, the object may be in a register,
2951 and then we would abort over here. */
2957 /* Check the access. */
2958 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2963 enum machine_mode best_mode;
2965 best_mode = get_best_mode (bitsize, bitpos,
2966 TYPE_ALIGN (TREE_TYPE (tem)),
2968 if (best_mode == VOIDmode)
2971 best_mode_size = GET_MODE_BITSIZE (best_mode);
2972 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2973 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2974 size *= GET_MODE_SIZE (best_mode);
2976 /* Check the access right of the pointer. */
2978 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
2980 GEN_INT (size), TYPE_MODE (sizetype),
2981 GEN_INT (MEMORY_USE_WO), QImode);
2984 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2986 /* Spurious cast makes HPUX compiler happy. */
2987 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2990 /* Required alignment of containing datum. */
2992 int_size_in_bytes (TREE_TYPE (tem)));
2993 preserve_temp_slots (result);
2997 /* If the value is meaningful, convert RESULT to the proper mode.
2998 Otherwise, return nothing. */
2999 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3000 TYPE_MODE (TREE_TYPE (from)),
3002 TREE_UNSIGNED (TREE_TYPE (to)))
3006 /* If the rhs is a function call and its value is not an aggregate,
3007 call the function before we start to compute the lhs.
3008 This is needed for correct code for cases such as
3009 val = setjmp (buf) on machines where reference to val
3010 requires loading up part of an address in a separate insn.
3012 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3013 a promoted variable where the zero- or sign- extension needs to be done.
3014 Handling this in the normal way is safe because no computation is done
3016 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3017 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3018 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3023 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3025 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3027 /* Handle calls that return values in multiple non-contiguous locations.
3028 The Irix 6 ABI has examples of this. */
3029 if (GET_CODE (to_rtx) == PARALLEL)
3030 emit_group_load (to_rtx, value);
3031 else if (GET_MODE (to_rtx) == BLKmode)
3032 emit_block_move (to_rtx, value, expr_size (from),
3033 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3035 emit_move_insn (to_rtx, value);
3036 preserve_temp_slots (to_rtx);
3039 return want_value ? to_rtx : NULL_RTX;
3042 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3043 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3046 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3048 /* Don't move directly into a return register. */
3049 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3054 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3055 emit_move_insn (to_rtx, temp);
3056 preserve_temp_slots (to_rtx);
3059 return want_value ? to_rtx : NULL_RTX;
3062 /* In case we are returning the contents of an object which overlaps
3063 the place the value is being stored, use a safe function when copying
3064 a value through a pointer into a structure value return block. */
3065 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3066 && current_function_returns_struct
3067 && !current_function_returns_pcc_struct)
3072 size = expr_size (from);
3073 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3074 EXPAND_MEMORY_USE_DONT);
3076 /* Copy the rights of the bitmap. */
3077 if (flag_check_memory_usage)
3078 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3079 XEXP (to_rtx, 0), ptr_mode,
3080 XEXP (from_rtx, 0), ptr_mode,
3081 convert_to_mode (TYPE_MODE (sizetype),
3082 size, TREE_UNSIGNED (sizetype)),
3083 TYPE_MODE (sizetype));
3085 #ifdef TARGET_MEM_FUNCTIONS
3086 emit_library_call (memcpy_libfunc, 0,
3087 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3088 XEXP (from_rtx, 0), Pmode,
3089 convert_to_mode (TYPE_MODE (sizetype),
3090 size, TREE_UNSIGNED (sizetype)),
3091 TYPE_MODE (sizetype));
3093 emit_library_call (bcopy_libfunc, 0,
3094 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3095 XEXP (to_rtx, 0), Pmode,
3096 convert_to_mode (TYPE_MODE (integer_type_node),
3097 size, TREE_UNSIGNED (integer_type_node)),
3098 TYPE_MODE (integer_type_node));
3101 preserve_temp_slots (to_rtx);
3104 return want_value ? to_rtx : NULL_RTX;
3107 /* Compute FROM and store the value in the rtx we got. */
3110 result = store_expr (from, to_rtx, want_value);
3111 preserve_temp_slots (result);
3114 return want_value ? result : NULL_RTX;
3117 /* Generate code for computing expression EXP,
3118 and storing the value into TARGET.
3119 TARGET may contain a QUEUED rtx.
3121 If WANT_VALUE is nonzero, return a copy of the value
3122 not in TARGET, so that we can be sure to use the proper
3123 value in a containing expression even if TARGET has something
3124 else stored in it. If possible, we copy the value through a pseudo
3125 and return that pseudo. Or, if the value is constant, we try to
3126 return the constant. In some cases, we return a pseudo
3127 copied *from* TARGET.
3129 If the mode is BLKmode then we may return TARGET itself.
3130 It turns out that in BLKmode it doesn't cause a problem.
3131 because C has no operators that could combine two different
3132 assignments into the same BLKmode object with different values
3133 with no sequence point. Will other languages need this to
3136 If WANT_VALUE is 0, we return NULL, to make sure
3137 to catch quickly any cases where the caller uses the value
3138 and fails to set WANT_VALUE. */
3141 store_expr (exp, target, want_value)
3143 register rtx target;
3147 int dont_return_target = 0;
3149 if (TREE_CODE (exp) == COMPOUND_EXPR)
3151 /* Perform first part of compound expression, then assign from second
3153 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3155 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3157 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3159 /* For conditional expression, get safe form of the target. Then
3160 test the condition, doing the appropriate assignment on either
3161 side. This avoids the creation of unnecessary temporaries.
3162 For non-BLKmode, it is more efficient not to do this. */
3164 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3167 target = protect_from_queue (target, 1);
3169 do_pending_stack_adjust ();
3171 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3172 start_cleanup_deferal ();
3173 store_expr (TREE_OPERAND (exp, 1), target, 0);
3174 end_cleanup_deferal ();
3176 emit_jump_insn (gen_jump (lab2));
3179 start_cleanup_deferal ();
3180 store_expr (TREE_OPERAND (exp, 2), target, 0);
3181 end_cleanup_deferal ();
3186 return want_value ? target : NULL_RTX;
3188 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3189 && GET_MODE (target) != BLKmode)
3190 /* If target is in memory and caller wants value in a register instead,
3191 arrange that. Pass TARGET as target for expand_expr so that,
3192 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3193 We know expand_expr will not use the target in that case.
3194 Don't do this if TARGET is volatile because we are supposed
3195 to write it and then read it. */
3197 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3198 GET_MODE (target), 0);
3199 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3200 temp = copy_to_reg (temp);
3201 dont_return_target = 1;
3203 else if (queued_subexp_p (target))
3204 /* If target contains a postincrement, let's not risk
3205 using it as the place to generate the rhs. */
3207 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3209 /* Expand EXP into a new pseudo. */
3210 temp = gen_reg_rtx (GET_MODE (target));
3211 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3214 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3216 /* If target is volatile, ANSI requires accessing the value
3217 *from* the target, if it is accessed. So make that happen.
3218 In no case return the target itself. */
3219 if (! MEM_VOLATILE_P (target) && want_value)
3220 dont_return_target = 1;
3222 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3223 /* If this is an scalar in a register that is stored in a wider mode
3224 than the declared mode, compute the result into its declared mode
3225 and then convert to the wider mode. Our value is the computed
3228 /* If we don't want a value, we can do the conversion inside EXP,
3229 which will often result in some optimizations. Do the conversion
3230 in two steps: first change the signedness, if needed, then
3231 the extend. But don't do this if the type of EXP is a subtype
3232 of something else since then the conversion might involve
3233 more than just converting modes. */
3234 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3235 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3237 if (TREE_UNSIGNED (TREE_TYPE (exp))
3238 != SUBREG_PROMOTED_UNSIGNED_P (target))
3241 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3245 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3246 SUBREG_PROMOTED_UNSIGNED_P (target)),
3250 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3252 /* If TEMP is a volatile MEM and we want a result value, make
3253 the access now so it gets done only once. Likewise if
3254 it contains TARGET. */
3255 if (GET_CODE (temp) == MEM && want_value
3256 && (MEM_VOLATILE_P (temp)
3257 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3258 temp = copy_to_reg (temp);
3260 /* If TEMP is a VOIDmode constant, use convert_modes to make
3261 sure that we properly convert it. */
3262 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3263 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3264 TYPE_MODE (TREE_TYPE (exp)), temp,
3265 SUBREG_PROMOTED_UNSIGNED_P (target));
3267 convert_move (SUBREG_REG (target), temp,
3268 SUBREG_PROMOTED_UNSIGNED_P (target));
3269 return want_value ? temp : NULL_RTX;
3273 temp = expand_expr (exp, target, GET_MODE (target), 0);
3274 /* Return TARGET if it's a specified hardware register.
3275 If TARGET is a volatile mem ref, either return TARGET
3276 or return a reg copied *from* TARGET; ANSI requires this.
3278 Otherwise, if TEMP is not TARGET, return TEMP
3279 if it is constant (for efficiency),
3280 or if we really want the correct value. */
3281 if (!(target && GET_CODE (target) == REG
3282 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3283 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3284 && ! rtx_equal_p (temp, target)
3285 && (CONSTANT_P (temp) || want_value))
3286 dont_return_target = 1;
3289 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3290 the same as that of TARGET, adjust the constant. This is needed, for
3291 example, in case it is a CONST_DOUBLE and we want only a word-sized
3293 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3294 && TREE_CODE (exp) != ERROR_MARK
3295 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3296 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3297 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3299 if (flag_check_memory_usage
3300 && GET_CODE (target) == MEM
3301 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3303 if (GET_CODE (temp) == MEM)
3304 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3305 XEXP (target, 0), ptr_mode,
3306 XEXP (temp, 0), ptr_mode,
3307 expr_size (exp), TYPE_MODE (sizetype));
3309 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3310 XEXP (target, 0), ptr_mode,
3311 expr_size (exp), TYPE_MODE (sizetype),
3312 GEN_INT (MEMORY_USE_WO), QImode);
3315 /* If value was not generated in the target, store it there.
3316 Convert the value to TARGET's type first if nec. */
3318 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
3320 target = protect_from_queue (target, 1);
3321 if (GET_MODE (temp) != GET_MODE (target)
3322 && GET_MODE (temp) != VOIDmode)
3324 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3325 if (dont_return_target)
3327 /* In this case, we will return TEMP,
3328 so make sure it has the proper mode.
3329 But don't forget to store the value into TARGET. */
3330 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3331 emit_move_insn (target, temp);
3334 convert_move (target, temp, unsignedp);
3337 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3339 /* Handle copying a string constant into an array.
3340 The string constant may be shorter than the array.
3341 So copy just the string's actual length, and clear the rest. */
3345 /* Get the size of the data type of the string,
3346 which is actually the size of the target. */
3347 size = expr_size (exp);
3348 if (GET_CODE (size) == CONST_INT
3349 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3350 emit_block_move (target, temp, size,
3351 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3354 /* Compute the size of the data to copy from the string. */
3356 = size_binop (MIN_EXPR,
3357 make_tree (sizetype, size),
3359 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3360 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3364 /* Copy that much. */
3365 emit_block_move (target, temp, copy_size_rtx,
3366 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3368 /* Figure out how much is left in TARGET that we have to clear.
3369 Do all calculations in ptr_mode. */
3371 addr = XEXP (target, 0);
3372 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3374 if (GET_CODE (copy_size_rtx) == CONST_INT)
3376 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3377 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3381 addr = force_reg (ptr_mode, addr);
3382 addr = expand_binop (ptr_mode, add_optab, addr,
3383 copy_size_rtx, NULL_RTX, 0,
3386 size = expand_binop (ptr_mode, sub_optab, size,
3387 copy_size_rtx, NULL_RTX, 0,
3390 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3391 GET_MODE (size), 0, 0);
3392 label = gen_label_rtx ();
3393 emit_jump_insn (gen_blt (label));
3396 if (size != const0_rtx)
3398 /* Be sure we can write on ADDR. */
3399 if (flag_check_memory_usage)
3400 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3402 size, TYPE_MODE (sizetype),
3403 GEN_INT (MEMORY_USE_WO), QImode);
3404 #ifdef TARGET_MEM_FUNCTIONS
3405 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3407 const0_rtx, TYPE_MODE (integer_type_node),
3408 convert_to_mode (TYPE_MODE (sizetype),
3410 TREE_UNSIGNED (sizetype)),
3411 TYPE_MODE (sizetype));
3413 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3415 convert_to_mode (TYPE_MODE (integer_type_node),
3417 TREE_UNSIGNED (integer_type_node)),
3418 TYPE_MODE (integer_type_node));
3426 /* Handle calls that return values in multiple non-contiguous locations.
3427 The Irix 6 ABI has examples of this. */
3428 else if (GET_CODE (target) == PARALLEL)
3429 emit_group_load (target, temp);
3430 else if (GET_MODE (temp) == BLKmode)
3431 emit_block_move (target, temp, expr_size (exp),
3432 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3434 emit_move_insn (target, temp);
3437 /* If we don't want a value, return NULL_RTX. */
3441 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3442 ??? The latter test doesn't seem to make sense. */
3443 else if (dont_return_target && GET_CODE (temp) != MEM)
3446 /* Return TARGET itself if it is a hard register. */
3447 else if (want_value && GET_MODE (target) != BLKmode
3448 && ! (GET_CODE (target) == REG
3449 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3450 return copy_to_reg (target);
3456 /* Return 1 if EXP just contains zeros. */
3464 switch (TREE_CODE (exp))
3468 case NON_LVALUE_EXPR:
3469 return is_zeros_p (TREE_OPERAND (exp, 0));
3472 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3476 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3479 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3482 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3483 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3484 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3485 if (! is_zeros_p (TREE_VALUE (elt)))
3495 /* Return 1 if EXP contains mostly (3/4) zeros. */
3498 mostly_zeros_p (exp)
3501 if (TREE_CODE (exp) == CONSTRUCTOR)
3503 int elts = 0, zeros = 0;
3504 tree elt = CONSTRUCTOR_ELTS (exp);
3505 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3507 /* If there are no ranges of true bits, it is all zero. */
3508 return elt == NULL_TREE;
3510 for (; elt; elt = TREE_CHAIN (elt))
3512 /* We do not handle the case where the index is a RANGE_EXPR,
3513 so the statistic will be somewhat inaccurate.
3514 We do make a more accurate count in store_constructor itself,
3515 so since this function is only used for nested array elements,
3516 this should be close enough. */
3517 if (mostly_zeros_p (TREE_VALUE (elt)))
3522 return 4 * zeros >= 3 * elts;
3525 return is_zeros_p (exp);
3528 /* Helper function for store_constructor.
3529 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3530 TYPE is the type of the CONSTRUCTOR, not the element type.
3531 CLEARED is as for store_constructor.
3533 This provides a recursive shortcut back to store_constructor when it isn't
3534 necessary to go through store_field. This is so that we can pass through
3535 the cleared field to let store_constructor know that we may not have to
3536 clear a substructure if the outer structure has already been cleared. */
3539 store_constructor_field (target, bitsize, bitpos,
3540 mode, exp, type, cleared)
3542 int bitsize, bitpos;
3543 enum machine_mode mode;
3547 if (TREE_CODE (exp) == CONSTRUCTOR
3548 && bitpos % BITS_PER_UNIT == 0
3549 /* If we have a non-zero bitpos for a register target, then we just
3550 let store_field do the bitfield handling. This is unlikely to
3551 generate unnecessary clear instructions anyways. */
3552 && (bitpos == 0 || GET_CODE (target) == MEM))
3555 target = change_address (target, VOIDmode,
3556 plus_constant (XEXP (target, 0),
3557 bitpos / BITS_PER_UNIT));
3558 store_constructor (exp, target, cleared);
3561 store_field (target, bitsize, bitpos, mode, exp,
3562 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3563 int_size_in_bytes (type));
3566 /* Store the value of constructor EXP into the rtx TARGET.
3567 TARGET is either a REG or a MEM.
3568 CLEARED is true if TARGET is known to have been zero'd. */
3571 store_constructor (exp, target, cleared)
3576 tree type = TREE_TYPE (exp);
3578 /* We know our target cannot conflict, since safe_from_p has been called. */
3580 /* Don't try copying piece by piece into a hard register
3581 since that is vulnerable to being clobbered by EXP.
3582 Instead, construct in a pseudo register and then copy it all. */
3583 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3585 rtx temp = gen_reg_rtx (GET_MODE (target));
3586 store_constructor (exp, temp, 0);
3587 emit_move_insn (target, temp);
3592 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3593 || TREE_CODE (type) == QUAL_UNION_TYPE)
3597 /* Inform later passes that the whole union value is dead. */
3598 if (TREE_CODE (type) == UNION_TYPE
3599 || TREE_CODE (type) == QUAL_UNION_TYPE)
3600 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3602 /* If we are building a static constructor into a register,
3603 set the initial value as zero so we can fold the value into
3604 a constant. But if more than one register is involved,
3605 this probably loses. */
3606 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3607 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3610 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3615 /* If the constructor has fewer fields than the structure
3616 or if we are initializing the structure to mostly zeros,
3617 clear the whole structure first. */
3618 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3619 != list_length (TYPE_FIELDS (type)))
3620 || mostly_zeros_p (exp))
3623 clear_storage (target, expr_size (exp),
3624 TYPE_ALIGN (type) / BITS_PER_UNIT);
3629 /* Inform later passes that the old value is dead. */
3630 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3632 /* Store each element of the constructor into
3633 the corresponding field of TARGET. */
3635 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3637 register tree field = TREE_PURPOSE (elt);
3638 register enum machine_mode mode;
3642 tree pos, constant = 0, offset = 0;
3643 rtx to_rtx = target;
3645 /* Just ignore missing fields.
3646 We cleared the whole structure, above,
3647 if any fields are missing. */
3651 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3654 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3655 unsignedp = TREE_UNSIGNED (field);
3656 mode = DECL_MODE (field);
3657 if (DECL_BIT_FIELD (field))
3660 pos = DECL_FIELD_BITPOS (field);
3661 if (TREE_CODE (pos) == INTEGER_CST)
3663 else if (TREE_CODE (pos) == PLUS_EXPR
3664 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3665 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3670 bitpos = TREE_INT_CST_LOW (constant);
3676 if (contains_placeholder_p (offset))
3677 offset = build (WITH_RECORD_EXPR, sizetype,
3680 offset = size_binop (FLOOR_DIV_EXPR, offset,
3681 size_int (BITS_PER_UNIT));
3683 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3684 if (GET_CODE (to_rtx) != MEM)
3688 = change_address (to_rtx, VOIDmode,
3689 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3690 force_reg (ptr_mode, offset_rtx)));
3692 if (TREE_READONLY (field))
3694 if (GET_CODE (to_rtx) == MEM)
3695 to_rtx = copy_rtx (to_rtx);
3697 RTX_UNCHANGING_P (to_rtx) = 1;
3700 store_constructor_field (to_rtx, bitsize, bitpos,
3701 mode, TREE_VALUE (elt), type, cleared);
3704 else if (TREE_CODE (type) == ARRAY_TYPE)
3709 tree domain = TYPE_DOMAIN (type);
3710 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3711 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3712 tree elttype = TREE_TYPE (type);
3714 /* If the constructor has fewer elements than the array,
3715 clear the whole array first. Similarly if this this is
3716 static constructor of a non-BLKmode object. */
3717 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3721 HOST_WIDE_INT count = 0, zero_count = 0;
3723 /* This loop is a more accurate version of the loop in
3724 mostly_zeros_p (it handles RANGE_EXPR in an index).
3725 It is also needed to check for missing elements. */
3726 for (elt = CONSTRUCTOR_ELTS (exp);
3728 elt = TREE_CHAIN (elt))
3730 tree index = TREE_PURPOSE (elt);
3731 HOST_WIDE_INT this_node_count;
3732 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3734 tree lo_index = TREE_OPERAND (index, 0);
3735 tree hi_index = TREE_OPERAND (index, 1);
3736 if (TREE_CODE (lo_index) != INTEGER_CST
3737 || TREE_CODE (hi_index) != INTEGER_CST)
3742 this_node_count = TREE_INT_CST_LOW (hi_index)
3743 - TREE_INT_CST_LOW (lo_index) + 1;
3746 this_node_count = 1;
3747 count += this_node_count;
3748 if (mostly_zeros_p (TREE_VALUE (elt)))
3749 zero_count += this_node_count;
3751 /* Clear the entire array first if there are any missing elements,
3752 or if the incidence of zero elements is >= 75%. */
3753 if (count < maxelt - minelt + 1
3754 || 4 * zero_count >= 3 * count)
3760 clear_storage (target, expr_size (exp),
3761 TYPE_ALIGN (type) / BITS_PER_UNIT);
3765 /* Inform later passes that the old value is dead. */
3766 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3768 /* Store each element of the constructor into
3769 the corresponding element of TARGET, determined
3770 by counting the elements. */
3771 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3773 elt = TREE_CHAIN (elt), i++)
3775 register enum machine_mode mode;
3779 tree value = TREE_VALUE (elt);
3780 tree index = TREE_PURPOSE (elt);
3781 rtx xtarget = target;
3783 if (cleared && is_zeros_p (value))
3786 mode = TYPE_MODE (elttype);
3787 bitsize = GET_MODE_BITSIZE (mode);
3788 unsignedp = TREE_UNSIGNED (elttype);
3790 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3792 tree lo_index = TREE_OPERAND (index, 0);
3793 tree hi_index = TREE_OPERAND (index, 1);
3794 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3795 struct nesting *loop;
3796 HOST_WIDE_INT lo, hi, count;
3799 /* If the range is constant and "small", unroll the loop. */
3800 if (TREE_CODE (lo_index) == INTEGER_CST
3801 && TREE_CODE (hi_index) == INTEGER_CST
3802 && (lo = TREE_INT_CST_LOW (lo_index),
3803 hi = TREE_INT_CST_LOW (hi_index),
3804 count = hi - lo + 1,
3805 (GET_CODE (target) != MEM
3807 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3808 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3811 lo -= minelt; hi -= minelt;
3812 for (; lo <= hi; lo++)
3814 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3815 store_constructor_field (target, bitsize, bitpos,
3816 mode, value, type, cleared);
3821 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3822 loop_top = gen_label_rtx ();
3823 loop_end = gen_label_rtx ();
3825 unsignedp = TREE_UNSIGNED (domain);
3827 index = build_decl (VAR_DECL, NULL_TREE, domain);
3829 DECL_RTL (index) = index_r
3830 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3833 if (TREE_CODE (value) == SAVE_EXPR
3834 && SAVE_EXPR_RTL (value) == 0)
3836 /* Make sure value gets expanded once before the
3838 expand_expr (value, const0_rtx, VOIDmode, 0);
3841 store_expr (lo_index, index_r, 0);
3842 loop = expand_start_loop (0);
3844 /* Assign value to element index. */
3845 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3846 size_int (BITS_PER_UNIT));
3847 position = size_binop (MULT_EXPR,
3848 size_binop (MINUS_EXPR, index,
3849 TYPE_MIN_VALUE (domain)),
3851 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3852 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3853 xtarget = change_address (target, mode, addr);
3854 if (TREE_CODE (value) == CONSTRUCTOR)
3855 store_constructor (value, xtarget, cleared);
3857 store_expr (value, xtarget, 0);
3859 expand_exit_loop_if_false (loop,
3860 build (LT_EXPR, integer_type_node,
3863 expand_increment (build (PREINCREMENT_EXPR,
3865 index, integer_one_node), 0, 0);
3867 emit_label (loop_end);
3869 /* Needed by stupid register allocation. to extend the
3870 lifetime of pseudo-regs used by target past the end
3872 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3875 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3876 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3882 index = size_int (i);
3885 index = size_binop (MINUS_EXPR, index,
3886 TYPE_MIN_VALUE (domain));
3887 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3888 size_int (BITS_PER_UNIT));
3889 position = size_binop (MULT_EXPR, index, position);
3890 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3891 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3892 xtarget = change_address (target, mode, addr);
3893 store_expr (value, xtarget, 0);
3898 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3899 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3901 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3902 store_constructor_field (target, bitsize, bitpos,
3903 mode, value, type, cleared);
3907 /* set constructor assignments */
3908 else if (TREE_CODE (type) == SET_TYPE)
3910 tree elt = CONSTRUCTOR_ELTS (exp);
3911 rtx xtarget = XEXP (target, 0);
3912 int set_word_size = TYPE_ALIGN (type);
3913 int nbytes = int_size_in_bytes (type), nbits;
3914 tree domain = TYPE_DOMAIN (type);
3915 tree domain_min, domain_max, bitlength;
3917 /* The default implementation strategy is to extract the constant
3918 parts of the constructor, use that to initialize the target,
3919 and then "or" in whatever non-constant ranges we need in addition.
3921 If a large set is all zero or all ones, it is
3922 probably better to set it using memset (if available) or bzero.
3923 Also, if a large set has just a single range, it may also be
3924 better to first clear all the first clear the set (using
3925 bzero/memset), and set the bits we want. */
3927 /* Check for all zeros. */
3928 if (elt == NULL_TREE)
3931 clear_storage (target, expr_size (exp),
3932 TYPE_ALIGN (type) / BITS_PER_UNIT);
3936 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3937 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3938 bitlength = size_binop (PLUS_EXPR,
3939 size_binop (MINUS_EXPR, domain_max, domain_min),
3942 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3944 nbits = TREE_INT_CST_LOW (bitlength);
3946 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3947 are "complicated" (more than one range), initialize (the
3948 constant parts) by copying from a constant. */
3949 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3950 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3952 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3953 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3954 char *bit_buffer = (char *) alloca (nbits);
3955 HOST_WIDE_INT word = 0;
3958 int offset = 0; /* In bytes from beginning of set. */
3959 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3962 if (bit_buffer[ibit])
3964 if (BYTES_BIG_ENDIAN)
3965 word |= (1 << (set_word_size - 1 - bit_pos));
3967 word |= 1 << bit_pos;
3970 if (bit_pos >= set_word_size || ibit == nbits)
3972 if (word != 0 || ! cleared)
3974 rtx datum = GEN_INT (word);
3976 /* The assumption here is that it is safe to use
3977 XEXP if the set is multi-word, but not if
3978 it's single-word. */
3979 if (GET_CODE (target) == MEM)
3981 to_rtx = plus_constant (XEXP (target, 0), offset);
3982 to_rtx = change_address (target, mode, to_rtx);
3984 else if (offset == 0)
3988 emit_move_insn (to_rtx, datum);
3994 offset += set_word_size / BITS_PER_UNIT;
4000 /* Don't bother clearing storage if the set is all ones. */
4001 if (TREE_CHAIN (elt) != NULL_TREE
4002 || (TREE_PURPOSE (elt) == NULL_TREE
4004 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4005 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4006 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4007 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4009 clear_storage (target, expr_size (exp),
4010 TYPE_ALIGN (type) / BITS_PER_UNIT);
4013 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4015 /* start of range of element or NULL */
4016 tree startbit = TREE_PURPOSE (elt);
4017 /* end of range of element, or element value */
4018 tree endbit = TREE_VALUE (elt);
4019 HOST_WIDE_INT startb, endb;
4020 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4022 bitlength_rtx = expand_expr (bitlength,
4023 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4025 /* handle non-range tuple element like [ expr ] */
4026 if (startbit == NULL_TREE)
4028 startbit = save_expr (endbit);
4031 startbit = convert (sizetype, startbit);
4032 endbit = convert (sizetype, endbit);
4033 if (! integer_zerop (domain_min))
4035 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4036 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4038 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4039 EXPAND_CONST_ADDRESS);
4040 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4041 EXPAND_CONST_ADDRESS);
4045 targetx = assign_stack_temp (GET_MODE (target),
4046 GET_MODE_SIZE (GET_MODE (target)),
4048 emit_move_insn (targetx, target);
4050 else if (GET_CODE (target) == MEM)
4055 #ifdef TARGET_MEM_FUNCTIONS
4056 /* Optimization: If startbit and endbit are
4057 constants divisible by BITS_PER_UNIT,
4058 call memset instead. */
4059 if (TREE_CODE (startbit) == INTEGER_CST
4060 && TREE_CODE (endbit) == INTEGER_CST
4061 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4062 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4064 emit_library_call (memset_libfunc, 0,
4066 plus_constant (XEXP (targetx, 0),
4067 startb / BITS_PER_UNIT),
4069 constm1_rtx, TYPE_MODE (integer_type_node),
4070 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4071 TYPE_MODE (sizetype));
4076 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
4077 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4078 bitlength_rtx, TYPE_MODE (sizetype),
4079 startbit_rtx, TYPE_MODE (sizetype),
4080 endbit_rtx, TYPE_MODE (sizetype));
4083 emit_move_insn (target, targetx);
4091 /* Store the value of EXP (an expression tree)
4092 into a subfield of TARGET which has mode MODE and occupies
4093 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4094 If MODE is VOIDmode, it means that we are storing into a bit-field.
4096 If VALUE_MODE is VOIDmode, return nothing in particular.
4097 UNSIGNEDP is not used in this case.
4099 Otherwise, return an rtx for the value stored. This rtx
4100 has mode VALUE_MODE if that is convenient to do.
4101 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4103 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4104 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4107 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4108 unsignedp, align, total_size)
4110 int bitsize, bitpos;
4111 enum machine_mode mode;
4113 enum machine_mode value_mode;
4118 HOST_WIDE_INT width_mask = 0;
4120 if (TREE_CODE (exp) == ERROR_MARK)
4123 if (bitsize < HOST_BITS_PER_WIDE_INT)
4124 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4126 /* If we are storing into an unaligned field of an aligned union that is
4127 in a register, we may have the mode of TARGET being an integer mode but
4128 MODE == BLKmode. In that case, get an aligned object whose size and
4129 alignment are the same as TARGET and store TARGET into it (we can avoid
4130 the store if the field being stored is the entire width of TARGET). Then
4131 call ourselves recursively to store the field into a BLKmode version of
4132 that object. Finally, load from the object into TARGET. This is not
4133 very efficient in general, but should only be slightly more expensive
4134 than the otherwise-required unaligned accesses. Perhaps this can be
4135 cleaned up later. */
4138 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4140 rtx object = assign_stack_temp (GET_MODE (target),
4141 GET_MODE_SIZE (GET_MODE (target)), 0);
4142 rtx blk_object = copy_rtx (object);
4144 MEM_IN_STRUCT_P (object) = 1;
4145 MEM_IN_STRUCT_P (blk_object) = 1;
4146 PUT_MODE (blk_object, BLKmode);
4148 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4149 emit_move_insn (object, target);
4151 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4154 /* Even though we aren't returning target, we need to
4155 give it the updated value. */
4156 emit_move_insn (target, object);
4161 /* If the structure is in a register or if the component
4162 is a bit field, we cannot use addressing to access it.
4163 Use bit-field techniques or SUBREG to store in it. */
4165 if (mode == VOIDmode
4166 || (mode != BLKmode && ! direct_store[(int) mode])
4167 || GET_CODE (target) == REG
4168 || GET_CODE (target) == SUBREG
4169 /* If the field isn't aligned enough to store as an ordinary memref,
4170 store it as a bit field. */
4171 || (SLOW_UNALIGNED_ACCESS
4172 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4173 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4175 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4177 /* If BITSIZE is narrower than the size of the type of EXP
4178 we will be narrowing TEMP. Normally, what's wanted are the
4179 low-order bits. However, if EXP's type is a record and this is
4180 big-endian machine, we want the upper BITSIZE bits. */
4181 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4182 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4183 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4184 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4185 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4189 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4191 if (mode != VOIDmode && mode != BLKmode
4192 && mode != TYPE_MODE (TREE_TYPE (exp)))
4193 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4195 /* If the modes of TARGET and TEMP are both BLKmode, both
4196 must be in memory and BITPOS must be aligned on a byte
4197 boundary. If so, we simply do a block copy. */
4198 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4200 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4201 || bitpos % BITS_PER_UNIT != 0)
4204 target = change_address (target, VOIDmode,
4205 plus_constant (XEXP (target, 0),
4206 bitpos / BITS_PER_UNIT));
4208 emit_block_move (target, temp,
4209 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4213 return value_mode == VOIDmode ? const0_rtx : target;
4216 /* Store the value in the bitfield. */
4217 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4218 if (value_mode != VOIDmode)
4220 /* The caller wants an rtx for the value. */
4221 /* If possible, avoid refetching from the bitfield itself. */
4223 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4226 enum machine_mode tmode;
4229 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4230 tmode = GET_MODE (temp);
4231 if (tmode == VOIDmode)
4233 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4234 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4235 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4237 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4238 NULL_RTX, value_mode, 0, align,
4245 rtx addr = XEXP (target, 0);
4248 /* If a value is wanted, it must be the lhs;
4249 so make the address stable for multiple use. */
4251 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4252 && ! CONSTANT_ADDRESS_P (addr)
4253 /* A frame-pointer reference is already stable. */
4254 && ! (GET_CODE (addr) == PLUS
4255 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4256 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4257 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4258 addr = copy_to_reg (addr);
4260 /* Now build a reference to just the desired component. */
4262 to_rtx = copy_rtx (change_address (target, mode,
4263 plus_constant (addr,
4265 / BITS_PER_UNIT))));
4266 MEM_IN_STRUCT_P (to_rtx) = 1;
4268 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4272 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4273 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4274 ARRAY_REFs and find the ultimate containing object, which we return.
4276 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4277 bit position, and *PUNSIGNEDP to the signedness of the field.
4278 If the position of the field is variable, we store a tree
4279 giving the variable offset (in units) in *POFFSET.
4280 This offset is in addition to the bit position.
4281 If the position is not variable, we store 0 in *POFFSET.
4282 We set *PALIGNMENT to the alignment in bytes of the address that will be
4283 computed. This is the alignment of the thing we return if *POFFSET
4284 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4286 If any of the extraction expressions is volatile,
4287 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4289 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4290 is a mode that can be used to access the field. In that case, *PBITSIZE
4293 If the field describes a variable-sized object, *PMODE is set to
4294 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4295 this case, but the address of the object can be found. */
4298 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4299 punsignedp, pvolatilep, palignment)
4304 enum machine_mode *pmode;
4309 tree orig_exp = exp;
4311 enum machine_mode mode = VOIDmode;
4312 tree offset = integer_zero_node;
4313 int alignment = BIGGEST_ALIGNMENT;
4315 if (TREE_CODE (exp) == COMPONENT_REF)
4317 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4318 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4319 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4320 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4322 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4324 size_tree = TREE_OPERAND (exp, 1);
4325 *punsignedp = TREE_UNSIGNED (exp);
4329 mode = TYPE_MODE (TREE_TYPE (exp));
4330 *pbitsize = GET_MODE_BITSIZE (mode);
4331 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4336 if (TREE_CODE (size_tree) != INTEGER_CST)
4337 mode = BLKmode, *pbitsize = -1;
4339 *pbitsize = TREE_INT_CST_LOW (size_tree);
4342 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4343 and find the ultimate containing object. */
4349 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4351 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4352 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4353 : TREE_OPERAND (exp, 2));
4354 tree constant = integer_zero_node, var = pos;
4356 /* If this field hasn't been filled in yet, don't go
4357 past it. This should only happen when folding expressions
4358 made during type construction. */
4362 /* Assume here that the offset is a multiple of a unit.
4363 If not, there should be an explicitly added constant. */
4364 if (TREE_CODE (pos) == PLUS_EXPR
4365 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4366 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4367 else if (TREE_CODE (pos) == INTEGER_CST)
4368 constant = pos, var = integer_zero_node;
4370 *pbitpos += TREE_INT_CST_LOW (constant);
4371 offset = size_binop (PLUS_EXPR, offset,
4372 size_binop (EXACT_DIV_EXPR, var,
4373 size_int (BITS_PER_UNIT)));
4376 else if (TREE_CODE (exp) == ARRAY_REF)
4378 /* This code is based on the code in case ARRAY_REF in expand_expr
4379 below. We assume here that the size of an array element is
4380 always an integral multiple of BITS_PER_UNIT. */
4382 tree index = TREE_OPERAND (exp, 1);
4383 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4385 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4386 tree index_type = TREE_TYPE (index);
4388 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4390 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4392 index_type = TREE_TYPE (index);
4395 if (! integer_zerop (low_bound))
4396 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4398 index = fold (build (MULT_EXPR, index_type, index,
4399 convert (index_type,
4400 TYPE_SIZE (TREE_TYPE (exp)))));
4402 if (TREE_CODE (index) == INTEGER_CST
4403 && TREE_INT_CST_HIGH (index) == 0)
4404 *pbitpos += TREE_INT_CST_LOW (index);
4406 offset = size_binop (PLUS_EXPR, offset,
4407 size_binop (FLOOR_DIV_EXPR, index,
4408 size_int (BITS_PER_UNIT)));
4410 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4411 && ! ((TREE_CODE (exp) == NOP_EXPR
4412 || TREE_CODE (exp) == CONVERT_EXPR)
4413 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4414 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4416 && (TYPE_MODE (TREE_TYPE (exp))
4417 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4420 /* If any reference in the chain is volatile, the effect is volatile. */
4421 if (TREE_THIS_VOLATILE (exp))
4424 /* If the offset is non-constant already, then we can't assume any
4425 alignment more than the alignment here. */
4426 if (! integer_zerop (offset))
4427 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4429 exp = TREE_OPERAND (exp, 0);
4432 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4433 alignment = MIN (alignment, DECL_ALIGN (exp));
4434 else if (TREE_TYPE (exp) != 0)
4435 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4437 if (integer_zerop (offset))
4440 if (offset != 0 && contains_placeholder_p (offset))
4441 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4445 *palignment = alignment / BITS_PER_UNIT;
4449 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4450 static enum memory_use_mode
4451 get_memory_usage_from_modifier (modifier)
4452 enum expand_modifier modifier;
4457 return MEMORY_USE_RO;
4459 case EXPAND_MEMORY_USE_WO:
4460 return MEMORY_USE_WO;
4462 case EXPAND_MEMORY_USE_RW:
4463 return MEMORY_USE_RW;
4465 case EXPAND_INITIALIZER:
4466 case EXPAND_MEMORY_USE_DONT:
4468 case EXPAND_CONST_ADDRESS:
4469 return MEMORY_USE_DONT;
4470 case EXPAND_MEMORY_USE_BAD:
4476 /* Given an rtx VALUE that may contain additions and multiplications,
4477 return an equivalent value that just refers to a register or memory.
4478 This is done by generating instructions to perform the arithmetic
4479 and returning a pseudo-register containing the value.
4481 The returned value may be a REG, SUBREG, MEM or constant. */
4484 force_operand (value, target)
4487 register optab binoptab = 0;
4488 /* Use a temporary to force order of execution of calls to
4492 /* Use subtarget as the target for operand 0 of a binary operation. */
4493 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4495 if (GET_CODE (value) == PLUS)
4496 binoptab = add_optab;
4497 else if (GET_CODE (value) == MINUS)
4498 binoptab = sub_optab;
4499 else if (GET_CODE (value) == MULT)
4501 op2 = XEXP (value, 1);
4502 if (!CONSTANT_P (op2)
4503 && !(GET_CODE (op2) == REG && op2 != subtarget))
4505 tmp = force_operand (XEXP (value, 0), subtarget);
4506 return expand_mult (GET_MODE (value), tmp,
4507 force_operand (op2, NULL_RTX),
4513 op2 = XEXP (value, 1);
4514 if (!CONSTANT_P (op2)
4515 && !(GET_CODE (op2) == REG && op2 != subtarget))
4517 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4519 binoptab = add_optab;
4520 op2 = negate_rtx (GET_MODE (value), op2);
4523 /* Check for an addition with OP2 a constant integer and our first
4524 operand a PLUS of a virtual register and something else. In that
4525 case, we want to emit the sum of the virtual register and the
4526 constant first and then add the other value. This allows virtual
4527 register instantiation to simply modify the constant rather than
4528 creating another one around this addition. */
4529 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4530 && GET_CODE (XEXP (value, 0)) == PLUS
4531 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4532 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4533 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4535 rtx temp = expand_binop (GET_MODE (value), binoptab,
4536 XEXP (XEXP (value, 0), 0), op2,
4537 subtarget, 0, OPTAB_LIB_WIDEN);
4538 return expand_binop (GET_MODE (value), binoptab, temp,
4539 force_operand (XEXP (XEXP (value, 0), 1), 0),
4540 target, 0, OPTAB_LIB_WIDEN);
4543 tmp = force_operand (XEXP (value, 0), subtarget);
4544 return expand_binop (GET_MODE (value), binoptab, tmp,
4545 force_operand (op2, NULL_RTX),
4546 target, 0, OPTAB_LIB_WIDEN);
4547 /* We give UNSIGNEDP = 0 to expand_binop
4548 because the only operations we are expanding here are signed ones. */
4553 /* Subroutine of expand_expr:
4554 save the non-copied parts (LIST) of an expr (LHS), and return a list
4555 which can restore these values to their previous values,
4556 should something modify their storage. */
4559 save_noncopied_parts (lhs, list)
4566 for (tail = list; tail; tail = TREE_CHAIN (tail))
4567 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4568 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4571 tree part = TREE_VALUE (tail);
4572 tree part_type = TREE_TYPE (part);
4573 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4574 rtx target = assign_temp (part_type, 0, 1, 1);
4575 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4576 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4577 parts = tree_cons (to_be_saved,
4578 build (RTL_EXPR, part_type, NULL_TREE,
4581 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4586 /* Subroutine of expand_expr:
4587 record the non-copied parts (LIST) of an expr (LHS), and return a list
4588 which specifies the initial values of these parts. */
4591 init_noncopied_parts (lhs, list)
4598 for (tail = list; tail; tail = TREE_CHAIN (tail))
4599 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4600 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4603 tree part = TREE_VALUE (tail);
4604 tree part_type = TREE_TYPE (part);
4605 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4606 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4611 /* Subroutine of expand_expr: return nonzero iff there is no way that
4612 EXP can reference X, which is being modified. */
4615 safe_from_p (x, exp)
4623 /* If EXP has varying size, we MUST use a target since we currently
4624 have no way of allocating temporaries of variable size
4625 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4626 So we assume here that something at a higher level has prevented a
4627 clash. This is somewhat bogus, but the best we can do. Only
4628 do this when X is BLKmode. */
4629 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4630 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4631 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4632 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4633 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4635 && GET_MODE (x) == BLKmode))
4638 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4639 find the underlying pseudo. */
4640 if (GET_CODE (x) == SUBREG)
4643 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4647 /* If X is a location in the outgoing argument area, it is always safe. */
4648 if (GET_CODE (x) == MEM
4649 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4650 || (GET_CODE (XEXP (x, 0)) == PLUS
4651 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4654 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4657 exp_rtl = DECL_RTL (exp);
4664 if (TREE_CODE (exp) == TREE_LIST)
4665 return ((TREE_VALUE (exp) == 0
4666 || safe_from_p (x, TREE_VALUE (exp)))
4667 && (TREE_CHAIN (exp) == 0
4668 || safe_from_p (x, TREE_CHAIN (exp))));
4673 return safe_from_p (x, TREE_OPERAND (exp, 0));
4677 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4678 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4682 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4683 the expression. If it is set, we conflict iff we are that rtx or
4684 both are in memory. Otherwise, we check all operands of the
4685 expression recursively. */
4687 switch (TREE_CODE (exp))
4690 return (staticp (TREE_OPERAND (exp, 0))
4691 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4694 if (GET_CODE (x) == MEM)
4699 exp_rtl = CALL_EXPR_RTL (exp);
4702 /* Assume that the call will clobber all hard registers and
4704 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4705 || GET_CODE (x) == MEM)
4712 /* If a sequence exists, we would have to scan every instruction
4713 in the sequence to see if it was safe. This is probably not
4715 if (RTL_EXPR_SEQUENCE (exp))
4718 exp_rtl = RTL_EXPR_RTL (exp);
4721 case WITH_CLEANUP_EXPR:
4722 exp_rtl = RTL_EXPR_RTL (exp);
4725 case CLEANUP_POINT_EXPR:
4726 return safe_from_p (x, TREE_OPERAND (exp, 0));
4729 exp_rtl = SAVE_EXPR_RTL (exp);
4733 /* The only operand we look at is operand 1. The rest aren't
4734 part of the expression. */
4735 return safe_from_p (x, TREE_OPERAND (exp, 1));
4737 case METHOD_CALL_EXPR:
4738 /* This takes a rtx argument, but shouldn't appear here. */
4745 /* If we have an rtx, we do not need to scan our operands. */
4749 nops = tree_code_length[(int) TREE_CODE (exp)];
4750 for (i = 0; i < nops; i++)
4751 if (TREE_OPERAND (exp, i) != 0
4752 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4756 /* If we have an rtl, find any enclosed object. Then see if we conflict
4760 if (GET_CODE (exp_rtl) == SUBREG)
4762 exp_rtl = SUBREG_REG (exp_rtl);
4763 if (GET_CODE (exp_rtl) == REG
4764 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4768 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4769 are memory and EXP is not readonly. */
4770 return ! (rtx_equal_p (x, exp_rtl)
4771 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4772 && ! TREE_READONLY (exp)));
4775 /* If we reach here, it is safe. */
4779 /* Subroutine of expand_expr: return nonzero iff EXP is an
4780 expression whose type is statically determinable. */
4786 if (TREE_CODE (exp) == PARM_DECL
4787 || TREE_CODE (exp) == VAR_DECL
4788 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4789 || TREE_CODE (exp) == COMPONENT_REF
4790 || TREE_CODE (exp) == ARRAY_REF)
4795 /* Subroutine of expand_expr: return rtx if EXP is a
4796 variable or parameter; else return 0. */
4803 switch (TREE_CODE (exp))
4807 return DECL_RTL (exp);
4813 /* expand_expr: generate code for computing expression EXP.
4814 An rtx for the computed value is returned. The value is never null.
4815 In the case of a void EXP, const0_rtx is returned.
4817 The value may be stored in TARGET if TARGET is nonzero.
4818 TARGET is just a suggestion; callers must assume that
4819 the rtx returned may not be the same as TARGET.
4821 If TARGET is CONST0_RTX, it means that the value will be ignored.
4823 If TMODE is not VOIDmode, it suggests generating the
4824 result in mode TMODE. But this is done only when convenient.
4825 Otherwise, TMODE is ignored and the value generated in its natural mode.
4826 TMODE is just a suggestion; callers must assume that
4827 the rtx returned may not have mode TMODE.
4829 Note that TARGET may have neither TMODE nor MODE. In that case, it
4830 probably will not be used.
4832 If MODIFIER is EXPAND_SUM then when EXP is an addition
4833 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4834 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4835 products as above, or REG or MEM, or constant.
4836 Ordinarily in such cases we would output mul or add instructions
4837 and then return a pseudo reg containing the sum.
4839 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4840 it also marks a label as absolutely required (it can't be dead).
4841 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4842 This is used for outputting expressions used in initializers.
4844 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4845 with a constant address even if that address is not normally legitimate.
4846 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4849 expand_expr (exp, target, tmode, modifier)
4852 enum machine_mode tmode;
4853 enum expand_modifier modifier;
4855 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4856 This is static so it will be accessible to our recursive callees. */
4857 static tree placeholder_list = 0;
4858 register rtx op0, op1, temp;
4859 tree type = TREE_TYPE (exp);
4860 int unsignedp = TREE_UNSIGNED (type);
4861 register enum machine_mode mode = TYPE_MODE (type);
4862 register enum tree_code code = TREE_CODE (exp);
4864 /* Use subtarget as the target for operand 0 of a binary operation. */
4865 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4866 rtx original_target = target;
4867 /* Maybe defer this until sure not doing bytecode? */
4868 int ignore = (target == const0_rtx
4869 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4870 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4871 || code == COND_EXPR)
4872 && TREE_CODE (type) == VOID_TYPE));
4874 /* Used by check-memory-usage to make modifier read only. */
4875 enum expand_modifier ro_modifier;
4877 /* Make a read-only version of the modifier. */
4878 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4879 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4880 ro_modifier = modifier;
4882 ro_modifier = EXPAND_NORMAL;
4884 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4886 bc_expand_expr (exp);
4890 /* Don't use hard regs as subtargets, because the combiner
4891 can only handle pseudo regs. */
4892 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4894 /* Avoid subtargets inside loops,
4895 since they hide some invariant expressions. */
4896 if (preserve_subexpressions_p ())
4899 /* If we are going to ignore this result, we need only do something
4900 if there is a side-effect somewhere in the expression. If there
4901 is, short-circuit the most common cases here. Note that we must
4902 not call expand_expr with anything but const0_rtx in case this
4903 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4907 if (! TREE_SIDE_EFFECTS (exp))
4910 /* Ensure we reference a volatile object even if value is ignored. */
4911 if (TREE_THIS_VOLATILE (exp)
4912 && TREE_CODE (exp) != FUNCTION_DECL
4913 && mode != VOIDmode && mode != BLKmode)
4915 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
4916 if (GET_CODE (temp) == MEM)
4917 temp = copy_to_reg (temp);
4921 if (TREE_CODE_CLASS (code) == '1')
4922 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4923 VOIDmode, ro_modifier);
4924 else if (TREE_CODE_CLASS (code) == '2'
4925 || TREE_CODE_CLASS (code) == '<')
4927 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
4928 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
4931 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4932 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4933 /* If the second operand has no side effects, just evaluate
4935 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4936 VOIDmode, ro_modifier);
4941 /* If will do cse, generate all results into pseudo registers
4942 since 1) that allows cse to find more things
4943 and 2) otherwise cse could produce an insn the machine
4946 if (! cse_not_expected && mode != BLKmode && target
4947 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4954 tree function = decl_function_context (exp);
4955 /* Handle using a label in a containing function. */
4956 if (function != current_function_decl
4957 && function != inline_function_decl && function != 0)
4959 struct function *p = find_function_data (function);
4960 /* Allocate in the memory associated with the function
4961 that the label is in. */
4962 push_obstacks (p->function_obstack,
4963 p->function_maybepermanent_obstack);
4965 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4966 label_rtx (exp), p->forced_labels);
4969 else if (modifier == EXPAND_INITIALIZER)
4970 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4971 label_rtx (exp), forced_labels);
4972 temp = gen_rtx (MEM, FUNCTION_MODE,
4973 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4974 if (function != current_function_decl
4975 && function != inline_function_decl && function != 0)
4976 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4981 if (DECL_RTL (exp) == 0)
4983 error_with_decl (exp, "prior parameter's size depends on `%s'");
4984 return CONST0_RTX (mode);
4987 /* ... fall through ... */
4990 /* If a static var's type was incomplete when the decl was written,
4991 but the type is complete now, lay out the decl now. */
4992 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4993 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4995 push_obstacks_nochange ();
4996 end_temporary_allocation ();
4997 layout_decl (exp, 0);
4998 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5002 /* Only check automatic variables. Currently, function arguments are
5003 not checked (this can be done at compile-time with prototypes).
5004 Aggregates are not checked. */
5005 if (flag_check_memory_usage && code == VAR_DECL
5006 && GET_CODE (DECL_RTL (exp)) == MEM
5007 && DECL_CONTEXT (exp) != NULL_TREE
5008 && ! TREE_STATIC (exp)
5009 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5011 enum memory_use_mode memory_usage;
5012 memory_usage = get_memory_usage_from_modifier (modifier);
5014 if (memory_usage != MEMORY_USE_DONT)
5015 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5016 XEXP (DECL_RTL (exp), 0), ptr_mode,
5017 GEN_INT (int_size_in_bytes (type)),
5018 TYPE_MODE (sizetype),
5019 GEN_INT (memory_usage), QImode);
5022 /* ... fall through ... */
5026 if (DECL_RTL (exp) == 0)
5029 /* Ensure variable marked as used even if it doesn't go through
5030 a parser. If it hasn't be used yet, write out an external
5032 if (! TREE_USED (exp))
5034 assemble_external (exp);
5035 TREE_USED (exp) = 1;
5038 /* Show we haven't gotten RTL for this yet. */
5041 /* Handle variables inherited from containing functions. */
5042 context = decl_function_context (exp);
5044 /* We treat inline_function_decl as an alias for the current function
5045 because that is the inline function whose vars, types, etc.
5046 are being merged into the current function.
5047 See expand_inline_function. */
5049 if (context != 0 && context != current_function_decl
5050 && context != inline_function_decl
5051 /* If var is static, we don't need a static chain to access it. */
5052 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5053 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5057 /* Mark as non-local and addressable. */
5058 DECL_NONLOCAL (exp) = 1;
5059 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5061 mark_addressable (exp);
5062 if (GET_CODE (DECL_RTL (exp)) != MEM)
5064 addr = XEXP (DECL_RTL (exp), 0);
5065 if (GET_CODE (addr) == MEM)
5066 addr = gen_rtx (MEM, Pmode,
5067 fix_lexical_addr (XEXP (addr, 0), exp));
5069 addr = fix_lexical_addr (addr, exp);
5070 temp = change_address (DECL_RTL (exp), mode, addr);
5073 /* This is the case of an array whose size is to be determined
5074 from its initializer, while the initializer is still being parsed.
5077 else if (GET_CODE (DECL_RTL (exp)) == MEM
5078 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5079 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5080 XEXP (DECL_RTL (exp), 0));
5082 /* If DECL_RTL is memory, we are in the normal case and either
5083 the address is not valid or it is not a register and -fforce-addr
5084 is specified, get the address into a register. */
5086 else if (GET_CODE (DECL_RTL (exp)) == MEM
5087 && modifier != EXPAND_CONST_ADDRESS
5088 && modifier != EXPAND_SUM
5089 && modifier != EXPAND_INITIALIZER
5090 && (! memory_address_p (DECL_MODE (exp),
5091 XEXP (DECL_RTL (exp), 0))
5093 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5094 temp = change_address (DECL_RTL (exp), VOIDmode,
5095 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5097 /* If we got something, return it. But first, set the alignment
5098 the address is a register. */
5101 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5102 mark_reg_pointer (XEXP (temp, 0),
5103 DECL_ALIGN (exp) / BITS_PER_UNIT);
5108 /* If the mode of DECL_RTL does not match that of the decl, it
5109 must be a promoted value. We return a SUBREG of the wanted mode,
5110 but mark it so that we know that it was already extended. */
5112 if (GET_CODE (DECL_RTL (exp)) == REG
5113 && GET_MODE (DECL_RTL (exp)) != mode)
5115 /* Get the signedness used for this variable. Ensure we get the
5116 same mode we got when the variable was declared. */
5117 if (GET_MODE (DECL_RTL (exp))
5118 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5121 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
5122 SUBREG_PROMOTED_VAR_P (temp) = 1;
5123 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5127 return DECL_RTL (exp);
5130 return immed_double_const (TREE_INT_CST_LOW (exp),
5131 TREE_INT_CST_HIGH (exp),
5135 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5136 EXPAND_MEMORY_USE_BAD);
5139 /* If optimized, generate immediate CONST_DOUBLE
5140 which will be turned into memory by reload if necessary.
5142 We used to force a register so that loop.c could see it. But
5143 this does not allow gen_* patterns to perform optimizations with
5144 the constants. It also produces two insns in cases like "x = 1.0;".
5145 On most machines, floating-point constants are not permitted in
5146 many insns, so we'd end up copying it to a register in any case.
5148 Now, we do the copying in expand_binop, if appropriate. */
5149 return immed_real_const (exp);
5153 if (! TREE_CST_RTL (exp))
5154 output_constant_def (exp);
5156 /* TREE_CST_RTL probably contains a constant address.
5157 On RISC machines where a constant address isn't valid,
5158 make some insns to get that address into a register. */
5159 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5160 && modifier != EXPAND_CONST_ADDRESS
5161 && modifier != EXPAND_INITIALIZER
5162 && modifier != EXPAND_SUM
5163 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5165 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5166 return change_address (TREE_CST_RTL (exp), VOIDmode,
5167 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5168 return TREE_CST_RTL (exp);
5171 context = decl_function_context (exp);
5173 /* If this SAVE_EXPR was at global context, assume we are an
5174 initialization function and move it into our context. */
5176 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5178 /* We treat inline_function_decl as an alias for the current function
5179 because that is the inline function whose vars, types, etc.
5180 are being merged into the current function.
5181 See expand_inline_function. */
5182 if (context == current_function_decl || context == inline_function_decl)
5185 /* If this is non-local, handle it. */
5188 /* The following call just exists to abort if the context is
5189 not of a containing function. */
5190 find_function_data (context);
5192 temp = SAVE_EXPR_RTL (exp);
5193 if (temp && GET_CODE (temp) == REG)
5195 put_var_into_stack (exp);
5196 temp = SAVE_EXPR_RTL (exp);
5198 if (temp == 0 || GET_CODE (temp) != MEM)
5200 return change_address (temp, mode,
5201 fix_lexical_addr (XEXP (temp, 0), exp));
5203 if (SAVE_EXPR_RTL (exp) == 0)
5205 if (mode == VOIDmode)
5208 temp = assign_temp (type, 0, 0, 0);
5210 SAVE_EXPR_RTL (exp) = temp;
5211 if (!optimize && GET_CODE (temp) == REG)
5212 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5215 /* If the mode of TEMP does not match that of the expression, it
5216 must be a promoted value. We pass store_expr a SUBREG of the
5217 wanted mode but mark it so that we know that it was already
5218 extended. Note that `unsignedp' was modified above in
5221 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5223 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5224 SUBREG_PROMOTED_VAR_P (temp) = 1;
5225 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5228 if (temp == const0_rtx)
5229 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5230 EXPAND_MEMORY_USE_BAD);
5232 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5235 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5236 must be a promoted value. We return a SUBREG of the wanted mode,
5237 but mark it so that we know that it was already extended. */
5239 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5240 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5242 /* Compute the signedness and make the proper SUBREG. */
5243 promote_mode (type, mode, &unsignedp, 0);
5244 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5245 SUBREG_PROMOTED_VAR_P (temp) = 1;
5246 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5250 return SAVE_EXPR_RTL (exp);
5255 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5256 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5260 case PLACEHOLDER_EXPR:
5262 tree placeholder_expr;
5264 /* If there is an object on the head of the placeholder list,
5265 see if some object in it's references is of type TYPE. For
5266 further information, see tree.def. */
5267 for (placeholder_expr = placeholder_list;
5268 placeholder_expr != 0;
5269 placeholder_expr = TREE_CHAIN (placeholder_expr))
5271 tree need_type = TYPE_MAIN_VARIANT (type);
5273 tree old_list = placeholder_list;
5276 /* See if the object is the type that we want. */
5277 if ((TYPE_MAIN_VARIANT (TREE_TYPE
5278 (TREE_PURPOSE (placeholder_expr)))
5280 object = TREE_PURPOSE (placeholder_expr);
5282 /* Find the innermost reference that is of the type we want. */
5283 for (elt = TREE_PURPOSE (placeholder_expr);
5285 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5286 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5287 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5288 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5289 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5290 || TREE_CODE (elt) == COND_EXPR)
5291 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5292 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5293 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5296 object = TREE_OPERAND (elt, 0);
5302 /* Expand this object skipping the list entries before
5303 it was found in case it is also a PLACEHOLDER_EXPR.
5304 In that case, we want to translate it using subsequent
5306 placeholder_list = TREE_CHAIN (placeholder_expr);
5307 temp = expand_expr (object, original_target, tmode,
5309 placeholder_list = old_list;
5315 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5318 case WITH_RECORD_EXPR:
5319 /* Put the object on the placeholder list, expand our first operand,
5320 and pop the list. */
5321 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5323 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5324 tmode, ro_modifier);
5325 placeholder_list = TREE_CHAIN (placeholder_list);
5329 expand_exit_loop_if_false (NULL_PTR,
5330 invert_truthvalue (TREE_OPERAND (exp, 0)));
5335 expand_start_loop (1);
5336 expand_expr_stmt (TREE_OPERAND (exp, 0));
5344 tree vars = TREE_OPERAND (exp, 0);
5345 int vars_need_expansion = 0;
5347 /* Need to open a binding contour here because
5348 if there are any cleanups they must be contained here. */
5349 expand_start_bindings (0);
5351 /* Mark the corresponding BLOCK for output in its proper place. */
5352 if (TREE_OPERAND (exp, 2) != 0
5353 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5354 insert_block (TREE_OPERAND (exp, 2));
5356 /* If VARS have not yet been expanded, expand them now. */
5359 if (DECL_RTL (vars) == 0)
5361 vars_need_expansion = 1;
5364 expand_decl_init (vars);
5365 vars = TREE_CHAIN (vars);
5368 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
5370 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5376 if (RTL_EXPR_SEQUENCE (exp))
5378 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5380 emit_insns (RTL_EXPR_SEQUENCE (exp));
5381 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5383 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5384 free_temps_for_rtl_expr (exp);
5385 return RTL_EXPR_RTL (exp);
5388 /* If we don't need the result, just ensure we evaluate any
5393 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5394 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5395 EXPAND_MEMORY_USE_BAD);
5399 /* All elts simple constants => refer to a constant in memory. But
5400 if this is a non-BLKmode mode, let it store a field at a time
5401 since that should make a CONST_INT or CONST_DOUBLE when we
5402 fold. Likewise, if we have a target we can use, it is best to
5403 store directly into the target unless the type is large enough
5404 that memcpy will be used. If we are making an initializer and
5405 all operands are constant, put it in memory as well. */
5406 else if ((TREE_STATIC (exp)
5407 && ((mode == BLKmode
5408 && ! (target != 0 && safe_from_p (target, exp)))
5409 || TREE_ADDRESSABLE (exp)
5410 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5411 && (move_by_pieces_ninsns
5412 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5413 TYPE_ALIGN (type) / BITS_PER_UNIT)
5415 && ! mostly_zeros_p (exp))))
5416 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5418 rtx constructor = output_constant_def (exp);
5419 if (modifier != EXPAND_CONST_ADDRESS
5420 && modifier != EXPAND_INITIALIZER
5421 && modifier != EXPAND_SUM
5422 && (! memory_address_p (GET_MODE (constructor),
5423 XEXP (constructor, 0))
5425 && GET_CODE (XEXP (constructor, 0)) != REG)))
5426 constructor = change_address (constructor, VOIDmode,
5427 XEXP (constructor, 0));
5433 /* Handle calls that pass values in multiple non-contiguous
5434 locations. The Irix 6 ABI has examples of this. */
5435 if (target == 0 || ! safe_from_p (target, exp)
5436 || GET_CODE (target) == PARALLEL)
5438 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5439 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5441 target = assign_temp (type, 0, 1, 1);
5444 if (TREE_READONLY (exp))
5446 if (GET_CODE (target) == MEM)
5447 target = copy_rtx (target);
5449 RTX_UNCHANGING_P (target) = 1;
5452 store_constructor (exp, target, 0);
5458 tree exp1 = TREE_OPERAND (exp, 0);
5461 tree string = string_constant (exp1, &index);
5465 && TREE_CODE (string) == STRING_CST
5466 && TREE_CODE (index) == INTEGER_CST
5467 && !TREE_INT_CST_HIGH (index)
5468 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5469 && GET_MODE_CLASS (mode) == MODE_INT
5470 && GET_MODE_SIZE (mode) == 1)
5471 return GEN_INT (TREE_STRING_POINTER (string)[i]);
5473 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5474 op0 = memory_address (mode, op0);
5476 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5478 enum memory_use_mode memory_usage;
5479 memory_usage = get_memory_usage_from_modifier (modifier);
5481 if (memory_usage != MEMORY_USE_DONT)
5482 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5484 GEN_INT (int_size_in_bytes (type)),
5485 TYPE_MODE (sizetype),
5486 GEN_INT (memory_usage), QImode);
5489 temp = gen_rtx (MEM, mode, op0);
5490 /* If address was computed by addition,
5491 mark this as an element of an aggregate. */
5492 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5493 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5494 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5495 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5496 || (TREE_CODE (exp1) == ADDR_EXPR
5497 && (exp2 = TREE_OPERAND (exp1, 0))
5498 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5499 MEM_IN_STRUCT_P (temp) = 1;
5500 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5502 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5503 here, because, in C and C++, the fact that a location is accessed
5504 through a pointer to const does not mean that the value there can
5505 never change. Languages where it can never change should
5506 also set TREE_STATIC. */
5507 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5512 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5516 tree array = TREE_OPERAND (exp, 0);
5517 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5518 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5519 tree index = TREE_OPERAND (exp, 1);
5520 tree index_type = TREE_TYPE (index);
5523 /* Optimize the special-case of a zero lower bound.
5525 We convert the low_bound to sizetype to avoid some problems
5526 with constant folding. (E.g. suppose the lower bound is 1,
5527 and its mode is QI. Without the conversion, (ARRAY
5528 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5529 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5531 But sizetype isn't quite right either (especially if
5532 the lowbound is negative). FIXME */
5534 if (! integer_zerop (low_bound))
5535 index = fold (build (MINUS_EXPR, index_type, index,
5536 convert (sizetype, low_bound)));
5538 /* Fold an expression like: "foo"[2].
5539 This is not done in fold so it won't happen inside &.
5540 Don't fold if this is for wide characters since it's too
5541 difficult to do correctly and this is a very rare case. */
5543 if (TREE_CODE (array) == STRING_CST
5544 && TREE_CODE (index) == INTEGER_CST
5545 && !TREE_INT_CST_HIGH (index)
5546 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5547 && GET_MODE_CLASS (mode) == MODE_INT
5548 && GET_MODE_SIZE (mode) == 1)
5549 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5551 /* If this is a constant index into a constant array,
5552 just get the value from the array. Handle both the cases when
5553 we have an explicit constructor and when our operand is a variable
5554 that was declared const. */
5556 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5558 if (TREE_CODE (index) == INTEGER_CST
5559 && TREE_INT_CST_HIGH (index) == 0)
5561 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5563 i = TREE_INT_CST_LOW (index);
5565 elem = TREE_CHAIN (elem);
5567 return expand_expr (fold (TREE_VALUE (elem)), target,
5568 tmode, ro_modifier);
5572 else if (optimize >= 1
5573 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5574 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5575 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5577 if (TREE_CODE (index) == INTEGER_CST)
5579 tree init = DECL_INITIAL (array);
5581 i = TREE_INT_CST_LOW (index);
5582 if (TREE_CODE (init) == CONSTRUCTOR)
5584 tree elem = CONSTRUCTOR_ELTS (init);
5587 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5588 elem = TREE_CHAIN (elem);
5590 return expand_expr (fold (TREE_VALUE (elem)), target,
5591 tmode, ro_modifier);
5593 else if (TREE_CODE (init) == STRING_CST
5594 && TREE_INT_CST_HIGH (index) == 0
5595 && (TREE_INT_CST_LOW (index)
5596 < TREE_STRING_LENGTH (init)))
5598 (TREE_STRING_POINTER
5599 (init)[TREE_INT_CST_LOW (index)]));
5604 /* ... fall through ... */
5608 /* If the operand is a CONSTRUCTOR, we can just extract the
5609 appropriate field if it is present. Don't do this if we have
5610 already written the data since we want to refer to that copy
5611 and varasm.c assumes that's what we'll do. */
5612 if (code != ARRAY_REF
5613 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5614 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5618 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5619 elt = TREE_CHAIN (elt))
5620 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5621 /* We can normally use the value of the field in the
5622 CONSTRUCTOR. However, if this is a bitfield in
5623 an integral mode that we can fit in a HOST_WIDE_INT,
5624 we must mask only the number of bits in the bitfield,
5625 since this is done implicitly by the constructor. If
5626 the bitfield does not meet either of those conditions,
5627 we can't do this optimization. */
5628 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5629 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5631 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5632 <= HOST_BITS_PER_WIDE_INT))))
5634 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5635 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5637 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5638 enum machine_mode imode
5639 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5641 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5643 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5644 op0 = expand_and (op0, op1, target);
5649 = build_int_2 (imode - bitsize, 0);
5651 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5653 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5663 enum machine_mode mode1;
5669 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5670 &mode1, &unsignedp, &volatilep,
5673 /* If we got back the original object, something is wrong. Perhaps
5674 we are evaluating an expression too early. In any event, don't
5675 infinitely recurse. */
5679 /* If TEM's type is a union of variable size, pass TARGET to the inner
5680 computation, since it will need a temporary and TARGET is known
5681 to have to do. This occurs in unchecked conversion in Ada. */
5683 op0 = expand_expr (tem,
5684 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5685 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5687 ? target : NULL_RTX),
5689 modifier == EXPAND_INITIALIZER ? modifier : 0);
5691 /* If this is a constant, put it into a register if it is a
5692 legitimate constant and memory if it isn't. */
5693 if (CONSTANT_P (op0))
5695 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5696 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5697 op0 = force_reg (mode, op0);
5699 op0 = validize_mem (force_const_mem (mode, op0));
5704 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5706 if (GET_CODE (op0) != MEM)
5708 op0 = change_address (op0, VOIDmode,
5709 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5710 force_reg (ptr_mode, offset_rtx)));
5713 /* Don't forget about volatility even if this is a bitfield. */
5714 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5716 op0 = copy_rtx (op0);
5717 MEM_VOLATILE_P (op0) = 1;
5720 /* Check the access. */
5721 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5723 enum memory_use_mode memory_usage;
5724 memory_usage = get_memory_usage_from_modifier (modifier);
5726 if (memory_usage != MEMORY_USE_DONT)
5731 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5732 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5734 /* Check the access right of the pointer. */
5735 if (size > BITS_PER_UNIT)
5736 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5738 GEN_INT (size / BITS_PER_UNIT),
5739 TYPE_MODE (sizetype),
5740 GEN_INT (memory_usage), QImode);
5744 /* In cases where an aligned union has an unaligned object
5745 as a field, we might be extracting a BLKmode value from
5746 an integer-mode (e.g., SImode) object. Handle this case
5747 by doing the extract into an object as wide as the field
5748 (which we know to be the width of a basic mode), then
5749 storing into memory, and changing the mode to BLKmode.
5750 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5751 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5752 if (mode1 == VOIDmode
5753 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5754 || (modifier != EXPAND_CONST_ADDRESS
5755 && modifier != EXPAND_INITIALIZER
5756 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5757 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5758 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5759 /* If the field isn't aligned enough to fetch as a memref,
5760 fetch it as a bit field. */
5761 || (SLOW_UNALIGNED_ACCESS
5762 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5763 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5765 enum machine_mode ext_mode = mode;
5767 if (ext_mode == BLKmode)
5768 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5770 if (ext_mode == BLKmode)
5772 /* In this case, BITPOS must start at a byte boundary and
5773 TARGET, if specified, must be a MEM. */
5774 if (GET_CODE (op0) != MEM
5775 || (target != 0 && GET_CODE (target) != MEM)
5776 || bitpos % BITS_PER_UNIT != 0)
5779 op0 = change_address (op0, VOIDmode,
5780 plus_constant (XEXP (op0, 0),
5781 bitpos / BITS_PER_UNIT));
5783 target = assign_temp (type, 0, 1, 1);
5785 emit_block_move (target, op0,
5786 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5793 op0 = validize_mem (op0);
5795 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5796 mark_reg_pointer (XEXP (op0, 0), alignment);
5798 op0 = extract_bit_field (op0, bitsize, bitpos,
5799 unsignedp, target, ext_mode, ext_mode,
5801 int_size_in_bytes (TREE_TYPE (tem)));
5803 /* If the result is a record type and BITSIZE is narrower than
5804 the mode of OP0, an integral mode, and this is a big endian
5805 machine, we must put the field into the high-order bits. */
5806 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5807 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5808 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5809 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5810 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5814 if (mode == BLKmode)
5816 rtx new = assign_stack_temp (ext_mode,
5817 bitsize / BITS_PER_UNIT, 0);
5819 emit_move_insn (new, op0);
5820 op0 = copy_rtx (new);
5821 PUT_MODE (op0, BLKmode);
5822 MEM_IN_STRUCT_P (op0) = 1;
5828 /* If the result is BLKmode, use that to access the object
5830 if (mode == BLKmode)
5833 /* Get a reference to just this component. */
5834 if (modifier == EXPAND_CONST_ADDRESS
5835 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5836 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5837 (bitpos / BITS_PER_UNIT)));
5839 op0 = change_address (op0, mode1,
5840 plus_constant (XEXP (op0, 0),
5841 (bitpos / BITS_PER_UNIT)));
5842 if (GET_CODE (XEXP (op0, 0)) == REG)
5843 mark_reg_pointer (XEXP (op0, 0), alignment);
5845 MEM_IN_STRUCT_P (op0) = 1;
5846 MEM_VOLATILE_P (op0) |= volatilep;
5847 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5848 || modifier == EXPAND_CONST_ADDRESS
5849 || modifier == EXPAND_INITIALIZER)
5851 else if (target == 0)
5852 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5854 convert_move (target, op0, unsignedp);
5858 /* Intended for a reference to a buffer of a file-object in Pascal.
5859 But it's not certain that a special tree code will really be
5860 necessary for these. INDIRECT_REF might work for them. */
5866 /* Pascal set IN expression.
5869 rlo = set_low - (set_low%bits_per_word);
5870 the_word = set [ (index - rlo)/bits_per_word ];
5871 bit_index = index % bits_per_word;
5872 bitmask = 1 << bit_index;
5873 return !!(the_word & bitmask); */
5875 tree set = TREE_OPERAND (exp, 0);
5876 tree index = TREE_OPERAND (exp, 1);
5877 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5878 tree set_type = TREE_TYPE (set);
5879 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5880 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5881 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5882 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5883 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5884 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5885 rtx setaddr = XEXP (setval, 0);
5886 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5888 rtx diff, quo, rem, addr, bit, result;
5890 preexpand_calls (exp);
5892 /* If domain is empty, answer is no. Likewise if index is constant
5893 and out of bounds. */
5894 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5895 && TREE_CODE (set_low_bound) == INTEGER_CST
5896 && tree_int_cst_lt (set_high_bound, set_low_bound)
5897 || (TREE_CODE (index) == INTEGER_CST
5898 && TREE_CODE (set_low_bound) == INTEGER_CST
5899 && tree_int_cst_lt (index, set_low_bound))
5900 || (TREE_CODE (set_high_bound) == INTEGER_CST
5901 && TREE_CODE (index) == INTEGER_CST
5902 && tree_int_cst_lt (set_high_bound, index))))
5906 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5908 /* If we get here, we have to generate the code for both cases
5909 (in range and out of range). */
5911 op0 = gen_label_rtx ();
5912 op1 = gen_label_rtx ();
5914 if (! (GET_CODE (index_val) == CONST_INT
5915 && GET_CODE (lo_r) == CONST_INT))
5917 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5918 GET_MODE (index_val), iunsignedp, 0);
5919 emit_jump_insn (gen_blt (op1));
5922 if (! (GET_CODE (index_val) == CONST_INT
5923 && GET_CODE (hi_r) == CONST_INT))
5925 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5926 GET_MODE (index_val), iunsignedp, 0);
5927 emit_jump_insn (gen_bgt (op1));
5930 /* Calculate the element number of bit zero in the first word
5932 if (GET_CODE (lo_r) == CONST_INT)
5933 rlow = GEN_INT (INTVAL (lo_r)
5934 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5936 rlow = expand_binop (index_mode, and_optab, lo_r,
5937 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5938 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5940 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5941 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5943 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5944 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5945 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5946 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5948 addr = memory_address (byte_mode,
5949 expand_binop (index_mode, add_optab, diff,
5950 setaddr, NULL_RTX, iunsignedp,
5953 /* Extract the bit we want to examine */
5954 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5955 gen_rtx (MEM, byte_mode, addr),
5956 make_tree (TREE_TYPE (index), rem),
5958 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5959 GET_MODE (target) == byte_mode ? target : 0,
5960 1, OPTAB_LIB_WIDEN);
5962 if (result != target)
5963 convert_move (target, result, 1);
5965 /* Output the code to handle the out-of-range case. */
5968 emit_move_insn (target, const0_rtx);
5973 case WITH_CLEANUP_EXPR:
5974 if (RTL_EXPR_RTL (exp) == 0)
5977 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
5978 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
5980 /* That's it for this cleanup. */
5981 TREE_OPERAND (exp, 2) = 0;
5983 return RTL_EXPR_RTL (exp);
5985 case CLEANUP_POINT_EXPR:
5987 extern int temp_slot_level;
5988 /* Start a new binding layer that will keep track of all cleanup
5989 actions to be performed. */
5990 expand_start_bindings (0);
5992 target_temp_slot_level = temp_slot_level;
5994 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
5995 /* If we're going to use this value, load it up now. */
5997 op0 = force_not_mem (op0);
5998 preserve_temp_slots (op0);
5999 expand_end_bindings (NULL_TREE, 0, 0);
6004 /* Check for a built-in function. */
6005 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6006 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6008 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6009 return expand_builtin (exp, target, subtarget, tmode, ignore);
6011 /* If this call was expanded already by preexpand_calls,
6012 just return the result we got. */
6013 if (CALL_EXPR_RTL (exp) != 0)
6014 return CALL_EXPR_RTL (exp);
6016 return expand_call (exp, target, ignore);
6018 case NON_LVALUE_EXPR:
6021 case REFERENCE_EXPR:
6022 if (TREE_CODE (type) == UNION_TYPE)
6024 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6027 if (mode != BLKmode)
6028 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6030 target = assign_temp (type, 0, 1, 1);
6033 if (GET_CODE (target) == MEM)
6034 /* Store data into beginning of memory target. */
6035 store_expr (TREE_OPERAND (exp, 0),
6036 change_address (target, TYPE_MODE (valtype), 0), 0);
6038 else if (GET_CODE (target) == REG)
6039 /* Store this field into a union of the proper type. */
6040 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6041 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6043 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6047 /* Return the entire union. */
6051 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6053 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6056 /* If the signedness of the conversion differs and OP0 is
6057 a promoted SUBREG, clear that indication since we now
6058 have to do the proper extension. */
6059 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6060 && GET_CODE (op0) == SUBREG)
6061 SUBREG_PROMOTED_VAR_P (op0) = 0;
6066 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6067 if (GET_MODE (op0) == mode)
6070 /* If OP0 is a constant, just convert it into the proper mode. */
6071 if (CONSTANT_P (op0))
6073 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6074 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6076 if (modifier == EXPAND_INITIALIZER)
6077 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6081 convert_to_mode (mode, op0,
6082 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6084 convert_move (target, op0,
6085 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6089 /* We come here from MINUS_EXPR when the second operand is a
6092 this_optab = add_optab;
6094 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6095 something else, make sure we add the register to the constant and
6096 then to the other thing. This case can occur during strength
6097 reduction and doing it this way will produce better code if the
6098 frame pointer or argument pointer is eliminated.
6100 fold-const.c will ensure that the constant is always in the inner
6101 PLUS_EXPR, so the only case we need to do anything about is if
6102 sp, ap, or fp is our second argument, in which case we must swap
6103 the innermost first argument and our second argument. */
6105 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6106 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6107 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6108 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6109 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6110 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6112 tree t = TREE_OPERAND (exp, 1);
6114 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6115 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6118 /* If the result is to be ptr_mode and we are adding an integer to
6119 something, we might be forming a constant. So try to use
6120 plus_constant. If it produces a sum and we can't accept it,
6121 use force_operand. This allows P = &ARR[const] to generate
6122 efficient code on machines where a SYMBOL_REF is not a valid
6125 If this is an EXPAND_SUM call, always return the sum. */
6126 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6127 || mode == ptr_mode)
6129 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6130 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6131 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6133 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6135 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6136 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6137 op1 = force_operand (op1, target);
6141 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6142 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6143 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6145 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6147 if (! CONSTANT_P (op0))
6149 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6150 VOIDmode, modifier);
6151 /* Don't go to both_summands if modifier
6152 says it's not right to return a PLUS. */
6153 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6157 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6158 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6159 op0 = force_operand (op0, target);
6164 /* No sense saving up arithmetic to be done
6165 if it's all in the wrong mode to form part of an address.
6166 And force_operand won't know whether to sign-extend or
6168 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6169 || mode != ptr_mode)
6172 preexpand_calls (exp);
6173 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6176 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6177 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6180 /* Make sure any term that's a sum with a constant comes last. */
6181 if (GET_CODE (op0) == PLUS
6182 && CONSTANT_P (XEXP (op0, 1)))
6188 /* If adding to a sum including a constant,
6189 associate it to put the constant outside. */
6190 if (GET_CODE (op1) == PLUS
6191 && CONSTANT_P (XEXP (op1, 1)))
6193 rtx constant_term = const0_rtx;
6195 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6198 /* Ensure that MULT comes first if there is one. */
6199 else if (GET_CODE (op0) == MULT)
6200 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
6202 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
6204 /* Let's also eliminate constants from op0 if possible. */
6205 op0 = eliminate_constant_term (op0, &constant_term);
6207 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6208 their sum should be a constant. Form it into OP1, since the
6209 result we want will then be OP0 + OP1. */
6211 temp = simplify_binary_operation (PLUS, mode, constant_term,
6216 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6219 /* Put a constant term last and put a multiplication first. */
6220 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6221 temp = op1, op1 = op0, op0 = temp;
6223 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6224 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6227 /* For initializers, we are allowed to return a MINUS of two
6228 symbolic constants. Here we handle all cases when both operands
6230 /* Handle difference of two symbolic constants,
6231 for the sake of an initializer. */
6232 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6233 && really_constant_p (TREE_OPERAND (exp, 0))
6234 && really_constant_p (TREE_OPERAND (exp, 1)))
6236 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6237 VOIDmode, ro_modifier);
6238 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6239 VOIDmode, ro_modifier);
6241 /* If the last operand is a CONST_INT, use plus_constant of
6242 the negated constant. Else make the MINUS. */
6243 if (GET_CODE (op1) == CONST_INT)
6244 return plus_constant (op0, - INTVAL (op1));
6246 return gen_rtx (MINUS, mode, op0, op1);
6248 /* Convert A - const to A + (-const). */
6249 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6251 tree negated = fold (build1 (NEGATE_EXPR, type,
6252 TREE_OPERAND (exp, 1)));
6254 /* Deal with the case where we can't negate the constant
6256 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6258 tree newtype = signed_type (type);
6259 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6260 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6261 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6263 if (! TREE_OVERFLOW (newneg))
6264 return expand_expr (convert (type,
6265 build (PLUS_EXPR, newtype,
6267 target, tmode, ro_modifier);
6271 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6275 this_optab = sub_optab;
6279 preexpand_calls (exp);
6280 /* If first operand is constant, swap them.
6281 Thus the following special case checks need only
6282 check the second operand. */
6283 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6285 register tree t1 = TREE_OPERAND (exp, 0);
6286 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6287 TREE_OPERAND (exp, 1) = t1;
6290 /* Attempt to return something suitable for generating an
6291 indexed address, for machines that support that. */
6293 if (modifier == EXPAND_SUM && mode == ptr_mode
6294 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6295 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6297 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6300 /* Apply distributive law if OP0 is x+c. */
6301 if (GET_CODE (op0) == PLUS
6302 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6303 return gen_rtx (PLUS, mode,
6304 gen_rtx (MULT, mode, XEXP (op0, 0),
6305 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6306 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6307 * INTVAL (XEXP (op0, 1))));
6309 if (GET_CODE (op0) != REG)
6310 op0 = force_operand (op0, NULL_RTX);
6311 if (GET_CODE (op0) != REG)
6312 op0 = copy_to_mode_reg (mode, op0);
6314 return gen_rtx (MULT, mode, op0,
6315 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6318 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6321 /* Check for multiplying things that have been extended
6322 from a narrower type. If this machine supports multiplying
6323 in that narrower type with a result in the desired type,
6324 do it that way, and avoid the explicit type-conversion. */
6325 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6326 && TREE_CODE (type) == INTEGER_TYPE
6327 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6328 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6329 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6330 && int_fits_type_p (TREE_OPERAND (exp, 1),
6331 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6332 /* Don't use a widening multiply if a shift will do. */
6333 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6334 > HOST_BITS_PER_WIDE_INT)
6335 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6337 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6338 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6340 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6341 /* If both operands are extended, they must either both
6342 be zero-extended or both be sign-extended. */
6343 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6345 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6347 enum machine_mode innermode
6348 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6349 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6350 ? smul_widen_optab : umul_widen_optab);
6351 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6352 ? umul_widen_optab : smul_widen_optab);
6353 if (mode == GET_MODE_WIDER_MODE (innermode))
6355 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6357 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6358 NULL_RTX, VOIDmode, 0);
6359 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6360 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6363 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6364 NULL_RTX, VOIDmode, 0);
6367 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6368 && innermode == word_mode)
6371 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6372 NULL_RTX, VOIDmode, 0);
6373 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6374 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6377 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6378 NULL_RTX, VOIDmode, 0);
6379 temp = expand_binop (mode, other_optab, op0, op1, target,
6380 unsignedp, OPTAB_LIB_WIDEN);
6381 htem = expand_mult_highpart_adjust (innermode,
6382 gen_highpart (innermode, temp),
6384 gen_highpart (innermode, temp),
6386 emit_move_insn (gen_highpart (innermode, temp), htem);
6391 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6392 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6393 return expand_mult (mode, op0, op1, target, unsignedp);
6395 case TRUNC_DIV_EXPR:
6396 case FLOOR_DIV_EXPR:
6398 case ROUND_DIV_EXPR:
6399 case EXACT_DIV_EXPR:
6400 preexpand_calls (exp);
6401 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6403 /* Possible optimization: compute the dividend with EXPAND_SUM
6404 then if the divisor is constant can optimize the case
6405 where some terms of the dividend have coeffs divisible by it. */
6406 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6407 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6408 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6411 this_optab = flodiv_optab;
6414 case TRUNC_MOD_EXPR:
6415 case FLOOR_MOD_EXPR:
6417 case ROUND_MOD_EXPR:
6418 preexpand_calls (exp);
6419 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6421 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6422 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6423 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6425 case FIX_ROUND_EXPR:
6426 case FIX_FLOOR_EXPR:
6428 abort (); /* Not used for C. */
6430 case FIX_TRUNC_EXPR:
6431 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6433 target = gen_reg_rtx (mode);
6434 expand_fix (target, op0, unsignedp);
6438 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6440 target = gen_reg_rtx (mode);
6441 /* expand_float can't figure out what to do if FROM has VOIDmode.
6442 So give it the correct mode. With -O, cse will optimize this. */
6443 if (GET_MODE (op0) == VOIDmode)
6444 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6446 expand_float (target, op0,
6447 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6451 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6452 temp = expand_unop (mode, neg_optab, op0, target, 0);
6458 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6460 /* Handle complex values specially. */
6461 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6462 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6463 return expand_complex_abs (mode, op0, target, unsignedp);
6465 /* Unsigned abs is simply the operand. Testing here means we don't
6466 risk generating incorrect code below. */
6467 if (TREE_UNSIGNED (type))
6470 return expand_abs (mode, op0, target, unsignedp,
6471 safe_from_p (target, TREE_OPERAND (exp, 0)));
6475 target = original_target;
6476 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6477 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6478 || GET_MODE (target) != mode
6479 || (GET_CODE (target) == REG
6480 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6481 target = gen_reg_rtx (mode);
6482 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6483 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6485 /* First try to do it with a special MIN or MAX instruction.
6486 If that does not win, use a conditional jump to select the proper
6488 this_optab = (TREE_UNSIGNED (type)
6489 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6490 : (code == MIN_EXPR ? smin_optab : smax_optab));
6492 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6497 /* At this point, a MEM target is no longer useful; we will get better
6500 if (GET_CODE (target) == MEM)
6501 target = gen_reg_rtx (mode);
6504 emit_move_insn (target, op0);
6506 op0 = gen_label_rtx ();
6508 /* If this mode is an integer too wide to compare properly,
6509 compare word by word. Rely on cse to optimize constant cases. */
6510 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6512 if (code == MAX_EXPR)
6513 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6514 target, op1, NULL_RTX, op0);
6516 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6517 op1, target, NULL_RTX, op0);
6518 emit_move_insn (target, op1);
6522 if (code == MAX_EXPR)
6523 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6524 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6525 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6527 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6528 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6529 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6530 if (temp == const0_rtx)
6531 emit_move_insn (target, op1);
6532 else if (temp != const_true_rtx)
6534 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6535 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6538 emit_move_insn (target, op1);
6545 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6546 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6552 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6553 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6558 /* ??? Can optimize bitwise operations with one arg constant.
6559 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6560 and (a bitwise1 b) bitwise2 b (etc)
6561 but that is probably not worth while. */
6563 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6564 boolean values when we want in all cases to compute both of them. In
6565 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6566 as actual zero-or-1 values and then bitwise anding. In cases where
6567 there cannot be any side effects, better code would be made by
6568 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6569 how to recognize those cases. */
6571 case TRUTH_AND_EXPR:
6573 this_optab = and_optab;
6578 this_optab = ior_optab;
6581 case TRUTH_XOR_EXPR:
6583 this_optab = xor_optab;
6590 preexpand_calls (exp);
6591 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6593 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6594 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6597 /* Could determine the answer when only additive constants differ. Also,
6598 the addition of one can be handled by changing the condition. */
6605 preexpand_calls (exp);
6606 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6610 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6611 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6613 && GET_CODE (original_target) == REG
6614 && (GET_MODE (original_target)
6615 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6617 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6620 if (temp != original_target)
6621 temp = copy_to_reg (temp);
6623 op1 = gen_label_rtx ();
6624 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6625 GET_MODE (temp), unsignedp, 0);
6626 emit_jump_insn (gen_beq (op1));
6627 emit_move_insn (temp, const1_rtx);
6632 /* If no set-flag instruction, must generate a conditional
6633 store into a temporary variable. Drop through
6634 and handle this like && and ||. */
6636 case TRUTH_ANDIF_EXPR:
6637 case TRUTH_ORIF_EXPR:
6639 && (target == 0 || ! safe_from_p (target, exp)
6640 /* Make sure we don't have a hard reg (such as function's return
6641 value) live across basic blocks, if not optimizing. */
6642 || (!optimize && GET_CODE (target) == REG
6643 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6644 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6647 emit_clr_insn (target);
6649 op1 = gen_label_rtx ();
6650 jumpifnot (exp, op1);
6653 emit_0_to_1_insn (target);
6656 return ignore ? const0_rtx : target;
6658 case TRUTH_NOT_EXPR:
6659 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6660 /* The parser is careful to generate TRUTH_NOT_EXPR
6661 only with operands that are always zero or one. */
6662 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6663 target, 1, OPTAB_LIB_WIDEN);
6669 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6671 return expand_expr (TREE_OPERAND (exp, 1),
6672 (ignore ? const0_rtx : target),
6676 /* If we would have a "singleton" (see below) were it not for a
6677 conversion in each arm, bring that conversion back out. */
6678 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6679 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6680 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6681 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6683 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6684 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6686 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6687 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6688 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6689 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6690 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6691 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6692 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6693 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6694 return expand_expr (build1 (NOP_EXPR, type,
6695 build (COND_EXPR, TREE_TYPE (true),
6696 TREE_OPERAND (exp, 0),
6698 target, tmode, modifier);
6702 /* Note that COND_EXPRs whose type is a structure or union
6703 are required to be constructed to contain assignments of
6704 a temporary variable, so that we can evaluate them here
6705 for side effect only. If type is void, we must do likewise. */
6707 /* If an arm of the branch requires a cleanup,
6708 only that cleanup is performed. */
6711 tree binary_op = 0, unary_op = 0;
6713 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6714 convert it to our mode, if necessary. */
6715 if (integer_onep (TREE_OPERAND (exp, 1))
6716 && integer_zerop (TREE_OPERAND (exp, 2))
6717 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6721 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6726 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
6727 if (GET_MODE (op0) == mode)
6731 target = gen_reg_rtx (mode);
6732 convert_move (target, op0, unsignedp);
6736 /* Check for X ? A + B : A. If we have this, we can copy A to the
6737 output and conditionally add B. Similarly for unary operations.
6738 Don't do this if X has side-effects because those side effects
6739 might affect A or B and the "?" operation is a sequence point in
6740 ANSI. (operand_equal_p tests for side effects.) */
6742 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6743 && operand_equal_p (TREE_OPERAND (exp, 2),
6744 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6745 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6746 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6747 && operand_equal_p (TREE_OPERAND (exp, 1),
6748 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6749 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6750 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6751 && operand_equal_p (TREE_OPERAND (exp, 2),
6752 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6753 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6754 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6755 && operand_equal_p (TREE_OPERAND (exp, 1),
6756 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6757 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6759 /* If we are not to produce a result, we have no target. Otherwise,
6760 if a target was specified use it; it will not be used as an
6761 intermediate target unless it is safe. If no target, use a
6766 else if (original_target
6767 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6768 || (singleton && GET_CODE (original_target) == REG
6769 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6770 && original_target == var_rtx (singleton)))
6771 && GET_MODE (original_target) == mode
6772 && ! (GET_CODE (original_target) == MEM
6773 && MEM_VOLATILE_P (original_target)))
6774 temp = original_target;
6775 else if (TREE_ADDRESSABLE (type))
6778 temp = assign_temp (type, 0, 0, 1);
6780 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6781 do the test of X as a store-flag operation, do this as
6782 A + ((X != 0) << log C). Similarly for other simple binary
6783 operators. Only do for C == 1 if BRANCH_COST is low. */
6784 if (temp && singleton && binary_op
6785 && (TREE_CODE (binary_op) == PLUS_EXPR
6786 || TREE_CODE (binary_op) == MINUS_EXPR
6787 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6788 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6789 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6790 : integer_onep (TREE_OPERAND (binary_op, 1)))
6791 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6794 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6795 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6796 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6799 /* If we had X ? A : A + 1, do this as A + (X == 0).
6801 We have to invert the truth value here and then put it
6802 back later if do_store_flag fails. We cannot simply copy
6803 TREE_OPERAND (exp, 0) to another variable and modify that
6804 because invert_truthvalue can modify the tree pointed to
6806 if (singleton == TREE_OPERAND (exp, 1))
6807 TREE_OPERAND (exp, 0)
6808 = invert_truthvalue (TREE_OPERAND (exp, 0));
6810 result = do_store_flag (TREE_OPERAND (exp, 0),
6811 (safe_from_p (temp, singleton)
6813 mode, BRANCH_COST <= 1);
6815 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6816 result = expand_shift (LSHIFT_EXPR, mode, result,
6817 build_int_2 (tree_log2
6821 (safe_from_p (temp, singleton)
6822 ? temp : NULL_RTX), 0);
6826 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6827 return expand_binop (mode, boptab, op1, result, temp,
6828 unsignedp, OPTAB_LIB_WIDEN);
6830 else if (singleton == TREE_OPERAND (exp, 1))
6831 TREE_OPERAND (exp, 0)
6832 = invert_truthvalue (TREE_OPERAND (exp, 0));
6835 do_pending_stack_adjust ();
6837 op0 = gen_label_rtx ();
6839 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6843 /* If the target conflicts with the other operand of the
6844 binary op, we can't use it. Also, we can't use the target
6845 if it is a hard register, because evaluating the condition
6846 might clobber it. */
6848 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6849 || (GET_CODE (temp) == REG
6850 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6851 temp = gen_reg_rtx (mode);
6852 store_expr (singleton, temp, 0);
6855 expand_expr (singleton,
6856 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6857 if (singleton == TREE_OPERAND (exp, 1))
6858 jumpif (TREE_OPERAND (exp, 0), op0);
6860 jumpifnot (TREE_OPERAND (exp, 0), op0);
6862 start_cleanup_deferal ();
6863 if (binary_op && temp == 0)
6864 /* Just touch the other operand. */
6865 expand_expr (TREE_OPERAND (binary_op, 1),
6866 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6868 store_expr (build (TREE_CODE (binary_op), type,
6869 make_tree (type, temp),
6870 TREE_OPERAND (binary_op, 1)),
6873 store_expr (build1 (TREE_CODE (unary_op), type,
6874 make_tree (type, temp)),
6878 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6879 comparison operator. If we have one of these cases, set the
6880 output to A, branch on A (cse will merge these two references),
6881 then set the output to FOO. */
6883 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6884 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6885 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6886 TREE_OPERAND (exp, 1), 0)
6887 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6888 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
6889 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6891 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6892 temp = gen_reg_rtx (mode);
6893 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6894 jumpif (TREE_OPERAND (exp, 0), op0);
6896 start_cleanup_deferal ();
6897 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6901 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6902 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6903 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6904 TREE_OPERAND (exp, 2), 0)
6905 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6906 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
6907 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6909 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6910 temp = gen_reg_rtx (mode);
6911 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6912 jumpifnot (TREE_OPERAND (exp, 0), op0);
6914 start_cleanup_deferal ();
6915 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6920 op1 = gen_label_rtx ();
6921 jumpifnot (TREE_OPERAND (exp, 0), op0);
6923 start_cleanup_deferal ();
6925 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6927 expand_expr (TREE_OPERAND (exp, 1),
6928 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6929 end_cleanup_deferal ();
6931 emit_jump_insn (gen_jump (op1));
6934 start_cleanup_deferal ();
6936 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6938 expand_expr (TREE_OPERAND (exp, 2),
6939 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6942 end_cleanup_deferal ();
6953 /* Something needs to be initialized, but we didn't know
6954 where that thing was when building the tree. For example,
6955 it could be the return value of a function, or a parameter
6956 to a function which lays down in the stack, or a temporary
6957 variable which must be passed by reference.
6959 We guarantee that the expression will either be constructed
6960 or copied into our original target. */
6962 tree slot = TREE_OPERAND (exp, 0);
6963 tree cleanups = NULL_TREE;
6967 if (TREE_CODE (slot) != VAR_DECL)
6971 target = original_target;
6975 if (DECL_RTL (slot) != 0)
6977 target = DECL_RTL (slot);
6978 /* If we have already expanded the slot, so don't do
6980 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6985 target = assign_temp (type, 2, 0, 1);
6986 /* All temp slots at this level must not conflict. */
6987 preserve_temp_slots (target);
6988 DECL_RTL (slot) = target;
6989 if (TREE_ADDRESSABLE (slot))
6991 TREE_ADDRESSABLE (slot) = 0;
6992 mark_addressable (slot);
6995 /* Since SLOT is not known to the called function
6996 to belong to its stack frame, we must build an explicit
6997 cleanup. This case occurs when we must build up a reference
6998 to pass the reference as an argument. In this case,
6999 it is very likely that such a reference need not be
7002 if (TREE_OPERAND (exp, 2) == 0)
7003 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7004 cleanups = TREE_OPERAND (exp, 2);
7009 /* This case does occur, when expanding a parameter which
7010 needs to be constructed on the stack. The target
7011 is the actual stack address that we want to initialize.
7012 The function we call will perform the cleanup in this case. */
7014 /* If we have already assigned it space, use that space,
7015 not target that we were passed in, as our target
7016 parameter is only a hint. */
7017 if (DECL_RTL (slot) != 0)
7019 target = DECL_RTL (slot);
7020 /* If we have already expanded the slot, so don't do
7022 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7027 DECL_RTL (slot) = target;
7028 /* If we must have an addressable slot, then make sure that
7029 the RTL that we just stored in slot is OK. */
7030 if (TREE_ADDRESSABLE (slot))
7032 TREE_ADDRESSABLE (slot) = 0;
7033 mark_addressable (slot);
7038 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7039 /* Mark it as expanded. */
7040 TREE_OPERAND (exp, 1) = NULL_TREE;
7042 store_expr (exp1, target, 0);
7044 expand_decl_cleanup (NULL_TREE, cleanups);
7051 tree lhs = TREE_OPERAND (exp, 0);
7052 tree rhs = TREE_OPERAND (exp, 1);
7053 tree noncopied_parts = 0;
7054 tree lhs_type = TREE_TYPE (lhs);
7056 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7057 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7058 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7059 TYPE_NONCOPIED_PARTS (lhs_type));
7060 while (noncopied_parts != 0)
7062 expand_assignment (TREE_VALUE (noncopied_parts),
7063 TREE_PURPOSE (noncopied_parts), 0, 0);
7064 noncopied_parts = TREE_CHAIN (noncopied_parts);
7071 /* If lhs is complex, expand calls in rhs before computing it.
7072 That's so we don't compute a pointer and save it over a call.
7073 If lhs is simple, compute it first so we can give it as a
7074 target if the rhs is just a call. This avoids an extra temp and copy
7075 and that prevents a partial-subsumption which makes bad code.
7076 Actually we could treat component_ref's of vars like vars. */
7078 tree lhs = TREE_OPERAND (exp, 0);
7079 tree rhs = TREE_OPERAND (exp, 1);
7080 tree noncopied_parts = 0;
7081 tree lhs_type = TREE_TYPE (lhs);
7085 if (TREE_CODE (lhs) != VAR_DECL
7086 && TREE_CODE (lhs) != RESULT_DECL
7087 && TREE_CODE (lhs) != PARM_DECL
7088 && ! (TREE_CODE (lhs) == INDIRECT_REF
7089 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7090 preexpand_calls (exp);
7092 /* Check for |= or &= of a bitfield of size one into another bitfield
7093 of size 1. In this case, (unless we need the result of the
7094 assignment) we can do this more efficiently with a
7095 test followed by an assignment, if necessary.
7097 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7098 things change so we do, this code should be enhanced to
7101 && TREE_CODE (lhs) == COMPONENT_REF
7102 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7103 || TREE_CODE (rhs) == BIT_AND_EXPR)
7104 && TREE_OPERAND (rhs, 0) == lhs
7105 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7106 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7107 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7109 rtx label = gen_label_rtx ();
7111 do_jump (TREE_OPERAND (rhs, 1),
7112 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7113 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7114 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7115 (TREE_CODE (rhs) == BIT_IOR_EXPR
7117 : integer_zero_node)),
7119 do_pending_stack_adjust ();
7124 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7125 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7126 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7127 TYPE_NONCOPIED_PARTS (lhs_type));
7129 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7130 while (noncopied_parts != 0)
7132 expand_assignment (TREE_PURPOSE (noncopied_parts),
7133 TREE_VALUE (noncopied_parts), 0, 0);
7134 noncopied_parts = TREE_CHAIN (noncopied_parts);
7139 case PREINCREMENT_EXPR:
7140 case PREDECREMENT_EXPR:
7141 return expand_increment (exp, 0, ignore);
7143 case POSTINCREMENT_EXPR:
7144 case POSTDECREMENT_EXPR:
7145 /* Faster to treat as pre-increment if result is not used. */
7146 return expand_increment (exp, ! ignore, ignore);
7149 /* If nonzero, TEMP will be set to the address of something that might
7150 be a MEM corresponding to a stack slot. */
7153 /* Are we taking the address of a nested function? */
7154 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7155 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7156 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7158 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7159 op0 = force_operand (op0, target);
7161 /* If we are taking the address of something erroneous, just
7163 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7167 /* We make sure to pass const0_rtx down if we came in with
7168 ignore set, to avoid doing the cleanups twice for something. */
7169 op0 = expand_expr (TREE_OPERAND (exp, 0),
7170 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7171 (modifier == EXPAND_INITIALIZER
7172 ? modifier : EXPAND_CONST_ADDRESS));
7174 /* If we are going to ignore the result, OP0 will have been set
7175 to const0_rtx, so just return it. Don't get confused and
7176 think we are taking the address of the constant. */
7180 op0 = protect_from_queue (op0, 0);
7182 /* We would like the object in memory. If it is a constant,
7183 we can have it be statically allocated into memory. For
7184 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7185 memory and store the value into it. */
7187 if (CONSTANT_P (op0))
7188 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7190 else if (GET_CODE (op0) == MEM)
7192 mark_temp_addr_taken (op0);
7193 temp = XEXP (op0, 0);
7196 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7197 || GET_CODE (op0) == CONCAT)
7199 /* If this object is in a register, it must be not
7201 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7202 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7204 mark_temp_addr_taken (memloc);
7205 emit_move_insn (memloc, op0);
7209 if (GET_CODE (op0) != MEM)
7212 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7214 temp = XEXP (op0, 0);
7215 #ifdef POINTERS_EXTEND_UNSIGNED
7216 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7217 && mode == ptr_mode)
7218 temp = convert_memory_address (ptr_mode, temp);
7223 op0 = force_operand (XEXP (op0, 0), target);
7226 if (flag_force_addr && GET_CODE (op0) != REG)
7227 op0 = force_reg (Pmode, op0);
7229 if (GET_CODE (op0) == REG
7230 && ! REG_USERVAR_P (op0))
7231 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7233 /* If we might have had a temp slot, add an equivalent address
7236 update_temp_slot_address (temp, op0);
7238 #ifdef POINTERS_EXTEND_UNSIGNED
7239 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7240 && mode == ptr_mode)
7241 op0 = convert_memory_address (ptr_mode, op0);
7246 case ENTRY_VALUE_EXPR:
7249 /* COMPLEX type for Extended Pascal & Fortran */
7252 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7255 /* Get the rtx code of the operands. */
7256 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7257 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7260 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7264 /* Move the real (op0) and imaginary (op1) parts to their location. */
7265 emit_move_insn (gen_realpart (mode, target), op0);
7266 emit_move_insn (gen_imagpart (mode, target), op1);
7268 insns = get_insns ();
7271 /* Complex construction should appear as a single unit. */
7272 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7273 each with a separate pseudo as destination.
7274 It's not correct for flow to treat them as a unit. */
7275 if (GET_CODE (target) != CONCAT)
7276 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7284 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7285 return gen_realpart (mode, op0);
7288 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7289 return gen_imagpart (mode, op0);
7293 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7297 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7300 target = gen_reg_rtx (mode);
7304 /* Store the realpart and the negated imagpart to target. */
7305 emit_move_insn (gen_realpart (partmode, target),
7306 gen_realpart (partmode, op0));
7308 imag_t = gen_imagpart (partmode, target);
7309 temp = expand_unop (partmode, neg_optab,
7310 gen_imagpart (partmode, op0), imag_t, 0);
7312 emit_move_insn (imag_t, temp);
7314 insns = get_insns ();
7317 /* Conjugate should appear as a single unit
7318 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7319 each with a separate pseudo as destination.
7320 It's not correct for flow to treat them as a unit. */
7321 if (GET_CODE (target) != CONCAT)
7322 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7329 case TRY_CATCH_EXPR:
7331 tree handler = TREE_OPERAND (exp, 1);
7333 expand_eh_region_start ();
7335 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7337 expand_eh_region_end (handler);
7344 rtx dcc = get_dynamic_cleanup_chain ();
7345 emit_move_insn (dcc, validize_mem (gen_rtx (MEM, Pmode, dcc)));
7351 rtx dhc = get_dynamic_handler_chain ();
7352 emit_move_insn (dhc, validize_mem (gen_rtx (MEM, Pmode, dhc)));
7357 op0 = CONST0_RTX (tmode);
7363 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7366 /* Here to do an ordinary binary operator, generating an instruction
7367 from the optab already placed in `this_optab'. */
7369 preexpand_calls (exp);
7370 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7372 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7373 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7375 temp = expand_binop (mode, this_optab, op0, op1, target,
7376 unsignedp, OPTAB_LIB_WIDEN);
7383 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7386 bc_expand_expr (exp)
7389 enum tree_code code;
7392 struct binary_operator *binoptab;
7393 struct unary_operator *unoptab;
7394 struct increment_operator *incroptab;
7395 struct bc_label *lab, *lab1;
7396 enum bytecode_opcode opcode;
7399 code = TREE_CODE (exp);
7405 if (DECL_RTL (exp) == 0)
7407 error_with_decl (exp, "prior parameter's size depends on `%s'");
7411 bc_load_parmaddr (DECL_RTL (exp));
7412 bc_load_memory (TREE_TYPE (exp), exp);
7418 if (DECL_RTL (exp) == 0)
7422 if (BYTECODE_LABEL (DECL_RTL (exp)))
7423 bc_load_externaddr (DECL_RTL (exp));
7425 bc_load_localaddr (DECL_RTL (exp));
7427 if (TREE_PUBLIC (exp))
7428 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7429 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7431 bc_load_localaddr (DECL_RTL (exp));
7433 bc_load_memory (TREE_TYPE (exp), exp);
7438 #ifdef DEBUG_PRINT_CODE
7439 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7441 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7443 : TYPE_MODE (TREE_TYPE (exp)))],
7444 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7450 #ifdef DEBUG_PRINT_CODE
7451 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7453 /* FIX THIS: find a better way to pass real_cst's. -bson */
7454 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7455 (double) TREE_REAL_CST (exp));
7464 /* We build a call description vector describing the type of
7465 the return value and of the arguments; this call vector,
7466 together with a pointer to a location for the return value
7467 and the base of the argument list, is passed to the low
7468 level machine dependent call subroutine, which is responsible
7469 for putting the arguments wherever real functions expect
7470 them, as well as getting the return value back. */
7472 tree calldesc = 0, arg;
7476 /* Push the evaluated args on the evaluation stack in reverse
7477 order. Also make an entry for each arg in the calldesc
7478 vector while we're at it. */
7480 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7482 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7485 bc_expand_expr (TREE_VALUE (arg));
7487 calldesc = tree_cons ((tree) 0,
7488 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7490 calldesc = tree_cons ((tree) 0,
7491 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7495 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7497 /* Allocate a location for the return value and push its
7498 address on the evaluation stack. Also make an entry
7499 at the front of the calldesc for the return value type. */
7501 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7502 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7503 bc_load_localaddr (retval);
7505 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7506 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7508 /* Prepend the argument count. */
7509 calldesc = tree_cons ((tree) 0,
7510 build_int_2 (nargs, 0),
7513 /* Push the address of the call description vector on the stack. */
7514 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7515 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7516 build_index_type (build_int_2 (nargs * 2, 0)));
7517 r = output_constant_def (calldesc);
7518 bc_load_externaddr (r);
7520 /* Push the address of the function to be called. */
7521 bc_expand_expr (TREE_OPERAND (exp, 0));
7523 /* Call the function, popping its address and the calldesc vector
7524 address off the evaluation stack in the process. */
7525 bc_emit_instruction (call);
7527 /* Pop the arguments off the stack. */
7528 bc_adjust_stack (nargs);
7530 /* Load the return value onto the stack. */
7531 bc_load_localaddr (retval);
7532 bc_load_memory (type, TREE_OPERAND (exp, 0));
7538 if (!SAVE_EXPR_RTL (exp))
7540 /* First time around: copy to local variable */
7541 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7542 TYPE_ALIGN (TREE_TYPE(exp)));
7543 bc_expand_expr (TREE_OPERAND (exp, 0));
7544 bc_emit_instruction (duplicate);
7546 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7547 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7551 /* Consecutive reference: use saved copy */
7552 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7553 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7558 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7559 how are they handled instead? */
7562 TREE_USED (exp) = 1;
7563 bc_expand_expr (STMT_BODY (exp));
7570 bc_expand_expr (TREE_OPERAND (exp, 0));
7571 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7576 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7581 bc_expand_address (TREE_OPERAND (exp, 0));
7586 bc_expand_expr (TREE_OPERAND (exp, 0));
7587 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7592 bc_expand_expr (bc_canonicalize_array_ref (exp));
7597 bc_expand_component_address (exp);
7599 /* If we have a bitfield, generate a proper load */
7600 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7605 bc_expand_expr (TREE_OPERAND (exp, 0));
7606 bc_emit_instruction (drop);
7607 bc_expand_expr (TREE_OPERAND (exp, 1));
7612 bc_expand_expr (TREE_OPERAND (exp, 0));
7613 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7614 lab = bc_get_bytecode_label ();
7615 bc_emit_bytecode (xjumpifnot);
7616 bc_emit_bytecode_labelref (lab);
7618 #ifdef DEBUG_PRINT_CODE
7619 fputc ('\n', stderr);
7621 bc_expand_expr (TREE_OPERAND (exp, 1));
7622 lab1 = bc_get_bytecode_label ();
7623 bc_emit_bytecode (jump);
7624 bc_emit_bytecode_labelref (lab1);
7626 #ifdef DEBUG_PRINT_CODE
7627 fputc ('\n', stderr);
7630 bc_emit_bytecode_labeldef (lab);
7631 bc_expand_expr (TREE_OPERAND (exp, 2));
7632 bc_emit_bytecode_labeldef (lab1);
7635 case TRUTH_ANDIF_EXPR:
7637 opcode = xjumpifnot;
7640 case TRUTH_ORIF_EXPR:
7647 binoptab = optab_plus_expr;
7652 binoptab = optab_minus_expr;
7657 binoptab = optab_mult_expr;
7660 case TRUNC_DIV_EXPR:
7661 case FLOOR_DIV_EXPR:
7663 case ROUND_DIV_EXPR:
7664 case EXACT_DIV_EXPR:
7666 binoptab = optab_trunc_div_expr;
7669 case TRUNC_MOD_EXPR:
7670 case FLOOR_MOD_EXPR:
7672 case ROUND_MOD_EXPR:
7674 binoptab = optab_trunc_mod_expr;
7677 case FIX_ROUND_EXPR:
7678 case FIX_FLOOR_EXPR:
7680 abort (); /* Not used for C. */
7682 case FIX_TRUNC_EXPR:
7689 abort (); /* FIXME */
7693 binoptab = optab_rdiv_expr;
7698 binoptab = optab_bit_and_expr;
7703 binoptab = optab_bit_ior_expr;
7708 binoptab = optab_bit_xor_expr;
7713 binoptab = optab_lshift_expr;
7718 binoptab = optab_rshift_expr;
7721 case TRUTH_AND_EXPR:
7723 binoptab = optab_truth_and_expr;
7728 binoptab = optab_truth_or_expr;
7733 binoptab = optab_lt_expr;
7738 binoptab = optab_le_expr;
7743 binoptab = optab_ge_expr;
7748 binoptab = optab_gt_expr;
7753 binoptab = optab_eq_expr;
7758 binoptab = optab_ne_expr;
7763 unoptab = optab_negate_expr;
7768 unoptab = optab_bit_not_expr;
7771 case TRUTH_NOT_EXPR:
7773 unoptab = optab_truth_not_expr;
7776 case PREDECREMENT_EXPR:
7778 incroptab = optab_predecrement_expr;
7781 case PREINCREMENT_EXPR:
7783 incroptab = optab_preincrement_expr;
7786 case POSTDECREMENT_EXPR:
7788 incroptab = optab_postdecrement_expr;
7791 case POSTINCREMENT_EXPR:
7793 incroptab = optab_postincrement_expr;
7798 bc_expand_constructor (exp);
7808 tree vars = TREE_OPERAND (exp, 0);
7809 int vars_need_expansion = 0;
7811 /* Need to open a binding contour here because
7812 if there are any cleanups they most be contained here. */
7813 expand_start_bindings (0);
7815 /* Mark the corresponding BLOCK for output. */
7816 if (TREE_OPERAND (exp, 2) != 0)
7817 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7819 /* If VARS have not yet been expanded, expand them now. */
7822 if (DECL_RTL (vars) == 0)
7824 vars_need_expansion = 1;
7827 expand_decl_init (vars);
7828 vars = TREE_CHAIN (vars);
7831 bc_expand_expr (TREE_OPERAND (exp, 1));
7833 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7846 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7847 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7853 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7859 bc_expand_expr (TREE_OPERAND (exp, 0));
7860 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7861 lab = bc_get_bytecode_label ();
7863 bc_emit_instruction (duplicate);
7864 bc_emit_bytecode (opcode);
7865 bc_emit_bytecode_labelref (lab);
7867 #ifdef DEBUG_PRINT_CODE
7868 fputc ('\n', stderr);
7871 bc_emit_instruction (drop);
7873 bc_expand_expr (TREE_OPERAND (exp, 1));
7874 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7875 bc_emit_bytecode_labeldef (lab);
7881 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7883 /* Push the quantum. */
7884 bc_expand_expr (TREE_OPERAND (exp, 1));
7886 /* Convert it to the lvalue's type. */
7887 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7889 /* Push the address of the lvalue */
7890 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7892 /* Perform actual increment */
7893 bc_expand_increment (incroptab, type);
7897 /* Return the alignment in bits of EXP, a pointer valued expression.
7898 But don't return more than MAX_ALIGN no matter what.
7899 The alignment returned is, by default, the alignment of the thing that
7900 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7902 Otherwise, look at the expression to see if we can do better, i.e., if the
7903 expression is actually pointing at an object whose alignment is tighter. */
7906 get_pointer_alignment (exp, max_align)
7910 unsigned align, inner;
7912 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7915 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7916 align = MIN (align, max_align);
7920 switch (TREE_CODE (exp))
7924 case NON_LVALUE_EXPR:
7925 exp = TREE_OPERAND (exp, 0);
7926 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7928 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7929 align = MIN (inner, max_align);
7933 /* If sum of pointer + int, restrict our maximum alignment to that
7934 imposed by the integer. If not, we can't do any better than
7936 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7939 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7944 exp = TREE_OPERAND (exp, 0);
7948 /* See what we are pointing at and look at its alignment. */
7949 exp = TREE_OPERAND (exp, 0);
7950 if (TREE_CODE (exp) == FUNCTION_DECL)
7951 align = FUNCTION_BOUNDARY;
7952 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7953 align = DECL_ALIGN (exp);
7954 #ifdef CONSTANT_ALIGNMENT
7955 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7956 align = CONSTANT_ALIGNMENT (exp, align);
7958 return MIN (align, max_align);
7966 /* Return the tree node and offset if a given argument corresponds to
7967 a string constant. */
7970 string_constant (arg, ptr_offset)
7976 if (TREE_CODE (arg) == ADDR_EXPR
7977 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7979 *ptr_offset = integer_zero_node;
7980 return TREE_OPERAND (arg, 0);
7982 else if (TREE_CODE (arg) == PLUS_EXPR)
7984 tree arg0 = TREE_OPERAND (arg, 0);
7985 tree arg1 = TREE_OPERAND (arg, 1);
7990 if (TREE_CODE (arg0) == ADDR_EXPR
7991 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7994 return TREE_OPERAND (arg0, 0);
7996 else if (TREE_CODE (arg1) == ADDR_EXPR
7997 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8000 return TREE_OPERAND (arg1, 0);
8007 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8008 way, because it could contain a zero byte in the middle.
8009 TREE_STRING_LENGTH is the size of the character array, not the string.
8011 Unfortunately, string_constant can't access the values of const char
8012 arrays with initializers, so neither can we do so here. */
8022 src = string_constant (src, &offset_node);
8025 max = TREE_STRING_LENGTH (src);
8026 ptr = TREE_STRING_POINTER (src);
8027 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8029 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8030 compute the offset to the following null if we don't know where to
8031 start searching for it. */
8033 for (i = 0; i < max; i++)
8036 /* We don't know the starting offset, but we do know that the string
8037 has no internal zero bytes. We can assume that the offset falls
8038 within the bounds of the string; otherwise, the programmer deserves
8039 what he gets. Subtract the offset from the length of the string,
8041 /* This would perhaps not be valid if we were dealing with named
8042 arrays in addition to literal string constants. */
8043 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8046 /* We have a known offset into the string. Start searching there for
8047 a null character. */
8048 if (offset_node == 0)
8052 /* Did we get a long long offset? If so, punt. */
8053 if (TREE_INT_CST_HIGH (offset_node) != 0)
8055 offset = TREE_INT_CST_LOW (offset_node);
8057 /* If the offset is known to be out of bounds, warn, and call strlen at
8059 if (offset < 0 || offset > max)
8061 warning ("offset outside bounds of constant string");
8064 /* Use strlen to search for the first zero byte. Since any strings
8065 constructed with build_string will have nulls appended, we win even
8066 if we get handed something like (char[4])"abcd".
8068 Since OFFSET is our starting index into the string, no further
8069 calculation is needed. */
8070 return size_int (strlen (ptr + offset));
8074 expand_builtin_return_addr (fndecl_code, count, tem)
8075 enum built_in_function fndecl_code;
8081 /* Some machines need special handling before we can access
8082 arbitrary frames. For example, on the sparc, we must first flush
8083 all register windows to the stack. */
8084 #ifdef SETUP_FRAME_ADDRESSES
8086 SETUP_FRAME_ADDRESSES ();
8089 /* On the sparc, the return address is not in the frame, it is in a
8090 register. There is no way to access it off of the current frame
8091 pointer, but it can be accessed off the previous frame pointer by
8092 reading the value from the register window save area. */
8093 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8094 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8098 /* Scan back COUNT frames to the specified frame. */
8099 for (i = 0; i < count; i++)
8101 /* Assume the dynamic chain pointer is in the word that the
8102 frame address points to, unless otherwise specified. */
8103 #ifdef DYNAMIC_CHAIN_ADDRESS
8104 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8106 tem = memory_address (Pmode, tem);
8107 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
8110 /* For __builtin_frame_address, return what we've got. */
8111 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8114 /* For __builtin_return_address, Get the return address from that
8116 #ifdef RETURN_ADDR_RTX
8117 tem = RETURN_ADDR_RTX (count, tem);
8119 tem = memory_address (Pmode,
8120 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8121 tem = gen_rtx (MEM, Pmode, tem);
8126 /* __builtin_setjmp is passed a pointer to an array of five words (not
8127 all will be used on all machines). It operates similarly to the C
8128 library function of the same name, but is more efficient. Much of
8129 the code below (and for longjmp) is copied from the handling of
8132 NOTE: This is intended for use by GNAT and the exception handling
8133 scheme in the compiler and will only work in the method used by
8137 expand_builtin_setjmp (buf_addr, target)
8141 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8142 enum machine_mode sa_mode = Pmode, value_mode;
8144 int old_inhibit_defer_pop = inhibit_defer_pop;
8146 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8147 build_function_type (void_type_node, NULL_TREE),
8150 CUMULATIVE_ARGS args_so_far;
8154 value_mode = TYPE_MODE (integer_type_node);
8156 #ifdef POINTERS_EXTEND_UNSIGNED
8157 buf_addr = convert_memory_address (Pmode, buf_addr);
8160 buf_addr = force_reg (Pmode, buf_addr);
8162 if (target == 0 || GET_CODE (target) != REG
8163 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8164 target = gen_reg_rtx (value_mode);
8168 /* We store the frame pointer and the address of lab1 in the buffer
8169 and use the rest of it for the stack save area, which is
8170 machine-dependent. */
8171 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8172 virtual_stack_vars_rtx);
8174 (validize_mem (gen_rtx (MEM, Pmode,
8175 plus_constant (buf_addr,
8176 GET_MODE_SIZE (Pmode)))),
8177 gen_rtx (LABEL_REF, Pmode, lab1));
8179 #ifdef HAVE_save_stack_nonlocal
8180 if (HAVE_save_stack_nonlocal)
8181 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8184 stack_save = gen_rtx (MEM, sa_mode,
8185 plus_constant (buf_addr,
8186 2 * GET_MODE_SIZE (Pmode)));
8187 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8191 emit_insn (gen_setjmp ());
8194 /* Set TARGET to zero and branch around the other case. */
8195 emit_move_insn (target, const0_rtx);
8196 emit_jump_insn (gen_jump (lab2));
8200 /* Note that setjmp clobbers FP when we get here, so we have to make
8201 sure it's marked as used by this function. */
8202 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8204 /* Mark the static chain as clobbered here so life information
8205 doesn't get messed up for it. */
8206 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8208 /* Now put in the code to restore the frame pointer, and argument
8209 pointer, if needed. The code below is from expand_end_bindings
8210 in stmt.c; see detailed documentation there. */
8211 #ifdef HAVE_nonlocal_goto
8212 if (! HAVE_nonlocal_goto)
8214 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8216 /* Do we need to do something like:
8218 current_function_has_nonlocal_label = 1;
8220 here? It seems like we might have to, or some subset of that
8221 functionality, but I am unsure. (mrs) */
8223 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8224 if (fixed_regs[ARG_POINTER_REGNUM])
8226 #ifdef ELIMINABLE_REGS
8227 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8229 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8230 if (elim_regs[i].from == ARG_POINTER_REGNUM
8231 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8234 if (i == sizeof elim_regs / sizeof elim_regs [0])
8237 /* Now restore our arg pointer from the address at which it
8238 was saved in our stack frame.
8239 If there hasn't be space allocated for it yet, make
8241 if (arg_pointer_save_area == 0)
8242 arg_pointer_save_area
8243 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8244 emit_move_insn (virtual_incoming_args_rtx,
8245 copy_to_reg (arg_pointer_save_area));
8250 #ifdef HAVE_nonlocal_goto_receiver
8251 if (HAVE_nonlocal_goto_receiver)
8252 emit_insn (gen_nonlocal_goto_receiver ());
8254 /* The static chain pointer contains the address of dummy function.
8255 We need to call it here to handle some PIC cases of restoring a
8256 global pointer. Then return 1. */
8257 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8259 /* We can't actually call emit_library_call here, so do everything
8260 it does, which isn't much for a libfunc with no args. */
8261 op0 = memory_address (FUNCTION_MODE, op0);
8263 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8264 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8265 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8267 #ifndef ACCUMULATE_OUTGOING_ARGS
8268 #ifdef HAVE_call_pop
8270 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8271 const0_rtx, next_arg_reg,
8272 GEN_INT (return_pops)));
8279 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8280 const0_rtx, next_arg_reg, const0_rtx));
8285 emit_move_insn (target, const1_rtx);
8291 /* Expand an expression EXP that calls a built-in function,
8292 with result going to TARGET if that's convenient
8293 (and in mode MODE if that's convenient).
8294 SUBTARGET may be used as the target for computing one of EXP's operands.
8295 IGNORE is nonzero if the value is to be ignored. */
8297 #define CALLED_AS_BUILT_IN(NODE) \
8298 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8301 expand_builtin (exp, target, subtarget, mode, ignore)
8305 enum machine_mode mode;
8308 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8309 tree arglist = TREE_OPERAND (exp, 1);
8312 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8313 optab builtin_optab;
8315 switch (DECL_FUNCTION_CODE (fndecl))
8320 /* build_function_call changes these into ABS_EXPR. */
8325 /* Treat these like sqrt, but only if the user asks for them. */
8326 if (! flag_fast_math)
8328 case BUILT_IN_FSQRT:
8329 /* If not optimizing, call the library function. */
8334 /* Arg could be wrong type if user redeclared this fcn wrong. */
8335 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8338 /* Stabilize and compute the argument. */
8339 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8340 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8342 exp = copy_node (exp);
8343 arglist = copy_node (arglist);
8344 TREE_OPERAND (exp, 1) = arglist;
8345 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8347 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8349 /* Make a suitable register to place result in. */
8350 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8355 switch (DECL_FUNCTION_CODE (fndecl))
8358 builtin_optab = sin_optab; break;
8360 builtin_optab = cos_optab; break;
8361 case BUILT_IN_FSQRT:
8362 builtin_optab = sqrt_optab; break;
8367 /* Compute into TARGET.
8368 Set TARGET to wherever the result comes back. */
8369 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8370 builtin_optab, op0, target, 0);
8372 /* If we were unable to expand via the builtin, stop the
8373 sequence (without outputting the insns) and break, causing
8374 a call the the library function. */
8381 /* Check the results by default. But if flag_fast_math is turned on,
8382 then assume sqrt will always be called with valid arguments. */
8384 if (! flag_fast_math)
8386 /* Don't define the builtin FP instructions
8387 if your machine is not IEEE. */
8388 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8391 lab1 = gen_label_rtx ();
8393 /* Test the result; if it is NaN, set errno=EDOM because
8394 the argument was not in the domain. */
8395 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8396 emit_jump_insn (gen_beq (lab1));
8400 #ifdef GEN_ERRNO_RTX
8401 rtx errno_rtx = GEN_ERRNO_RTX;
8404 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8407 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8410 /* We can't set errno=EDOM directly; let the library call do it.
8411 Pop the arguments right away in case the call gets deleted. */
8413 expand_call (exp, target, 0);
8420 /* Output the entire sequence. */
8421 insns = get_insns ();
8427 /* __builtin_apply_args returns block of memory allocated on
8428 the stack into which is stored the arg pointer, structure
8429 value address, static chain, and all the registers that might
8430 possibly be used in performing a function call. The code is
8431 moved to the start of the function so the incoming values are
8433 case BUILT_IN_APPLY_ARGS:
8434 /* Don't do __builtin_apply_args more than once in a function.
8435 Save the result of the first call and reuse it. */
8436 if (apply_args_value != 0)
8437 return apply_args_value;
8439 /* When this function is called, it means that registers must be
8440 saved on entry to this function. So we migrate the
8441 call to the first insn of this function. */
8446 temp = expand_builtin_apply_args ();
8450 apply_args_value = temp;
8452 /* Put the sequence after the NOTE that starts the function.
8453 If this is inside a SEQUENCE, make the outer-level insn
8454 chain current, so the code is placed at the start of the
8456 push_topmost_sequence ();
8457 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8458 pop_topmost_sequence ();
8462 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8463 FUNCTION with a copy of the parameters described by
8464 ARGUMENTS, and ARGSIZE. It returns a block of memory
8465 allocated on the stack into which is stored all the registers
8466 that might possibly be used for returning the result of a
8467 function. ARGUMENTS is the value returned by
8468 __builtin_apply_args. ARGSIZE is the number of bytes of
8469 arguments that must be copied. ??? How should this value be
8470 computed? We'll also need a safe worst case value for varargs
8472 case BUILT_IN_APPLY:
8474 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8475 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8476 || TREE_CHAIN (arglist) == 0
8477 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8478 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8479 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8487 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8488 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8490 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8493 /* __builtin_return (RESULT) causes the function to return the
8494 value described by RESULT. RESULT is address of the block of
8495 memory returned by __builtin_apply. */
8496 case BUILT_IN_RETURN:
8498 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8499 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8500 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8501 NULL_RTX, VOIDmode, 0));
8504 case BUILT_IN_SAVEREGS:
8505 /* Don't do __builtin_saveregs more than once in a function.
8506 Save the result of the first call and reuse it. */
8507 if (saveregs_value != 0)
8508 return saveregs_value;
8510 /* When this function is called, it means that registers must be
8511 saved on entry to this function. So we migrate the
8512 call to the first insn of this function. */
8516 /* Now really call the function. `expand_call' does not call
8517 expand_builtin, so there is no danger of infinite recursion here. */
8520 #ifdef EXPAND_BUILTIN_SAVEREGS
8521 /* Do whatever the machine needs done in this case. */
8522 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8524 /* The register where the function returns its value
8525 is likely to have something else in it, such as an argument.
8526 So preserve that register around the call. */
8528 if (value_mode != VOIDmode)
8530 rtx valreg = hard_libcall_value (value_mode);
8531 rtx saved_valreg = gen_reg_rtx (value_mode);
8533 emit_move_insn (saved_valreg, valreg);
8534 temp = expand_call (exp, target, ignore);
8535 emit_move_insn (valreg, saved_valreg);
8538 /* Generate the call, putting the value in a pseudo. */
8539 temp = expand_call (exp, target, ignore);
8545 saveregs_value = temp;
8547 /* Put the sequence after the NOTE that starts the function.
8548 If this is inside a SEQUENCE, make the outer-level insn
8549 chain current, so the code is placed at the start of the
8551 push_topmost_sequence ();
8552 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8553 pop_topmost_sequence ();
8557 /* __builtin_args_info (N) returns word N of the arg space info
8558 for the current function. The number and meanings of words
8559 is controlled by the definition of CUMULATIVE_ARGS. */
8560 case BUILT_IN_ARGS_INFO:
8562 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8564 int *word_ptr = (int *) ¤t_function_args_info;
8565 tree type, elts, result;
8567 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8568 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8569 __FILE__, __LINE__);
8573 tree arg = TREE_VALUE (arglist);
8574 if (TREE_CODE (arg) != INTEGER_CST)
8575 error ("argument of `__builtin_args_info' must be constant");
8578 int wordnum = TREE_INT_CST_LOW (arg);
8580 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8581 error ("argument of `__builtin_args_info' out of range");
8583 return GEN_INT (word_ptr[wordnum]);
8587 error ("missing argument in `__builtin_args_info'");
8592 for (i = 0; i < nwords; i++)
8593 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8595 type = build_array_type (integer_type_node,
8596 build_index_type (build_int_2 (nwords, 0)));
8597 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8598 TREE_CONSTANT (result) = 1;
8599 TREE_STATIC (result) = 1;
8600 result = build (INDIRECT_REF, build_pointer_type (type), result);
8601 TREE_CONSTANT (result) = 1;
8602 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8606 /* Return the address of the first anonymous stack arg. */
8607 case BUILT_IN_NEXT_ARG:
8609 tree fntype = TREE_TYPE (current_function_decl);
8611 if ((TYPE_ARG_TYPES (fntype) == 0
8612 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8614 && ! current_function_varargs)
8616 error ("`va_start' used in function with fixed args");
8622 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8623 tree arg = TREE_VALUE (arglist);
8625 /* Strip off all nops for the sake of the comparison. This
8626 is not quite the same as STRIP_NOPS. It does more.
8627 We must also strip off INDIRECT_EXPR for C++ reference
8629 while (TREE_CODE (arg) == NOP_EXPR
8630 || TREE_CODE (arg) == CONVERT_EXPR
8631 || TREE_CODE (arg) == NON_LVALUE_EXPR
8632 || TREE_CODE (arg) == INDIRECT_REF)
8633 arg = TREE_OPERAND (arg, 0);
8634 if (arg != last_parm)
8635 warning ("second parameter of `va_start' not last named argument");
8637 else if (! current_function_varargs)
8638 /* Evidently an out of date version of <stdarg.h>; can't validate
8639 va_start's second argument, but can still work as intended. */
8640 warning ("`__builtin_next_arg' called without an argument");
8643 return expand_binop (Pmode, add_optab,
8644 current_function_internal_arg_pointer,
8645 current_function_arg_offset_rtx,
8646 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8648 case BUILT_IN_CLASSIFY_TYPE:
8651 tree type = TREE_TYPE (TREE_VALUE (arglist));
8652 enum tree_code code = TREE_CODE (type);
8653 if (code == VOID_TYPE)
8654 return GEN_INT (void_type_class);
8655 if (code == INTEGER_TYPE)
8656 return GEN_INT (integer_type_class);
8657 if (code == CHAR_TYPE)
8658 return GEN_INT (char_type_class);
8659 if (code == ENUMERAL_TYPE)
8660 return GEN_INT (enumeral_type_class);
8661 if (code == BOOLEAN_TYPE)
8662 return GEN_INT (boolean_type_class);
8663 if (code == POINTER_TYPE)
8664 return GEN_INT (pointer_type_class);
8665 if (code == REFERENCE_TYPE)
8666 return GEN_INT (reference_type_class);
8667 if (code == OFFSET_TYPE)
8668 return GEN_INT (offset_type_class);
8669 if (code == REAL_TYPE)
8670 return GEN_INT (real_type_class);
8671 if (code == COMPLEX_TYPE)
8672 return GEN_INT (complex_type_class);
8673 if (code == FUNCTION_TYPE)
8674 return GEN_INT (function_type_class);
8675 if (code == METHOD_TYPE)
8676 return GEN_INT (method_type_class);
8677 if (code == RECORD_TYPE)
8678 return GEN_INT (record_type_class);
8679 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8680 return GEN_INT (union_type_class);
8681 if (code == ARRAY_TYPE)
8683 if (TYPE_STRING_FLAG (type))
8684 return GEN_INT (string_type_class);
8686 return GEN_INT (array_type_class);
8688 if (code == SET_TYPE)
8689 return GEN_INT (set_type_class);
8690 if (code == FILE_TYPE)
8691 return GEN_INT (file_type_class);
8692 if (code == LANG_TYPE)
8693 return GEN_INT (lang_type_class);
8695 return GEN_INT (no_type_class);
8697 case BUILT_IN_CONSTANT_P:
8702 tree arg = TREE_VALUE (arglist);
8705 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8706 || (TREE_CODE (arg) == ADDR_EXPR
8707 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8708 ? const1_rtx : const0_rtx);
8711 case BUILT_IN_FRAME_ADDRESS:
8712 /* The argument must be a nonnegative integer constant.
8713 It counts the number of frames to scan up the stack.
8714 The value is the address of that frame. */
8715 case BUILT_IN_RETURN_ADDRESS:
8716 /* The argument must be a nonnegative integer constant.
8717 It counts the number of frames to scan up the stack.
8718 The value is the return address saved in that frame. */
8720 /* Warning about missing arg was already issued. */
8722 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8723 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8725 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8726 error ("invalid arg to `__builtin_frame_address'");
8728 error ("invalid arg to `__builtin_return_address'");
8733 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8734 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8735 hard_frame_pointer_rtx);
8737 /* Some ports cannot access arbitrary stack frames. */
8740 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8741 warning ("unsupported arg to `__builtin_frame_address'");
8743 warning ("unsupported arg to `__builtin_return_address'");
8747 /* For __builtin_frame_address, return what we've got. */
8748 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8751 if (GET_CODE (tem) != REG)
8752 tem = copy_to_reg (tem);
8756 /* Returns the address of the area where the structure is returned.
8758 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8760 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8761 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8764 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
8766 case BUILT_IN_ALLOCA:
8768 /* Arg could be non-integer if user redeclared this fcn wrong. */
8769 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8772 /* Compute the argument. */
8773 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8775 /* Allocate the desired space. */
8776 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8779 /* If not optimizing, call the library function. */
8780 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8784 /* Arg could be non-integer if user redeclared this fcn wrong. */
8785 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8788 /* Compute the argument. */
8789 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8790 /* Compute ffs, into TARGET if possible.
8791 Set TARGET to wherever the result comes back. */
8792 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8793 ffs_optab, op0, target, 1);
8798 case BUILT_IN_STRLEN:
8799 /* If not optimizing, call the library function. */
8800 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8804 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8805 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8809 tree src = TREE_VALUE (arglist);
8810 tree len = c_strlen (src);
8813 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8815 rtx result, src_rtx, char_rtx;
8816 enum machine_mode insn_mode = value_mode, char_mode;
8817 enum insn_code icode;
8819 /* If the length is known, just return it. */
8821 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
8823 /* If SRC is not a pointer type, don't do this operation inline. */
8827 /* Call a function if we can't compute strlen in the right mode. */
8829 while (insn_mode != VOIDmode)
8831 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8832 if (icode != CODE_FOR_nothing)
8835 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8837 if (insn_mode == VOIDmode)
8840 /* Make a place to write the result of the instruction. */
8843 && GET_CODE (result) == REG
8844 && GET_MODE (result) == insn_mode
8845 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8846 result = gen_reg_rtx (insn_mode);
8848 /* Make sure the operands are acceptable to the predicates. */
8850 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8851 result = gen_reg_rtx (insn_mode);
8852 src_rtx = memory_address (BLKmode,
8853 expand_expr (src, NULL_RTX, ptr_mode,
8856 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8857 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8859 /* Check the string is readable and has an end. */
8860 if (flag_check_memory_usage)
8861 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8863 GEN_INT (MEMORY_USE_RO), QImode);
8865 char_rtx = const0_rtx;
8866 char_mode = insn_operand_mode[(int)icode][2];
8867 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8868 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8870 emit_insn (GEN_FCN (icode) (result,
8871 gen_rtx (MEM, BLKmode, src_rtx),
8872 char_rtx, GEN_INT (align)));
8874 /* Return the value in the proper mode for this function. */
8875 if (GET_MODE (result) == value_mode)
8877 else if (target != 0)
8879 convert_move (target, result, 0);
8883 return convert_to_mode (value_mode, result, 0);
8886 case BUILT_IN_STRCPY:
8887 /* If not optimizing, call the library function. */
8888 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8892 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8893 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8894 || TREE_CHAIN (arglist) == 0
8895 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8899 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8904 len = size_binop (PLUS_EXPR, len, integer_one_node);
8906 chainon (arglist, build_tree_list (NULL_TREE, len));
8910 case BUILT_IN_MEMCPY:
8911 /* If not optimizing, call the library function. */
8912 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8916 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8917 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8918 || TREE_CHAIN (arglist) == 0
8919 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8921 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8922 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8923 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8928 tree dest = TREE_VALUE (arglist);
8929 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8930 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8934 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8936 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8937 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
8939 /* If either SRC or DEST is not a pointer type, don't do
8940 this operation in-line. */
8941 if (src_align == 0 || dest_align == 0)
8943 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8944 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8948 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8949 dest_mem = gen_rtx (MEM, BLKmode,
8950 memory_address (BLKmode, dest_rtx));
8951 /* There could be a void* cast on top of the object. */
8952 while (TREE_CODE (dest) == NOP_EXPR)
8953 dest = TREE_OPERAND (dest, 0);
8954 type = TREE_TYPE (TREE_TYPE (dest));
8955 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8956 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
8957 src_mem = gen_rtx (MEM, BLKmode,
8958 memory_address (BLKmode, src_rtx));
8959 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8961 /* Just copy the rights of SRC to the rights of DEST. */
8962 if (flag_check_memory_usage)
8963 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
8966 len_rtx, TYPE_MODE (sizetype));
8968 /* There could be a void* cast on top of the object. */
8969 while (TREE_CODE (src) == NOP_EXPR)
8970 src = TREE_OPERAND (src, 0);
8971 type = TREE_TYPE (TREE_TYPE (src));
8972 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8974 /* Copy word part most expediently. */
8976 = emit_block_move (dest_mem, src_mem, len_rtx,
8977 MIN (src_align, dest_align));
8980 dest_addr = force_operand (dest_rtx, NULL_RTX);
8985 case BUILT_IN_MEMSET:
8986 /* If not optimizing, call the library function. */
8987 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8991 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8992 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8993 || TREE_CHAIN (arglist) == 0
8994 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8996 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8998 != (TREE_CODE (TREE_TYPE
9000 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9004 tree dest = TREE_VALUE (arglist);
9005 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9006 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9010 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9011 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
9013 /* If DEST is not a pointer type, don't do this
9014 operation in-line. */
9015 if (dest_align == 0)
9018 /* If VAL is not 0, don't do this operation in-line. */
9019 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9022 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
9023 dest_mem = gen_rtx (MEM, BLKmode,
9024 memory_address (BLKmode, dest_rtx));
9025 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9027 /* Just check DST is writable and mark it as readable. */
9028 if (flag_check_memory_usage)
9029 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9031 len_rtx, TYPE_MODE (sizetype),
9032 GEN_INT (MEMORY_USE_WO), QImode);
9035 /* There could be a void* cast on top of the object. */
9036 while (TREE_CODE (dest) == NOP_EXPR)
9037 dest = TREE_OPERAND (dest, 0);
9038 type = TREE_TYPE (TREE_TYPE (dest));
9039 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
9041 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9044 dest_addr = force_operand (dest_rtx, NULL_RTX);
9049 /* These comparison functions need an instruction that returns an actual
9050 index. An ordinary compare that just sets the condition codes
9052 #ifdef HAVE_cmpstrsi
9053 case BUILT_IN_STRCMP:
9054 /* If not optimizing, call the library function. */
9055 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9058 /* If we need to check memory accesses, call the library function. */
9059 if (flag_check_memory_usage)
9063 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9064 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9065 || TREE_CHAIN (arglist) == 0
9066 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9068 else if (!HAVE_cmpstrsi)
9071 tree arg1 = TREE_VALUE (arglist);
9072 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9076 len = c_strlen (arg1);
9078 len = size_binop (PLUS_EXPR, integer_one_node, len);
9079 len2 = c_strlen (arg2);
9081 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9083 /* If we don't have a constant length for the first, use the length
9084 of the second, if we know it. We don't require a constant for
9085 this case; some cost analysis could be done if both are available
9086 but neither is constant. For now, assume they're equally cheap.
9088 If both strings have constant lengths, use the smaller. This
9089 could arise if optimization results in strcpy being called with
9090 two fixed strings, or if the code was machine-generated. We should
9091 add some code to the `memcmp' handler below to deal with such
9092 situations, someday. */
9093 if (!len || TREE_CODE (len) != INTEGER_CST)
9100 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9102 if (tree_int_cst_lt (len2, len))
9106 chainon (arglist, build_tree_list (NULL_TREE, len));
9110 case BUILT_IN_MEMCMP:
9111 /* If not optimizing, call the library function. */
9112 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9115 /* If we need to check memory accesses, call the library function. */
9116 if (flag_check_memory_usage)
9120 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9121 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9122 || TREE_CHAIN (arglist) == 0
9123 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9124 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9125 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9127 else if (!HAVE_cmpstrsi)
9130 tree arg1 = TREE_VALUE (arglist);
9131 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9132 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9136 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9138 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9139 enum machine_mode insn_mode
9140 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9142 /* If we don't have POINTER_TYPE, call the function. */
9143 if (arg1_align == 0 || arg2_align == 0)
9145 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9146 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9150 /* Make a place to write the result of the instruction. */
9153 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9154 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9155 result = gen_reg_rtx (insn_mode);
9157 emit_insn (gen_cmpstrsi (result,
9158 gen_rtx (MEM, BLKmode,
9159 expand_expr (arg1, NULL_RTX,
9162 gen_rtx (MEM, BLKmode,
9163 expand_expr (arg2, NULL_RTX,
9166 expand_expr (len, NULL_RTX, VOIDmode, 0),
9167 GEN_INT (MIN (arg1_align, arg2_align))));
9169 /* Return the value in the proper mode for this function. */
9170 mode = TYPE_MODE (TREE_TYPE (exp));
9171 if (GET_MODE (result) == mode)
9173 else if (target != 0)
9175 convert_move (target, result, 0);
9179 return convert_to_mode (mode, result, 0);
9182 case BUILT_IN_STRCMP:
9183 case BUILT_IN_MEMCMP:
9187 case BUILT_IN_SETJMP:
9189 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9193 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9195 return expand_builtin_setjmp (buf_addr, target);
9198 /* __builtin_longjmp is passed a pointer to an array of five words
9199 and a value, which is a dummy. It's similar to the C library longjmp
9200 function but works with __builtin_setjmp above. */
9201 case BUILT_IN_LONGJMP:
9202 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9203 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9207 tree dummy_id = get_identifier ("__dummy");
9208 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
9209 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
9210 #ifdef POINTERS_EXTEND_UNSIGNED
9213 convert_memory_address
9215 expand_expr (TREE_VALUE (arglist),
9216 NULL_RTX, VOIDmode, 0)));
9219 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9223 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
9224 rtx lab = gen_rtx (MEM, Pmode,
9225 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
9226 enum machine_mode sa_mode
9227 #ifdef HAVE_save_stack_nonlocal
9228 = (HAVE_save_stack_nonlocal
9229 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9234 rtx stack = gen_rtx (MEM, sa_mode,
9235 plus_constant (buf_addr,
9236 2 * GET_MODE_SIZE (Pmode)));
9238 DECL_EXTERNAL (dummy_decl) = 1;
9239 TREE_PUBLIC (dummy_decl) = 1;
9240 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9242 /* Expand the second expression just for side-effects. */
9243 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9244 const0_rtx, VOIDmode, 0);
9246 assemble_external (dummy_decl);
9248 /* Pick up FP, label, and SP from the block and jump. This code is
9249 from expand_goto in stmt.c; see there for detailed comments. */
9250 #if HAVE_nonlocal_goto
9251 if (HAVE_nonlocal_goto)
9252 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9253 XEXP (DECL_RTL (dummy_decl), 0)));
9257 lab = copy_to_reg (lab);
9258 emit_move_insn (hard_frame_pointer_rtx, fp);
9259 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9261 /* Put in the static chain register the address of the dummy
9263 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9264 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9265 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9266 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9267 emit_indirect_jump (lab);
9273 /* Various hooks for the DWARF 2 __throw routine. */
9274 case BUILT_IN_UNWIND_INIT:
9275 expand_builtin_unwind_init ();
9278 return frame_pointer_rtx;
9280 return stack_pointer_rtx;
9281 #ifdef DWARF2_UNWIND_INFO
9282 case BUILT_IN_DWARF_FP_REGNUM:
9283 return expand_builtin_dwarf_fp_regnum ();
9284 case BUILT_IN_DWARF_REG_SIZE:
9285 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9287 case BUILT_IN_FROB_RETURN_ADDR:
9288 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9289 case BUILT_IN_EXTRACT_RETURN_ADDR:
9290 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9291 case BUILT_IN_SET_RETURN_ADDR_REG:
9292 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
9294 case BUILT_IN_EH_STUB:
9295 return expand_builtin_eh_stub ();
9296 case BUILT_IN_SET_EH_REGS:
9297 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
9298 TREE_VALUE (TREE_CHAIN (arglist)));
9301 default: /* just do library call, if unknown builtin */
9302 error ("built-in function `%s' not currently supported",
9303 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9306 /* The switch statement above can drop through to cause the function
9307 to be called normally. */
9309 return expand_call (exp, target, ignore);
9312 /* Built-in functions to perform an untyped call and return. */
9314 /* For each register that may be used for calling a function, this
9315 gives a mode used to copy the register's value. VOIDmode indicates
9316 the register is not used for calling a function. If the machine
9317 has register windows, this gives only the outbound registers.
9318 INCOMING_REGNO gives the corresponding inbound register. */
9319 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9321 /* For each register that may be used for returning values, this gives
9322 a mode used to copy the register's value. VOIDmode indicates the
9323 register is not used for returning values. If the machine has
9324 register windows, this gives only the outbound registers.
9325 INCOMING_REGNO gives the corresponding inbound register. */
9326 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9328 /* For each register that may be used for calling a function, this
9329 gives the offset of that register into the block returned by
9330 __builtin_apply_args. 0 indicates that the register is not
9331 used for calling a function. */
9332 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9334 /* Return the offset of register REGNO into the block returned by
9335 __builtin_apply_args. This is not declared static, since it is
9336 needed in objc-act.c. */
9339 apply_args_register_offset (regno)
9344 /* Arguments are always put in outgoing registers (in the argument
9345 block) if such make sense. */
9346 #ifdef OUTGOING_REGNO
9347 regno = OUTGOING_REGNO(regno);
9349 return apply_args_reg_offset[regno];
9352 /* Return the size required for the block returned by __builtin_apply_args,
9353 and initialize apply_args_mode. */
9358 static int size = -1;
9360 enum machine_mode mode;
9362 /* The values computed by this function never change. */
9365 /* The first value is the incoming arg-pointer. */
9366 size = GET_MODE_SIZE (Pmode);
9368 /* The second value is the structure value address unless this is
9369 passed as an "invisible" first argument. */
9370 if (struct_value_rtx)
9371 size += GET_MODE_SIZE (Pmode);
9373 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9374 if (FUNCTION_ARG_REGNO_P (regno))
9376 /* Search for the proper mode for copying this register's
9377 value. I'm not sure this is right, but it works so far. */
9378 enum machine_mode best_mode = VOIDmode;
9380 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9382 mode = GET_MODE_WIDER_MODE (mode))
9383 if (HARD_REGNO_MODE_OK (regno, mode)
9384 && HARD_REGNO_NREGS (regno, mode) == 1)
9387 if (best_mode == VOIDmode)
9388 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9390 mode = GET_MODE_WIDER_MODE (mode))
9391 if (HARD_REGNO_MODE_OK (regno, mode)
9392 && (mov_optab->handlers[(int) mode].insn_code
9393 != CODE_FOR_nothing))
9397 if (mode == VOIDmode)
9400 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9401 if (size % align != 0)
9402 size = CEIL (size, align) * align;
9403 apply_args_reg_offset[regno] = size;
9404 size += GET_MODE_SIZE (mode);
9405 apply_args_mode[regno] = mode;
9409 apply_args_mode[regno] = VOIDmode;
9410 apply_args_reg_offset[regno] = 0;
9416 /* Return the size required for the block returned by __builtin_apply,
9417 and initialize apply_result_mode. */
9420 apply_result_size ()
9422 static int size = -1;
9424 enum machine_mode mode;
9426 /* The values computed by this function never change. */
9431 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9432 if (FUNCTION_VALUE_REGNO_P (regno))
9434 /* Search for the proper mode for copying this register's
9435 value. I'm not sure this is right, but it works so far. */
9436 enum machine_mode best_mode = VOIDmode;
9438 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9440 mode = GET_MODE_WIDER_MODE (mode))
9441 if (HARD_REGNO_MODE_OK (regno, mode))
9444 if (best_mode == VOIDmode)
9445 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9447 mode = GET_MODE_WIDER_MODE (mode))
9448 if (HARD_REGNO_MODE_OK (regno, mode)
9449 && (mov_optab->handlers[(int) mode].insn_code
9450 != CODE_FOR_nothing))
9454 if (mode == VOIDmode)
9457 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9458 if (size % align != 0)
9459 size = CEIL (size, align) * align;
9460 size += GET_MODE_SIZE (mode);
9461 apply_result_mode[regno] = mode;
9464 apply_result_mode[regno] = VOIDmode;
9466 /* Allow targets that use untyped_call and untyped_return to override
9467 the size so that machine-specific information can be stored here. */
9468 #ifdef APPLY_RESULT_SIZE
9469 size = APPLY_RESULT_SIZE;
9475 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9476 /* Create a vector describing the result block RESULT. If SAVEP is true,
9477 the result block is used to save the values; otherwise it is used to
9478 restore the values. */
9481 result_vector (savep, result)
9485 int regno, size, align, nelts;
9486 enum machine_mode mode;
9488 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9491 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9492 if ((mode = apply_result_mode[regno]) != VOIDmode)
9494 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9495 if (size % align != 0)
9496 size = CEIL (size, align) * align;
9497 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9498 mem = change_address (result, mode,
9499 plus_constant (XEXP (result, 0), size));
9500 savevec[nelts++] = (savep
9501 ? gen_rtx (SET, VOIDmode, mem, reg)
9502 : gen_rtx (SET, VOIDmode, reg, mem));
9503 size += GET_MODE_SIZE (mode);
9505 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9507 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9509 /* Save the state required to perform an untyped call with the same
9510 arguments as were passed to the current function. */
9513 expand_builtin_apply_args ()
9516 int size, align, regno;
9517 enum machine_mode mode;
9519 /* Create a block where the arg-pointer, structure value address,
9520 and argument registers can be saved. */
9521 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9523 /* Walk past the arg-pointer and structure value address. */
9524 size = GET_MODE_SIZE (Pmode);
9525 if (struct_value_rtx)
9526 size += GET_MODE_SIZE (Pmode);
9528 /* Save each register used in calling a function to the block. */
9529 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9530 if ((mode = apply_args_mode[regno]) != VOIDmode)
9534 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9535 if (size % align != 0)
9536 size = CEIL (size, align) * align;
9538 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9541 /* For reg-stack.c's stack register household.
9542 Compare with a similar piece of code in function.c. */
9544 emit_insn (gen_rtx (USE, mode, tem));
9547 emit_move_insn (change_address (registers, mode,
9548 plus_constant (XEXP (registers, 0),
9551 size += GET_MODE_SIZE (mode);
9554 /* Save the arg pointer to the block. */
9555 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9556 copy_to_reg (virtual_incoming_args_rtx));
9557 size = GET_MODE_SIZE (Pmode);
9559 /* Save the structure value address unless this is passed as an
9560 "invisible" first argument. */
9561 if (struct_value_incoming_rtx)
9563 emit_move_insn (change_address (registers, Pmode,
9564 plus_constant (XEXP (registers, 0),
9566 copy_to_reg (struct_value_incoming_rtx));
9567 size += GET_MODE_SIZE (Pmode);
9570 /* Return the address of the block. */
9571 return copy_addr_to_reg (XEXP (registers, 0));
9574 /* Perform an untyped call and save the state required to perform an
9575 untyped return of whatever value was returned by the given function. */
9578 expand_builtin_apply (function, arguments, argsize)
9579 rtx function, arguments, argsize;
9581 int size, align, regno;
9582 enum machine_mode mode;
9583 rtx incoming_args, result, reg, dest, call_insn;
9584 rtx old_stack_level = 0;
9585 rtx call_fusage = 0;
9587 /* Create a block where the return registers can be saved. */
9588 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9590 /* ??? The argsize value should be adjusted here. */
9592 /* Fetch the arg pointer from the ARGUMENTS block. */
9593 incoming_args = gen_reg_rtx (Pmode);
9594 emit_move_insn (incoming_args,
9595 gen_rtx (MEM, Pmode, arguments));
9596 #ifndef STACK_GROWS_DOWNWARD
9597 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9598 incoming_args, 0, OPTAB_LIB_WIDEN);
9601 /* Perform postincrements before actually calling the function. */
9604 /* Push a new argument block and copy the arguments. */
9605 do_pending_stack_adjust ();
9606 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9608 /* Push a block of memory onto the stack to store the memory arguments.
9609 Save the address in a register, and copy the memory arguments. ??? I
9610 haven't figured out how the calling convention macros effect this,
9611 but it's likely that the source and/or destination addresses in
9612 the block copy will need updating in machine specific ways. */
9613 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9614 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9615 gen_rtx (MEM, BLKmode, incoming_args),
9617 PARM_BOUNDARY / BITS_PER_UNIT);
9619 /* Refer to the argument block. */
9621 arguments = gen_rtx (MEM, BLKmode, arguments);
9623 /* Walk past the arg-pointer and structure value address. */
9624 size = GET_MODE_SIZE (Pmode);
9625 if (struct_value_rtx)
9626 size += GET_MODE_SIZE (Pmode);
9628 /* Restore each of the registers previously saved. Make USE insns
9629 for each of these registers for use in making the call. */
9630 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9631 if ((mode = apply_args_mode[regno]) != VOIDmode)
9633 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9634 if (size % align != 0)
9635 size = CEIL (size, align) * align;
9636 reg = gen_rtx (REG, mode, regno);
9637 emit_move_insn (reg,
9638 change_address (arguments, mode,
9639 plus_constant (XEXP (arguments, 0),
9642 use_reg (&call_fusage, reg);
9643 size += GET_MODE_SIZE (mode);
9646 /* Restore the structure value address unless this is passed as an
9647 "invisible" first argument. */
9648 size = GET_MODE_SIZE (Pmode);
9649 if (struct_value_rtx)
9651 rtx value = gen_reg_rtx (Pmode);
9652 emit_move_insn (value,
9653 change_address (arguments, Pmode,
9654 plus_constant (XEXP (arguments, 0),
9656 emit_move_insn (struct_value_rtx, value);
9657 if (GET_CODE (struct_value_rtx) == REG)
9658 use_reg (&call_fusage, struct_value_rtx);
9659 size += GET_MODE_SIZE (Pmode);
9662 /* All arguments and registers used for the call are set up by now! */
9663 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9665 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9666 and we don't want to load it into a register as an optimization,
9667 because prepare_call_address already did it if it should be done. */
9668 if (GET_CODE (function) != SYMBOL_REF)
9669 function = memory_address (FUNCTION_MODE, function);
9671 /* Generate the actual call instruction and save the return value. */
9672 #ifdef HAVE_untyped_call
9673 if (HAVE_untyped_call)
9674 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9675 result, result_vector (1, result)));
9678 #ifdef HAVE_call_value
9679 if (HAVE_call_value)
9683 /* Locate the unique return register. It is not possible to
9684 express a call that sets more than one return register using
9685 call_value; use untyped_call for that. In fact, untyped_call
9686 only needs to save the return registers in the given block. */
9687 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9688 if ((mode = apply_result_mode[regno]) != VOIDmode)
9691 abort (); /* HAVE_untyped_call required. */
9692 valreg = gen_rtx (REG, mode, regno);
9695 emit_call_insn (gen_call_value (valreg,
9696 gen_rtx (MEM, FUNCTION_MODE, function),
9697 const0_rtx, NULL_RTX, const0_rtx));
9699 emit_move_insn (change_address (result, GET_MODE (valreg),
9707 /* Find the CALL insn we just emitted. */
9708 for (call_insn = get_last_insn ();
9709 call_insn && GET_CODE (call_insn) != CALL_INSN;
9710 call_insn = PREV_INSN (call_insn))
9716 /* Put the register usage information on the CALL. If there is already
9717 some usage information, put ours at the end. */
9718 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9722 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9723 link = XEXP (link, 1))
9726 XEXP (link, 1) = call_fusage;
9729 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9731 /* Restore the stack. */
9732 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9734 /* Return the address of the result block. */
9735 return copy_addr_to_reg (XEXP (result, 0));
9738 /* Perform an untyped return. */
9741 expand_builtin_return (result)
9744 int size, align, regno;
9745 enum machine_mode mode;
9747 rtx call_fusage = 0;
9749 apply_result_size ();
9750 result = gen_rtx (MEM, BLKmode, result);
9752 #ifdef HAVE_untyped_return
9753 if (HAVE_untyped_return)
9755 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9761 /* Restore the return value and note that each value is used. */
9763 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9764 if ((mode = apply_result_mode[regno]) != VOIDmode)
9766 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9767 if (size % align != 0)
9768 size = CEIL (size, align) * align;
9769 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9770 emit_move_insn (reg,
9771 change_address (result, mode,
9772 plus_constant (XEXP (result, 0),
9775 push_to_sequence (call_fusage);
9776 emit_insn (gen_rtx (USE, VOIDmode, reg));
9777 call_fusage = get_insns ();
9779 size += GET_MODE_SIZE (mode);
9782 /* Put the USE insns before the return. */
9783 emit_insns (call_fusage);
9785 /* Return whatever values was restored by jumping directly to the end
9787 expand_null_return ();
9790 /* Expand code for a post- or pre- increment or decrement
9791 and return the RTX for the result.
9792 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9795 expand_increment (exp, post, ignore)
9799 register rtx op0, op1;
9800 register rtx temp, value;
9801 register tree incremented = TREE_OPERAND (exp, 0);
9802 optab this_optab = add_optab;
9804 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9805 int op0_is_copy = 0;
9806 int single_insn = 0;
9807 /* 1 means we can't store into OP0 directly,
9808 because it is a subreg narrower than a word,
9809 and we don't dare clobber the rest of the word. */
9812 if (output_bytecode)
9814 bc_expand_expr (exp);
9818 /* Stabilize any component ref that might need to be
9819 evaluated more than once below. */
9821 || TREE_CODE (incremented) == BIT_FIELD_REF
9822 || (TREE_CODE (incremented) == COMPONENT_REF
9823 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9824 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9825 incremented = stabilize_reference (incremented);
9826 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9827 ones into save exprs so that they don't accidentally get evaluated
9828 more than once by the code below. */
9829 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9830 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9831 incremented = save_expr (incremented);
9833 /* Compute the operands as RTX.
9834 Note whether OP0 is the actual lvalue or a copy of it:
9835 I believe it is a copy iff it is a register or subreg
9836 and insns were generated in computing it. */
9838 temp = get_last_insn ();
9839 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9841 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9842 in place but instead must do sign- or zero-extension during assignment,
9843 so we copy it into a new register and let the code below use it as
9846 Note that we can safely modify this SUBREG since it is know not to be
9847 shared (it was made by the expand_expr call above). */
9849 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9852 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9856 else if (GET_CODE (op0) == SUBREG
9857 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9859 /* We cannot increment this SUBREG in place. If we are
9860 post-incrementing, get a copy of the old value. Otherwise,
9861 just mark that we cannot increment in place. */
9863 op0 = copy_to_reg (op0);
9868 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9869 && temp != get_last_insn ());
9870 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9871 EXPAND_MEMORY_USE_BAD);
9873 /* Decide whether incrementing or decrementing. */
9874 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9875 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9876 this_optab = sub_optab;
9878 /* Convert decrement by a constant into a negative increment. */
9879 if (this_optab == sub_optab
9880 && GET_CODE (op1) == CONST_INT)
9882 op1 = GEN_INT (- INTVAL (op1));
9883 this_optab = add_optab;
9886 /* For a preincrement, see if we can do this with a single instruction. */
9889 icode = (int) this_optab->handlers[(int) mode].insn_code;
9890 if (icode != (int) CODE_FOR_nothing
9891 /* Make sure that OP0 is valid for operands 0 and 1
9892 of the insn we want to queue. */
9893 && (*insn_operand_predicate[icode][0]) (op0, mode)
9894 && (*insn_operand_predicate[icode][1]) (op0, mode)
9895 && (*insn_operand_predicate[icode][2]) (op1, mode))
9899 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9900 then we cannot just increment OP0. We must therefore contrive to
9901 increment the original value. Then, for postincrement, we can return
9902 OP0 since it is a copy of the old value. For preincrement, expand here
9903 unless we can do it with a single insn.
9905 Likewise if storing directly into OP0 would clobber high bits
9906 we need to preserve (bad_subreg). */
9907 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9909 /* This is the easiest way to increment the value wherever it is.
9910 Problems with multiple evaluation of INCREMENTED are prevented
9911 because either (1) it is a component_ref or preincrement,
9912 in which case it was stabilized above, or (2) it is an array_ref
9913 with constant index in an array in a register, which is
9914 safe to reevaluate. */
9915 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9916 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9917 ? MINUS_EXPR : PLUS_EXPR),
9920 TREE_OPERAND (exp, 1));
9922 while (TREE_CODE (incremented) == NOP_EXPR
9923 || TREE_CODE (incremented) == CONVERT_EXPR)
9925 newexp = convert (TREE_TYPE (incremented), newexp);
9926 incremented = TREE_OPERAND (incremented, 0);
9929 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9930 return post ? op0 : temp;
9935 /* We have a true reference to the value in OP0.
9936 If there is an insn to add or subtract in this mode, queue it.
9937 Queueing the increment insn avoids the register shuffling
9938 that often results if we must increment now and first save
9939 the old value for subsequent use. */
9941 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9942 op0 = stabilize (op0);
9945 icode = (int) this_optab->handlers[(int) mode].insn_code;
9946 if (icode != (int) CODE_FOR_nothing
9947 /* Make sure that OP0 is valid for operands 0 and 1
9948 of the insn we want to queue. */
9949 && (*insn_operand_predicate[icode][0]) (op0, mode)
9950 && (*insn_operand_predicate[icode][1]) (op0, mode))
9952 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9953 op1 = force_reg (mode, op1);
9955 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9957 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9959 rtx addr = (general_operand (XEXP (op0, 0), mode)
9960 ? force_reg (Pmode, XEXP (op0, 0))
9961 : copy_to_reg (XEXP (op0, 0)));
9964 op0 = change_address (op0, VOIDmode, addr);
9965 temp = force_reg (GET_MODE (op0), op0);
9966 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9967 op1 = force_reg (mode, op1);
9969 /* The increment queue is LIFO, thus we have to `queue'
9970 the instructions in reverse order. */
9971 enqueue_insn (op0, gen_move_insn (op0, temp));
9972 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9977 /* Preincrement, or we can't increment with one simple insn. */
9979 /* Save a copy of the value before inc or dec, to return it later. */
9980 temp = value = copy_to_reg (op0);
9982 /* Arrange to return the incremented value. */
9983 /* Copy the rtx because expand_binop will protect from the queue,
9984 and the results of that would be invalid for us to return
9985 if our caller does emit_queue before using our result. */
9986 temp = copy_rtx (value = op0);
9988 /* Increment however we can. */
9989 op1 = expand_binop (mode, this_optab, value, op1,
9990 flag_check_memory_usage ? NULL_RTX : op0,
9991 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9992 /* Make sure the value is stored into OP0. */
9994 emit_move_insn (op0, op1);
9999 /* Expand all function calls contained within EXP, innermost ones first.
10000 But don't look within expressions that have sequence points.
10001 For each CALL_EXPR, record the rtx for its value
10002 in the CALL_EXPR_RTL field. */
10005 preexpand_calls (exp)
10008 register int nops, i;
10009 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10011 if (! do_preexpand_calls)
10014 /* Only expressions and references can contain calls. */
10016 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10019 switch (TREE_CODE (exp))
10022 /* Do nothing if already expanded. */
10023 if (CALL_EXPR_RTL (exp) != 0
10024 /* Do nothing if the call returns a variable-sized object. */
10025 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10026 /* Do nothing to built-in functions. */
10027 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10028 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10030 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10033 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10036 case COMPOUND_EXPR:
10038 case TRUTH_ANDIF_EXPR:
10039 case TRUTH_ORIF_EXPR:
10040 /* If we find one of these, then we can be sure
10041 the adjust will be done for it (since it makes jumps).
10042 Do it now, so that if this is inside an argument
10043 of a function, we don't get the stack adjustment
10044 after some other args have already been pushed. */
10045 do_pending_stack_adjust ();
10050 case WITH_CLEANUP_EXPR:
10051 case CLEANUP_POINT_EXPR:
10055 if (SAVE_EXPR_RTL (exp) != 0)
10062 nops = tree_code_length[(int) TREE_CODE (exp)];
10063 for (i = 0; i < nops; i++)
10064 if (TREE_OPERAND (exp, i) != 0)
10066 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10067 if (type == 'e' || type == '<' || type == '1' || type == '2'
10069 preexpand_calls (TREE_OPERAND (exp, i));
10073 /* At the start of a function, record that we have no previously-pushed
10074 arguments waiting to be popped. */
10077 init_pending_stack_adjust ()
10079 pending_stack_adjust = 0;
10082 /* When exiting from function, if safe, clear out any pending stack adjust
10083 so the adjustment won't get done. */
10086 clear_pending_stack_adjust ()
10088 #ifdef EXIT_IGNORE_STACK
10090 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
10091 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10092 && ! flag_inline_functions)
10093 pending_stack_adjust = 0;
10097 /* Pop any previously-pushed arguments that have not been popped yet. */
10100 do_pending_stack_adjust ()
10102 if (inhibit_defer_pop == 0)
10104 if (pending_stack_adjust != 0)
10105 adjust_stack (GEN_INT (pending_stack_adjust));
10106 pending_stack_adjust = 0;
10110 /* Expand conditional expressions. */
10112 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10113 LABEL is an rtx of code CODE_LABEL, in this function and all the
10117 jumpifnot (exp, label)
10121 do_jump (exp, label, NULL_RTX);
10124 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10127 jumpif (exp, label)
10131 do_jump (exp, NULL_RTX, label);
10134 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10135 the result is zero, or IF_TRUE_LABEL if the result is one.
10136 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10137 meaning fall through in that case.
10139 do_jump always does any pending stack adjust except when it does not
10140 actually perform a jump. An example where there is no jump
10141 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10143 This function is responsible for optimizing cases such as
10144 &&, || and comparison operators in EXP. */
10147 do_jump (exp, if_false_label, if_true_label)
10149 rtx if_false_label, if_true_label;
10151 register enum tree_code code = TREE_CODE (exp);
10152 /* Some cases need to create a label to jump to
10153 in order to properly fall through.
10154 These cases set DROP_THROUGH_LABEL nonzero. */
10155 rtx drop_through_label = 0;
10157 rtx comparison = 0;
10160 enum machine_mode mode;
10170 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10176 /* This is not true with #pragma weak */
10178 /* The address of something can never be zero. */
10180 emit_jump (if_true_label);
10185 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10186 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10187 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10190 /* If we are narrowing the operand, we have to do the compare in the
10192 if ((TYPE_PRECISION (TREE_TYPE (exp))
10193 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10195 case NON_LVALUE_EXPR:
10196 case REFERENCE_EXPR:
10201 /* These cannot change zero->non-zero or vice versa. */
10202 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10206 /* This is never less insns than evaluating the PLUS_EXPR followed by
10207 a test and can be longer if the test is eliminated. */
10209 /* Reduce to minus. */
10210 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10211 TREE_OPERAND (exp, 0),
10212 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10213 TREE_OPERAND (exp, 1))));
10214 /* Process as MINUS. */
10218 /* Non-zero iff operands of minus differ. */
10219 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10220 TREE_OPERAND (exp, 0),
10221 TREE_OPERAND (exp, 1)),
10226 /* If we are AND'ing with a small constant, do this comparison in the
10227 smallest type that fits. If the machine doesn't have comparisons
10228 that small, it will be converted back to the wider comparison.
10229 This helps if we are testing the sign bit of a narrower object.
10230 combine can't do this for us because it can't know whether a
10231 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10233 if (! SLOW_BYTE_ACCESS
10234 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10235 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10236 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10237 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10238 && (type = type_for_mode (mode, 1)) != 0
10239 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10240 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10241 != CODE_FOR_nothing))
10243 do_jump (convert (type, exp), if_false_label, if_true_label);
10248 case TRUTH_NOT_EXPR:
10249 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10252 case TRUTH_ANDIF_EXPR:
10253 if (if_false_label == 0)
10254 if_false_label = drop_through_label = gen_label_rtx ();
10255 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10256 start_cleanup_deferal ();
10257 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10258 end_cleanup_deferal ();
10261 case TRUTH_ORIF_EXPR:
10262 if (if_true_label == 0)
10263 if_true_label = drop_through_label = gen_label_rtx ();
10264 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10265 start_cleanup_deferal ();
10266 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10267 end_cleanup_deferal ();
10270 case COMPOUND_EXPR:
10271 push_temp_slots ();
10272 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10273 preserve_temp_slots (NULL_RTX);
10274 free_temp_slots ();
10277 do_pending_stack_adjust ();
10278 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10281 case COMPONENT_REF:
10282 case BIT_FIELD_REF:
10285 int bitsize, bitpos, unsignedp;
10286 enum machine_mode mode;
10292 /* Get description of this reference. We don't actually care
10293 about the underlying object here. */
10294 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10295 &mode, &unsignedp, &volatilep,
10298 type = type_for_size (bitsize, unsignedp);
10299 if (! SLOW_BYTE_ACCESS
10300 && type != 0 && bitsize >= 0
10301 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10302 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10303 != CODE_FOR_nothing))
10305 do_jump (convert (type, exp), if_false_label, if_true_label);
10312 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10313 if (integer_onep (TREE_OPERAND (exp, 1))
10314 && integer_zerop (TREE_OPERAND (exp, 2)))
10315 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10317 else if (integer_zerop (TREE_OPERAND (exp, 1))
10318 && integer_onep (TREE_OPERAND (exp, 2)))
10319 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10323 register rtx label1 = gen_label_rtx ();
10324 drop_through_label = gen_label_rtx ();
10326 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10328 start_cleanup_deferal ();
10329 /* Now the THEN-expression. */
10330 do_jump (TREE_OPERAND (exp, 1),
10331 if_false_label ? if_false_label : drop_through_label,
10332 if_true_label ? if_true_label : drop_through_label);
10333 /* In case the do_jump just above never jumps. */
10334 do_pending_stack_adjust ();
10335 emit_label (label1);
10337 /* Now the ELSE-expression. */
10338 do_jump (TREE_OPERAND (exp, 2),
10339 if_false_label ? if_false_label : drop_through_label,
10340 if_true_label ? if_true_label : drop_through_label);
10341 end_cleanup_deferal ();
10347 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10349 if (integer_zerop (TREE_OPERAND (exp, 1)))
10350 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10351 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10352 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10355 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10356 fold (build (EQ_EXPR, TREE_TYPE (exp),
10357 fold (build1 (REALPART_EXPR,
10358 TREE_TYPE (inner_type),
10359 TREE_OPERAND (exp, 0))),
10360 fold (build1 (REALPART_EXPR,
10361 TREE_TYPE (inner_type),
10362 TREE_OPERAND (exp, 1))))),
10363 fold (build (EQ_EXPR, TREE_TYPE (exp),
10364 fold (build1 (IMAGPART_EXPR,
10365 TREE_TYPE (inner_type),
10366 TREE_OPERAND (exp, 0))),
10367 fold (build1 (IMAGPART_EXPR,
10368 TREE_TYPE (inner_type),
10369 TREE_OPERAND (exp, 1))))))),
10370 if_false_label, if_true_label);
10371 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10372 && !can_compare_p (TYPE_MODE (inner_type)))
10373 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10375 comparison = compare (exp, EQ, EQ);
10381 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10383 if (integer_zerop (TREE_OPERAND (exp, 1)))
10384 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10385 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10386 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10389 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10390 fold (build (NE_EXPR, TREE_TYPE (exp),
10391 fold (build1 (REALPART_EXPR,
10392 TREE_TYPE (inner_type),
10393 TREE_OPERAND (exp, 0))),
10394 fold (build1 (REALPART_EXPR,
10395 TREE_TYPE (inner_type),
10396 TREE_OPERAND (exp, 1))))),
10397 fold (build (NE_EXPR, TREE_TYPE (exp),
10398 fold (build1 (IMAGPART_EXPR,
10399 TREE_TYPE (inner_type),
10400 TREE_OPERAND (exp, 0))),
10401 fold (build1 (IMAGPART_EXPR,
10402 TREE_TYPE (inner_type),
10403 TREE_OPERAND (exp, 1))))))),
10404 if_false_label, if_true_label);
10405 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10406 && !can_compare_p (TYPE_MODE (inner_type)))
10407 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10409 comparison = compare (exp, NE, NE);
10414 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10416 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10417 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10419 comparison = compare (exp, LT, LTU);
10423 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10425 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10426 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10428 comparison = compare (exp, LE, LEU);
10432 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10434 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10435 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10437 comparison = compare (exp, GT, GTU);
10441 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10443 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10444 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10446 comparison = compare (exp, GE, GEU);
10451 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10453 /* This is not needed any more and causes poor code since it causes
10454 comparisons and tests from non-SI objects to have different code
10456 /* Copy to register to avoid generating bad insns by cse
10457 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10458 if (!cse_not_expected && GET_CODE (temp) == MEM)
10459 temp = copy_to_reg (temp);
10461 do_pending_stack_adjust ();
10462 if (GET_CODE (temp) == CONST_INT)
10463 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10464 else if (GET_CODE (temp) == LABEL_REF)
10465 comparison = const_true_rtx;
10466 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10467 && !can_compare_p (GET_MODE (temp)))
10468 /* Note swapping the labels gives us not-equal. */
10469 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10470 else if (GET_MODE (temp) != VOIDmode)
10471 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10472 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10473 GET_MODE (temp), NULL_RTX, 0);
10478 /* Do any postincrements in the expression that was tested. */
10481 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10482 straight into a conditional jump instruction as the jump condition.
10483 Otherwise, all the work has been done already. */
10485 if (comparison == const_true_rtx)
10488 emit_jump (if_true_label);
10490 else if (comparison == const0_rtx)
10492 if (if_false_label)
10493 emit_jump (if_false_label);
10495 else if (comparison)
10496 do_jump_for_compare (comparison, if_false_label, if_true_label);
10498 if (drop_through_label)
10500 /* If do_jump produces code that might be jumped around,
10501 do any stack adjusts from that code, before the place
10502 where control merges in. */
10503 do_pending_stack_adjust ();
10504 emit_label (drop_through_label);
10508 /* Given a comparison expression EXP for values too wide to be compared
10509 with one insn, test the comparison and jump to the appropriate label.
10510 The code of EXP is ignored; we always test GT if SWAP is 0,
10511 and LT if SWAP is 1. */
10514 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10517 rtx if_false_label, if_true_label;
10519 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10520 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10521 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10522 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10523 rtx drop_through_label = 0;
10524 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10527 if (! if_true_label || ! if_false_label)
10528 drop_through_label = gen_label_rtx ();
10529 if (! if_true_label)
10530 if_true_label = drop_through_label;
10531 if (! if_false_label)
10532 if_false_label = drop_through_label;
10534 /* Compare a word at a time, high order first. */
10535 for (i = 0; i < nwords; i++)
10538 rtx op0_word, op1_word;
10540 if (WORDS_BIG_ENDIAN)
10542 op0_word = operand_subword_force (op0, i, mode);
10543 op1_word = operand_subword_force (op1, i, mode);
10547 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10548 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10551 /* All but high-order word must be compared as unsigned. */
10552 comp = compare_from_rtx (op0_word, op1_word,
10553 (unsignedp || i > 0) ? GTU : GT,
10554 unsignedp, word_mode, NULL_RTX, 0);
10555 if (comp == const_true_rtx)
10556 emit_jump (if_true_label);
10557 else if (comp != const0_rtx)
10558 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10560 /* Consider lower words only if these are equal. */
10561 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10563 if (comp == const_true_rtx)
10564 emit_jump (if_false_label);
10565 else if (comp != const0_rtx)
10566 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10569 if (if_false_label)
10570 emit_jump (if_false_label);
10571 if (drop_through_label)
10572 emit_label (drop_through_label);
10575 /* Compare OP0 with OP1, word at a time, in mode MODE.
10576 UNSIGNEDP says to do unsigned comparison.
10577 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10580 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10581 enum machine_mode mode;
10584 rtx if_false_label, if_true_label;
10586 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10587 rtx drop_through_label = 0;
10590 if (! if_true_label || ! if_false_label)
10591 drop_through_label = gen_label_rtx ();
10592 if (! if_true_label)
10593 if_true_label = drop_through_label;
10594 if (! if_false_label)
10595 if_false_label = drop_through_label;
10597 /* Compare a word at a time, high order first. */
10598 for (i = 0; i < nwords; i++)
10601 rtx op0_word, op1_word;
10603 if (WORDS_BIG_ENDIAN)
10605 op0_word = operand_subword_force (op0, i, mode);
10606 op1_word = operand_subword_force (op1, i, mode);
10610 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10611 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10614 /* All but high-order word must be compared as unsigned. */
10615 comp = compare_from_rtx (op0_word, op1_word,
10616 (unsignedp || i > 0) ? GTU : GT,
10617 unsignedp, word_mode, NULL_RTX, 0);
10618 if (comp == const_true_rtx)
10619 emit_jump (if_true_label);
10620 else if (comp != const0_rtx)
10621 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10623 /* Consider lower words only if these are equal. */
10624 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10626 if (comp == const_true_rtx)
10627 emit_jump (if_false_label);
10628 else if (comp != const0_rtx)
10629 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10632 if (if_false_label)
10633 emit_jump (if_false_label);
10634 if (drop_through_label)
10635 emit_label (drop_through_label);
10638 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10639 with one insn, test the comparison and jump to the appropriate label. */
10642 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10644 rtx if_false_label, if_true_label;
10646 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10647 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10648 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10649 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10651 rtx drop_through_label = 0;
10653 if (! if_false_label)
10654 drop_through_label = if_false_label = gen_label_rtx ();
10656 for (i = 0; i < nwords; i++)
10658 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10659 operand_subword_force (op1, i, mode),
10660 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10661 word_mode, NULL_RTX, 0);
10662 if (comp == const_true_rtx)
10663 emit_jump (if_false_label);
10664 else if (comp != const0_rtx)
10665 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10669 emit_jump (if_true_label);
10670 if (drop_through_label)
10671 emit_label (drop_through_label);
10674 /* Jump according to whether OP0 is 0.
10675 We assume that OP0 has an integer mode that is too wide
10676 for the available compare insns. */
10679 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10681 rtx if_false_label, if_true_label;
10683 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10686 rtx drop_through_label = 0;
10688 /* The fastest way of doing this comparison on almost any machine is to
10689 "or" all the words and compare the result. If all have to be loaded
10690 from memory and this is a very wide item, it's possible this may
10691 be slower, but that's highly unlikely. */
10693 part = gen_reg_rtx (word_mode);
10694 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10695 for (i = 1; i < nwords && part != 0; i++)
10696 part = expand_binop (word_mode, ior_optab, part,
10697 operand_subword_force (op0, i, GET_MODE (op0)),
10698 part, 1, OPTAB_WIDEN);
10702 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10705 if (comp == const_true_rtx)
10706 emit_jump (if_false_label);
10707 else if (comp == const0_rtx)
10708 emit_jump (if_true_label);
10710 do_jump_for_compare (comp, if_false_label, if_true_label);
10715 /* If we couldn't do the "or" simply, do this with a series of compares. */
10716 if (! if_false_label)
10717 drop_through_label = if_false_label = gen_label_rtx ();
10719 for (i = 0; i < nwords; i++)
10721 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10723 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10724 if (comp == const_true_rtx)
10725 emit_jump (if_false_label);
10726 else if (comp != const0_rtx)
10727 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10731 emit_jump (if_true_label);
10733 if (drop_through_label)
10734 emit_label (drop_through_label);
10737 /* Given a comparison expression in rtl form, output conditional branches to
10738 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10741 do_jump_for_compare (comparison, if_false_label, if_true_label)
10742 rtx comparison, if_false_label, if_true_label;
10746 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10747 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10751 if (if_false_label)
10752 emit_jump (if_false_label);
10754 else if (if_false_label)
10757 rtx prev = get_last_insn ();
10760 /* Output the branch with the opposite condition. Then try to invert
10761 what is generated. If more than one insn is a branch, or if the
10762 branch is not the last insn written, abort. If we can't invert
10763 the branch, emit make a true label, redirect this jump to that,
10764 emit a jump to the false label and define the true label. */
10766 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10767 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10771 /* Here we get the first insn that was just emitted. It used to be the
10772 case that, on some machines, emitting the branch would discard
10773 the previous compare insn and emit a replacement. This isn't
10774 done anymore, but abort if we see that PREV is deleted. */
10777 insn = get_insns ();
10778 else if (INSN_DELETED_P (prev))
10781 insn = NEXT_INSN (prev);
10783 for (; insn; insn = NEXT_INSN (insn))
10784 if (GET_CODE (insn) == JUMP_INSN)
10791 if (branch != get_last_insn ())
10794 JUMP_LABEL (branch) = if_false_label;
10795 if (! invert_jump (branch, if_false_label))
10797 if_true_label = gen_label_rtx ();
10798 redirect_jump (branch, if_true_label);
10799 emit_jump (if_false_label);
10800 emit_label (if_true_label);
10805 /* Generate code for a comparison expression EXP
10806 (including code to compute the values to be compared)
10807 and set (CC0) according to the result.
10808 SIGNED_CODE should be the rtx operation for this comparison for
10809 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10811 We force a stack adjustment unless there are currently
10812 things pushed on the stack that aren't yet used. */
10815 compare (exp, signed_code, unsigned_code)
10817 enum rtx_code signed_code, unsigned_code;
10820 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10822 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10823 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10824 register enum machine_mode mode = TYPE_MODE (type);
10825 int unsignedp = TREE_UNSIGNED (type);
10826 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10828 #ifdef HAVE_canonicalize_funcptr_for_compare
10829 /* If function pointers need to be "canonicalized" before they can
10830 be reliably compared, then canonicalize them. */
10831 if (HAVE_canonicalize_funcptr_for_compare
10832 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10833 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10836 rtx new_op0 = gen_reg_rtx (mode);
10838 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10842 if (HAVE_canonicalize_funcptr_for_compare
10843 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10844 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10847 rtx new_op1 = gen_reg_rtx (mode);
10849 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10854 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10856 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10857 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10860 /* Like compare but expects the values to compare as two rtx's.
10861 The decision as to signed or unsigned comparison must be made by the caller.
10863 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10866 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10867 size of MODE should be used. */
10870 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10871 register rtx op0, op1;
10872 enum rtx_code code;
10874 enum machine_mode mode;
10880 /* If one operand is constant, make it the second one. Only do this
10881 if the other operand is not constant as well. */
10883 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10884 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10889 code = swap_condition (code);
10892 if (flag_force_mem)
10894 op0 = force_not_mem (op0);
10895 op1 = force_not_mem (op1);
10898 do_pending_stack_adjust ();
10900 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10901 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10905 /* There's no need to do this now that combine.c can eliminate lots of
10906 sign extensions. This can be less efficient in certain cases on other
10909 /* If this is a signed equality comparison, we can do it as an
10910 unsigned comparison since zero-extension is cheaper than sign
10911 extension and comparisons with zero are done as unsigned. This is
10912 the case even on machines that can do fast sign extension, since
10913 zero-extension is easier to combine with other operations than
10914 sign-extension is. If we are comparing against a constant, we must
10915 convert it to what it would look like unsigned. */
10916 if ((code == EQ || code == NE) && ! unsignedp
10917 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10919 if (GET_CODE (op1) == CONST_INT
10920 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10921 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10926 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10928 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10931 /* Generate code to calculate EXP using a store-flag instruction
10932 and return an rtx for the result. EXP is either a comparison
10933 or a TRUTH_NOT_EXPR whose operand is a comparison.
10935 If TARGET is nonzero, store the result there if convenient.
10937 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10940 Return zero if there is no suitable set-flag instruction
10941 available on this machine.
10943 Once expand_expr has been called on the arguments of the comparison,
10944 we are committed to doing the store flag, since it is not safe to
10945 re-evaluate the expression. We emit the store-flag insn by calling
10946 emit_store_flag, but only expand the arguments if we have a reason
10947 to believe that emit_store_flag will be successful. If we think that
10948 it will, but it isn't, we have to simulate the store-flag with a
10949 set/jump/set sequence. */
10952 do_store_flag (exp, target, mode, only_cheap)
10955 enum machine_mode mode;
10958 enum rtx_code code;
10959 tree arg0, arg1, type;
10961 enum machine_mode operand_mode;
10965 enum insn_code icode;
10966 rtx subtarget = target;
10967 rtx result, label, pattern, jump_pat;
10969 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10970 result at the end. We can't simply invert the test since it would
10971 have already been inverted if it were valid. This case occurs for
10972 some floating-point comparisons. */
10974 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10975 invert = 1, exp = TREE_OPERAND (exp, 0);
10977 arg0 = TREE_OPERAND (exp, 0);
10978 arg1 = TREE_OPERAND (exp, 1);
10979 type = TREE_TYPE (arg0);
10980 operand_mode = TYPE_MODE (type);
10981 unsignedp = TREE_UNSIGNED (type);
10983 /* We won't bother with BLKmode store-flag operations because it would mean
10984 passing a lot of information to emit_store_flag. */
10985 if (operand_mode == BLKmode)
10988 /* We won't bother with store-flag operations involving function pointers
10989 when function pointers must be canonicalized before comparisons. */
10990 #ifdef HAVE_canonicalize_funcptr_for_compare
10991 if (HAVE_canonicalize_funcptr_for_compare
10992 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10993 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10995 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10996 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10997 == FUNCTION_TYPE))))
11004 /* Get the rtx comparison code to use. We know that EXP is a comparison
11005 operation of some type. Some comparisons against 1 and -1 can be
11006 converted to comparisons with zero. Do so here so that the tests
11007 below will be aware that we have a comparison with zero. These
11008 tests will not catch constants in the first operand, but constants
11009 are rarely passed as the first operand. */
11011 switch (TREE_CODE (exp))
11020 if (integer_onep (arg1))
11021 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11023 code = unsignedp ? LTU : LT;
11026 if (! unsignedp && integer_all_onesp (arg1))
11027 arg1 = integer_zero_node, code = LT;
11029 code = unsignedp ? LEU : LE;
11032 if (! unsignedp && integer_all_onesp (arg1))
11033 arg1 = integer_zero_node, code = GE;
11035 code = unsignedp ? GTU : GT;
11038 if (integer_onep (arg1))
11039 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11041 code = unsignedp ? GEU : GE;
11047 /* Put a constant second. */
11048 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11050 tem = arg0; arg0 = arg1; arg1 = tem;
11051 code = swap_condition (code);
11054 /* If this is an equality or inequality test of a single bit, we can
11055 do this by shifting the bit being tested to the low-order bit and
11056 masking the result with the constant 1. If the condition was EQ,
11057 we xor it with 1. This does not require an scc insn and is faster
11058 than an scc insn even if we have it. */
11060 if ((code == NE || code == EQ)
11061 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11062 && integer_pow2p (TREE_OPERAND (arg0, 1))
11063 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
11065 tree inner = TREE_OPERAND (arg0, 0);
11070 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
11071 NULL_RTX, VOIDmode, 0));
11072 /* In this case, immed_double_const will sign extend the value to make
11073 it look the same on the host and target. We must remove the
11074 sign-extension before calling exact_log2, since exact_log2 will
11075 fail for negative values. */
11076 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
11077 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
11078 /* We don't use the obvious constant shift to generate the mask,
11079 because that generates compiler warnings when BITS_PER_WORD is
11080 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
11081 code is unreachable in that case. */
11082 tem = tem & GET_MODE_MASK (word_mode);
11083 bitnum = exact_log2 (tem);
11085 /* If INNER is a right shift of a constant and it plus BITNUM does
11086 not overflow, adjust BITNUM and INNER. */
11088 if (TREE_CODE (inner) == RSHIFT_EXPR
11089 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11090 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11091 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11092 < TYPE_PRECISION (type)))
11094 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11095 inner = TREE_OPERAND (inner, 0);
11098 /* If we are going to be able to omit the AND below, we must do our
11099 operations as unsigned. If we must use the AND, we have a choice.
11100 Normally unsigned is faster, but for some machines signed is. */
11101 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11102 #ifdef LOAD_EXTEND_OP
11103 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11109 if (subtarget == 0 || GET_CODE (subtarget) != REG
11110 || GET_MODE (subtarget) != operand_mode
11111 || ! safe_from_p (subtarget, inner))
11114 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11117 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11118 size_int (bitnum), subtarget, ops_unsignedp);
11120 if (GET_MODE (op0) != mode)
11121 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11123 if ((code == EQ && ! invert) || (code == NE && invert))
11124 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11125 ops_unsignedp, OPTAB_LIB_WIDEN);
11127 /* Put the AND last so it can combine with more things. */
11128 if (bitnum != TYPE_PRECISION (type) - 1)
11129 op0 = expand_and (op0, const1_rtx, subtarget);
11134 /* Now see if we are likely to be able to do this. Return if not. */
11135 if (! can_compare_p (operand_mode))
11137 icode = setcc_gen_code[(int) code];
11138 if (icode == CODE_FOR_nothing
11139 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11141 /* We can only do this if it is one of the special cases that
11142 can be handled without an scc insn. */
11143 if ((code == LT && integer_zerop (arg1))
11144 || (! only_cheap && code == GE && integer_zerop (arg1)))
11146 else if (BRANCH_COST >= 0
11147 && ! only_cheap && (code == NE || code == EQ)
11148 && TREE_CODE (type) != REAL_TYPE
11149 && ((abs_optab->handlers[(int) operand_mode].insn_code
11150 != CODE_FOR_nothing)
11151 || (ffs_optab->handlers[(int) operand_mode].insn_code
11152 != CODE_FOR_nothing)))
11158 preexpand_calls (exp);
11159 if (subtarget == 0 || GET_CODE (subtarget) != REG
11160 || GET_MODE (subtarget) != operand_mode
11161 || ! safe_from_p (subtarget, arg1))
11164 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11165 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11168 target = gen_reg_rtx (mode);
11170 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11171 because, if the emit_store_flag does anything it will succeed and
11172 OP0 and OP1 will not be used subsequently. */
11174 result = emit_store_flag (target, code,
11175 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11176 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11177 operand_mode, unsignedp, 1);
11182 result = expand_binop (mode, xor_optab, result, const1_rtx,
11183 result, 0, OPTAB_LIB_WIDEN);
11187 /* If this failed, we have to do this with set/compare/jump/set code. */
11188 if (GET_CODE (target) != REG
11189 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11190 target = gen_reg_rtx (GET_MODE (target));
11192 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11193 result = compare_from_rtx (op0, op1, code, unsignedp,
11194 operand_mode, NULL_RTX, 0);
11195 if (GET_CODE (result) == CONST_INT)
11196 return (((result == const0_rtx && ! invert)
11197 || (result != const0_rtx && invert))
11198 ? const0_rtx : const1_rtx);
11200 label = gen_label_rtx ();
11201 if (bcc_gen_fctn[(int) code] == 0)
11204 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11205 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11206 emit_label (label);
11211 /* Generate a tablejump instruction (used for switch statements). */
11213 #ifdef HAVE_tablejump
11215 /* INDEX is the value being switched on, with the lowest value
11216 in the table already subtracted.
11217 MODE is its expected mode (needed if INDEX is constant).
11218 RANGE is the length of the jump table.
11219 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11221 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11222 index value is out of range. */
11225 do_tablejump (index, mode, range, table_label, default_label)
11226 rtx index, range, table_label, default_label;
11227 enum machine_mode mode;
11229 register rtx temp, vector;
11231 /* Do an unsigned comparison (in the proper mode) between the index
11232 expression and the value which represents the length of the range.
11233 Since we just finished subtracting the lower bound of the range
11234 from the index expression, this comparison allows us to simultaneously
11235 check that the original index expression value is both greater than
11236 or equal to the minimum value of the range and less than or equal to
11237 the maximum value of the range. */
11239 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11240 emit_jump_insn (gen_bgtu (default_label));
11242 /* If index is in range, it must fit in Pmode.
11243 Convert to Pmode so we can index with it. */
11245 index = convert_to_mode (Pmode, index, 1);
11247 /* Don't let a MEM slip thru, because then INDEX that comes
11248 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11249 and break_out_memory_refs will go to work on it and mess it up. */
11250 #ifdef PIC_CASE_VECTOR_ADDRESS
11251 if (flag_pic && GET_CODE (index) != REG)
11252 index = copy_to_mode_reg (Pmode, index);
11255 /* If flag_force_addr were to affect this address
11256 it could interfere with the tricky assumptions made
11257 about addresses that contain label-refs,
11258 which may be valid only very near the tablejump itself. */
11259 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11260 GET_MODE_SIZE, because this indicates how large insns are. The other
11261 uses should all be Pmode, because they are addresses. This code
11262 could fail if addresses and insns are not the same size. */
11263 index = gen_rtx (PLUS, Pmode,
11264 gen_rtx (MULT, Pmode, index,
11265 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11266 gen_rtx (LABEL_REF, Pmode, table_label));
11267 #ifdef PIC_CASE_VECTOR_ADDRESS
11269 index = PIC_CASE_VECTOR_ADDRESS (index);
11272 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11273 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11274 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11275 RTX_UNCHANGING_P (vector) = 1;
11276 convert_move (temp, vector, 0);
11278 emit_jump_insn (gen_tablejump (temp, table_label));
11280 #ifndef CASE_VECTOR_PC_RELATIVE
11281 /* If we are generating PIC code or if the table is PC-relative, the
11282 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11288 #endif /* HAVE_tablejump */
11291 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11292 to that value is on the top of the stack. The resulting type is TYPE, and
11293 the source declaration is DECL. */
11296 bc_load_memory (type, decl)
11299 enum bytecode_opcode opcode;
11302 /* Bit fields are special. We only know about signed and
11303 unsigned ints, and enums. The latter are treated as
11304 signed integers. */
11306 if (DECL_BIT_FIELD (decl))
11307 if (TREE_CODE (type) == ENUMERAL_TYPE
11308 || TREE_CODE (type) == INTEGER_TYPE)
11309 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11313 /* See corresponding comment in bc_store_memory. */
11314 if (TYPE_MODE (type) == BLKmode
11315 || TYPE_MODE (type) == VOIDmode)
11318 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11320 if (opcode == neverneverland)
11323 bc_emit_bytecode (opcode);
11325 #ifdef DEBUG_PRINT_CODE
11326 fputc ('\n', stderr);
11331 /* Store the contents of the second stack slot to the address in the
11332 top stack slot. DECL is the declaration of the destination and is used
11333 to determine whether we're dealing with a bitfield. */
11336 bc_store_memory (type, decl)
11339 enum bytecode_opcode opcode;
11342 if (DECL_BIT_FIELD (decl))
11344 if (TREE_CODE (type) == ENUMERAL_TYPE
11345 || TREE_CODE (type) == INTEGER_TYPE)
11351 if (TYPE_MODE (type) == BLKmode)
11353 /* Copy structure. This expands to a block copy instruction, storeBLK.
11354 In addition to the arguments expected by the other store instructions,
11355 it also expects a type size (SImode) on top of the stack, which is the
11356 structure size in size units (usually bytes). The two first arguments
11357 are already on the stack; so we just put the size on level 1. For some
11358 other languages, the size may be variable, this is why we don't encode
11359 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11361 bc_expand_expr (TYPE_SIZE (type));
11365 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11367 if (opcode == neverneverland)
11370 bc_emit_bytecode (opcode);
11372 #ifdef DEBUG_PRINT_CODE
11373 fputc ('\n', stderr);
11378 /* Allocate local stack space sufficient to hold a value of the given
11379 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11380 integral power of 2. A special case is locals of type VOID, which
11381 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11382 remapped into the corresponding attribute of SI. */
11385 bc_allocate_local (size, alignment)
11386 int size, alignment;
11389 int byte_alignment;
11394 /* Normalize size and alignment */
11396 size = UNITS_PER_WORD;
11398 if (alignment < BITS_PER_UNIT)
11399 byte_alignment = 1 << (INT_ALIGN - 1);
11402 byte_alignment = alignment / BITS_PER_UNIT;
11404 if (local_vars_size & (byte_alignment - 1))
11405 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11407 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11408 local_vars_size += size;
11414 /* Allocate variable-sized local array. Variable-sized arrays are
11415 actually pointers to the address in memory where they are stored. */
11418 bc_allocate_variable_array (size)
11422 const int ptralign = (1 << (PTR_ALIGN - 1));
11424 /* Align pointer */
11425 if (local_vars_size & ptralign)
11426 local_vars_size += ptralign - (local_vars_size & ptralign);
11428 /* Note down local space needed: pointer to block; also return
11431 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11432 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11437 /* Push the machine address for the given external variable offset. */
11440 bc_load_externaddr (externaddr)
11443 bc_emit_bytecode (constP);
11444 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11445 BYTECODE_BC_LABEL (externaddr)->offset);
11447 #ifdef DEBUG_PRINT_CODE
11448 fputc ('\n', stderr);
11453 /* Like above, but expects an IDENTIFIER. */
11456 bc_load_externaddr_id (id, offset)
11460 if (!IDENTIFIER_POINTER (id))
11463 bc_emit_bytecode (constP);
11464 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11466 #ifdef DEBUG_PRINT_CODE
11467 fputc ('\n', stderr);
11472 /* Push the machine address for the given local variable offset. */
11475 bc_load_localaddr (localaddr)
11478 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11482 /* Push the machine address for the given parameter offset.
11483 NOTE: offset is in bits. */
11486 bc_load_parmaddr (parmaddr)
11489 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11494 /* Convert a[i] into *(a + i). */
11497 bc_canonicalize_array_ref (exp)
11500 tree type = TREE_TYPE (exp);
11501 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11502 TREE_OPERAND (exp, 0));
11503 tree index = TREE_OPERAND (exp, 1);
11506 /* Convert the integer argument to a type the same size as a pointer
11507 so the multiply won't overflow spuriously. */
11509 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11510 index = convert (type_for_size (POINTER_SIZE, 0), index);
11512 /* The array address isn't volatile even if the array is.
11513 (Of course this isn't terribly relevant since the bytecode
11514 translator treats nearly everything as volatile anyway.) */
11515 TREE_THIS_VOLATILE (array_adr) = 0;
11517 return build1 (INDIRECT_REF, type,
11518 fold (build (PLUS_EXPR,
11519 TYPE_POINTER_TO (type),
11521 fold (build (MULT_EXPR,
11522 TYPE_POINTER_TO (type),
11524 size_in_bytes (type))))));
11528 /* Load the address of the component referenced by the given
11529 COMPONENT_REF expression.
11531 Returns innermost lvalue. */
11534 bc_expand_component_address (exp)
11538 enum machine_mode mode;
11540 HOST_WIDE_INT SIval;
11543 tem = TREE_OPERAND (exp, 1);
11544 mode = DECL_MODE (tem);
11547 /* Compute cumulative bit offset for nested component refs
11548 and array refs, and find the ultimate containing object. */
11550 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11552 if (TREE_CODE (tem) == COMPONENT_REF)
11553 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11555 if (TREE_CODE (tem) == ARRAY_REF
11556 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11557 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11559 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11560 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11561 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11566 bc_expand_expr (tem);
11569 /* For bitfields also push their offset and size */
11570 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11571 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11573 if (SIval = bitpos / BITS_PER_UNIT)
11574 bc_emit_instruction (addconstPSI, SIval);
11576 return (TREE_OPERAND (exp, 1));
11580 /* Emit code to push two SI constants */
11583 bc_push_offset_and_size (offset, size)
11584 HOST_WIDE_INT offset, size;
11586 bc_emit_instruction (constSI, offset);
11587 bc_emit_instruction (constSI, size);
11591 /* Emit byte code to push the address of the given lvalue expression to
11592 the stack. If it's a bit field, we also push offset and size info.
11594 Returns innermost component, which allows us to determine not only
11595 its type, but also whether it's a bitfield. */
11598 bc_expand_address (exp)
11602 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11606 switch (TREE_CODE (exp))
11610 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11612 case COMPONENT_REF:
11614 return (bc_expand_component_address (exp));
11618 bc_expand_expr (TREE_OPERAND (exp, 0));
11620 /* For variable-sized types: retrieve pointer. Sometimes the
11621 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11622 also make sure we have an operand, just in case... */
11624 if (TREE_OPERAND (exp, 0)
11625 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11626 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11627 bc_emit_instruction (loadP);
11629 /* If packed, also return offset and size */
11630 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11632 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11633 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11635 return (TREE_OPERAND (exp, 0));
11637 case FUNCTION_DECL:
11639 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11640 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11645 bc_load_parmaddr (DECL_RTL (exp));
11647 /* For variable-sized types: retrieve pointer */
11648 if (TYPE_SIZE (TREE_TYPE (exp))
11649 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11650 bc_emit_instruction (loadP);
11652 /* If packed, also return offset and size */
11653 if (DECL_BIT_FIELD (exp))
11654 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11655 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11661 bc_emit_instruction (returnP);
11667 if (BYTECODE_LABEL (DECL_RTL (exp)))
11668 bc_load_externaddr (DECL_RTL (exp));
11671 if (DECL_EXTERNAL (exp))
11672 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11673 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11675 bc_load_localaddr (DECL_RTL (exp));
11677 /* For variable-sized types: retrieve pointer */
11678 if (TYPE_SIZE (TREE_TYPE (exp))
11679 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11680 bc_emit_instruction (loadP);
11682 /* If packed, also return offset and size */
11683 if (DECL_BIT_FIELD (exp))
11684 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11685 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11693 bc_emit_bytecode (constP);
11694 r = output_constant_def (exp);
11695 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11697 #ifdef DEBUG_PRINT_CODE
11698 fputc ('\n', stderr);
11709 /* Most lvalues don't have components. */
11714 /* Emit a type code to be used by the runtime support in handling
11715 parameter passing. The type code consists of the machine mode
11716 plus the minimal alignment shifted left 8 bits. */
11719 bc_runtime_type_code (type)
11724 switch (TREE_CODE (type))
11730 case ENUMERAL_TYPE:
11734 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11746 return build_int_2 (val, 0);
11750 /* Generate constructor label */
11753 bc_gen_constr_label ()
11755 static int label_counter;
11756 static char label[20];
11758 sprintf (label, "*LR%d", label_counter++);
11760 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11764 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11765 expand the constructor data as static data, and push a pointer to it.
11766 The pointer is put in the pointer table and is retrieved by a constP
11767 bytecode instruction. We then loop and store each constructor member in
11768 the corresponding component. Finally, we return the original pointer on
11772 bc_expand_constructor (constr)
11776 HOST_WIDE_INT ptroffs;
11780 /* Literal constructors are handled as constants, whereas
11781 non-literals are evaluated and stored element by element
11782 into the data segment. */
11784 /* Allocate space in proper segment and push pointer to space on stack.
11787 l = bc_gen_constr_label ();
11789 if (TREE_CONSTANT (constr))
11793 bc_emit_const_labeldef (l);
11794 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11800 bc_emit_data_labeldef (l);
11801 bc_output_data_constructor (constr);
11805 /* Add reference to pointer table and recall pointer to stack;
11806 this code is common for both types of constructors: literals
11807 and non-literals. */
11809 ptroffs = bc_define_pointer (l);
11810 bc_emit_instruction (constP, ptroffs);
11812 /* This is all that has to be done if it's a literal. */
11813 if (TREE_CONSTANT (constr))
11817 /* At this point, we have the pointer to the structure on top of the stack.
11818 Generate sequences of store_memory calls for the constructor. */
11820 /* constructor type is structure */
11821 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11825 /* If the constructor has fewer fields than the structure,
11826 clear the whole structure first. */
11828 if (list_length (CONSTRUCTOR_ELTS (constr))
11829 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11831 bc_emit_instruction (duplicate);
11832 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11833 bc_emit_instruction (clearBLK);
11836 /* Store each element of the constructor into the corresponding
11837 field of TARGET. */
11839 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11841 register tree field = TREE_PURPOSE (elt);
11842 register enum machine_mode mode;
11847 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11848 mode = DECL_MODE (field);
11849 unsignedp = TREE_UNSIGNED (field);
11851 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11853 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11854 /* The alignment of TARGET is
11855 at least what its type requires. */
11857 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11858 int_size_in_bytes (TREE_TYPE (constr)));
11863 /* Constructor type is array */
11864 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11868 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11869 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11870 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11871 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11873 /* If the constructor has fewer fields than the structure,
11874 clear the whole structure first. */
11876 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11878 bc_emit_instruction (duplicate);
11879 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11880 bc_emit_instruction (clearBLK);
11884 /* Store each element of the constructor into the corresponding
11885 element of TARGET, determined by counting the elements. */
11887 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11889 elt = TREE_CHAIN (elt), i++)
11891 register enum machine_mode mode;
11896 mode = TYPE_MODE (elttype);
11897 bitsize = GET_MODE_BITSIZE (mode);
11898 unsignedp = TREE_UNSIGNED (elttype);
11900 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11901 /* * TYPE_SIZE_UNIT (elttype) */ );
11903 bc_store_field (elt, bitsize, bitpos, mode,
11904 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11905 /* The alignment of TARGET is
11906 at least what its type requires. */
11908 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11909 int_size_in_bytes (TREE_TYPE (constr)));
11916 /* Store the value of EXP (an expression tree) into member FIELD of
11917 structure at address on stack, which has type TYPE, mode MODE and
11918 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11921 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11922 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11925 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11926 value_mode, unsignedp, align, total_size)
11927 int bitsize, bitpos;
11928 enum machine_mode mode;
11929 tree field, exp, type;
11930 enum machine_mode value_mode;
11936 /* Expand expression and copy pointer */
11937 bc_expand_expr (exp);
11938 bc_emit_instruction (over);
11941 /* If the component is a bit field, we cannot use addressing to access
11942 it. Use bit-field techniques to store in it. */
11944 if (DECL_BIT_FIELD (field))
11946 bc_store_bit_field (bitpos, bitsize, unsignedp);
11950 /* Not bit field */
11952 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11954 /* Advance pointer to the desired member */
11956 bc_emit_instruction (addconstPSI, offset);
11959 bc_store_memory (type, field);
11964 /* Store SI/SU in bitfield */
11967 bc_store_bit_field (offset, size, unsignedp)
11968 int offset, size, unsignedp;
11970 /* Push bitfield offset and size */
11971 bc_push_offset_and_size (offset, size);
11974 bc_emit_instruction (sstoreBI);
11978 /* Load SI/SU from bitfield */
11981 bc_load_bit_field (offset, size, unsignedp)
11982 int offset, size, unsignedp;
11984 /* Push bitfield offset and size */
11985 bc_push_offset_and_size (offset, size);
11987 /* Load: sign-extend if signed, else zero-extend */
11988 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11992 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11993 (adjust stack pointer upwards), negative means add that number of
11994 levels (adjust the stack pointer downwards). Only positive values
11995 normally make sense. */
11998 bc_adjust_stack (nlevels)
12007 bc_emit_instruction (drop);
12010 bc_emit_instruction (drop);
12015 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
12016 stack_depth -= nlevels;
12019 #if defined (VALIDATE_STACK_FOR_BC)
12020 VALIDATE_STACK_FOR_BC ();