1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
46 #include "insn-flags.h"
47 #include "insn-config.h"
48 #include "insn-codes.h"
50 #include "hard-reg-set.h"
57 #include "bc-typecd.h"
58 #include "bc-opcode.h"
62 #define obstack_chunk_alloc xmalloc
63 #define obstack_chunk_free free
64 struct obstack stmt_obstack;
66 /* Filename and line number of last line-number note,
67 whether we actually emitted it or not. */
71 /* Nonzero if within a ({...}) grouping, in which case we must
72 always compute a value for each expr-stmt in case it is the last one. */
74 int expr_stmts_for_value;
76 /* Each time we expand an expression-statement,
77 record the expr's type and its RTL value here. */
79 static tree last_expr_type;
80 static rtx last_expr_value;
82 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
83 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
84 This is used by the `remember_end_note' function to record the endpoint
85 of each generated block in its associated BLOCK node. */
87 static rtx last_block_end_note;
89 /* Number of binding contours started so far in this function. */
91 int block_start_count;
93 /* Nonzero if function being compiled needs to
94 return the address of where it has put a structure value. */
96 extern int current_function_returns_pcc_struct;
98 /* Label that will go on parm cleanup code, if any.
99 Jumping to this label runs cleanup code for parameters, if
100 such code must be run. Following this code is the logical return label. */
102 extern rtx cleanup_label;
104 /* Label that will go on function epilogue.
105 Jumping to this label serves as a "return" instruction
106 on machines which require execution of the epilogue on all returns. */
108 extern rtx return_label;
110 /* Offset to end of allocated area of stack frame.
111 If stack grows down, this is the address of the last stack slot allocated.
112 If stack grows up, this is the address for the next slot. */
113 extern int frame_offset;
115 /* Label to jump back to for tail recursion, or 0 if we have
116 not yet needed one for this function. */
117 extern rtx tail_recursion_label;
119 /* Place after which to insert the tail_recursion_label if we need one. */
120 extern rtx tail_recursion_reentry;
122 /* Location at which to save the argument pointer if it will need to be
123 referenced. There are two cases where this is done: if nonlocal gotos
124 exist, or if vars whose is an offset from the argument pointer will be
125 needed by inner routines. */
127 extern rtx arg_pointer_save_area;
129 /* Chain of all RTL_EXPRs that have insns in them. */
130 extern tree rtl_expr_chain;
132 /* Stack allocation level in which temporaries for TARGET_EXPRs live. */
133 extern int target_temp_slot_level;
135 extern int temp_slot_level;
137 /* Functions and data structures for expanding case statements. */
139 /* Case label structure, used to hold info on labels within case
140 statements. We handle "range" labels; for a single-value label
141 as in C, the high and low limits are the same.
143 An AVL tree of case nodes is initially created, and later transformed
144 to a list linked via the RIGHT fields in the nodes. Nodes with
145 higher case values are later in the list.
147 Switch statements can be output in one of two forms. A branch table
148 is used if there are more than a few labels and the labels are dense
149 within the range between the smallest and largest case value. If a
150 branch table is used, no further manipulations are done with the case
153 The alternative to the use of a branch table is to generate a series
154 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
155 and PARENT fields to hold a binary tree. Initially the tree is
156 totally unbalanced, with everything on the right. We balance the tree
157 with nodes on the left having lower case values than the parent
158 and nodes on the right having higher values. We then output the tree
163 struct case_node *left; /* Left son in binary tree */
164 struct case_node *right; /* Right son in binary tree; also node chain */
165 struct case_node *parent; /* Parent of node in binary tree */
166 tree low; /* Lowest index value for this label */
167 tree high; /* Highest index value for this label */
168 tree code_label; /* Label to jump to when node matches */
172 typedef struct case_node case_node;
173 typedef struct case_node *case_node_ptr;
175 /* These are used by estimate_case_costs and balance_case_nodes. */
177 /* This must be a signed type, and non-ANSI compilers lack signed char. */
178 static short *cost_table;
179 static int use_cost_table;
181 /* Stack of control and binding constructs we are currently inside.
183 These constructs begin when you call `expand_start_WHATEVER'
184 and end when you call `expand_end_WHATEVER'. This stack records
185 info about how the construct began that tells the end-function
186 what to do. It also may provide information about the construct
187 to alter the behavior of other constructs within the body.
188 For example, they may affect the behavior of C `break' and `continue'.
190 Each construct gets one `struct nesting' object.
191 All of these objects are chained through the `all' field.
192 `nesting_stack' points to the first object (innermost construct).
193 The position of an entry on `nesting_stack' is in its `depth' field.
195 Each type of construct has its own individual stack.
196 For example, loops have `loop_stack'. Each object points to the
197 next object of the same type through the `next' field.
199 Some constructs are visible to `break' exit-statements and others
200 are not. Which constructs are visible depends on the language.
201 Therefore, the data structure allows each construct to be visible
202 or not, according to the args given when the construct is started.
203 The construct is visible if the `exit_label' field is non-null.
204 In that case, the value should be a CODE_LABEL rtx. */
209 struct nesting *next;
214 /* For conds (if-then and if-then-else statements). */
217 /* Label for the end of the if construct.
218 There is none if EXITFLAG was not set
219 and no `else' has been seen yet. */
221 /* Label for the end of this alternative.
222 This may be the end of the if or the next else/elseif. */
228 /* Label at the top of the loop; place to loop back to. */
230 /* Label at the end of the whole construct. */
232 /* Label before a jump that branches to the end of the whole
233 construct. This is where destructors go if any. */
235 /* Label for `continue' statement to jump to;
236 this is in front of the stepper of the loop. */
239 /* For variable binding contours. */
242 /* Sequence number of this binding contour within the function,
243 in order of entry. */
244 int block_start_count;
245 /* Nonzero => value to restore stack to on exit. Complemented by
246 bc_stack_level (see below) when generating bytecodes. */
248 /* The NOTE that starts this contour.
249 Used by expand_goto to check whether the destination
250 is within each contour or not. */
252 /* Innermost containing binding contour that has a stack level. */
253 struct nesting *innermost_stack_block;
254 /* List of cleanups to be run on exit from this contour.
255 This is a list of expressions to be evaluated.
256 The TREE_PURPOSE of each link is the ..._DECL node
257 which the cleanup pertains to. */
259 /* List of cleanup-lists of blocks containing this block,
260 as they were at the locus where this block appears.
261 There is an element for each containing block,
262 ordered innermost containing block first.
263 The tail of this list can be 0,
264 if all remaining elements would be empty lists.
265 The element's TREE_VALUE is the cleanup-list of that block,
266 which may be null. */
268 /* Chain of labels defined inside this binding contour.
269 For contours that have stack levels or cleanups. */
270 struct label_chain *label_chain;
271 /* Number of function calls seen, as of start of this block. */
272 int function_call_count;
273 /* Bytecode specific: stack level to restore stack to on exit. */
275 /* Nonzero if this is associated with a EH region. */
276 int exception_region;
277 /* The saved target_temp_slot_level from our outer block.
278 We may reset target_temp_slot_level to be the level of
279 this block, if that is done, target_temp_slot_level
280 reverts to the saved target_temp_slot_level at the very
282 int target_temp_slot_level;
283 /* True if we are currently emitting insns in an area of
284 output code that is controlled by a conditional
285 expression. This is used by the cleanup handling code to
286 generate conditional cleanup actions. */
287 int conditional_code;
288 /* A place to move the start of the exception region for any
289 of the conditional cleanups, must be at the end or after
290 the start of the last unconditional cleanup, and before any
291 conditional branch points. */
292 rtx last_unconditional_cleanup;
293 /* When in a conditional context, this is the specific
294 cleanup list associated with last_unconditional_cleanup,
295 where we place the conditionalized cleanups. */
298 /* For switch (C) or case (Pascal) statements,
299 and also for dummies (see `expand_start_case_dummy'). */
302 /* The insn after which the case dispatch should finally
303 be emitted. Zero for a dummy. */
305 /* For bytecodes, the case table is in-lined right in the code.
306 A label is needed for skipping over this block. It is only
307 used when generating bytecodes. */
309 /* A list of case labels; it is first built as an AVL tree.
310 During expand_end_case, this is converted to a list, and may be
311 rearranged into a nearly balanced binary tree. */
312 struct case_node *case_list;
313 /* Label to jump to if no case matches. */
315 /* The expression to be dispatched on. */
317 /* Type that INDEX_EXPR should be converted to. */
319 /* Number of range exprs in case statement. */
321 /* Name of this kind of statement, for warnings. */
323 /* Nonzero if a case label has been seen in this case stmt. */
329 /* Chain of all pending binding contours. */
330 struct nesting *block_stack;
332 /* If any new stacks are added here, add them to POPSTACKS too. */
334 /* Chain of all pending binding contours that restore stack levels
336 struct nesting *stack_block_stack;
338 /* Chain of all pending conditional statements. */
339 struct nesting *cond_stack;
341 /* Chain of all pending loops. */
342 struct nesting *loop_stack;
344 /* Chain of all pending case or switch statements. */
345 struct nesting *case_stack;
347 /* Separate chain including all of the above,
348 chained through the `all' field. */
349 struct nesting *nesting_stack;
351 /* Number of entries on nesting_stack now. */
354 /* Allocate and return a new `struct nesting'. */
356 #define ALLOC_NESTING() \
357 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
359 /* Pop the nesting stack element by element until we pop off
360 the element which is at the top of STACK.
361 Update all the other stacks, popping off elements from them
362 as we pop them from nesting_stack. */
364 #define POPSTACK(STACK) \
365 do { struct nesting *target = STACK; \
366 struct nesting *this; \
367 do { this = nesting_stack; \
368 if (loop_stack == this) \
369 loop_stack = loop_stack->next; \
370 if (cond_stack == this) \
371 cond_stack = cond_stack->next; \
372 if (block_stack == this) \
373 block_stack = block_stack->next; \
374 if (stack_block_stack == this) \
375 stack_block_stack = stack_block_stack->next; \
376 if (case_stack == this) \
377 case_stack = case_stack->next; \
378 nesting_depth = nesting_stack->depth - 1; \
379 nesting_stack = this->all; \
380 obstack_free (&stmt_obstack, this); } \
381 while (this != target); } while (0)
383 /* In some cases it is impossible to generate code for a forward goto
384 until the label definition is seen. This happens when it may be necessary
385 for the goto to reset the stack pointer: we don't yet know how to do that.
386 So expand_goto puts an entry on this fixup list.
387 Each time a binding contour that resets the stack is exited,
389 If the target label has now been defined, we can insert the proper code. */
393 /* Points to following fixup. */
394 struct goto_fixup *next;
395 /* Points to the insn before the jump insn.
396 If more code must be inserted, it goes after this insn. */
398 /* The LABEL_DECL that this jump is jumping to, or 0
399 for break, continue or return. */
401 /* The BLOCK for the place where this goto was found. */
403 /* The CODE_LABEL rtx that this is jumping to. */
405 /* Number of binding contours started in current function
406 before the label reference. */
407 int block_start_count;
408 /* The outermost stack level that should be restored for this jump.
409 Each time a binding contour that resets the stack is exited,
410 if the target label is *not* yet defined, this slot is updated. */
412 /* List of lists of cleanup expressions to be run by this goto.
413 There is one element for each block that this goto is within.
414 The tail of this list can be 0,
415 if all remaining elements would be empty.
416 The TREE_VALUE contains the cleanup list of that block as of the
417 time this goto was seen.
418 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
419 tree cleanup_list_list;
421 /* Bytecode specific members follow */
423 /* The label that this jump is jumping to, or 0 for break, continue
425 struct bc_label *bc_target;
427 /* The label we use for the fixup patch */
428 struct bc_label *label;
430 /* True (non-0) if fixup has been handled */
433 /* Like stack_level above, except refers to the interpreter stack */
437 static struct goto_fixup *goto_fixup_chain;
439 /* Within any binding contour that must restore a stack level,
440 all labels are recorded with a chain of these structures. */
444 /* Points to following fixup. */
445 struct label_chain *next;
450 /* Non-zero if we are using EH to handle cleanus. */
451 static int using_eh_for_cleanups_p = 0;
454 static void expand_goto_internal PROTO((tree, rtx, rtx));
455 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
456 struct bc_label *, tree));
457 static int expand_fixup PROTO((tree, rtx, rtx));
458 static void bc_expand_fixup PROTO((enum bytecode_opcode,
459 struct bc_label *, int));
460 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
462 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
464 static void bc_expand_start_cond PROTO((tree, int));
465 static void bc_expand_end_cond PROTO((void));
466 static void bc_expand_start_else PROTO((void));
467 static void bc_expand_end_loop PROTO((void));
468 static void bc_expand_end_bindings PROTO((tree, int, int));
469 static void bc_expand_decl PROTO((tree, tree));
470 static void bc_expand_variable_local_init PROTO((tree));
471 static void bc_expand_decl_init PROTO((tree));
472 static void expand_null_return_1 PROTO((rtx, int));
473 static void expand_value_return PROTO((rtx));
474 static int tail_recursion_args PROTO((tree, tree));
475 static void expand_cleanups PROTO((tree, tree, int, int));
476 static void bc_expand_start_case PROTO((struct nesting *, tree,
478 static int bc_pushcase PROTO((tree, tree));
479 static void bc_check_for_full_enumeration_handling PROTO((tree));
480 static void bc_expand_end_case PROTO((tree));
481 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
482 static int estimate_case_costs PROTO((case_node_ptr));
483 static void group_case_nodes PROTO((case_node_ptr));
484 static void balance_case_nodes PROTO((case_node_ptr *,
486 static int node_has_low_bound PROTO((case_node_ptr, tree));
487 static int node_has_high_bound PROTO((case_node_ptr, tree));
488 static int node_is_bounded PROTO((case_node_ptr, tree));
489 static void emit_jump_if_reachable PROTO((rtx));
490 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
491 static int add_case_node PROTO((tree, tree, tree, tree *));
492 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
494 extern rtx bc_allocate_local ();
495 extern rtx bc_allocate_variable_array ();
498 using_eh_for_cleanups ()
500 using_eh_for_cleanups_p = 1;
506 gcc_obstack_init (&stmt_obstack);
511 init_stmt_for_function ()
513 /* We are not currently within any block, conditional, loop or case. */
515 stack_block_stack = 0;
522 block_start_count = 0;
524 /* No gotos have been expanded yet. */
525 goto_fixup_chain = 0;
527 /* We are not processing a ({...}) grouping. */
528 expr_stmts_for_value = 0;
531 init_eh_for_function ();
538 p->block_stack = block_stack;
539 p->stack_block_stack = stack_block_stack;
540 p->cond_stack = cond_stack;
541 p->loop_stack = loop_stack;
542 p->case_stack = case_stack;
543 p->nesting_stack = nesting_stack;
544 p->nesting_depth = nesting_depth;
545 p->block_start_count = block_start_count;
546 p->last_expr_type = last_expr_type;
547 p->last_expr_value = last_expr_value;
548 p->expr_stmts_for_value = expr_stmts_for_value;
549 p->emit_filename = emit_filename;
550 p->emit_lineno = emit_lineno;
551 p->goto_fixup_chain = goto_fixup_chain;
556 restore_stmt_status (p)
559 block_stack = p->block_stack;
560 stack_block_stack = p->stack_block_stack;
561 cond_stack = p->cond_stack;
562 loop_stack = p->loop_stack;
563 case_stack = p->case_stack;
564 nesting_stack = p->nesting_stack;
565 nesting_depth = p->nesting_depth;
566 block_start_count = p->block_start_count;
567 last_expr_type = p->last_expr_type;
568 last_expr_value = p->last_expr_value;
569 expr_stmts_for_value = p->expr_stmts_for_value;
570 emit_filename = p->emit_filename;
571 emit_lineno = p->emit_lineno;
572 goto_fixup_chain = p->goto_fixup_chain;
573 restore_eh_status (p);
576 /* Emit a no-op instruction. */
583 if (!output_bytecode)
585 last_insn = get_last_insn ();
587 && (GET_CODE (last_insn) == CODE_LABEL
588 || (GET_CODE (last_insn) == NOTE
589 && prev_real_insn (last_insn) == 0)))
590 emit_insn (gen_nop ());
594 /* Return the rtx-label that corresponds to a LABEL_DECL,
595 creating it if necessary. */
601 if (TREE_CODE (label) != LABEL_DECL)
604 if (DECL_RTL (label))
605 return DECL_RTL (label);
607 return DECL_RTL (label) = gen_label_rtx ();
610 /* Add an unconditional jump to LABEL as the next sequential instruction. */
616 do_pending_stack_adjust ();
617 emit_jump_insn (gen_jump (label));
621 /* Emit code to jump to the address
622 specified by the pointer expression EXP. */
625 expand_computed_goto (exp)
630 bc_expand_expr (exp);
631 bc_emit_instruction (jumpP);
635 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
637 #ifdef POINTERS_EXTEND_UNSIGNED
638 x = convert_memory_address (Pmode, x);
642 /* Be sure the function is executable. */
643 if (flag_check_memory_usage)
644 emit_library_call (chkr_check_exec_libfunc, 1,
645 VOIDmode, 1, x, ptr_mode);
647 do_pending_stack_adjust ();
648 emit_indirect_jump (x);
652 /* Handle goto statements and the labels that they can go to. */
654 /* Specify the location in the RTL code of a label LABEL,
655 which is a LABEL_DECL tree node.
657 This is used for the kind of label that the user can jump to with a
658 goto statement, and for alternatives of a switch or case statement.
659 RTL labels generated for loops and conditionals don't go through here;
660 they are generated directly at the RTL level, by other functions below.
662 Note that this has nothing to do with defining label *names*.
663 Languages vary in how they do that and what that even means. */
669 struct label_chain *p;
673 if (! DECL_RTL (label))
674 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
675 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
676 error ("multiply defined label");
680 do_pending_stack_adjust ();
681 emit_label (label_rtx (label));
682 if (DECL_NAME (label))
683 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
685 if (stack_block_stack != 0)
687 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
688 p->next = stack_block_stack->data.block.label_chain;
689 stack_block_stack->data.block.label_chain = p;
694 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
695 from nested functions. */
698 declare_nonlocal_label (label)
701 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
702 LABEL_PRESERVE_P (label_rtx (label)) = 1;
703 if (nonlocal_goto_handler_slot == 0)
705 nonlocal_goto_handler_slot
706 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
707 emit_stack_save (SAVE_NONLOCAL,
708 &nonlocal_goto_stack_level,
709 PREV_INSN (tail_recursion_reentry));
713 /* Generate RTL code for a `goto' statement with target label LABEL.
714 LABEL should be a LABEL_DECL tree node that was or will later be
715 defined with `expand_label'. */
725 expand_goto_internal (label, label_rtx (label), NULL_RTX);
729 /* Check for a nonlocal goto to a containing function. */
730 context = decl_function_context (label);
731 if (context != 0 && context != current_function_decl)
733 struct function *p = find_function_data (context);
734 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
737 p->has_nonlocal_label = 1;
738 current_function_has_nonlocal_goto = 1;
739 LABEL_REF_NONLOCAL_P (label_ref) = 1;
741 /* Copy the rtl for the slots so that they won't be shared in
742 case the virtual stack vars register gets instantiated differently
743 in the parent than in the child. */
745 #if HAVE_nonlocal_goto
746 if (HAVE_nonlocal_goto)
747 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
748 copy_rtx (p->nonlocal_goto_handler_slot),
749 copy_rtx (p->nonlocal_goto_stack_level),
756 /* Restore frame pointer for containing function.
757 This sets the actual hard register used for the frame pointer
758 to the location of the function's incoming static chain info.
759 The non-local goto handler will then adjust it to contain the
760 proper value and reload the argument pointer, if needed. */
761 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
763 /* We have now loaded the frame pointer hardware register with
764 the address of that corresponds to the start of the virtual
765 stack vars. So replace virtual_stack_vars_rtx in all
766 addresses we use with stack_pointer_rtx. */
768 /* Get addr of containing function's current nonlocal goto handler,
769 which will do any cleanups and then jump to the label. */
770 addr = copy_rtx (p->nonlocal_goto_handler_slot);
771 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
772 hard_frame_pointer_rtx));
774 /* Restore the stack pointer. Note this uses fp just restored. */
775 addr = p->nonlocal_goto_stack_level;
777 addr = replace_rtx (copy_rtx (addr),
778 virtual_stack_vars_rtx,
779 hard_frame_pointer_rtx);
781 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
783 /* Put in the static chain register the nonlocal label address. */
784 emit_move_insn (static_chain_rtx, label_ref);
785 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
787 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
788 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
789 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
790 emit_indirect_jump (temp);
794 expand_goto_internal (label, label_rtx (label), NULL_RTX);
797 /* Generate RTL code for a `goto' statement with target label BODY.
798 LABEL should be a LABEL_REF.
799 LAST_INSN, if non-0, is the rtx we should consider as the last
800 insn emitted (for the purposes of cleaning up a return). */
803 expand_goto_internal (body, label, last_insn)
808 struct nesting *block;
811 /* NOTICE! If a bytecode instruction other than `jump' is needed,
812 then the caller has to call bc_expand_goto_internal()
813 directly. This is rather an exceptional case, and there aren't
814 that many places where this is necessary. */
817 expand_goto_internal (body, label, last_insn);
821 if (GET_CODE (label) != CODE_LABEL)
824 /* If label has already been defined, we can tell now
825 whether and how we must alter the stack level. */
827 if (PREV_INSN (label) != 0)
829 /* Find the innermost pending block that contains the label.
830 (Check containment by comparing insn-uids.)
831 Then restore the outermost stack level within that block,
832 and do cleanups of all blocks contained in it. */
833 for (block = block_stack; block; block = block->next)
835 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
837 if (block->data.block.stack_level != 0)
838 stack_level = block->data.block.stack_level;
839 /* Execute the cleanups for blocks we are exiting. */
840 if (block->data.block.cleanups != 0)
842 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
843 do_pending_stack_adjust ();
849 /* Ensure stack adjust isn't done by emit_jump, as this
850 would clobber the stack pointer. This one should be
851 deleted as dead by flow. */
852 clear_pending_stack_adjust ();
853 do_pending_stack_adjust ();
854 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
857 if (body != 0 && DECL_TOO_LATE (body))
858 error ("jump to `%s' invalidly jumps into binding contour",
859 IDENTIFIER_POINTER (DECL_NAME (body)));
861 /* Label not yet defined: may need to put this goto
862 on the fixup list. */
863 else if (! expand_fixup (body, label, last_insn))
865 /* No fixup needed. Record that the label is the target
866 of at least one goto that has no fixup. */
868 TREE_ADDRESSABLE (body) = 1;
874 /* Generate a jump with OPCODE to the given bytecode LABEL which is
875 found within BODY. */
878 bc_expand_goto_internal (opcode, label, body)
879 enum bytecode_opcode opcode;
880 struct bc_label *label;
883 struct nesting *block;
884 int stack_level = -1;
886 /* If the label is defined, adjust the stack as necessary.
887 If it's not defined, we have to push the reference on the
893 /* Find the innermost pending block that contains the label.
894 (Check containment by comparing bytecode uids.) Then restore the
895 outermost stack level within that block. */
897 for (block = block_stack; block; block = block->next)
899 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
901 if (block->data.block.bc_stack_level)
902 stack_level = block->data.block.bc_stack_level;
904 /* Execute the cleanups for blocks we are exiting. */
905 if (block->data.block.cleanups != 0)
907 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
908 do_pending_stack_adjust ();
912 /* Restore the stack level. If we need to adjust the stack, we
913 must do so after the jump, since the jump may depend on
914 what's on the stack. Thus, any stack-modifying conditional
915 jumps (these are the only ones that rely on what's on the
916 stack) go into the fixup list. */
919 && stack_depth != stack_level
922 bc_expand_fixup (opcode, label, stack_level);
925 if (stack_level >= 0)
926 bc_adjust_stack (stack_depth - stack_level);
928 if (body && DECL_BIT_FIELD (body))
929 error ("jump to `%s' invalidly jumps into binding contour",
930 IDENTIFIER_POINTER (DECL_NAME (body)));
932 /* Emit immediate jump */
933 bc_emit_bytecode (opcode);
934 bc_emit_bytecode_labelref (label);
936 #ifdef DEBUG_PRINT_CODE
937 fputc ('\n', stderr);
942 /* Put goto in the fixup list */
943 bc_expand_fixup (opcode, label, stack_level);
946 /* Generate if necessary a fixup for a goto
947 whose target label in tree structure (if any) is TREE_LABEL
948 and whose target in rtl is RTL_LABEL.
950 If LAST_INSN is nonzero, we pretend that the jump appears
951 after insn LAST_INSN instead of at the current point in the insn stream.
953 The fixup will be used later to insert insns just before the goto.
954 Those insns will restore the stack level as appropriate for the
955 target label, and will (in the case of C++) also invoke any object
956 destructors which have to be invoked when we exit the scopes which
957 are exited by the goto.
959 Value is nonzero if a fixup is made. */
962 expand_fixup (tree_label, rtl_label, last_insn)
967 struct nesting *block, *end_block;
969 /* See if we can recognize which block the label will be output in.
970 This is possible in some very common cases.
971 If we succeed, set END_BLOCK to that block.
972 Otherwise, set it to 0. */
975 && (rtl_label == cond_stack->data.cond.endif_label
976 || rtl_label == cond_stack->data.cond.next_label))
977 end_block = cond_stack;
978 /* If we are in a loop, recognize certain labels which
979 are likely targets. This reduces the number of fixups
980 we need to create. */
982 && (rtl_label == loop_stack->data.loop.start_label
983 || rtl_label == loop_stack->data.loop.end_label
984 || rtl_label == loop_stack->data.loop.continue_label))
985 end_block = loop_stack;
989 /* Now set END_BLOCK to the binding level to which we will return. */
993 struct nesting *next_block = end_block->all;
996 /* First see if the END_BLOCK is inside the innermost binding level.
997 If so, then no cleanups or stack levels are relevant. */
998 while (next_block && next_block != block)
999 next_block = next_block->all;
1004 /* Otherwise, set END_BLOCK to the innermost binding level
1005 which is outside the relevant control-structure nesting. */
1006 next_block = block_stack->next;
1007 for (block = block_stack; block != end_block; block = block->all)
1008 if (block == next_block)
1009 next_block = next_block->next;
1010 end_block = next_block;
1013 /* Does any containing block have a stack level or cleanups?
1014 If not, no fixup is needed, and that is the normal case
1015 (the only case, for standard C). */
1016 for (block = block_stack; block != end_block; block = block->next)
1017 if (block->data.block.stack_level != 0
1018 || block->data.block.cleanups != 0)
1021 if (block != end_block)
1023 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1024 struct goto_fixup *fixup
1025 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1026 /* In case an old stack level is restored, make sure that comes
1027 after any pending stack adjust. */
1028 /* ?? If the fixup isn't to come at the present position,
1029 doing the stack adjust here isn't useful. Doing it with our
1030 settings at that location isn't useful either. Let's hope
1033 do_pending_stack_adjust ();
1034 fixup->target = tree_label;
1035 fixup->target_rtl = rtl_label;
1037 /* Create a BLOCK node and a corresponding matched set of
1038 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1039 this point. The notes will encapsulate any and all fixup
1040 code which we might later insert at this point in the insn
1041 stream. Also, the BLOCK node will be the parent (i.e. the
1042 `SUPERBLOCK') of any other BLOCK nodes which we might create
1043 later on when we are expanding the fixup code. */
1046 register rtx original_before_jump
1047 = last_insn ? last_insn : get_last_insn ();
1051 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1052 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1053 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1055 emit_insns_after (fixup->before_jump, original_before_jump);
1058 fixup->block_start_count = block_start_count;
1059 fixup->stack_level = 0;
1060 fixup->cleanup_list_list
1061 = ((block->data.block.outer_cleanups
1062 || block->data.block.cleanups)
1063 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1064 block->data.block.outer_cleanups)
1066 fixup->next = goto_fixup_chain;
1067 goto_fixup_chain = fixup;
1074 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1075 Make the fixup restore the stack level to STACK_LEVEL. */
1078 bc_expand_fixup (opcode, label, stack_level)
1079 enum bytecode_opcode opcode;
1080 struct bc_label *label;
1083 struct goto_fixup *fixup
1084 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1086 fixup->label = bc_get_bytecode_label ();
1087 fixup->bc_target = label;
1088 fixup->bc_stack_level = stack_level;
1089 fixup->bc_handled = FALSE;
1091 fixup->next = goto_fixup_chain;
1092 goto_fixup_chain = fixup;
1094 /* Insert a jump to the fixup code */
1095 bc_emit_bytecode (opcode);
1096 bc_emit_bytecode_labelref (fixup->label);
1098 #ifdef DEBUG_PRINT_CODE
1099 fputc ('\n', stderr);
1103 /* Expand any needed fixups in the outputmost binding level of the
1104 function. FIRST_INSN is the first insn in the function. */
1107 expand_fixups (first_insn)
1110 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1113 /* When exiting a binding contour, process all pending gotos requiring fixups.
1114 THISBLOCK is the structure that describes the block being exited.
1115 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1116 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1117 FIRST_INSN is the insn that began this contour.
1119 Gotos that jump out of this contour must restore the
1120 stack level and do the cleanups before actually jumping.
1122 DONT_JUMP_IN nonzero means report error there is a jump into this
1123 contour from before the beginning of the contour.
1124 This is also done if STACK_LEVEL is nonzero. */
1127 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1128 struct nesting *thisblock;
1134 register struct goto_fixup *f, *prev;
1136 if (output_bytecode)
1138 /* ??? The second arg is the bc stack level, which is not the same
1139 as STACK_LEVEL. I have no idea what should go here, so I'll
1141 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1145 /* F is the fixup we are considering; PREV is the previous one. */
1146 /* We run this loop in two passes so that cleanups of exited blocks
1147 are run first, and blocks that are exited are marked so
1150 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1152 /* Test for a fixup that is inactive because it is already handled. */
1153 if (f->before_jump == 0)
1155 /* Delete inactive fixup from the chain, if that is easy to do. */
1157 prev->next = f->next;
1159 /* Has this fixup's target label been defined?
1160 If so, we can finalize it. */
1161 else if (PREV_INSN (f->target_rtl) != 0)
1163 register rtx cleanup_insns;
1165 /* Get the first non-label after the label
1166 this goto jumps to. If that's before this scope begins,
1167 we don't have a jump into the scope. */
1168 rtx after_label = f->target_rtl;
1169 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1170 after_label = NEXT_INSN (after_label);
1172 /* If this fixup jumped into this contour from before the beginning
1173 of this contour, report an error. */
1174 /* ??? Bug: this does not detect jumping in through intermediate
1175 blocks that have stack levels or cleanups.
1176 It detects only a problem with the innermost block
1177 around the label. */
1179 && (dont_jump_in || stack_level || cleanup_list)
1180 /* If AFTER_LABEL is 0, it means the jump goes to the end
1181 of the rtl, which means it jumps into this scope. */
1182 && (after_label == 0
1183 || INSN_UID (first_insn) < INSN_UID (after_label))
1184 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1185 && ! DECL_ERROR_ISSUED (f->target))
1187 error_with_decl (f->target,
1188 "label `%s' used before containing binding contour");
1189 /* Prevent multiple errors for one label. */
1190 DECL_ERROR_ISSUED (f->target) = 1;
1193 /* We will expand the cleanups into a sequence of their own and
1194 then later on we will attach this new sequence to the insn
1195 stream just ahead of the actual jump insn. */
1199 /* Temporarily restore the lexical context where we will
1200 logically be inserting the fixup code. We do this for the
1201 sake of getting the debugging information right. */
1204 set_block (f->context);
1206 /* Expand the cleanups for blocks this jump exits. */
1207 if (f->cleanup_list_list)
1210 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1211 /* Marked elements correspond to blocks that have been closed.
1212 Do their cleanups. */
1213 if (TREE_ADDRESSABLE (lists)
1214 && TREE_VALUE (lists) != 0)
1216 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1217 /* Pop any pushes done in the cleanups,
1218 in case function is about to return. */
1219 do_pending_stack_adjust ();
1223 /* Restore stack level for the biggest contour that this
1224 jump jumps out of. */
1226 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1228 /* Finish up the sequence containing the insns which implement the
1229 necessary cleanups, and then attach that whole sequence to the
1230 insn stream just ahead of the actual jump insn. Attaching it
1231 at that point insures that any cleanups which are in fact
1232 implicit C++ object destructions (which must be executed upon
1233 leaving the block) appear (to the debugger) to be taking place
1234 in an area of the generated code where the object(s) being
1235 destructed are still "in scope". */
1237 cleanup_insns = get_insns ();
1241 emit_insns_after (cleanup_insns, f->before_jump);
1248 /* For any still-undefined labels, do the cleanups for this block now.
1249 We must do this now since items in the cleanup list may go out
1250 of scope when the block ends. */
1251 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1252 if (f->before_jump != 0
1253 && PREV_INSN (f->target_rtl) == 0
1254 /* Label has still not appeared. If we are exiting a block with
1255 a stack level to restore, that started before the fixup,
1256 mark this stack level as needing restoration
1257 when the fixup is later finalized. */
1259 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1260 means the label is undefined. That's erroneous, but possible. */
1261 && (thisblock->data.block.block_start_count
1262 <= f->block_start_count))
1264 tree lists = f->cleanup_list_list;
1267 for (; lists; lists = TREE_CHAIN (lists))
1268 /* If the following elt. corresponds to our containing block
1269 then the elt. must be for this block. */
1270 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1274 set_block (f->context);
1275 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1276 do_pending_stack_adjust ();
1277 cleanup_insns = get_insns ();
1280 if (cleanup_insns != 0)
1282 = emit_insns_after (cleanup_insns, f->before_jump);
1284 f->cleanup_list_list = TREE_CHAIN (lists);
1288 f->stack_level = stack_level;
1293 /* When exiting a binding contour, process all pending gotos requiring fixups.
1294 Note: STACK_DEPTH is not altered.
1296 The arguments are currently not used in the bytecode compiler, but we may
1297 need them one day for languages other than C.
1299 THISBLOCK is the structure that describes the block being exited.
1300 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1301 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1302 FIRST_INSN is the insn that began this contour.
1304 Gotos that jump out of this contour must restore the
1305 stack level and do the cleanups before actually jumping.
1307 DONT_JUMP_IN nonzero means report error there is a jump into this
1308 contour from before the beginning of the contour.
1309 This is also done if STACK_LEVEL is nonzero. */
1312 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1313 struct nesting *thisblock;
1319 register struct goto_fixup *f, *prev;
1320 int saved_stack_depth;
1322 /* F is the fixup we are considering; PREV is the previous one. */
1324 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1326 /* Test for a fixup that is inactive because it is already handled. */
1327 if (f->before_jump == 0)
1329 /* Delete inactive fixup from the chain, if that is easy to do. */
1331 prev->next = f->next;
1334 /* Emit code to restore the stack and continue */
1335 bc_emit_bytecode_labeldef (f->label);
1337 /* Save stack_depth across call, since bc_adjust_stack will alter
1338 the perceived stack depth via the instructions generated. */
1340 if (f->bc_stack_level >= 0)
1342 saved_stack_depth = stack_depth;
1343 bc_adjust_stack (stack_depth - f->bc_stack_level);
1344 stack_depth = saved_stack_depth;
1347 bc_emit_bytecode (jump);
1348 bc_emit_bytecode_labelref (f->bc_target);
1350 #ifdef DEBUG_PRINT_CODE
1351 fputc ('\n', stderr);
1355 goto_fixup_chain = NULL;
1358 /* Generate RTL for an asm statement (explicit assembler code).
1359 BODY is a STRING_CST node containing the assembler code text,
1360 or an ADDR_EXPR containing a STRING_CST. */
1366 if (output_bytecode)
1368 error ("`asm' is invalid when generating bytecode");
1372 if (flag_check_memory_usage)
1374 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1378 if (TREE_CODE (body) == ADDR_EXPR)
1379 body = TREE_OPERAND (body, 0);
1381 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1382 TREE_STRING_POINTER (body)));
1386 /* Generate RTL for an asm statement with arguments.
1387 STRING is the instruction template.
1388 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1389 Each output or input has an expression in the TREE_VALUE and
1390 a constraint-string in the TREE_PURPOSE.
1391 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1392 that is clobbered by this insn.
1394 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1395 Some elements of OUTPUTS may be replaced with trees representing temporary
1396 values. The caller should copy those temporary values to the originally
1399 VOL nonzero means the insn is volatile; don't optimize it. */
1402 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1403 tree string, outputs, inputs, clobbers;
1408 rtvec argvec, constraints;
1410 int ninputs = list_length (inputs);
1411 int noutputs = list_length (outputs);
1416 /* Vector of RTX's of evaluated output operands. */
1417 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1418 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1419 enum machine_mode *inout_mode
1420 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1421 /* The insn we have emitted. */
1424 if (output_bytecode)
1426 error ("`asm' is invalid when generating bytecode");
1430 if (flag_check_memory_usage)
1432 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1436 /* Count the number of meaningful clobbered registers, ignoring what
1437 we would ignore later. */
1439 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1441 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1442 i = decode_reg_name (regname);
1443 if (i >= 0 || i == -4)
1446 error ("unknown register name `%s' in `asm'", regname);
1451 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1453 tree val = TREE_VALUE (tail);
1454 tree type = TREE_TYPE (val);
1457 int found_equal = 0;
1461 /* If there's an erroneous arg, emit no insn. */
1462 if (TREE_TYPE (val) == error_mark_node)
1465 /* Make sure constraint has `=' and does not have `+'. Also, see
1466 if it allows any register. Be liberal on the latter test, since
1467 the worst that happens if we get it wrong is we issue an error
1470 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1471 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1474 /* Make sure we can specify the matching operand. */
1477 error ("output operand constraint %d contains `+'", i);
1481 /* Replace '+' with '='. */
1482 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] = '=';
1490 case '?': case '!': case '*': case '%': case '&':
1491 case 'V': case 'm': case 'o': case '<': case '>':
1492 case 'E': case 'F': case 'G': case 'H': case 'X':
1493 case 's': case 'i': case 'n':
1494 case 'I': case 'J': case 'K': case 'L': case 'M':
1495 case 'N': case 'O': case 'P': case ',':
1496 #ifdef EXTRA_CONSTRAINT
1497 case 'Q': case 'R': case 'S': case 'T': case 'U':
1501 case '0': case '1': case '2': case '3': case '4':
1502 case '5': case '6': case '7': case '8': case '9':
1503 error ("matching constraint not valid in output operand");
1506 case 'p': case 'g': case 'r':
1512 if (! found_equal && ! found_plus)
1514 error ("output operand constraint lacks `='");
1518 /* If an output operand is not a decl or indirect ref and our constraint
1519 allows a register, make a temporary to act as an intermediate.
1520 Make the asm insn write into that, then our caller will copy it to
1521 the real output operand. Likewise for promoted variables. */
1523 if (TREE_CODE (val) == INDIRECT_REF
1524 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1525 && ! (GET_CODE (DECL_RTL (val)) == REG
1526 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1531 mark_addressable (TREE_VALUE (tail));
1534 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1535 EXPAND_MEMORY_USE_WO);
1537 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1538 error ("output number %d not directly addressable", i);
1542 output_rtx[i] = assign_temp (type, 0, 0, 0);
1543 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1548 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1549 inout_opnum[ninout++] = i;
1554 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1556 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1560 /* Make vectors for the expression-rtx and constraint strings. */
1562 argvec = rtvec_alloc (ninputs);
1563 constraints = rtvec_alloc (ninputs);
1565 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1566 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1568 MEM_VOLATILE_P (body) = vol;
1570 /* Eval the inputs and put them into ARGVEC.
1571 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1574 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1579 /* If there's an erroneous arg, emit no insn,
1580 because the ASM_INPUT would get VOIDmode
1581 and that could cause a crash in reload. */
1582 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1584 if (TREE_PURPOSE (tail) == NULL_TREE)
1586 error ("hard register `%s' listed as input operand to `asm'",
1587 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1591 /* Make sure constraint has neither `=' nor `+'. */
1593 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1594 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1597 error ("input operand constraint contains `%c'",
1598 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1601 case '?': case '!': case '*': case '%': case '&':
1602 case 'V': case 'm': case 'o': case '<': case '>':
1603 case 'E': case 'F': case 'G': case 'H': case 'X':
1604 case 's': case 'i': case 'n':
1605 case 'I': case 'J': case 'K': case 'L': case 'M':
1606 case 'N': case 'O': case 'P': case ',':
1607 #ifdef EXTRA_CONSTRAINT
1608 case 'Q': case 'R': case 'S': case 'T': case 'U':
1612 /* Whether or not a numeric constraint allows a register is
1613 decided by the matching constraint, and so there is no need
1614 to do anything special with them. We must handle them in
1615 the default case, so that we don't unnecessarily force
1616 operands to memory. */
1617 case '0': case '1': case '2': case '3': case '4':
1618 case '5': case '6': case '7': case '8': case '9':
1619 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]
1621 error ("matching constraint references invalid operand number");
1623 /* ... fall through ... */
1625 case 'p': case 'g': case 'r':
1632 mark_addressable (TREE_VALUE (tail));
1634 XVECEXP (body, 3, i) /* argvec */
1635 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1636 if (CONSTANT_P (XVECEXP (body, 3, i))
1637 && ! general_operand (XVECEXP (body, 3, i),
1638 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1641 XVECEXP (body, 3, i)
1642 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1643 XVECEXP (body, 3, i));
1645 XVECEXP (body, 3, i)
1646 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1647 XVECEXP (body, 3, i));
1651 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1652 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1653 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1655 tree type = TREE_TYPE (TREE_VALUE (tail));
1656 rtx memloc = assign_temp (type, 1, 1, 1);
1658 emit_move_insn (memloc, XVECEXP (body, 3, i));
1659 XVECEXP (body, 3, i) = memloc;
1662 XVECEXP (body, 4, i) /* constraints */
1663 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1664 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1668 /* Protect all the operands from the queue,
1669 now that they have all been evaluated. */
1671 for (i = 0; i < ninputs - ninout; i++)
1672 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1674 for (i = 0; i < noutputs; i++)
1675 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1677 /* For in-out operands, copy output rtx to input rtx. */
1678 for (i = 0; i < ninout; i++)
1680 static char match[9+1][2]
1681 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1682 int j = inout_opnum[i];
1684 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1686 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1687 = gen_rtx (ASM_INPUT, inout_mode[j], match[j]);
1690 /* Now, for each output, construct an rtx
1691 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1692 ARGVEC CONSTRAINTS))
1693 If there is more than one, put them inside a PARALLEL. */
1695 if (noutputs == 1 && nclobbers == 0)
1697 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1698 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1700 else if (noutputs == 0 && nclobbers == 0)
1702 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1703 insn = emit_insn (body);
1709 if (num == 0) num = 1;
1710 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1712 /* For each output operand, store a SET. */
1714 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1716 XVECEXP (body, 0, i)
1717 = gen_rtx (SET, VOIDmode,
1719 gen_rtx (ASM_OPERANDS, VOIDmode,
1720 TREE_STRING_POINTER (string),
1721 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1722 i, argvec, constraints,
1724 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1727 /* If there are no outputs (but there are some clobbers)
1728 store the bare ASM_OPERANDS into the PARALLEL. */
1731 XVECEXP (body, 0, i++) = obody;
1733 /* Store (clobber REG) for each clobbered register specified. */
1735 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1737 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1738 int j = decode_reg_name (regname);
1742 if (j == -3) /* `cc', which is not a register */
1745 if (j == -4) /* `memory', don't cache memory across asm */
1747 XVECEXP (body, 0, i++)
1748 = gen_rtx (CLOBBER, VOIDmode,
1749 gen_rtx (MEM, BLKmode,
1750 gen_rtx (SCRATCH, VOIDmode, 0)));
1754 /* Ignore unknown register, error already signalled. */
1758 /* Use QImode since that's guaranteed to clobber just one reg. */
1759 XVECEXP (body, 0, i++)
1760 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1763 insn = emit_insn (body);
1769 /* Generate RTL to evaluate the expression EXP
1770 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1773 expand_expr_stmt (exp)
1776 if (output_bytecode)
1778 int org_stack_depth = stack_depth;
1780 bc_expand_expr (exp);
1782 /* Restore stack depth */
1783 if (stack_depth < org_stack_depth)
1786 bc_emit_instruction (drop);
1788 last_expr_type = TREE_TYPE (exp);
1792 /* If -W, warn about statements with no side effects,
1793 except for an explicit cast to void (e.g. for assert()), and
1794 except inside a ({...}) where they may be useful. */
1795 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1797 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1798 && !(TREE_CODE (exp) == CONVERT_EXPR
1799 && TREE_TYPE (exp) == void_type_node))
1800 warning_with_file_and_line (emit_filename, emit_lineno,
1801 "statement with no effect");
1802 else if (warn_unused)
1803 warn_if_unused_value (exp);
1806 /* If EXP is of function type and we are expanding statements for
1807 value, convert it to pointer-to-function. */
1808 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1809 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1811 last_expr_type = TREE_TYPE (exp);
1812 if (! flag_syntax_only)
1813 last_expr_value = expand_expr (exp,
1814 (expr_stmts_for_value
1815 ? NULL_RTX : const0_rtx),
1818 /* If all we do is reference a volatile value in memory,
1819 copy it to a register to be sure it is actually touched. */
1820 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1821 && TREE_THIS_VOLATILE (exp))
1823 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1825 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1826 copy_to_reg (last_expr_value);
1829 rtx lab = gen_label_rtx ();
1831 /* Compare the value with itself to reference it. */
1832 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1833 expand_expr (TYPE_SIZE (last_expr_type),
1834 NULL_RTX, VOIDmode, 0),
1836 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1837 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1842 /* If this expression is part of a ({...}) and is in memory, we may have
1843 to preserve temporaries. */
1844 preserve_temp_slots (last_expr_value);
1846 /* Free any temporaries used to evaluate this expression. Any temporary
1847 used as a result of this expression will already have been preserved
1854 /* Warn if EXP contains any computations whose results are not used.
1855 Return 1 if a warning is printed; 0 otherwise. */
1858 warn_if_unused_value (exp)
1861 if (TREE_USED (exp))
1864 switch (TREE_CODE (exp))
1866 case PREINCREMENT_EXPR:
1867 case POSTINCREMENT_EXPR:
1868 case PREDECREMENT_EXPR:
1869 case POSTDECREMENT_EXPR:
1874 case METHOD_CALL_EXPR:
1876 case TRY_CATCH_EXPR:
1877 case WITH_CLEANUP_EXPR:
1879 /* We don't warn about COND_EXPR because it may be a useful
1880 construct if either arm contains a side effect. */
1885 /* For a binding, warn if no side effect within it. */
1886 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1889 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1891 case TRUTH_ORIF_EXPR:
1892 case TRUTH_ANDIF_EXPR:
1893 /* In && or ||, warn if 2nd operand has no side effect. */
1894 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1897 if (TREE_NO_UNUSED_WARNING (exp))
1899 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1901 /* Let people do `(foo (), 0)' without a warning. */
1902 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1904 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1908 case NON_LVALUE_EXPR:
1909 /* Don't warn about values cast to void. */
1910 if (TREE_TYPE (exp) == void_type_node)
1912 /* Don't warn about conversions not explicit in the user's program. */
1913 if (TREE_NO_UNUSED_WARNING (exp))
1915 /* Assignment to a cast usually results in a cast of a modify.
1916 Don't complain about that. There can be an arbitrary number of
1917 casts before the modify, so we must loop until we find the first
1918 non-cast expression and then test to see if that is a modify. */
1920 tree tem = TREE_OPERAND (exp, 0);
1922 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1923 tem = TREE_OPERAND (tem, 0);
1925 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1926 || TREE_CODE (tem) == CALL_EXPR)
1932 /* Don't warn about automatic dereferencing of references, since
1933 the user cannot control it. */
1934 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1935 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1936 /* ... fall through ... */
1939 /* Referencing a volatile value is a side effect, so don't warn. */
1940 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1941 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1942 && TREE_THIS_VOLATILE (exp))
1945 warning_with_file_and_line (emit_filename, emit_lineno,
1946 "value computed is not used");
1951 /* Clear out the memory of the last expression evaluated. */
1959 /* Begin a statement which will return a value.
1960 Return the RTL_EXPR for this statement expr.
1961 The caller must save that value and pass it to expand_end_stmt_expr. */
1964 expand_start_stmt_expr ()
1969 /* When generating bytecode just note down the stack depth */
1970 if (output_bytecode)
1971 return (build_int_2 (stack_depth, 0));
1973 /* Make the RTL_EXPR node temporary, not momentary,
1974 so that rtl_expr_chain doesn't become garbage. */
1975 momentary = suspend_momentary ();
1976 t = make_node (RTL_EXPR);
1977 resume_momentary (momentary);
1978 do_pending_stack_adjust ();
1979 start_sequence_for_rtl_expr (t);
1981 expr_stmts_for_value++;
1985 /* Restore the previous state at the end of a statement that returns a value.
1986 Returns a tree node representing the statement's value and the
1987 insns to compute the value.
1989 The nodes of that expression have been freed by now, so we cannot use them.
1990 But we don't want to do that anyway; the expression has already been
1991 evaluated and now we just want to use the value. So generate a RTL_EXPR
1992 with the proper type and RTL value.
1994 If the last substatement was not an expression,
1995 return something with type `void'. */
1998 expand_end_stmt_expr (t)
2001 if (output_bytecode)
2007 /* At this point, all expressions have been evaluated in order.
2008 However, all expression values have been popped when evaluated,
2009 which means we have to recover the last expression value. This is
2010 the last value removed by means of a `drop' instruction. Instead
2011 of adding code to inhibit dropping the last expression value, it
2012 is here recovered by undoing the `drop'. Since `drop' is
2013 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
2016 bc_adjust_stack (-1);
2018 if (!last_expr_type)
2019 last_expr_type = void_type_node;
2021 t = make_node (RTL_EXPR);
2022 TREE_TYPE (t) = last_expr_type;
2023 RTL_EXPR_RTL (t) = NULL;
2024 RTL_EXPR_SEQUENCE (t) = NULL;
2026 /* Don't consider deleting this expr or containing exprs at tree level. */
2027 TREE_THIS_VOLATILE (t) = 1;
2035 if (last_expr_type == 0)
2037 last_expr_type = void_type_node;
2038 last_expr_value = const0_rtx;
2040 else if (last_expr_value == 0)
2041 /* There are some cases where this can happen, such as when the
2042 statement is void type. */
2043 last_expr_value = const0_rtx;
2044 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2045 /* Remove any possible QUEUED. */
2046 last_expr_value = protect_from_queue (last_expr_value, 0);
2050 TREE_TYPE (t) = last_expr_type;
2051 RTL_EXPR_RTL (t) = last_expr_value;
2052 RTL_EXPR_SEQUENCE (t) = get_insns ();
2054 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2058 /* Don't consider deleting this expr or containing exprs at tree level. */
2059 TREE_SIDE_EFFECTS (t) = 1;
2060 /* Propagate volatility of the actual RTL expr. */
2061 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2064 expr_stmts_for_value--;
2069 /* Generate RTL for the start of an if-then. COND is the expression
2070 whose truth should be tested.
2072 If EXITFLAG is nonzero, this conditional is visible to
2073 `exit_something'. */
2076 expand_start_cond (cond, exitflag)
2080 struct nesting *thiscond = ALLOC_NESTING ();
2082 /* Make an entry on cond_stack for the cond we are entering. */
2084 thiscond->next = cond_stack;
2085 thiscond->all = nesting_stack;
2086 thiscond->depth = ++nesting_depth;
2087 thiscond->data.cond.next_label = gen_label_rtx ();
2088 /* Before we encounter an `else', we don't need a separate exit label
2089 unless there are supposed to be exit statements
2090 to exit this conditional. */
2091 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2092 thiscond->data.cond.endif_label = thiscond->exit_label;
2093 cond_stack = thiscond;
2094 nesting_stack = thiscond;
2096 if (output_bytecode)
2097 bc_expand_start_cond (cond, exitflag);
2099 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2102 /* Generate RTL between then-clause and the elseif-clause
2103 of an if-then-elseif-.... */
2106 expand_start_elseif (cond)
2109 if (cond_stack->data.cond.endif_label == 0)
2110 cond_stack->data.cond.endif_label = gen_label_rtx ();
2111 emit_jump (cond_stack->data.cond.endif_label);
2112 emit_label (cond_stack->data.cond.next_label);
2113 cond_stack->data.cond.next_label = gen_label_rtx ();
2114 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2117 /* Generate RTL between the then-clause and the else-clause
2118 of an if-then-else. */
2121 expand_start_else ()
2123 if (cond_stack->data.cond.endif_label == 0)
2124 cond_stack->data.cond.endif_label = gen_label_rtx ();
2126 if (output_bytecode)
2128 bc_expand_start_else ();
2132 emit_jump (cond_stack->data.cond.endif_label);
2133 emit_label (cond_stack->data.cond.next_label);
2134 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2137 /* After calling expand_start_else, turn this "else" into an "else if"
2138 by providing another condition. */
2141 expand_elseif (cond)
2144 cond_stack->data.cond.next_label = gen_label_rtx ();
2145 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2148 /* Generate RTL for the end of an if-then.
2149 Pop the record for it off of cond_stack. */
2154 struct nesting *thiscond = cond_stack;
2156 if (output_bytecode)
2157 bc_expand_end_cond ();
2160 do_pending_stack_adjust ();
2161 if (thiscond->data.cond.next_label)
2162 emit_label (thiscond->data.cond.next_label);
2163 if (thiscond->data.cond.endif_label)
2164 emit_label (thiscond->data.cond.endif_label);
2167 POPSTACK (cond_stack);
2172 /* Generate code for the start of an if-then. COND is the expression
2173 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2174 is to be visible to exit_something. It is assumed that the caller
2175 has pushed the previous context on the cond stack. */
2178 bc_expand_start_cond (cond, exitflag)
2182 struct nesting *thiscond = cond_stack;
2184 thiscond->data.case_stmt.nominal_type = cond;
2186 thiscond->exit_label = gen_label_rtx ();
2187 bc_expand_expr (cond);
2188 bc_emit_bytecode (xjumpifnot);
2189 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2191 #ifdef DEBUG_PRINT_CODE
2192 fputc ('\n', stderr);
2196 /* Generate the label for the end of an if with
2200 bc_expand_end_cond ()
2202 struct nesting *thiscond = cond_stack;
2204 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2207 /* Generate code for the start of the else- clause of
2211 bc_expand_start_else ()
2213 struct nesting *thiscond = cond_stack;
2215 thiscond->data.cond.endif_label = thiscond->exit_label;
2216 thiscond->exit_label = gen_label_rtx ();
2217 bc_emit_bytecode (jump);
2218 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2220 #ifdef DEBUG_PRINT_CODE
2221 fputc ('\n', stderr);
2224 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2227 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2228 loop should be exited by `exit_something'. This is a loop for which
2229 `expand_continue' will jump to the top of the loop.
2231 Make an entry on loop_stack to record the labels associated with
2235 expand_start_loop (exit_flag)
2238 register struct nesting *thisloop = ALLOC_NESTING ();
2240 /* Make an entry on loop_stack for the loop we are entering. */
2242 thisloop->next = loop_stack;
2243 thisloop->all = nesting_stack;
2244 thisloop->depth = ++nesting_depth;
2245 thisloop->data.loop.start_label = gen_label_rtx ();
2246 thisloop->data.loop.end_label = gen_label_rtx ();
2247 thisloop->data.loop.alt_end_label = 0;
2248 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2249 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2250 loop_stack = thisloop;
2251 nesting_stack = thisloop;
2253 if (output_bytecode)
2255 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2259 do_pending_stack_adjust ();
2261 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2262 emit_label (thisloop->data.loop.start_label);
2267 /* Like expand_start_loop but for a loop where the continuation point
2268 (for expand_continue_loop) will be specified explicitly. */
2271 expand_start_loop_continue_elsewhere (exit_flag)
2274 struct nesting *thisloop = expand_start_loop (exit_flag);
2275 loop_stack->data.loop.continue_label = gen_label_rtx ();
2279 /* Specify the continuation point for a loop started with
2280 expand_start_loop_continue_elsewhere.
2281 Use this at the point in the code to which a continue statement
2285 expand_loop_continue_here ()
2287 if (output_bytecode)
2289 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2292 do_pending_stack_adjust ();
2293 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2294 emit_label (loop_stack->data.loop.continue_label);
2300 bc_expand_end_loop ()
2302 struct nesting *thisloop = loop_stack;
2304 bc_emit_bytecode (jump);
2305 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2307 #ifdef DEBUG_PRINT_CODE
2308 fputc ('\n', stderr);
2311 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2312 POPSTACK (loop_stack);
2317 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2318 Pop the block off of loop_stack. */
2324 register rtx start_label;
2325 rtx last_test_insn = 0;
2328 if (output_bytecode)
2330 bc_expand_end_loop ();
2334 insn = get_last_insn ();
2335 start_label = loop_stack->data.loop.start_label;
2337 /* Mark the continue-point at the top of the loop if none elsewhere. */
2338 if (start_label == loop_stack->data.loop.continue_label)
2339 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2341 do_pending_stack_adjust ();
2343 /* If optimizing, perhaps reorder the loop. If the loop
2344 starts with a conditional exit, roll that to the end
2345 where it will optimize together with the jump back.
2347 We look for the last conditional branch to the exit that we encounter
2348 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2349 branch to the exit first, use it.
2351 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2352 because moving them is not valid. */
2356 ! (GET_CODE (insn) == JUMP_INSN
2357 && GET_CODE (PATTERN (insn)) == SET
2358 && SET_DEST (PATTERN (insn)) == pc_rtx
2359 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2361 /* Scan insns from the top of the loop looking for a qualified
2362 conditional exit. */
2363 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2364 insn = NEXT_INSN (insn))
2366 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2369 if (GET_CODE (insn) == NOTE
2370 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2371 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2374 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2377 if (last_test_insn && num_insns > 30)
2380 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2381 && SET_DEST (PATTERN (insn)) == pc_rtx
2382 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2383 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2384 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2385 == loop_stack->data.loop.end_label)
2386 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2387 == loop_stack->data.loop.alt_end_label)))
2388 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2389 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2390 == loop_stack->data.loop.end_label)
2391 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2392 == loop_stack->data.loop.alt_end_label)))))
2393 last_test_insn = insn;
2395 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2396 && GET_CODE (PATTERN (insn)) == SET
2397 && SET_DEST (PATTERN (insn)) == pc_rtx
2398 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2399 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2400 == loop_stack->data.loop.end_label)
2401 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2402 == loop_stack->data.loop.alt_end_label)))
2403 /* Include BARRIER. */
2404 last_test_insn = NEXT_INSN (insn);
2407 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2409 /* We found one. Move everything from there up
2410 to the end of the loop, and add a jump into the loop
2411 to jump to there. */
2412 register rtx newstart_label = gen_label_rtx ();
2413 register rtx start_move = start_label;
2415 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2416 then we want to move this note also. */
2417 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2418 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2419 == NOTE_INSN_LOOP_CONT))
2420 start_move = PREV_INSN (start_move);
2422 emit_label_after (newstart_label, PREV_INSN (start_move));
2423 reorder_insns (start_move, last_test_insn, get_last_insn ());
2424 emit_jump_insn_after (gen_jump (start_label),
2425 PREV_INSN (newstart_label));
2426 emit_barrier_after (PREV_INSN (newstart_label));
2427 start_label = newstart_label;
2431 emit_jump (start_label);
2432 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2433 emit_label (loop_stack->data.loop.end_label);
2435 POPSTACK (loop_stack);
2440 /* Generate a jump to the current loop's continue-point.
2441 This is usually the top of the loop, but may be specified
2442 explicitly elsewhere. If not currently inside a loop,
2443 return 0 and do nothing; caller will print an error message. */
2446 expand_continue_loop (whichloop)
2447 struct nesting *whichloop;
2451 whichloop = loop_stack;
2454 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2459 /* Generate a jump to exit the current loop. If not currently inside a loop,
2460 return 0 and do nothing; caller will print an error message. */
2463 expand_exit_loop (whichloop)
2464 struct nesting *whichloop;
2468 whichloop = loop_stack;
2471 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2475 /* Generate a conditional jump to exit the current loop if COND
2476 evaluates to zero. If not currently inside a loop,
2477 return 0 and do nothing; caller will print an error message. */
2480 expand_exit_loop_if_false (whichloop, cond)
2481 struct nesting *whichloop;
2486 whichloop = loop_stack;
2489 if (output_bytecode)
2491 bc_expand_expr (cond);
2492 bc_expand_goto_internal (xjumpifnot,
2493 BYTECODE_BC_LABEL (whichloop->exit_label),
2498 /* In order to handle fixups, we actually create a conditional jump
2499 around a unconditional branch to exit the loop. If fixups are
2500 necessary, they go before the unconditional branch. */
2502 rtx label = gen_label_rtx ();
2505 do_jump (cond, NULL_RTX, label);
2506 last_insn = get_last_insn ();
2507 if (GET_CODE (last_insn) == CODE_LABEL)
2508 whichloop->data.loop.alt_end_label = last_insn;
2509 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2517 /* Return non-zero if we should preserve sub-expressions as separate
2518 pseudos. We never do so if we aren't optimizing. We always do so
2519 if -fexpensive-optimizations.
2521 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2522 the loop may still be a small one. */
2525 preserve_subexpressions_p ()
2529 if (flag_expensive_optimizations)
2532 if (optimize == 0 || loop_stack == 0)
2535 insn = get_last_insn_anywhere ();
2538 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2539 < n_non_fixed_regs * 3));
2543 /* Generate a jump to exit the current loop, conditional, binding contour
2544 or case statement. Not all such constructs are visible to this function,
2545 only those started with EXIT_FLAG nonzero. Individual languages use
2546 the EXIT_FLAG parameter to control which kinds of constructs you can
2549 If not currently inside anything that can be exited,
2550 return 0 and do nothing; caller will print an error message. */
2553 expand_exit_something ()
2557 for (n = nesting_stack; n; n = n->all)
2558 if (n->exit_label != 0)
2560 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2567 /* Generate RTL to return from the current function, with no value.
2568 (That is, we do not do anything about returning any value.) */
2571 expand_null_return ()
2573 struct nesting *block = block_stack;
2576 if (output_bytecode)
2578 bc_emit_instruction (ret);
2582 /* Does any pending block have cleanups? */
2584 while (block && block->data.block.cleanups == 0)
2585 block = block->next;
2587 /* If yes, use a goto to return, since that runs cleanups. */
2589 expand_null_return_1 (last_insn, block != 0);
2592 /* Generate RTL to return from the current function, with value VAL. */
2595 expand_value_return (val)
2598 struct nesting *block = block_stack;
2599 rtx last_insn = get_last_insn ();
2600 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2602 /* Copy the value to the return location
2603 unless it's already there. */
2605 if (return_reg != val)
2607 #ifdef PROMOTE_FUNCTION_RETURN
2608 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2609 int unsignedp = TREE_UNSIGNED (type);
2610 enum machine_mode mode
2611 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2614 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2615 convert_move (return_reg, val, unsignedp);
2618 emit_move_insn (return_reg, val);
2620 if (GET_CODE (return_reg) == REG
2621 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2622 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2623 /* Handle calls that return values in multiple non-contiguous locations.
2624 The Irix 6 ABI has examples of this. */
2625 else if (GET_CODE (return_reg) == PARALLEL)
2629 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2631 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2633 if (GET_CODE (x) == REG
2634 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2635 emit_insn (gen_rtx (USE, VOIDmode, x));
2639 /* Does any pending block have cleanups? */
2641 while (block && block->data.block.cleanups == 0)
2642 block = block->next;
2644 /* If yes, use a goto to return, since that runs cleanups.
2645 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2647 expand_null_return_1 (last_insn, block != 0);
2650 /* Output a return with no value. If LAST_INSN is nonzero,
2651 pretend that the return takes place after LAST_INSN.
2652 If USE_GOTO is nonzero then don't use a return instruction;
2653 go to the return label instead. This causes any cleanups
2654 of pending blocks to be executed normally. */
2657 expand_null_return_1 (last_insn, use_goto)
2661 rtx end_label = cleanup_label ? cleanup_label : return_label;
2663 clear_pending_stack_adjust ();
2664 do_pending_stack_adjust ();
2667 /* PCC-struct return always uses an epilogue. */
2668 if (current_function_returns_pcc_struct || use_goto)
2671 end_label = return_label = gen_label_rtx ();
2672 expand_goto_internal (NULL_TREE, end_label, last_insn);
2676 /* Otherwise output a simple return-insn if one is available,
2677 unless it won't do the job. */
2679 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2681 emit_jump_insn (gen_return ());
2687 /* Otherwise jump to the epilogue. */
2688 expand_goto_internal (NULL_TREE, end_label, last_insn);
2691 /* Generate RTL to evaluate the expression RETVAL and return it
2692 from the current function. */
2695 expand_return (retval)
2698 /* If there are any cleanups to be performed, then they will
2699 be inserted following LAST_INSN. It is desirable
2700 that the last_insn, for such purposes, should be the
2701 last insn before computing the return value. Otherwise, cleanups
2702 which call functions can clobber the return value. */
2703 /* ??? rms: I think that is erroneous, because in C++ it would
2704 run destructors on variables that might be used in the subsequent
2705 computation of the return value. */
2707 register rtx val = 0;
2711 struct nesting *block;
2713 /* Bytecode returns are quite simple, just leave the result on the
2714 arithmetic stack. */
2715 if (output_bytecode)
2717 bc_expand_expr (retval);
2718 bc_emit_instruction (ret);
2722 /* If function wants no value, give it none. */
2723 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2725 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2727 expand_null_return ();
2731 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2732 /* This is not sufficient. We also need to watch for cleanups of the
2733 expression we are about to expand. Unfortunately, we cannot know
2734 if it has cleanups until we expand it, and we want to change how we
2735 expand it depending upon if we need cleanups. We can't win. */
2737 cleanups = any_pending_cleanups (1);
2742 if (TREE_CODE (retval) == RESULT_DECL)
2743 retval_rhs = retval;
2744 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2745 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2746 retval_rhs = TREE_OPERAND (retval, 1);
2747 else if (TREE_TYPE (retval) == void_type_node)
2748 /* Recognize tail-recursive call to void function. */
2749 retval_rhs = retval;
2751 retval_rhs = NULL_TREE;
2753 /* Only use `last_insn' if there are cleanups which must be run. */
2754 if (cleanups || cleanup_label != 0)
2755 last_insn = get_last_insn ();
2757 /* Distribute return down conditional expr if either of the sides
2758 may involve tail recursion (see test below). This enhances the number
2759 of tail recursions we see. Don't do this always since it can produce
2760 sub-optimal code in some cases and we distribute assignments into
2761 conditional expressions when it would help. */
2763 if (optimize && retval_rhs != 0
2764 && frame_offset == 0
2765 && TREE_CODE (retval_rhs) == COND_EXPR
2766 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2767 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2769 rtx label = gen_label_rtx ();
2772 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2773 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2774 DECL_RESULT (current_function_decl),
2775 TREE_OPERAND (retval_rhs, 1));
2776 TREE_SIDE_EFFECTS (expr) = 1;
2777 expand_return (expr);
2780 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2781 DECL_RESULT (current_function_decl),
2782 TREE_OPERAND (retval_rhs, 2));
2783 TREE_SIDE_EFFECTS (expr) = 1;
2784 expand_return (expr);
2788 /* For tail-recursive call to current function,
2789 just jump back to the beginning.
2790 It's unsafe if any auto variable in this function
2791 has its address taken; for simplicity,
2792 require stack frame to be empty. */
2793 if (optimize && retval_rhs != 0
2794 && frame_offset == 0
2795 && TREE_CODE (retval_rhs) == CALL_EXPR
2796 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2797 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2798 /* Finish checking validity, and if valid emit code
2799 to set the argument variables for the new call. */
2800 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2801 DECL_ARGUMENTS (current_function_decl)))
2803 if (tail_recursion_label == 0)
2805 tail_recursion_label = gen_label_rtx ();
2806 emit_label_after (tail_recursion_label,
2807 tail_recursion_reentry);
2810 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2815 /* This optimization is safe if there are local cleanups
2816 because expand_null_return takes care of them.
2817 ??? I think it should also be safe when there is a cleanup label,
2818 because expand_null_return takes care of them, too.
2819 Any reason why not? */
2820 if (HAVE_return && cleanup_label == 0
2821 && ! current_function_returns_pcc_struct
2822 && BRANCH_COST <= 1)
2824 /* If this is return x == y; then generate
2825 if (x == y) return 1; else return 0;
2826 if we can do it with explicit return insns and branches are cheap,
2827 but not if we have the corresponding scc insn. */
2830 switch (TREE_CODE (retval_rhs))
2856 case TRUTH_ANDIF_EXPR:
2857 case TRUTH_ORIF_EXPR:
2858 case TRUTH_AND_EXPR:
2860 case TRUTH_NOT_EXPR:
2861 case TRUTH_XOR_EXPR:
2864 op0 = gen_label_rtx ();
2865 jumpifnot (retval_rhs, op0);
2866 expand_value_return (const1_rtx);
2868 expand_value_return (const0_rtx);
2877 #endif /* HAVE_return */
2879 /* If the result is an aggregate that is being returned in one (or more)
2880 registers, load the registers here. The compiler currently can't handle
2881 copying a BLKmode value into registers. We could put this code in a
2882 more general area (for use by everyone instead of just function
2883 call/return), but until this feature is generally usable it is kept here
2884 (and in expand_call). The value must go into a pseudo in case there
2885 are cleanups that will clobber the real return register. */
2888 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2889 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2891 int i, bitpos, xbitpos;
2892 int big_endian_correction = 0;
2893 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2894 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2895 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2896 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2897 rtx result_reg, src, dst;
2898 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2899 enum machine_mode tmpmode, result_reg_mode;
2901 /* Structures whose size is not a multiple of a word are aligned
2902 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2903 machine, this means we must skip the empty high order bytes when
2904 calculating the bit offset. */
2905 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2906 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2909 /* Copy the structure BITSIZE bits at a time. */
2910 for (bitpos = 0, xbitpos = big_endian_correction;
2911 bitpos < bytes * BITS_PER_UNIT;
2912 bitpos += bitsize, xbitpos += bitsize)
2914 /* We need a new destination pseudo each time xbitpos is
2915 on a word boundary and when xbitpos == big_endian_correction
2916 (the first time through). */
2917 if (xbitpos % BITS_PER_WORD == 0
2918 || xbitpos == big_endian_correction)
2920 /* Generate an appropriate register. */
2921 dst = gen_reg_rtx (word_mode);
2922 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2924 /* Clobber the destination before we move anything into it. */
2925 emit_insn (gen_rtx (CLOBBER, VOIDmode, dst));
2928 /* We need a new source operand each time bitpos is on a word
2930 if (bitpos % BITS_PER_WORD == 0)
2931 src = operand_subword_force (result_val,
2932 bitpos / BITS_PER_WORD,
2935 /* Use bitpos for the source extraction (left justified) and
2936 xbitpos for the destination store (right justified). */
2937 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2938 extract_bit_field (src, bitsize,
2939 bitpos % BITS_PER_WORD, 1,
2940 NULL_RTX, word_mode,
2942 bitsize / BITS_PER_UNIT,
2944 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2947 /* Find the smallest integer mode large enough to hold the
2948 entire structure and use that mode instead of BLKmode
2949 on the USE insn for the return register. */
2950 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2951 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2952 tmpmode != MAX_MACHINE_MODE;
2953 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2955 /* Have we found a large enough mode? */
2956 if (GET_MODE_SIZE (tmpmode) >= bytes)
2960 /* No suitable mode found. */
2961 if (tmpmode == MAX_MACHINE_MODE)
2964 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2966 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2967 result_reg_mode = word_mode;
2969 result_reg_mode = tmpmode;
2970 result_reg = gen_reg_rtx (result_reg_mode);
2973 for (i = 0; i < n_regs; i++)
2974 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2977 if (tmpmode != result_reg_mode)
2978 result_reg = gen_lowpart (tmpmode, result_reg);
2980 expand_value_return (result_reg);
2984 && TREE_TYPE (retval_rhs) != void_type_node
2985 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2987 /* Calculate the return value into a pseudo reg. */
2988 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2989 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2990 val = force_not_mem (val);
2992 /* Return the calculated value, doing cleanups first. */
2993 expand_value_return (val);
2997 /* No cleanups or no hard reg used;
2998 calculate value into hard return reg. */
2999 expand_expr (retval, const0_rtx, VOIDmode, 0);
3001 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
3005 /* Return 1 if the end of the generated RTX is not a barrier.
3006 This means code already compiled can drop through. */
3009 drop_through_at_end_p ()
3011 rtx insn = get_last_insn ();
3012 while (insn && GET_CODE (insn) == NOTE)
3013 insn = PREV_INSN (insn);
3014 return insn && GET_CODE (insn) != BARRIER;
3017 /* Emit code to alter this function's formal parms for a tail-recursive call.
3018 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3019 FORMALS is the chain of decls of formals.
3020 Return 1 if this can be done;
3021 otherwise return 0 and do not emit any code. */
3024 tail_recursion_args (actuals, formals)
3025 tree actuals, formals;
3027 register tree a = actuals, f = formals;
3029 register rtx *argvec;
3031 /* Check that number and types of actuals are compatible
3032 with the formals. This is not always true in valid C code.
3033 Also check that no formal needs to be addressable
3034 and that all formals are scalars. */
3036 /* Also count the args. */
3038 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3040 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3041 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3043 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3046 if (a != 0 || f != 0)
3049 /* Compute all the actuals. */
3051 argvec = (rtx *) alloca (i * sizeof (rtx));
3053 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3054 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3056 /* Find which actual values refer to current values of previous formals.
3057 Copy each of them now, before any formal is changed. */
3059 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3063 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3064 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3065 { copy = 1; break; }
3067 argvec[i] = copy_to_reg (argvec[i]);
3070 /* Store the values of the actuals into the formals. */
3072 for (f = formals, a = actuals, i = 0; f;
3073 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3075 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3076 emit_move_insn (DECL_RTL (f), argvec[i]);
3078 convert_move (DECL_RTL (f), argvec[i],
3079 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3086 /* Generate the RTL code for entering a binding contour.
3087 The variables are declared one by one, by calls to `expand_decl'.
3089 EXIT_FLAG is nonzero if this construct should be visible to
3090 `exit_something'. */
3093 expand_start_bindings (exit_flag)
3096 struct nesting *thisblock = ALLOC_NESTING ();
3097 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3099 /* Make an entry on block_stack for the block we are entering. */
3101 thisblock->next = block_stack;
3102 thisblock->all = nesting_stack;
3103 thisblock->depth = ++nesting_depth;
3104 thisblock->data.block.stack_level = 0;
3105 thisblock->data.block.cleanups = 0;
3106 thisblock->data.block.function_call_count = 0;
3107 thisblock->data.block.exception_region = 0;
3108 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
3110 thisblock->data.block.conditional_code = 0;
3111 thisblock->data.block.last_unconditional_cleanup = note;
3112 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3115 && !(block_stack->data.block.cleanups == NULL_TREE
3116 && block_stack->data.block.outer_cleanups == NULL_TREE))
3117 thisblock->data.block.outer_cleanups
3118 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3119 block_stack->data.block.outer_cleanups);
3121 thisblock->data.block.outer_cleanups = 0;
3122 thisblock->data.block.label_chain = 0;
3123 thisblock->data.block.innermost_stack_block = stack_block_stack;
3124 thisblock->data.block.first_insn = note;
3125 thisblock->data.block.block_start_count = ++block_start_count;
3126 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3127 block_stack = thisblock;
3128 nesting_stack = thisblock;
3130 if (!output_bytecode)
3132 /* Make a new level for allocating stack slots. */
3137 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3138 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3139 expand_expr are made. After we end the region, we know that all
3140 space for all temporaries that were created by TARGET_EXPRs will be
3141 destroyed and their space freed for reuse. */
3144 expand_start_target_temps ()
3146 /* This is so that even if the result is preserved, the space
3147 allocated will be freed, as we know that it is no longer in use. */
3150 /* Start a new binding layer that will keep track of all cleanup
3151 actions to be performed. */
3152 expand_start_bindings (0);
3154 target_temp_slot_level = temp_slot_level;
3158 expand_end_target_temps ()
3160 expand_end_bindings (NULL_TREE, 0, 0);
3162 /* This is so that even if the result is preserved, the space
3163 allocated will be freed, as we know that it is no longer in use. */
3167 /* Mark top block of block_stack as an implicit binding for an
3168 exception region. This is used to prevent infinite recursion when
3169 ending a binding with expand_end_bindings. It is only ever called
3170 by expand_eh_region_start, as that it the only way to create a
3171 block stack for a exception region. */
3174 mark_block_as_eh_region ()
3176 block_stack->data.block.exception_region = 1;
3177 if (block_stack->next
3178 && block_stack->next->data.block.conditional_code)
3180 block_stack->data.block.conditional_code
3181 = block_stack->next->data.block.conditional_code;
3182 block_stack->data.block.last_unconditional_cleanup
3183 = block_stack->next->data.block.last_unconditional_cleanup;
3184 block_stack->data.block.cleanup_ptr
3185 = block_stack->next->data.block.cleanup_ptr;
3189 /* True if we are currently emitting insns in an area of output code
3190 that is controlled by a conditional expression. This is used by
3191 the cleanup handling code to generate conditional cleanup actions. */
3194 conditional_context ()
3196 return block_stack && block_stack->data.block.conditional_code;
3199 /* Mark top block of block_stack as not for an implicit binding for an
3200 exception region. This is only ever done by expand_eh_region_end
3201 to let expand_end_bindings know that it is being called explicitly
3202 to end the binding layer for just the binding layer associated with
3203 the exception region, otherwise expand_end_bindings would try and
3204 end all implicit binding layers for exceptions regions, and then
3205 one normal binding layer. */
3208 mark_block_as_not_eh_region ()
3210 block_stack->data.block.exception_region = 0;
3213 /* True if the top block of block_stack was marked as for an exception
3214 region by mark_block_as_eh_region. */
3219 return block_stack && block_stack->data.block.exception_region;
3222 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3223 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3227 remember_end_note (block)
3228 register tree block;
3230 BLOCK_END_NOTE (block) = last_block_end_note;
3231 last_block_end_note = NULL_RTX;
3234 /* Generate RTL code to terminate a binding contour.
3235 VARS is the chain of VAR_DECL nodes
3236 for the variables bound in this contour.
3237 MARK_ENDS is nonzero if we should put a note at the beginning
3238 and end of this binding contour.
3240 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3241 (That is true automatically if the contour has a saved stack level.) */
3244 expand_end_bindings (vars, mark_ends, dont_jump_in)
3249 register struct nesting *thisblock;
3252 while (block_stack->data.block.exception_region)
3254 /* Because we don't need or want a new temporary level and
3255 because we didn't create one in expand_eh_region_start,
3256 create a fake one now to avoid removing one in
3257 expand_end_bindings. */
3260 block_stack->data.block.exception_region = 0;
3262 expand_end_bindings (NULL_TREE, 0, 0);
3265 if (output_bytecode)
3267 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3271 /* Since expand_eh_region_start does an expand_start_bindings, we
3272 have to first end all the bindings that were created by
3273 expand_eh_region_start. */
3275 thisblock = block_stack;
3278 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3279 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3280 && ! DECL_IN_SYSTEM_HEADER (decl)
3281 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3282 warning_with_decl (decl, "unused variable `%s'");
3284 if (thisblock->exit_label)
3286 do_pending_stack_adjust ();
3287 emit_label (thisblock->exit_label);
3290 /* If necessary, make a handler for nonlocal gotos taking
3291 place in the function calls in this block. */
3292 if (function_call_count != thisblock->data.block.function_call_count
3294 /* Make handler for outermost block
3295 if there were any nonlocal gotos to this function. */
3296 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3297 /* Make handler for inner block if it has something
3298 special to do when you jump out of it. */
3299 : (thisblock->data.block.cleanups != 0
3300 || thisblock->data.block.stack_level != 0)))
3303 rtx afterward = gen_label_rtx ();
3304 rtx handler_label = gen_label_rtx ();
3305 rtx save_receiver = gen_reg_rtx (Pmode);
3308 /* Don't let jump_optimize delete the handler. */
3309 LABEL_PRESERVE_P (handler_label) = 1;
3311 /* Record the handler address in the stack slot for that purpose,
3312 during this block, saving and restoring the outer value. */
3313 if (thisblock->next != 0)
3315 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3318 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3319 insns = get_insns ();
3321 emit_insns_before (insns, thisblock->data.block.first_insn);
3325 emit_move_insn (nonlocal_goto_handler_slot,
3326 gen_rtx (LABEL_REF, Pmode, handler_label));
3327 insns = get_insns ();
3329 emit_insns_before (insns, thisblock->data.block.first_insn);
3331 /* Jump around the handler; it runs only when specially invoked. */
3332 emit_jump (afterward);
3333 emit_label (handler_label);
3335 #ifdef HAVE_nonlocal_goto
3336 if (! HAVE_nonlocal_goto)
3338 /* First adjust our frame pointer to its actual value. It was
3339 previously set to the start of the virtual area corresponding to
3340 the stacked variables when we branched here and now needs to be
3341 adjusted to the actual hardware fp value.
3343 Assignments are to virtual registers are converted by
3344 instantiate_virtual_regs into the corresponding assignment
3345 to the underlying register (fp in this case) that makes
3346 the original assignment true.
3347 So the following insn will actually be
3348 decrementing fp by STARTING_FRAME_OFFSET. */
3349 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3351 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3352 if (fixed_regs[ARG_POINTER_REGNUM])
3354 #ifdef ELIMINABLE_REGS
3355 /* If the argument pointer can be eliminated in favor of the
3356 frame pointer, we don't need to restore it. We assume here
3357 that if such an elimination is present, it can always be used.
3358 This is the case on all known machines; if we don't make this
3359 assumption, we do unnecessary saving on many machines. */
3360 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3363 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3364 if (elim_regs[i].from == ARG_POINTER_REGNUM
3365 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3368 if (i == sizeof elim_regs / sizeof elim_regs [0])
3371 /* Now restore our arg pointer from the address at which it
3372 was saved in our stack frame.
3373 If there hasn't be space allocated for it yet, make
3375 if (arg_pointer_save_area == 0)
3376 arg_pointer_save_area
3377 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3378 emit_move_insn (virtual_incoming_args_rtx,
3379 /* We need a pseudo here, or else
3380 instantiate_virtual_regs_1 complains. */
3381 copy_to_reg (arg_pointer_save_area));
3386 #ifdef HAVE_nonlocal_goto_receiver
3387 if (HAVE_nonlocal_goto_receiver)
3388 emit_insn (gen_nonlocal_goto_receiver ());
3391 /* The handler expects the desired label address in the static chain
3392 register. It tests the address and does an appropriate jump
3393 to whatever label is desired. */
3394 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3395 /* Skip any labels we shouldn't be able to jump to from here. */
3396 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3398 rtx not_this = gen_label_rtx ();
3399 rtx this = gen_label_rtx ();
3400 do_jump_if_equal (static_chain_rtx,
3401 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3403 emit_jump (not_this);
3405 expand_goto (TREE_VALUE (link));
3406 emit_label (not_this);
3408 /* If label is not recognized, abort. */
3409 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3412 emit_label (afterward);
3415 /* Don't allow jumping into a block that has a stack level.
3416 Cleanups are allowed, though. */
3418 || thisblock->data.block.stack_level != 0)
3420 struct label_chain *chain;
3422 /* Any labels in this block are no longer valid to go to.
3423 Mark them to cause an error message. */
3424 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3426 DECL_TOO_LATE (chain->label) = 1;
3427 /* If any goto without a fixup came to this label,
3428 that must be an error, because gotos without fixups
3429 come from outside all saved stack-levels. */
3430 if (TREE_ADDRESSABLE (chain->label))
3431 error_with_decl (chain->label,
3432 "label `%s' used before containing binding contour");
3436 /* Restore stack level in effect before the block
3437 (only if variable-size objects allocated). */
3438 /* Perform any cleanups associated with the block. */
3440 if (thisblock->data.block.stack_level != 0
3441 || thisblock->data.block.cleanups != 0)
3443 /* Only clean up here if this point can actually be reached. */
3444 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3446 /* Don't let cleanups affect ({...}) constructs. */
3447 int old_expr_stmts_for_value = expr_stmts_for_value;
3448 rtx old_last_expr_value = last_expr_value;
3449 tree old_last_expr_type = last_expr_type;
3450 expr_stmts_for_value = 0;
3452 /* Do the cleanups. */
3453 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3455 do_pending_stack_adjust ();
3457 expr_stmts_for_value = old_expr_stmts_for_value;
3458 last_expr_value = old_last_expr_value;
3459 last_expr_type = old_last_expr_type;
3461 /* Restore the stack level. */
3463 if (reachable && thisblock->data.block.stack_level != 0)
3465 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3466 thisblock->data.block.stack_level, NULL_RTX);
3467 if (nonlocal_goto_handler_slot != 0)
3468 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3472 /* Any gotos out of this block must also do these things.
3473 Also report any gotos with fixups that came to labels in this
3475 fixup_gotos (thisblock,
3476 thisblock->data.block.stack_level,
3477 thisblock->data.block.cleanups,
3478 thisblock->data.block.first_insn,
3482 /* Mark the beginning and end of the scope if requested.
3483 We do this now, after running cleanups on the variables
3484 just going out of scope, so they are in scope for their cleanups. */
3487 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3489 /* Get rid of the beginning-mark if we don't make an end-mark. */
3490 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3492 /* If doing stupid register allocation, make sure lives of all
3493 register variables declared here extend thru end of scope. */
3496 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3498 rtx rtl = DECL_RTL (decl);
3499 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3503 /* Restore the temporary level of TARGET_EXPRs. */
3504 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3506 /* Restore block_stack level for containing block. */
3508 stack_block_stack = thisblock->data.block.innermost_stack_block;
3509 POPSTACK (block_stack);
3511 /* Pop the stack slot nesting and free any slots at this level. */
3516 /* End a binding contour.
3517 VARS is the chain of VAR_DECL nodes for the variables bound
3518 in this contour. MARK_ENDS is nonzer if we should put a note
3519 at the beginning and end of this binding contour.
3520 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3524 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3529 struct nesting *thisbind = nesting_stack;
3533 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3534 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3535 warning_with_decl (decl, "unused variable `%s'");
3537 if (thisbind->exit_label)
3538 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3540 /* Pop block/bindings off stack */
3541 POPSTACK (block_stack);
3544 /* Generate RTL for the automatic variable declaration DECL.
3545 (Other kinds of declarations are simply ignored if seen here.) */
3551 struct nesting *thisblock = block_stack;
3554 if (output_bytecode)
3556 bc_expand_decl (decl, 0);
3560 type = TREE_TYPE (decl);
3562 /* Only automatic variables need any expansion done.
3563 Static and external variables, and external functions,
3564 will be handled by `assemble_variable' (called from finish_decl).
3565 TYPE_DECL and CONST_DECL require nothing.
3566 PARM_DECLs are handled in `assign_parms'. */
3568 if (TREE_CODE (decl) != VAR_DECL)
3570 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3573 /* Create the RTL representation for the variable. */
3575 if (type == error_mark_node)
3576 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3577 else if (DECL_SIZE (decl) == 0)
3578 /* Variable with incomplete type. */
3580 if (DECL_INITIAL (decl) == 0)
3581 /* Error message was already done; now avoid a crash. */
3582 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3584 /* An initializer is going to decide the size of this array.
3585 Until we know the size, represent its address with a reg. */
3586 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3587 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3589 else if (DECL_MODE (decl) != BLKmode
3590 /* If -ffloat-store, don't put explicit float vars
3592 && !(flag_float_store
3593 && TREE_CODE (type) == REAL_TYPE)
3594 && ! TREE_THIS_VOLATILE (decl)
3595 && ! TREE_ADDRESSABLE (decl)
3596 && (DECL_REGISTER (decl) || ! obey_regdecls))
3598 /* Automatic variable that can go in a register. */
3599 int unsignedp = TREE_UNSIGNED (type);
3600 enum machine_mode reg_mode
3601 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3603 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3604 mark_user_reg (DECL_RTL (decl));
3606 if (TREE_CODE (type) == POINTER_TYPE)
3607 mark_reg_pointer (DECL_RTL (decl),
3608 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3612 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3613 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3614 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3615 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3616 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3618 /* Variable of fixed size that goes on the stack. */
3622 /* If we previously made RTL for this decl, it must be an array
3623 whose size was determined by the initializer.
3624 The old address was a register; set that register now
3625 to the proper address. */
3626 if (DECL_RTL (decl) != 0)
3628 if (GET_CODE (DECL_RTL (decl)) != MEM
3629 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3631 oldaddr = XEXP (DECL_RTL (decl), 0);
3635 = assign_stack_temp (DECL_MODE (decl),
3636 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3637 + BITS_PER_UNIT - 1)
3640 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3642 /* Set alignment we actually gave this decl. */
3643 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3644 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3648 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3649 if (addr != oldaddr)
3650 emit_move_insn (oldaddr, addr);
3653 /* If this is a memory ref that contains aggregate components,
3654 mark it as such for cse and loop optimize. */
3655 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3657 /* If this is in memory because of -ffloat-store,
3658 set the volatile bit, to prevent optimizations from
3659 undoing the effects. */
3660 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3661 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3665 /* Dynamic-size object: must push space on the stack. */
3669 /* Record the stack pointer on entry to block, if have
3670 not already done so. */
3671 if (thisblock->data.block.stack_level == 0)
3673 do_pending_stack_adjust ();
3674 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3675 &thisblock->data.block.stack_level,
3676 thisblock->data.block.first_insn);
3677 stack_block_stack = thisblock;
3680 /* Compute the variable's size, in bytes. */
3681 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3683 size_int (BITS_PER_UNIT)),
3684 NULL_RTX, VOIDmode, 0);
3687 /* Allocate space on the stack for the variable. Note that
3688 DECL_ALIGN says how the variable is to be aligned and we
3689 cannot use it to conclude anything about the alignment of
3691 address = allocate_dynamic_stack_space (size, NULL_RTX,
3692 TYPE_ALIGN (TREE_TYPE (decl)));
3694 /* Reference the variable indirect through that rtx. */
3695 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3697 /* If this is a memory ref that contains aggregate components,
3698 mark it as such for cse and loop optimize. */
3699 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3701 /* Indicate the alignment we actually gave this variable. */
3702 #ifdef STACK_BOUNDARY
3703 DECL_ALIGN (decl) = STACK_BOUNDARY;
3705 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3709 if (TREE_THIS_VOLATILE (decl))
3710 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3711 #if 0 /* A variable is not necessarily unchanging
3712 just because it is const. RTX_UNCHANGING_P
3713 means no change in the function,
3714 not merely no change in the variable's scope.
3715 It is correct to set RTX_UNCHANGING_P if the variable's scope
3716 is the whole function. There's no convenient way to test that. */
3717 if (TREE_READONLY (decl))
3718 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3721 /* If doing stupid register allocation, make sure life of any
3722 register variable starts here, at the start of its scope. */
3725 use_variable (DECL_RTL (decl));
3729 /* Generate code for the automatic variable declaration DECL. For
3730 most variables this just means we give it a stack offset. The
3731 compiler sometimes emits cleanups without variables and we will
3732 have to deal with those too. */
3735 bc_expand_decl (decl, cleanup)
3743 /* A cleanup with no variable. */
3750 /* Only auto variables need any work. */
3751 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3754 type = TREE_TYPE (decl);
3756 if (type == error_mark_node)
3757 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3759 else if (DECL_SIZE (decl) == 0)
3761 /* Variable with incomplete type. The stack offset herein will be
3762 fixed later in expand_decl_init. */
3763 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3765 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3767 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3771 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3774 /* Emit code to perform the initialization of a declaration DECL. */
3777 expand_decl_init (decl)
3780 int was_used = TREE_USED (decl);
3782 if (output_bytecode)
3784 bc_expand_decl_init (decl);
3788 /* If this is a CONST_DECL, we don't have to generate any code, but
3789 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3790 to be set while in the obstack containing the constant. If we don't
3791 do this, we can lose if we have functions nested three deep and the middle
3792 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3793 the innermost function is the first to expand that STRING_CST. */
3794 if (TREE_CODE (decl) == CONST_DECL)
3796 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3797 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3798 EXPAND_INITIALIZER);
3802 if (TREE_STATIC (decl))
3805 /* Compute and store the initial value now. */
3807 if (DECL_INITIAL (decl) == error_mark_node)
3809 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3810 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3811 || code == POINTER_TYPE)
3812 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3816 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3818 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3819 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3823 /* Don't let the initialization count as "using" the variable. */
3824 TREE_USED (decl) = was_used;
3826 /* Free any temporaries we made while initializing the decl. */
3827 preserve_temp_slots (NULL_RTX);
3831 /* Expand initialization for variable-sized types. Allocate array
3832 using newlocalSI and set local variable, which is a pointer to the
3836 bc_expand_variable_local_init (decl)
3839 /* Evaluate size expression and coerce to SI */
3840 bc_expand_expr (DECL_SIZE (decl));
3842 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3843 no coercion is necessary (?) */
3845 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3846 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3848 /* Emit code to allocate array */
3849 bc_emit_instruction (newlocalSI);
3851 /* Store array pointer in local variable. This is the only instance
3852 where we actually want the address of the pointer to the
3853 variable-size block, rather than the pointer itself. We avoid
3854 using expand_address() since that would cause the pointer to be
3855 pushed rather than its address. Hence the hard-coded reference;
3856 notice also that the variable is always local (no global
3857 variable-size type variables). */
3859 bc_load_localaddr (DECL_RTL (decl));
3860 bc_emit_instruction (storeP);
3864 /* Emit code to initialize a declaration. */
3867 bc_expand_decl_init (decl)
3870 int org_stack_depth;
3872 /* Statical initializers are handled elsewhere */
3874 if (TREE_STATIC (decl))
3877 /* Memory original stack depth */
3878 org_stack_depth = stack_depth;
3880 /* If the type is variable-size, we first create its space (we ASSUME
3881 it CAN'T be static). We do this regardless of whether there's an
3882 initializer assignment or not. */
3884 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3885 bc_expand_variable_local_init (decl);
3887 /* Expand initializer assignment */
3888 if (DECL_INITIAL (decl) == error_mark_node)
3890 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3892 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3893 || code == POINTER_TYPE)
3895 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3897 else if (DECL_INITIAL (decl))
3898 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3900 /* Restore stack depth */
3901 if (org_stack_depth > stack_depth)
3904 bc_adjust_stack (stack_depth - org_stack_depth);
3908 /* CLEANUP is an expression to be executed at exit from this binding contour;
3909 for example, in C++, it might call the destructor for this variable.
3911 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3912 CLEANUP multiple times, and have the correct semantics. This
3913 happens in exception handling, for gotos, returns, breaks that
3914 leave the current scope.
3916 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3917 that is not associated with any particular variable. */
3920 expand_decl_cleanup (decl, cleanup)
3923 struct nesting *thisblock = block_stack;
3925 /* Error if we are not in any block. */
3929 /* Record the cleanup if there is one. */
3935 tree *cleanups = &thisblock->data.block.cleanups;
3936 int cond_context = conditional_context ();
3940 rtx flag = gen_reg_rtx (word_mode);
3945 emit_move_insn (flag, const0_rtx);
3946 set_flag_0 = get_insns ();
3949 thisblock->data.block.last_unconditional_cleanup
3950 = emit_insns_after (set_flag_0,
3951 thisblock->data.block.last_unconditional_cleanup);
3953 emit_move_insn (flag, const1_rtx);
3955 /* All cleanups must be on the function_obstack. */
3956 push_obstacks_nochange ();
3957 resume_temporary_allocation ();
3959 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3960 DECL_RTL (cond) = flag;
3962 /* Conditionalize the cleanup. */
3963 cleanup = build (COND_EXPR, void_type_node,
3964 truthvalue_conversion (cond),
3965 cleanup, integer_zero_node);
3966 cleanup = fold (cleanup);
3970 cleanups = thisblock->data.block.cleanup_ptr;
3973 /* All cleanups must be on the function_obstack. */
3974 push_obstacks_nochange ();
3975 resume_temporary_allocation ();
3976 cleanup = unsave_expr (cleanup);
3979 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3982 /* If this block has a cleanup, it belongs in stack_block_stack. */
3983 stack_block_stack = thisblock;
3990 /* If this was optimized so that there is no exception region for the
3991 cleanup, then mark the TREE_LIST node, so that we can later tell
3992 if we need to call expand_eh_region_end. */
3993 if (! using_eh_for_cleanups_p
3994 || expand_eh_region_start_tree (decl, cleanup))
3995 TREE_ADDRESSABLE (t) = 1;
3996 /* If that started a new EH region, we're in a new block. */
3997 thisblock = block_stack;
4004 thisblock->data.block.last_unconditional_cleanup
4005 = emit_insns_after (seq,
4006 thisblock->data.block.last_unconditional_cleanup);
4010 thisblock->data.block.last_unconditional_cleanup
4012 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4018 /* Like expand_decl_cleanup, but suppress generating an exception handler
4019 to perform the cleanup. */
4022 expand_decl_cleanup_no_eh (decl, cleanup)
4025 int save_eh = using_eh_for_cleanups_p;
4026 using_eh_for_cleanups_p = 0;
4027 expand_decl_cleanup (decl, cleanup);
4028 using_eh_for_cleanups_p = save_eh;
4031 /* Arrange for the top element of the dynamic cleanup chain to be
4032 popped if we exit the current binding contour. DECL is the
4033 associated declaration, if any, otherwise NULL_TREE. If the
4034 current contour is left via an exception, then __sjthrow will pop
4035 the top element off the dynamic cleanup chain. The code that
4036 avoids doing the action we push into the cleanup chain in the
4037 exceptional case is contained in expand_cleanups.
4039 This routine is only used by expand_eh_region_start, and that is
4040 the only way in which an exception region should be started. This
4041 routine is only used when using the setjmp/longjmp codegen method
4042 for exception handling. */
4045 expand_dcc_cleanup (decl)
4048 struct nesting *thisblock = block_stack;
4051 /* Error if we are not in any block. */
4055 /* Record the cleanup for the dynamic handler chain. */
4057 /* All cleanups must be on the function_obstack. */
4058 push_obstacks_nochange ();
4059 resume_temporary_allocation ();
4060 cleanup = make_node (POPDCC_EXPR);
4063 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4064 thisblock->data.block.cleanups
4065 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4067 /* If this block has a cleanup, it belongs in stack_block_stack. */
4068 stack_block_stack = thisblock;
4072 /* Arrange for the top element of the dynamic handler chain to be
4073 popped if we exit the current binding contour. DECL is the
4074 assciated declaration, if any, otherwise NULL_TREE. If the current
4075 contour is left via an exception, then __sjthrow will pop the top
4076 element off the dynamic handler chain. The code that avoids doing
4077 the action we push into the handler chain in the exceptional case
4078 is contained in expand_cleanups.
4080 This routine is only used by expand_eh_region_start, and that is
4081 the only way in which an exception region should be started. This
4082 routine is only used when using the setjmp/longjmp codegen method
4083 for exception handling. */
4086 expand_dhc_cleanup (decl)
4089 struct nesting *thisblock = block_stack;
4092 /* Error if we are not in any block. */
4096 /* Record the cleanup for the dynamic handler chain. */
4098 /* All cleanups must be on the function_obstack. */
4099 push_obstacks_nochange ();
4100 resume_temporary_allocation ();
4101 cleanup = make_node (POPDHC_EXPR);
4104 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4105 thisblock->data.block.cleanups
4106 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4108 /* If this block has a cleanup, it belongs in stack_block_stack. */
4109 stack_block_stack = thisblock;
4113 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4114 DECL_ELTS is the list of elements that belong to DECL's type.
4115 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4118 expand_anon_union_decl (decl, cleanup, decl_elts)
4119 tree decl, cleanup, decl_elts;
4121 struct nesting *thisblock = block_stack;
4125 expand_decl_cleanup (decl, cleanup);
4126 x = DECL_RTL (decl);
4130 tree decl_elt = TREE_VALUE (decl_elts);
4131 tree cleanup_elt = TREE_PURPOSE (decl_elts);
4132 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4134 /* Propagate the union's alignment to the elements. */
4135 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4137 /* If the element has BLKmode and the union doesn't, the union is
4138 aligned such that the element doesn't need to have BLKmode, so
4139 change the element's mode to the appropriate one for its size. */
4140 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4141 DECL_MODE (decl_elt) = mode
4142 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
4145 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4146 instead create a new MEM rtx with the proper mode. */
4147 if (GET_CODE (x) == MEM)
4149 if (mode == GET_MODE (x))
4150 DECL_RTL (decl_elt) = x;
4153 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
4154 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
4155 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
4158 else if (GET_CODE (x) == REG)
4160 if (mode == GET_MODE (x))
4161 DECL_RTL (decl_elt) = x;
4163 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
4168 /* Record the cleanup if there is one. */
4171 thisblock->data.block.cleanups
4172 = temp_tree_cons (decl_elt, cleanup_elt,
4173 thisblock->data.block.cleanups);
4175 decl_elts = TREE_CHAIN (decl_elts);
4179 /* Expand a list of cleanups LIST.
4180 Elements may be expressions or may be nested lists.
4182 If DONT_DO is nonnull, then any list-element
4183 whose TREE_PURPOSE matches DONT_DO is omitted.
4184 This is sometimes used to avoid a cleanup associated with
4185 a value that is being returned out of the scope.
4187 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4188 goto and handle protection regions specially in that case.
4190 If REACHABLE, we emit code, otherwise just inform the exception handling
4191 code about this finalization. */
4194 expand_cleanups (list, dont_do, in_fixup, reachable)
4201 for (tail = list; tail; tail = TREE_CHAIN (tail))
4202 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4204 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4205 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4210 tree cleanup = TREE_VALUE (tail);
4212 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4213 if (TREE_CODE (cleanup) != POPDHC_EXPR
4214 && TREE_CODE (cleanup) != POPDCC_EXPR
4215 /* See expand_eh_region_start_tree for this case. */
4216 && ! TREE_ADDRESSABLE (tail))
4218 cleanup = protect_with_terminate (cleanup);
4219 expand_eh_region_end (cleanup);
4225 /* Cleanups may be run multiple times. For example,
4226 when exiting a binding contour, we expand the
4227 cleanups associated with that contour. When a goto
4228 within that binding contour has a target outside that
4229 contour, it will expand all cleanups from its scope to
4230 the target. Though the cleanups are expanded multiple
4231 times, the control paths are non-overlapping so the
4232 cleanups will not be executed twice. */
4234 /* We may need to protect fixups with rethrow regions. */
4235 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4237 expand_fixup_region_start ();
4238 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4240 expand_fixup_region_end (TREE_VALUE (tail));
4247 /* Mark when the context we are emitting RTL for as a conditional
4248 context, so that any cleanup actions we register with
4249 expand_decl_init will be properly conditionalized when those
4250 cleanup actions are later performed. Must be called before any
4251 expression (tree) is expanded that is within a contidional context. */
4254 start_cleanup_deferal ()
4256 /* block_stack can be NULL if we are inside the parameter list. It is
4257 OK to do nothing, because cleanups aren't possible here. */
4259 ++block_stack->data.block.conditional_code;
4262 /* Mark the end of a conditional region of code. Because cleanup
4263 deferals may be nested, we may still be in a conditional region
4264 after we end the currently deferred cleanups, only after we end all
4265 deferred cleanups, are we back in unconditional code. */
4268 end_cleanup_deferal ()
4270 /* block_stack can be NULL if we are inside the parameter list. It is
4271 OK to do nothing, because cleanups aren't possible here. */
4273 --block_stack->data.block.conditional_code;
4276 /* Move all cleanups from the current block_stack
4277 to the containing block_stack, where they are assumed to
4278 have been created. If anything can cause a temporary to
4279 be created, but not expanded for more than one level of
4280 block_stacks, then this code will have to change. */
4285 struct nesting *block = block_stack;
4286 struct nesting *outer = block->next;
4288 outer->data.block.cleanups
4289 = chainon (block->data.block.cleanups,
4290 outer->data.block.cleanups);
4291 block->data.block.cleanups = 0;
4295 last_cleanup_this_contour ()
4297 if (block_stack == 0)
4300 return block_stack->data.block.cleanups;
4303 /* Return 1 if there are any pending cleanups at this point.
4304 If THIS_CONTOUR is nonzero, check the current contour as well.
4305 Otherwise, look only at the contours that enclose this one. */
4308 any_pending_cleanups (this_contour)
4311 struct nesting *block;
4313 if (block_stack == 0)
4316 if (this_contour && block_stack->data.block.cleanups != NULL)
4318 if (block_stack->data.block.cleanups == 0
4319 && block_stack->data.block.outer_cleanups == 0)
4322 for (block = block_stack->next; block; block = block->next)
4323 if (block->data.block.cleanups != 0)
4329 /* Enter a case (Pascal) or switch (C) statement.
4330 Push a block onto case_stack and nesting_stack
4331 to accumulate the case-labels that are seen
4332 and to record the labels generated for the statement.
4334 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4335 Otherwise, this construct is transparent for `exit_something'.
4337 EXPR is the index-expression to be dispatched on.
4338 TYPE is its nominal type. We could simply convert EXPR to this type,
4339 but instead we take short cuts. */
4342 expand_start_case (exit_flag, expr, type, printname)
4348 register struct nesting *thiscase = ALLOC_NESTING ();
4350 /* Make an entry on case_stack for the case we are entering. */
4352 thiscase->next = case_stack;
4353 thiscase->all = nesting_stack;
4354 thiscase->depth = ++nesting_depth;
4355 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4356 thiscase->data.case_stmt.case_list = 0;
4357 thiscase->data.case_stmt.index_expr = expr;
4358 thiscase->data.case_stmt.nominal_type = type;
4359 thiscase->data.case_stmt.default_label = 0;
4360 thiscase->data.case_stmt.num_ranges = 0;
4361 thiscase->data.case_stmt.printname = printname;
4362 thiscase->data.case_stmt.seenlabel = 0;
4363 case_stack = thiscase;
4364 nesting_stack = thiscase;
4366 if (output_bytecode)
4368 bc_expand_start_case (thiscase, expr, type, printname);
4372 do_pending_stack_adjust ();
4374 /* Make sure case_stmt.start points to something that won't
4375 need any transformation before expand_end_case. */
4376 if (GET_CODE (get_last_insn ()) != NOTE)
4377 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4379 thiscase->data.case_stmt.start = get_last_insn ();
4381 start_cleanup_deferal ();
4385 /* Enter a case statement. It is assumed that the caller has pushed
4386 the current context onto the case stack. */
4389 bc_expand_start_case (thiscase, expr, type, printname)
4390 struct nesting *thiscase;
4395 bc_expand_expr (expr);
4396 bc_expand_conversion (TREE_TYPE (expr), type);
4398 /* For cases, the skip is a place we jump to that's emitted after
4399 the size of the jump table is known. */
4401 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
4402 bc_emit_bytecode (jump);
4403 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4405 #ifdef DEBUG_PRINT_CODE
4406 fputc ('\n', stderr);
4411 /* Start a "dummy case statement" within which case labels are invalid
4412 and are not connected to any larger real case statement.
4413 This can be used if you don't want to let a case statement jump
4414 into the middle of certain kinds of constructs. */
4417 expand_start_case_dummy ()
4419 register struct nesting *thiscase = ALLOC_NESTING ();
4421 /* Make an entry on case_stack for the dummy. */
4423 thiscase->next = case_stack;
4424 thiscase->all = nesting_stack;
4425 thiscase->depth = ++nesting_depth;
4426 thiscase->exit_label = 0;
4427 thiscase->data.case_stmt.case_list = 0;
4428 thiscase->data.case_stmt.start = 0;
4429 thiscase->data.case_stmt.nominal_type = 0;
4430 thiscase->data.case_stmt.default_label = 0;
4431 thiscase->data.case_stmt.num_ranges = 0;
4432 case_stack = thiscase;
4433 nesting_stack = thiscase;
4434 start_cleanup_deferal ();
4437 /* End a dummy case statement. */
4440 expand_end_case_dummy ()
4442 end_cleanup_deferal ();
4443 POPSTACK (case_stack);
4446 /* Return the data type of the index-expression
4447 of the innermost case statement, or null if none. */
4450 case_index_expr_type ()
4453 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4457 /* Accumulate one case or default label inside a case or switch statement.
4458 VALUE is the value of the case (a null pointer, for a default label).
4459 The function CONVERTER, when applied to arguments T and V,
4460 converts the value V to the type T.
4462 If not currently inside a case or switch statement, return 1 and do
4463 nothing. The caller will print a language-specific error message.
4464 If VALUE is a duplicate or overlaps, return 2 and do nothing
4465 except store the (first) duplicate node in *DUPLICATE.
4466 If VALUE is out of range, return 3 and do nothing.
4467 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4468 Return 0 on success.
4470 Extended to handle range statements. */
4473 pushcase (value, converter, label, duplicate)
4474 register tree value;
4475 tree (*converter) PROTO((tree, tree));
4476 register tree label;
4479 register struct case_node **l;
4480 register struct case_node *n;
4484 if (output_bytecode)
4485 return bc_pushcase (value, label);
4487 /* Fail if not inside a real case statement. */
4488 if (! (case_stack && case_stack->data.case_stmt.start))
4491 if (stack_block_stack
4492 && stack_block_stack->depth > case_stack->depth)
4495 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4496 nominal_type = case_stack->data.case_stmt.nominal_type;
4498 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4499 if (index_type == error_mark_node)
4502 /* Convert VALUE to the type in which the comparisons are nominally done. */
4504 value = (*converter) (nominal_type, value);
4506 /* If this is the first label, warn if any insns have been emitted. */
4507 if (case_stack->data.case_stmt.seenlabel == 0)
4510 for (insn = case_stack->data.case_stmt.start;
4512 insn = NEXT_INSN (insn))
4514 if (GET_CODE (insn) == CODE_LABEL)
4516 if (GET_CODE (insn) != NOTE
4517 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4519 warning ("unreachable code at beginning of %s",
4520 case_stack->data.case_stmt.printname);
4525 case_stack->data.case_stmt.seenlabel = 1;
4527 /* Fail if this value is out of range for the actual type of the index
4528 (which may be narrower than NOMINAL_TYPE). */
4529 if (value != 0 && ! int_fits_type_p (value, index_type))
4532 /* Fail if this is a duplicate or overlaps another entry. */
4535 if (case_stack->data.case_stmt.default_label != 0)
4537 *duplicate = case_stack->data.case_stmt.default_label;
4540 case_stack->data.case_stmt.default_label = label;
4543 return add_case_node (value, value, label, duplicate);
4545 expand_label (label);
4549 /* Like pushcase but this case applies to all values
4550 between VALUE1 and VALUE2 (inclusive).
4551 The return value is the same as that of pushcase
4552 but there is one additional error code:
4553 4 means the specified range was empty. */
4556 pushcase_range (value1, value2, converter, label, duplicate)
4557 register tree value1, value2;
4558 tree (*converter) PROTO((tree, tree));
4559 register tree label;
4562 register struct case_node **l;
4563 register struct case_node *n;
4567 /* Fail if not inside a real case statement. */
4568 if (! (case_stack && case_stack->data.case_stmt.start))
4571 /* Fail if the range is empty. */
4572 if (tree_int_cst_lt (value2, value1))
4575 if (stack_block_stack
4576 && stack_block_stack->depth > case_stack->depth)
4579 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4580 nominal_type = case_stack->data.case_stmt.nominal_type;
4582 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4583 if (index_type == error_mark_node)
4586 /* If this is the first label, warn if any insns have been emitted. */
4587 if (case_stack->data.case_stmt.seenlabel == 0)
4590 for (insn = case_stack->data.case_stmt.start;
4592 insn = NEXT_INSN (insn))
4594 if (GET_CODE (insn) == CODE_LABEL)
4596 if (GET_CODE (insn) != NOTE
4597 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4599 warning ("unreachable code at beginning of %s",
4600 case_stack->data.case_stmt.printname);
4605 case_stack->data.case_stmt.seenlabel = 1;
4607 /* Convert VALUEs to type in which the comparisons are nominally done. */
4608 if (value1 == 0) /* Negative infinity. */
4609 value1 = TYPE_MIN_VALUE (index_type);
4610 value1 = (*converter) (nominal_type, value1);
4612 if (value2 == 0) /* Positive infinity. */
4613 value2 = TYPE_MAX_VALUE (index_type);
4614 value2 = (*converter) (nominal_type, value2);
4616 /* Fail if these values are out of range. */
4617 if (! int_fits_type_p (value1, index_type))
4620 if (! int_fits_type_p (value2, index_type))
4623 return add_case_node (value1, value2, label, duplicate);
4626 /* Do the actual insertion of a case label for pushcase and pushcase_range
4627 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4628 slowdown for large switch statements. */
4631 add_case_node (low, high, label, duplicate)
4636 struct case_node *p, **q, *r;
4638 q = &case_stack->data.case_stmt.case_list;
4645 /* Keep going past elements distinctly greater than HIGH. */
4646 if (tree_int_cst_lt (high, p->low))
4649 /* or distinctly less than LOW. */
4650 else if (tree_int_cst_lt (p->high, low))
4655 /* We have an overlap; this is an error. */
4656 *duplicate = p->code_label;
4661 /* Add this label to the chain, and succeed.
4662 Copy LOW, HIGH so they are on temporary rather than momentary
4663 obstack and will thus survive till the end of the case statement. */
4665 r = (struct case_node *) oballoc (sizeof (struct case_node));
4666 r->low = copy_node (low);
4668 /* If the bounds are equal, turn this into the one-value case. */
4670 if (tree_int_cst_equal (low, high))
4674 r->high = copy_node (high);
4675 case_stack->data.case_stmt.num_ranges++;
4678 r->code_label = label;
4679 expand_label (label);
4689 struct case_node *s;
4695 if (! (b = p->balance))
4696 /* Growth propagation from left side. */
4703 if (p->left = s = r->right)
4720 case_stack->data.case_stmt.case_list = r;
4723 /* r->balance == +1 */
4728 struct case_node *t = r->right;
4730 if (p->left = s = t->right)
4734 if (r->right = s = t->left)
4756 case_stack->data.case_stmt.case_list = t;
4763 /* p->balance == +1; growth of left side balances the node. */
4773 if (! (b = p->balance))
4774 /* Growth propagation from right side. */
4782 if (p->right = s = r->left)
4799 case_stack->data.case_stmt.case_list = r;
4803 /* r->balance == -1 */
4807 struct case_node *t = r->left;
4809 if (p->right = s = t->left)
4814 if (r->left = s = t->right)
4837 case_stack->data.case_stmt.case_list = t;
4843 /* p->balance == -1; growth of right side balances the node. */
4856 /* Accumulate one case or default label; VALUE is the value of the
4857 case, or nil for a default label. If not currently inside a case,
4858 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4859 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4860 Return 0 on success. This function is a leftover from the earlier
4861 bytecode compiler, which was based on gcc 1.37. It should be
4862 merged into pushcase. */
4865 bc_pushcase (value, label)
4869 struct nesting *thiscase = case_stack;
4870 struct case_node *case_label, *new_label;
4875 /* Fail if duplicate, overlap, or out of type range. */
4878 value = convert (thiscase->data.case_stmt.nominal_type, value);
4879 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4882 for (case_label = thiscase->data.case_stmt.case_list;
4883 case_label->left; case_label = case_label->left)
4884 if (! tree_int_cst_lt (case_label->left->high, value))
4887 if (case_label != thiscase->data.case_stmt.case_list
4888 && ! tree_int_cst_lt (case_label->high, value)
4889 || (case_label->left && ! tree_int_cst_lt (value, case_label->left->low)))
4892 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4893 new_label->low = new_label->high = copy_node (value);
4894 new_label->code_label = label;
4895 new_label->left = case_label->left;
4897 case_label->left = new_label;
4898 thiscase->data.case_stmt.num_ranges++;
4902 if (thiscase->data.case_stmt.default_label)
4904 thiscase->data.case_stmt.default_label = label;
4907 expand_label (label);
4911 /* Returns the number of possible values of TYPE.
4912 Returns -1 if the number is unknown or variable.
4913 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4914 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4915 do not increase monotonically (there may be duplicates);
4916 to 1 if the values increase monotonically, but not always by 1;
4917 otherwise sets it to 0. */
4920 all_cases_count (type, spareness)
4924 HOST_WIDE_INT count, count_high = 0;
4927 switch (TREE_CODE (type))
4934 count = 1 << BITS_PER_UNIT;
4938 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4939 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4944 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4945 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4946 but with overflow checking. */
4947 tree mint = TYPE_MIN_VALUE (type);
4948 tree maxt = TYPE_MAX_VALUE (type);
4949 HOST_WIDE_INT lo, hi;
4950 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4952 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4954 add_double (lo, hi, 1, 0, &lo, &hi);
4955 if (hi != 0 || lo < 0)
4962 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4964 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4965 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4966 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4967 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4971 if (*spareness == 1)
4973 tree prev = TREE_VALUE (TYPE_VALUES (type));
4974 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4976 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4981 prev = TREE_VALUE (t);
4990 #define BITARRAY_TEST(ARRAY, INDEX) \
4991 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4992 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4993 #define BITARRAY_SET(ARRAY, INDEX) \
4994 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4995 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4997 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4998 with the case values we have seen, assuming the case expression
5000 SPARSENESS is as determined by all_cases_count.
5002 The time needed is proportional to COUNT, unless
5003 SPARSENESS is 2, in which case quadratic time is needed. */
5006 mark_seen_cases (type, cases_seen, count, sparseness)
5008 unsigned char *cases_seen;
5014 tree next_node_to_try = NULL_TREE;
5015 long next_node_offset = 0;
5017 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5018 tree val = make_node (INTEGER_CST);
5019 TREE_TYPE (val) = type;
5022 else if (sparseness == 2)
5027 /* This less efficient loop is only needed to handle
5028 duplicate case values (multiple enum constants
5029 with the same value). */
5030 TREE_TYPE (val) = TREE_TYPE (root->low);
5031 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5032 t = TREE_CHAIN (t), xlo++)
5034 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5035 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5039 /* Keep going past elements distinctly greater than VAL. */
5040 if (tree_int_cst_lt (val, n->low))
5043 /* or distinctly less than VAL. */
5044 else if (tree_int_cst_lt (n->high, val))
5049 /* We have found a matching range. */
5050 BITARRAY_SET (cases_seen, xlo);
5060 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5061 for (n = root; n; n = n->right)
5063 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5064 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5065 while ( ! tree_int_cst_lt (n->high, val))
5067 /* Calculate (into xlo) the "offset" of the integer (val).
5068 The element with lowest value has offset 0, the next smallest
5069 element has offset 1, etc. */
5071 HOST_WIDE_INT xlo, xhi;
5073 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5075 /* The TYPE_VALUES will be in increasing order, so
5076 starting searching where we last ended. */
5077 t = next_node_to_try;
5078 xlo = next_node_offset;
5084 t = TYPE_VALUES (type);
5087 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5089 next_node_to_try = TREE_CHAIN (t);
5090 next_node_offset = xlo + 1;
5095 if (t == next_node_to_try)
5104 t = TYPE_MIN_VALUE (type);
5106 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5110 add_double (xlo, xhi,
5111 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5115 if (xhi == 0 && xlo >= 0 && xlo < count)
5116 BITARRAY_SET (cases_seen, xlo);
5117 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5119 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5125 /* Called when the index of a switch statement is an enumerated type
5126 and there is no default label.
5128 Checks that all enumeration literals are covered by the case
5129 expressions of a switch. Also, warn if there are any extra
5130 switch cases that are *not* elements of the enumerated type.
5132 If all enumeration literals were covered by the case expressions,
5133 turn one of the expressions into the default expression since it should
5134 not be possible to fall through such a switch. */
5137 check_for_full_enumeration_handling (type)
5140 register struct case_node *n;
5141 register struct case_node **l;
5142 register tree chain;
5145 /* True iff the selector type is a numbered set mode. */
5148 /* The number of possible selector values. */
5151 /* For each possible selector value. a one iff it has been matched
5152 by a case value alternative. */
5153 unsigned char *cases_seen;
5155 /* The allocated size of cases_seen, in chars. */
5159 if (output_bytecode)
5161 bc_check_for_full_enumeration_handling (type);
5168 size = all_cases_count (type, &sparseness);
5169 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5171 if (size > 0 && size < 600000
5172 /* We deliberately use malloc here - not xmalloc. */
5173 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
5176 tree v = TYPE_VALUES (type);
5177 bzero (cases_seen, bytes_needed);
5179 /* The time complexity of this code is normally O(N), where
5180 N being the number of members in the enumerated type.
5181 However, if type is a ENUMERAL_TYPE whose values do not
5182 increase monotonically, O(N*log(N)) time may be needed. */
5184 mark_seen_cases (type, cases_seen, size, sparseness);
5186 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5188 if (BITARRAY_TEST(cases_seen, i) == 0)
5189 warning ("enumeration value `%s' not handled in switch",
5190 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5196 /* Now we go the other way around; we warn if there are case
5197 expressions that don't correspond to enumerators. This can
5198 occur since C and C++ don't enforce type-checking of
5199 assignments to enumeration variables. */
5201 if (case_stack->data.case_stmt.case_list
5202 && case_stack->data.case_stmt.case_list->left)
5203 case_stack->data.case_stmt.case_list
5204 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5206 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5208 for (chain = TYPE_VALUES (type);
5209 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5210 chain = TREE_CHAIN (chain))
5215 if (TYPE_NAME (type) == 0)
5216 warning ("case value `%d' not in enumerated type",
5217 TREE_INT_CST_LOW (n->low));
5219 warning ("case value `%d' not in enumerated type `%s'",
5220 TREE_INT_CST_LOW (n->low),
5221 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5224 : DECL_NAME (TYPE_NAME (type))));
5226 if (!tree_int_cst_equal (n->low, n->high))
5228 for (chain = TYPE_VALUES (type);
5229 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5230 chain = TREE_CHAIN (chain))
5235 if (TYPE_NAME (type) == 0)
5236 warning ("case value `%d' not in enumerated type",
5237 TREE_INT_CST_LOW (n->high));
5239 warning ("case value `%d' not in enumerated type `%s'",
5240 TREE_INT_CST_LOW (n->high),
5241 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5244 : DECL_NAME (TYPE_NAME (type))));
5250 /* ??? This optimization is disabled because it causes valid programs to
5251 fail. ANSI C does not guarantee that an expression with enum type
5252 will have a value that is the same as one of the enumeration literals. */
5254 /* If all values were found as case labels, make one of them the default
5255 label. Thus, this switch will never fall through. We arbitrarily pick
5256 the last one to make the default since this is likely the most
5257 efficient choice. */
5261 for (l = &case_stack->data.case_stmt.case_list;
5266 case_stack->data.case_stmt.default_label = (*l)->code_label;
5273 /* Check that all enumeration literals are covered by the case
5274 expressions of a switch. Also warn if there are any cases
5275 that are not elements of the enumerated type. */
5278 bc_check_for_full_enumeration_handling (type)
5281 struct nesting *thiscase = case_stack;
5282 struct case_node *c;
5285 /* Check for enums not handled. */
5286 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
5288 for (c = thiscase->data.case_stmt.case_list->left;
5289 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
5292 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
5293 warning ("enumerated value `%s' not handled in switch",
5294 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
5297 /* Check for cases not in the enumeration. */
5298 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5300 for (e = TYPE_VALUES (type);
5301 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
5305 warning ("case value `%d' not in enumerated type `%s'",
5306 TREE_INT_CST_LOW (c->low),
5307 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
5309 : DECL_NAME (TYPE_NAME (type))));
5313 /* Terminate a case (Pascal) or switch (C) statement
5314 in which ORIG_INDEX is the expression to be tested.
5315 Generate the code to test it and jump to the right place. */
5318 expand_end_case (orig_index)
5321 tree minval, maxval, range, orig_minval;
5322 rtx default_label = 0;
5323 register struct case_node *n;
5331 register struct nesting *thiscase = case_stack;
5332 tree index_expr, index_type;
5335 if (output_bytecode)
5337 bc_expand_end_case (orig_index);
5341 table_label = gen_label_rtx ();
5342 index_expr = thiscase->data.case_stmt.index_expr;
5343 index_type = TREE_TYPE (index_expr);
5344 unsignedp = TREE_UNSIGNED (index_type);
5346 do_pending_stack_adjust ();
5348 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5349 if (index_type != error_mark_node)
5351 /* If switch expression was an enumerated type, check that all
5352 enumeration literals are covered by the cases.
5353 No sense trying this if there's a default case, however. */
5355 if (!thiscase->data.case_stmt.default_label
5356 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5357 && TREE_CODE (index_expr) != INTEGER_CST)
5358 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5360 /* If this is the first label, warn if any insns have been emitted. */
5361 if (thiscase->data.case_stmt.seenlabel == 0)
5364 for (insn = get_last_insn ();
5365 insn != case_stack->data.case_stmt.start;
5366 insn = PREV_INSN (insn))
5367 if (GET_CODE (insn) != NOTE
5368 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
5370 warning ("unreachable code at beginning of %s",
5371 case_stack->data.case_stmt.printname);
5376 /* If we don't have a default-label, create one here,
5377 after the body of the switch. */
5378 if (thiscase->data.case_stmt.default_label == 0)
5380 thiscase->data.case_stmt.default_label
5381 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5382 expand_label (thiscase->data.case_stmt.default_label);
5384 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5386 before_case = get_last_insn ();
5388 if (thiscase->data.case_stmt.case_list
5389 && thiscase->data.case_stmt.case_list->left)
5390 thiscase->data.case_stmt.case_list
5391 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5393 /* Simplify the case-list before we count it. */
5394 group_case_nodes (thiscase->data.case_stmt.case_list);
5396 /* Get upper and lower bounds of case values.
5397 Also convert all the case values to the index expr's data type. */
5400 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5402 /* Check low and high label values are integers. */
5403 if (TREE_CODE (n->low) != INTEGER_CST)
5405 if (TREE_CODE (n->high) != INTEGER_CST)
5408 n->low = convert (index_type, n->low);
5409 n->high = convert (index_type, n->high);
5411 /* Count the elements and track the largest and smallest
5412 of them (treating them as signed even if they are not). */
5420 if (INT_CST_LT (n->low, minval))
5422 if (INT_CST_LT (maxval, n->high))
5425 /* A range counts double, since it requires two compares. */
5426 if (! tree_int_cst_equal (n->low, n->high))
5430 orig_minval = minval;
5432 /* Compute span of values. */
5434 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5436 end_cleanup_deferal ();
5440 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5442 emit_jump (default_label);
5445 /* If range of values is much bigger than number of values,
5446 make a sequence of conditional branches instead of a dispatch.
5447 If the switch-index is a constant, do it this way
5448 because we can optimize it. */
5450 #ifndef CASE_VALUES_THRESHOLD
5452 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5454 /* If machine does not have a case insn that compares the
5455 bounds, this means extra overhead for dispatch tables
5456 which raises the threshold for using them. */
5457 #define CASE_VALUES_THRESHOLD 5
5458 #endif /* HAVE_casesi */
5459 #endif /* CASE_VALUES_THRESHOLD */
5461 else if (TREE_INT_CST_HIGH (range) != 0
5462 || count < CASE_VALUES_THRESHOLD
5463 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5465 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5468 || TREE_CODE (index_expr) == INTEGER_CST
5469 /* These will reduce to a constant. */
5470 || (TREE_CODE (index_expr) == CALL_EXPR
5471 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5472 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5473 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5474 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5475 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5477 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5479 /* If the index is a short or char that we do not have
5480 an insn to handle comparisons directly, convert it to
5481 a full integer now, rather than letting each comparison
5482 generate the conversion. */
5484 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5485 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5486 == CODE_FOR_nothing))
5488 enum machine_mode wider_mode;
5489 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5490 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5491 if (cmp_optab->handlers[(int) wider_mode].insn_code
5492 != CODE_FOR_nothing)
5494 index = convert_to_mode (wider_mode, index, unsignedp);
5500 do_pending_stack_adjust ();
5502 index = protect_from_queue (index, 0);
5503 if (GET_CODE (index) == MEM)
5504 index = copy_to_reg (index);
5505 if (GET_CODE (index) == CONST_INT
5506 || TREE_CODE (index_expr) == INTEGER_CST)
5508 /* Make a tree node with the proper constant value
5509 if we don't already have one. */
5510 if (TREE_CODE (index_expr) != INTEGER_CST)
5513 = build_int_2 (INTVAL (index),
5514 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5515 index_expr = convert (index_type, index_expr);
5518 /* For constant index expressions we need only
5519 issue a unconditional branch to the appropriate
5520 target code. The job of removing any unreachable
5521 code is left to the optimisation phase if the
5522 "-O" option is specified. */
5523 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5524 if (! tree_int_cst_lt (index_expr, n->low)
5525 && ! tree_int_cst_lt (n->high, index_expr))
5529 emit_jump (label_rtx (n->code_label));
5531 emit_jump (default_label);
5535 /* If the index expression is not constant we generate
5536 a binary decision tree to select the appropriate
5537 target code. This is done as follows:
5539 The list of cases is rearranged into a binary tree,
5540 nearly optimal assuming equal probability for each case.
5542 The tree is transformed into RTL, eliminating
5543 redundant test conditions at the same time.
5545 If program flow could reach the end of the
5546 decision tree an unconditional jump to the
5547 default code is emitted. */
5550 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5551 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5552 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5554 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5555 default_label, index_type);
5556 emit_jump_if_reachable (default_label);
5565 enum machine_mode index_mode = SImode;
5566 int index_bits = GET_MODE_BITSIZE (index_mode);
5568 enum machine_mode op_mode;
5570 /* Convert the index to SImode. */
5571 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5572 > GET_MODE_BITSIZE (index_mode))
5574 enum machine_mode omode = TYPE_MODE (index_type);
5575 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5577 /* We must handle the endpoints in the original mode. */
5578 index_expr = build (MINUS_EXPR, index_type,
5579 index_expr, minval);
5580 minval = integer_zero_node;
5581 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5582 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
5583 emit_jump_insn (gen_bltu (default_label));
5584 /* Now we can safely truncate. */
5585 index = convert_to_mode (index_mode, index, 0);
5589 if (TYPE_MODE (index_type) != index_mode)
5591 index_expr = convert (type_for_size (index_bits, 0),
5593 index_type = TREE_TYPE (index_expr);
5596 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5599 index = protect_from_queue (index, 0);
5600 do_pending_stack_adjust ();
5602 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5603 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5605 index = copy_to_mode_reg (op_mode, index);
5607 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5609 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5610 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5612 op1 = copy_to_mode_reg (op_mode, op1);
5614 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5616 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5617 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5619 op2 = copy_to_mode_reg (op_mode, op2);
5621 emit_jump_insn (gen_casesi (index, op1, op2,
5622 table_label, default_label));
5626 #ifdef HAVE_tablejump
5627 if (! win && HAVE_tablejump)
5629 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5630 fold (build (MINUS_EXPR, index_type,
5631 index_expr, minval)));
5632 index_type = TREE_TYPE (index_expr);
5633 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5635 index = protect_from_queue (index, 0);
5636 do_pending_stack_adjust ();
5638 do_tablejump (index, TYPE_MODE (index_type),
5639 expand_expr (range, NULL_RTX, VOIDmode, 0),
5640 table_label, default_label);
5647 /* Get table of labels to jump to, in order of case index. */
5649 ncases = TREE_INT_CST_LOW (range) + 1;
5650 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5651 bzero ((char *) labelvec, ncases * sizeof (rtx));
5653 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5655 register HOST_WIDE_INT i
5656 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5661 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
5662 if (i + TREE_INT_CST_LOW (orig_minval)
5663 == TREE_INT_CST_LOW (n->high))
5669 /* Fill in the gaps with the default. */
5670 for (i = 0; i < ncases; i++)
5671 if (labelvec[i] == 0)
5672 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
5674 /* Output the table */
5675 emit_label (table_label);
5677 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
5678 were an expression, instead of an #ifdef/#ifndef. */
5680 #ifdef CASE_VECTOR_PC_RELATIVE
5684 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
5685 gen_rtx (LABEL_REF, Pmode, table_label),
5686 gen_rtvec_v (ncases, labelvec)));
5688 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
5689 gen_rtvec_v (ncases, labelvec)));
5691 /* If the case insn drops through the table,
5692 after the table we must jump to the default-label.
5693 Otherwise record no drop-through after the table. */
5694 #ifdef CASE_DROPS_THROUGH
5695 emit_jump (default_label);
5701 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5702 reorder_insns (before_case, get_last_insn (),
5703 thiscase->data.case_stmt.start);
5706 end_cleanup_deferal ();
5708 if (thiscase->exit_label)
5709 emit_label (thiscase->exit_label);
5711 POPSTACK (case_stack);
5716 /* Convert the tree NODE into a list linked by the right field, with the left
5717 field zeroed. RIGHT is used for recursion; it is a list to be placed
5718 rightmost in the resulting list. */
5720 static struct case_node *
5721 case_tree2list (node, right)
5722 struct case_node *node, *right;
5724 struct case_node *left;
5727 right = case_tree2list (node->right, right);
5729 node->right = right;
5730 if (left = node->left)
5733 return case_tree2list (left, node);
5739 /* Terminate a case statement. EXPR is the original index
5743 bc_expand_end_case (expr)
5746 struct nesting *thiscase = case_stack;
5747 enum bytecode_opcode opcode;
5748 struct bc_label *jump_label;
5749 struct case_node *c;
5751 bc_emit_bytecode (jump);
5752 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5754 #ifdef DEBUG_PRINT_CODE
5755 fputc ('\n', stderr);
5758 /* Now that the size of the jump table is known, emit the actual
5759 indexed jump instruction. */
5760 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
5762 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
5763 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
5764 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
5766 bc_emit_bytecode (opcode);
5768 /* Now emit the case instructions literal arguments, in order.
5769 In addition to the value on the stack, it uses:
5770 1. The address of the jump table.
5771 2. The size of the jump table.
5772 3. The default label. */
5774 jump_label = bc_get_bytecode_label ();
5775 bc_emit_bytecode_labelref (jump_label);
5776 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
5777 sizeof thiscase->data.case_stmt.num_ranges);
5779 if (thiscase->data.case_stmt.default_label)
5780 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
5782 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5784 /* Output the jump table. */
5786 bc_align_bytecode (3 /* PTR_ALIGN */);
5787 bc_emit_bytecode_labeldef (jump_label);
5789 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
5790 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5792 opcode = TREE_INT_CST_LOW (c->low);
5793 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5795 opcode = TREE_INT_CST_LOW (c->high);
5796 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5798 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5801 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
5802 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5804 bc_emit_bytecode_DI_const (c->low);
5805 bc_emit_bytecode_DI_const (c->high);
5807 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5814 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
5816 /* Possibly issue enumeration warnings. */
5818 if (!thiscase->data.case_stmt.default_label
5819 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
5820 && TREE_CODE (expr) != INTEGER_CST
5822 check_for_full_enumeration_handling (TREE_TYPE (expr));
5825 #ifdef DEBUG_PRINT_CODE
5826 fputc ('\n', stderr);
5829 POPSTACK (case_stack);
5833 /* Return unique bytecode ID. */
5838 static int bc_uid = 0;
5843 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5846 do_jump_if_equal (op1, op2, label, unsignedp)
5847 rtx op1, op2, label;
5850 if (GET_CODE (op1) == CONST_INT
5851 && GET_CODE (op2) == CONST_INT)
5853 if (INTVAL (op1) == INTVAL (op2))
5858 enum machine_mode mode = GET_MODE (op1);
5859 if (mode == VOIDmode)
5860 mode = GET_MODE (op2);
5861 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5862 emit_jump_insn (gen_beq (label));
5866 /* Not all case values are encountered equally. This function
5867 uses a heuristic to weight case labels, in cases where that
5868 looks like a reasonable thing to do.
5870 Right now, all we try to guess is text, and we establish the
5873 chars above space: 16
5882 If we find any cases in the switch that are not either -1 or in the range
5883 of valid ASCII characters, or are control characters other than those
5884 commonly used with "\", don't treat this switch scanning text.
5886 Return 1 if these nodes are suitable for cost estimation, otherwise
5890 estimate_case_costs (node)
5893 tree min_ascii = build_int_2 (-1, -1);
5894 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5898 /* If we haven't already made the cost table, make it now. Note that the
5899 lower bound of the table is -1, not zero. */
5901 if (cost_table == NULL)
5903 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5904 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5906 for (i = 0; i < 128; i++)
5910 else if (ispunct (i))
5912 else if (iscntrl (i))
5916 cost_table[' '] = 8;
5917 cost_table['\t'] = 4;
5918 cost_table['\0'] = 4;
5919 cost_table['\n'] = 2;
5920 cost_table['\f'] = 1;
5921 cost_table['\v'] = 1;
5922 cost_table['\b'] = 1;
5925 /* See if all the case expressions look like text. It is text if the
5926 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5927 as signed arithmetic since we don't want to ever access cost_table with a
5928 value less than -1. Also check that none of the constants in a range
5929 are strange control characters. */
5931 for (n = node; n; n = n->right)
5933 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5936 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5937 if (cost_table[i] < 0)
5941 /* All interesting values are within the range of interesting
5942 ASCII characters. */
5946 /* Scan an ordered list of case nodes
5947 combining those with consecutive values or ranges.
5949 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5952 group_case_nodes (head)
5955 case_node_ptr node = head;
5959 rtx lb = next_real_insn (label_rtx (node->code_label));
5961 case_node_ptr np = node;
5963 /* Try to group the successors of NODE with NODE. */
5964 while (((np = np->right) != 0)
5965 /* Do they jump to the same place? */
5966 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5967 || (lb != 0 && lb2 != 0
5968 && simplejump_p (lb)
5969 && simplejump_p (lb2)
5970 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5971 SET_SRC (PATTERN (lb2)))))
5972 /* Are their ranges consecutive? */
5973 && tree_int_cst_equal (np->low,
5974 fold (build (PLUS_EXPR,
5975 TREE_TYPE (node->high),
5978 /* An overflow is not consecutive. */
5979 && tree_int_cst_lt (node->high,
5980 fold (build (PLUS_EXPR,
5981 TREE_TYPE (node->high),
5983 integer_one_node))))
5985 node->high = np->high;
5987 /* NP is the first node after NODE which can't be grouped with it.
5988 Delete the nodes in between, and move on to that node. */
5994 /* Take an ordered list of case nodes
5995 and transform them into a near optimal binary tree,
5996 on the assumption that any target code selection value is as
5997 likely as any other.
5999 The transformation is performed by splitting the ordered
6000 list into two equal sections plus a pivot. The parts are
6001 then attached to the pivot as left and right branches. Each
6002 branch is is then transformed recursively. */
6005 balance_case_nodes (head, parent)
6006 case_node_ptr *head;
6007 case_node_ptr parent;
6009 register case_node_ptr np;
6017 register case_node_ptr *npp;
6020 /* Count the number of entries on branch. Also count the ranges. */
6024 if (!tree_int_cst_equal (np->low, np->high))
6028 cost += cost_table[TREE_INT_CST_LOW (np->high)];
6032 cost += cost_table[TREE_INT_CST_LOW (np->low)];
6040 /* Split this list if it is long enough for that to help. */
6045 /* Find the place in the list that bisects the list's total cost,
6046 Here I gets half the total cost. */
6051 /* Skip nodes while their cost does not reach that amount. */
6052 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
6053 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
6054 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
6057 npp = &(*npp)->right;
6062 /* Leave this branch lopsided, but optimize left-hand
6063 side and fill in `parent' fields for right-hand side. */
6065 np->parent = parent;
6066 balance_case_nodes (&np->left, np);
6067 for (; np->right; np = np->right)
6068 np->right->parent = np;
6072 /* If there are just three nodes, split at the middle one. */
6074 npp = &(*npp)->right;
6077 /* Find the place in the list that bisects the list's total cost,
6078 where ranges count as 2.
6079 Here I gets half the total cost. */
6080 i = (i + ranges + 1) / 2;
6083 /* Skip nodes while their cost does not reach that amount. */
6084 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
6089 npp = &(*npp)->right;
6094 np->parent = parent;
6097 /* Optimize each of the two split parts. */
6098 balance_case_nodes (&np->left, np);
6099 balance_case_nodes (&np->right, np);
6103 /* Else leave this branch as one level,
6104 but fill in `parent' fields. */
6106 np->parent = parent;
6107 for (; np->right; np = np->right)
6108 np->right->parent = np;
6113 /* Search the parent sections of the case node tree
6114 to see if a test for the lower bound of NODE would be redundant.
6115 INDEX_TYPE is the type of the index expression.
6117 The instructions to generate the case decision tree are
6118 output in the same order as nodes are processed so it is
6119 known that if a parent node checks the range of the current
6120 node minus one that the current node is bounded at its lower
6121 span. Thus the test would be redundant. */
6124 node_has_low_bound (node, index_type)
6129 case_node_ptr pnode;
6131 /* If the lower bound of this node is the lowest value in the index type,
6132 we need not test it. */
6134 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6137 /* If this node has a left branch, the value at the left must be less
6138 than that at this node, so it cannot be bounded at the bottom and
6139 we need not bother testing any further. */
6144 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6145 node->low, integer_one_node));
6147 /* If the subtraction above overflowed, we can't verify anything.
6148 Otherwise, look for a parent that tests our value - 1. */
6150 if (! tree_int_cst_lt (low_minus_one, node->low))
6153 for (pnode = node->parent; pnode; pnode = pnode->parent)
6154 if (tree_int_cst_equal (low_minus_one, pnode->high))
6160 /* Search the parent sections of the case node tree
6161 to see if a test for the upper bound of NODE would be redundant.
6162 INDEX_TYPE is the type of the index expression.
6164 The instructions to generate the case decision tree are
6165 output in the same order as nodes are processed so it is
6166 known that if a parent node checks the range of the current
6167 node plus one that the current node is bounded at its upper
6168 span. Thus the test would be redundant. */
6171 node_has_high_bound (node, index_type)
6176 case_node_ptr pnode;
6178 /* If the upper bound of this node is the highest value in the type
6179 of the index expression, we need not test against it. */
6181 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6184 /* If this node has a right branch, the value at the right must be greater
6185 than that at this node, so it cannot be bounded at the top and
6186 we need not bother testing any further. */
6191 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6192 node->high, integer_one_node));
6194 /* If the addition above overflowed, we can't verify anything.
6195 Otherwise, look for a parent that tests our value + 1. */
6197 if (! tree_int_cst_lt (node->high, high_plus_one))
6200 for (pnode = node->parent; pnode; pnode = pnode->parent)
6201 if (tree_int_cst_equal (high_plus_one, pnode->low))
6207 /* Search the parent sections of the
6208 case node tree to see if both tests for the upper and lower
6209 bounds of NODE would be redundant. */
6212 node_is_bounded (node, index_type)
6216 return (node_has_low_bound (node, index_type)
6217 && node_has_high_bound (node, index_type));
6220 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6223 emit_jump_if_reachable (label)
6226 if (GET_CODE (get_last_insn ()) != BARRIER)
6230 /* Emit step-by-step code to select a case for the value of INDEX.
6231 The thus generated decision tree follows the form of the
6232 case-node binary tree NODE, whose nodes represent test conditions.
6233 INDEX_TYPE is the type of the index of the switch.
6235 Care is taken to prune redundant tests from the decision tree
6236 by detecting any boundary conditions already checked by
6237 emitted rtx. (See node_has_high_bound, node_has_low_bound
6238 and node_is_bounded, above.)
6240 Where the test conditions can be shown to be redundant we emit
6241 an unconditional jump to the target code. As a further
6242 optimization, the subordinates of a tree node are examined to
6243 check for bounded nodes. In this case conditional and/or
6244 unconditional jumps as a result of the boundary check for the
6245 current node are arranged to target the subordinates associated
6246 code for out of bound conditions on the current node node.
6248 We can assume that when control reaches the code generated here,
6249 the index value has already been compared with the parents
6250 of this node, and determined to be on the same side of each parent
6251 as this node is. Thus, if this node tests for the value 51,
6252 and a parent tested for 52, we don't need to consider
6253 the possibility of a value greater than 51. If another parent
6254 tests for the value 50, then this node need not test anything. */
6257 emit_case_nodes (index, node, default_label, index_type)
6263 /* If INDEX has an unsigned type, we must make unsigned branches. */
6264 int unsignedp = TREE_UNSIGNED (index_type);
6265 typedef rtx rtx_function ();
6266 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
6267 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
6268 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
6269 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
6270 enum machine_mode mode = GET_MODE (index);
6272 /* See if our parents have already tested everything for us.
6273 If they have, emit an unconditional jump for this node. */
6274 if (node_is_bounded (node, index_type))
6275 emit_jump (label_rtx (node->code_label));
6277 else if (tree_int_cst_equal (node->low, node->high))
6279 /* Node is single valued. First see if the index expression matches
6280 this node and then check our children, if any. */
6282 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6283 label_rtx (node->code_label), unsignedp);
6285 if (node->right != 0 && node->left != 0)
6287 /* This node has children on both sides.
6288 Dispatch to one side or the other
6289 by comparing the index value with this node's value.
6290 If one subtree is bounded, check that one first,
6291 so we can avoid real branches in the tree. */
6293 if (node_is_bounded (node->right, index_type))
6295 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6297 GT, NULL_RTX, mode, unsignedp, 0);
6299 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
6300 emit_case_nodes (index, node->left, default_label, index_type);
6303 else if (node_is_bounded (node->left, index_type))
6305 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6307 LT, NULL_RTX, mode, unsignedp, 0);
6308 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
6309 emit_case_nodes (index, node->right, default_label, index_type);
6314 /* Neither node is bounded. First distinguish the two sides;
6315 then emit the code for one side at a time. */
6318 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6320 /* See if the value is on the right. */
6321 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6323 GT, NULL_RTX, mode, unsignedp, 0);
6324 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
6326 /* Value must be on the left.
6327 Handle the left-hand subtree. */
6328 emit_case_nodes (index, node->left, default_label, index_type);
6329 /* If left-hand subtree does nothing,
6331 emit_jump_if_reachable (default_label);
6333 /* Code branches here for the right-hand subtree. */
6334 expand_label (test_label);
6335 emit_case_nodes (index, node->right, default_label, index_type);
6339 else if (node->right != 0 && node->left == 0)
6341 /* Here we have a right child but no left so we issue conditional
6342 branch to default and process the right child.
6344 Omit the conditional branch to default if we it avoid only one
6345 right child; it costs too much space to save so little time. */
6347 if (node->right->right || node->right->left
6348 || !tree_int_cst_equal (node->right->low, node->right->high))
6350 if (!node_has_low_bound (node, index_type))
6352 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6354 LT, NULL_RTX, mode, unsignedp, 0);
6355 emit_jump_insn ((*gen_blt_pat) (default_label));
6358 emit_case_nodes (index, node->right, default_label, index_type);
6361 /* We cannot process node->right normally
6362 since we haven't ruled out the numbers less than
6363 this node's value. So handle node->right explicitly. */
6364 do_jump_if_equal (index,
6365 expand_expr (node->right->low, NULL_RTX,
6367 label_rtx (node->right->code_label), unsignedp);
6370 else if (node->right == 0 && node->left != 0)
6372 /* Just one subtree, on the left. */
6374 #if 0 /* The following code and comment were formerly part
6375 of the condition here, but they didn't work
6376 and I don't understand what the idea was. -- rms. */
6377 /* If our "most probable entry" is less probable
6378 than the default label, emit a jump to
6379 the default label using condition codes
6380 already lying around. With no right branch,
6381 a branch-greater-than will get us to the default
6384 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6387 if (node->left->left || node->left->right
6388 || !tree_int_cst_equal (node->left->low, node->left->high))
6390 if (!node_has_high_bound (node, index_type))
6392 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6394 GT, NULL_RTX, mode, unsignedp, 0);
6395 emit_jump_insn ((*gen_bgt_pat) (default_label));
6398 emit_case_nodes (index, node->left, default_label, index_type);
6401 /* We cannot process node->left normally
6402 since we haven't ruled out the numbers less than
6403 this node's value. So handle node->left explicitly. */
6404 do_jump_if_equal (index,
6405 expand_expr (node->left->low, NULL_RTX,
6407 label_rtx (node->left->code_label), unsignedp);
6412 /* Node is a range. These cases are very similar to those for a single
6413 value, except that we do not start by testing whether this node
6414 is the one to branch to. */
6416 if (node->right != 0 && node->left != 0)
6418 /* Node has subtrees on both sides.
6419 If the right-hand subtree is bounded,
6420 test for it first, since we can go straight there.
6421 Otherwise, we need to make a branch in the control structure,
6422 then handle the two subtrees. */
6423 tree test_label = 0;
6425 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6427 GT, NULL_RTX, mode, unsignedp, 0);
6429 if (node_is_bounded (node->right, index_type))
6430 /* Right hand node is fully bounded so we can eliminate any
6431 testing and branch directly to the target code. */
6432 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
6435 /* Right hand node requires testing.
6436 Branch to a label where we will handle it later. */
6438 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6439 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
6442 /* Value belongs to this node or to the left-hand subtree. */
6444 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6445 GE, NULL_RTX, mode, unsignedp, 0);
6446 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6448 /* Handle the left-hand subtree. */
6449 emit_case_nodes (index, node->left, default_label, index_type);
6451 /* If right node had to be handled later, do that now. */
6455 /* If the left-hand subtree fell through,
6456 don't let it fall into the right-hand subtree. */
6457 emit_jump_if_reachable (default_label);
6459 expand_label (test_label);
6460 emit_case_nodes (index, node->right, default_label, index_type);
6464 else if (node->right != 0 && node->left == 0)
6466 /* Deal with values to the left of this node,
6467 if they are possible. */
6468 if (!node_has_low_bound (node, index_type))
6470 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6472 LT, NULL_RTX, mode, unsignedp, 0);
6473 emit_jump_insn ((*gen_blt_pat) (default_label));
6476 /* Value belongs to this node or to the right-hand subtree. */
6478 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6480 LE, NULL_RTX, mode, unsignedp, 0);
6481 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
6483 emit_case_nodes (index, node->right, default_label, index_type);
6486 else if (node->right == 0 && node->left != 0)
6488 /* Deal with values to the right of this node,
6489 if they are possible. */
6490 if (!node_has_high_bound (node, index_type))
6492 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6494 GT, NULL_RTX, mode, unsignedp, 0);
6495 emit_jump_insn ((*gen_bgt_pat) (default_label));
6498 /* Value belongs to this node or to the left-hand subtree. */
6500 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6501 GE, NULL_RTX, mode, unsignedp, 0);
6502 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6504 emit_case_nodes (index, node->left, default_label, index_type);
6509 /* Node has no children so we check low and high bounds to remove
6510 redundant tests. Only one of the bounds can exist,
6511 since otherwise this node is bounded--a case tested already. */
6513 if (!node_has_high_bound (node, index_type))
6515 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6517 GT, NULL_RTX, mode, unsignedp, 0);
6518 emit_jump_insn ((*gen_bgt_pat) (default_label));
6521 if (!node_has_low_bound (node, index_type))
6523 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6525 LT, NULL_RTX, mode, unsignedp, 0);
6526 emit_jump_insn ((*gen_blt_pat) (default_label));
6529 emit_jump (label_rtx (node->code_label));
6534 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6535 so that the debugging info will be correct for the unrolled loop. */
6537 /* Indexed by block number, contains a pointer to the N'th block node. */
6539 static tree *block_vector;
6542 find_loop_tree_blocks ()
6544 tree block = DECL_INITIAL (current_function_decl);
6546 block_vector = identify_blocks (block, get_insns ());
6550 unroll_block_trees ()
6552 tree block = DECL_INITIAL (current_function_decl);
6554 reorder_blocks (block_vector, block, get_insns ());