1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
55 #include "bc-typecd.h"
56 #include "bc-opcode.h"
60 #define obstack_chunk_alloc xmalloc
61 #define obstack_chunk_free free
62 struct obstack stmt_obstack;
64 /* Filename and line number of last line-number note,
65 whether we actually emitted it or not. */
69 /* Nonzero if within a ({...}) grouping, in which case we must
70 always compute a value for each expr-stmt in case it is the last one. */
72 int expr_stmts_for_value;
74 /* Each time we expand an expression-statement,
75 record the expr's type and its RTL value here. */
77 static tree last_expr_type;
78 static rtx last_expr_value;
80 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
81 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
82 This is used by the `remember_end_note' function to record the endpoint
83 of each generated block in its associated BLOCK node. */
85 static rtx last_block_end_note;
87 /* Number of binding contours started so far in this function. */
89 int block_start_count;
91 /* Nonzero if function being compiled needs to
92 return the address of where it has put a structure value. */
94 extern int current_function_returns_pcc_struct;
96 /* Label that will go on parm cleanup code, if any.
97 Jumping to this label runs cleanup code for parameters, if
98 such code must be run. Following this code is the logical return label. */
100 extern rtx cleanup_label;
102 /* Label that will go on function epilogue.
103 Jumping to this label serves as a "return" instruction
104 on machines which require execution of the epilogue on all returns. */
106 extern rtx return_label;
108 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
109 So we can mark them all live at the end of the function, if nonopt. */
110 extern rtx save_expr_regs;
112 /* Offset to end of allocated area of stack frame.
113 If stack grows down, this is the address of the last stack slot allocated.
114 If stack grows up, this is the address for the next slot. */
115 extern int frame_offset;
117 /* Label to jump back to for tail recursion, or 0 if we have
118 not yet needed one for this function. */
119 extern rtx tail_recursion_label;
121 /* Place after which to insert the tail_recursion_label if we need one. */
122 extern rtx tail_recursion_reentry;
124 /* Location at which to save the argument pointer if it will need to be
125 referenced. There are two cases where this is done: if nonlocal gotos
126 exist, or if vars whose is an offset from the argument pointer will be
127 needed by inner routines. */
129 extern rtx arg_pointer_save_area;
131 /* Chain of all RTL_EXPRs that have insns in them. */
132 extern tree rtl_expr_chain;
134 #if 0 /* Turned off because 0 seems to work just as well. */
135 /* Cleanup lists are required for binding levels regardless of whether
136 that binding level has cleanups or not. This node serves as the
137 cleanup list whenever an empty list is required. */
138 static tree empty_cleanup_list;
141 /* Functions and data structures for expanding case statements. */
143 /* Case label structure, used to hold info on labels within case
144 statements. We handle "range" labels; for a single-value label
145 as in C, the high and low limits are the same.
147 A chain of case nodes is initially maintained via the RIGHT fields
148 in the nodes. Nodes with higher case values are later in the list.
150 Switch statements can be output in one of two forms. A branch table
151 is used if there are more than a few labels and the labels are dense
152 within the range between the smallest and largest case value. If a
153 branch table is used, no further manipulations are done with the case
156 The alternative to the use of a branch table is to generate a series
157 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
158 and PARENT fields to hold a binary tree. Initially the tree is
159 totally unbalanced, with everything on the right. We balance the tree
160 with nodes on the left having lower case values than the parent
161 and nodes on the right having higher values. We then output the tree
166 struct case_node *left; /* Left son in binary tree */
167 struct case_node *right; /* Right son in binary tree; also node chain */
168 struct case_node *parent; /* Parent of node in binary tree */
169 tree low; /* Lowest index value for this label */
170 tree high; /* Highest index value for this label */
171 tree code_label; /* Label to jump to when node matches */
174 typedef struct case_node case_node;
175 typedef struct case_node *case_node_ptr;
177 /* These are used by estimate_case_costs and balance_case_nodes. */
179 /* This must be a signed type, and non-ANSI compilers lack signed char. */
180 static short *cost_table;
181 static int use_cost_table;
183 /* Stack of control and binding constructs we are currently inside.
185 These constructs begin when you call `expand_start_WHATEVER'
186 and end when you call `expand_end_WHATEVER'. This stack records
187 info about how the construct began that tells the end-function
188 what to do. It also may provide information about the construct
189 to alter the behavior of other constructs within the body.
190 For example, they may affect the behavior of C `break' and `continue'.
192 Each construct gets one `struct nesting' object.
193 All of these objects are chained through the `all' field.
194 `nesting_stack' points to the first object (innermost construct).
195 The position of an entry on `nesting_stack' is in its `depth' field.
197 Each type of construct has its own individual stack.
198 For example, loops have `loop_stack'. Each object points to the
199 next object of the same type through the `next' field.
201 Some constructs are visible to `break' exit-statements and others
202 are not. Which constructs are visible depends on the language.
203 Therefore, the data structure allows each construct to be visible
204 or not, according to the args given when the construct is started.
205 The construct is visible if the `exit_label' field is non-null.
206 In that case, the value should be a CODE_LABEL rtx. */
211 struct nesting *next;
216 /* For conds (if-then and if-then-else statements). */
219 /* Label for the end of the if construct.
220 There is none if EXITFLAG was not set
221 and no `else' has been seen yet. */
223 /* Label for the end of this alternative.
224 This may be the end of the if or the next else/elseif. */
230 /* Label at the top of the loop; place to loop back to. */
232 /* Label at the end of the whole construct. */
234 /* Label for `continue' statement to jump to;
235 this is in front of the stepper of the loop. */
238 /* For variable binding contours. */
241 /* Sequence number of this binding contour within the function,
242 in order of entry. */
243 int block_start_count;
244 /* Nonzero => value to restore stack to on exit. Complemented by
245 bc_stack_level (see below) when generating bytecodes. */
247 /* The NOTE that starts this contour.
248 Used by expand_goto to check whether the destination
249 is within each contour or not. */
251 /* Innermost containing binding contour that has a stack level. */
252 struct nesting *innermost_stack_block;
253 /* List of cleanups to be run on exit from this contour.
254 This is a list of expressions to be evaluated.
255 The TREE_PURPOSE of each link is the ..._DECL node
256 which the cleanup pertains to. */
258 /* List of cleanup-lists of blocks containing this block,
259 as they were at the locus where this block appears.
260 There is an element for each containing block,
261 ordered innermost containing block first.
262 The tail of this list can be 0 (was empty_cleanup_list),
263 if all remaining elements would be empty lists.
264 The element's TREE_VALUE is the cleanup-list of that block,
265 which may be null. */
267 /* Chain of labels defined inside this binding contour.
268 For contours that have stack levels or cleanups. */
269 struct label_chain *label_chain;
270 /* Number of function calls seen, as of start of this block. */
271 int function_call_count;
272 /* Bytecode specific: stack level to restore stack to on exit. */
275 /* For switch (C) or case (Pascal) statements,
276 and also for dummies (see `expand_start_case_dummy'). */
279 /* The insn after which the case dispatch should finally
280 be emitted. Zero for a dummy. */
282 /* For bytecodes, the case table is in-lined right in the code.
283 A label is needed for skipping over this block. It is only
284 used when generating bytecodes. */
286 /* A list of case labels, kept in ascending order by value
287 as the list is built.
288 During expand_end_case, this list may be rearranged into a
289 nearly balanced binary tree. */
290 struct case_node *case_list;
291 /* Label to jump to if no case matches. */
293 /* The expression to be dispatched on. */
295 /* Type that INDEX_EXPR should be converted to. */
297 /* Number of range exprs in case statement. */
299 /* Name of this kind of statement, for warnings. */
301 /* Nonzero if a case label has been seen in this case stmt. */
304 /* For exception contours. */
307 /* List of exceptions raised. This is a TREE_LIST
308 of whatever you want. */
310 /* List of exceptions caught. This is also a TREE_LIST
311 of whatever you want. As a special case, it has the
312 value `void_type_node' if it handles default exceptions. */
315 /* First insn of TRY block, in case resumptive model is needed. */
317 /* Label for the catch clauses. */
319 /* Label for unhandled exceptions. */
321 /* Label at the end of whole construct. */
323 /* Label which "escapes" the exception construct.
324 Like EXIT_LABEL for BREAK construct, but for exceptions. */
330 /* Chain of all pending binding contours. */
331 struct nesting *block_stack;
333 /* If any new stacks are added here, add them to POPSTACKS too. */
335 /* Chain of all pending binding contours that restore stack levels
337 struct nesting *stack_block_stack;
339 /* Chain of all pending conditional statements. */
340 struct nesting *cond_stack;
342 /* Chain of all pending loops. */
343 struct nesting *loop_stack;
345 /* Chain of all pending case or switch statements. */
346 struct nesting *case_stack;
348 /* Chain of all pending exception contours. */
349 struct nesting *except_stack;
351 /* Separate chain including all of the above,
352 chained through the `all' field. */
353 struct nesting *nesting_stack;
355 /* Number of entries on nesting_stack now. */
358 /* Allocate and return a new `struct nesting'. */
360 #define ALLOC_NESTING() \
361 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
363 /* Pop the nesting stack element by element until we pop off
364 the element which is at the top of STACK.
365 Update all the other stacks, popping off elements from them
366 as we pop them from nesting_stack. */
368 #define POPSTACK(STACK) \
369 do { struct nesting *target = STACK; \
370 struct nesting *this; \
371 do { this = nesting_stack; \
372 if (loop_stack == this) \
373 loop_stack = loop_stack->next; \
374 if (cond_stack == this) \
375 cond_stack = cond_stack->next; \
376 if (block_stack == this) \
377 block_stack = block_stack->next; \
378 if (stack_block_stack == this) \
379 stack_block_stack = stack_block_stack->next; \
380 if (case_stack == this) \
381 case_stack = case_stack->next; \
382 if (except_stack == this) \
383 except_stack = except_stack->next; \
384 nesting_depth = nesting_stack->depth - 1; \
385 nesting_stack = this->all; \
386 obstack_free (&stmt_obstack, this); } \
387 while (this != target); } while (0)
389 /* In some cases it is impossible to generate code for a forward goto
390 until the label definition is seen. This happens when it may be necessary
391 for the goto to reset the stack pointer: we don't yet know how to do that.
392 So expand_goto puts an entry on this fixup list.
393 Each time a binding contour that resets the stack is exited,
395 If the target label has now been defined, we can insert the proper code. */
399 /* Points to following fixup. */
400 struct goto_fixup *next;
401 /* Points to the insn before the jump insn.
402 If more code must be inserted, it goes after this insn. */
404 /* The LABEL_DECL that this jump is jumping to, or 0
405 for break, continue or return. */
407 /* The BLOCK for the place where this goto was found. */
409 /* The CODE_LABEL rtx that this is jumping to. */
411 /* Number of binding contours started in current function
412 before the label reference. */
413 int block_start_count;
414 /* The outermost stack level that should be restored for this jump.
415 Each time a binding contour that resets the stack is exited,
416 if the target label is *not* yet defined, this slot is updated. */
418 /* List of lists of cleanup expressions to be run by this goto.
419 There is one element for each block that this goto is within.
420 The tail of this list can be 0 (was empty_cleanup_list),
421 if all remaining elements would be empty.
422 The TREE_VALUE contains the cleanup list of that block as of the
423 time this goto was seen.
424 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
425 tree cleanup_list_list;
427 /* Bytecode specific members follow */
429 /* The label that this jump is jumping to, or 0 for break, continue
431 struct bc_label *bc_target;
433 /* The label we use for the fixup patch */
434 struct bc_label *label;
436 /* True (non-0) if fixup has been handled */
439 /* Like stack_level above, except refers to the interpreter stack */
443 static struct goto_fixup *goto_fixup_chain;
445 /* Within any binding contour that must restore a stack level,
446 all labels are recorded with a chain of these structures. */
450 /* Points to following fixup. */
451 struct label_chain *next;
454 static void expand_goto_internal PROTO((tree, rtx, rtx));
455 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
456 struct bc_label *, tree));
457 static int expand_fixup PROTO((tree, rtx, rtx));
458 static void bc_expand_fixup PROTO((enum bytecode_opcode,
459 struct bc_label *, int));
460 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
462 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
464 static int warn_if_unused_value PROTO((tree));
465 static void bc_expand_start_cond PROTO((tree, int));
466 static void bc_expand_end_cond PROTO((void));
467 static void bc_expand_start_else PROTO((void));
468 static void bc_expand_end_loop PROTO((void));
469 static void bc_expand_end_bindings PROTO((tree, int, int));
470 static void bc_expand_decl PROTO((tree, tree));
471 static void bc_expand_variable_local_init PROTO((tree));
472 static void bc_expand_decl_init PROTO((tree));
473 static void expand_null_return_1 PROTO((rtx, int));
474 static int tail_recursion_args PROTO((tree, tree));
475 static void expand_cleanups PROTO((tree, tree));
476 static void bc_expand_start_case PROTO((struct nesting *, tree,
478 static int bc_pushcase PROTO((tree, tree));
479 static void bc_check_for_full_enumeration_handling PROTO((tree));
480 static void bc_expand_end_case PROTO((tree));
481 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
482 static int estimate_case_costs PROTO((case_node_ptr));
483 static void group_case_nodes PROTO((case_node_ptr));
484 static void balance_case_nodes PROTO((case_node_ptr *,
486 static int node_has_low_bound PROTO((case_node_ptr, tree));
487 static int node_has_high_bound PROTO((case_node_ptr, tree));
488 static int node_is_bounded PROTO((case_node_ptr, tree));
489 static void emit_jump_if_reachable PROTO((rtx));
490 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
492 int bc_expand_exit_loop_if_false ();
493 void bc_expand_start_cond ();
494 void bc_expand_end_cond ();
495 void bc_expand_start_else ();
496 void bc_expand_end_bindings ();
497 void bc_expand_start_case ();
498 void bc_check_for_full_enumeration_handling ();
499 void bc_expand_end_case ();
500 void bc_expand_decl ();
502 extern rtx bc_allocate_local ();
503 extern rtx bc_allocate_variable_array ();
508 gcc_obstack_init (&stmt_obstack);
510 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
515 init_stmt_for_function ()
517 /* We are not currently within any block, conditional, loop or case. */
519 stack_block_stack = 0;
526 block_start_count = 0;
528 /* No gotos have been expanded yet. */
529 goto_fixup_chain = 0;
531 /* We are not processing a ({...}) grouping. */
532 expr_stmts_for_value = 0;
540 p->block_stack = block_stack;
541 p->stack_block_stack = stack_block_stack;
542 p->cond_stack = cond_stack;
543 p->loop_stack = loop_stack;
544 p->case_stack = case_stack;
545 p->nesting_stack = nesting_stack;
546 p->nesting_depth = nesting_depth;
547 p->block_start_count = block_start_count;
548 p->last_expr_type = last_expr_type;
549 p->last_expr_value = last_expr_value;
550 p->expr_stmts_for_value = expr_stmts_for_value;
551 p->emit_filename = emit_filename;
552 p->emit_lineno = emit_lineno;
553 p->goto_fixup_chain = goto_fixup_chain;
557 restore_stmt_status (p)
560 block_stack = p->block_stack;
561 stack_block_stack = p->stack_block_stack;
562 cond_stack = p->cond_stack;
563 loop_stack = p->loop_stack;
564 case_stack = p->case_stack;
565 nesting_stack = p->nesting_stack;
566 nesting_depth = p->nesting_depth;
567 block_start_count = p->block_start_count;
568 last_expr_type = p->last_expr_type;
569 last_expr_value = p->last_expr_value;
570 expr_stmts_for_value = p->expr_stmts_for_value;
571 emit_filename = p->emit_filename;
572 emit_lineno = p->emit_lineno;
573 goto_fixup_chain = p->goto_fixup_chain;
576 /* Emit a no-op instruction. */
583 if (!output_bytecode)
585 last_insn = get_last_insn ();
587 && (GET_CODE (last_insn) == CODE_LABEL
588 || prev_real_insn (last_insn) == 0))
589 emit_insn (gen_nop ());
593 /* Return the rtx-label that corresponds to a LABEL_DECL,
594 creating it if necessary. */
600 if (TREE_CODE (label) != LABEL_DECL)
603 if (DECL_RTL (label))
604 return DECL_RTL (label);
606 return DECL_RTL (label) = gen_label_rtx ();
609 /* Add an unconditional jump to LABEL as the next sequential instruction. */
615 do_pending_stack_adjust ();
616 emit_jump_insn (gen_jump (label));
620 /* Emit code to jump to the address
621 specified by the pointer expression EXP. */
624 expand_computed_goto (exp)
629 bc_expand_expr (exp);
630 bc_emit_instruction (jumpP);
634 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
636 emit_indirect_jump (x);
640 /* Handle goto statements and the labels that they can go to. */
642 /* Specify the location in the RTL code of a label LABEL,
643 which is a LABEL_DECL tree node.
645 This is used for the kind of label that the user can jump to with a
646 goto statement, and for alternatives of a switch or case statement.
647 RTL labels generated for loops and conditionals don't go through here;
648 they are generated directly at the RTL level, by other functions below.
650 Note that this has nothing to do with defining label *names*.
651 Languages vary in how they do that and what that even means. */
657 struct label_chain *p;
661 if (! DECL_RTL (label))
662 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
663 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
664 error ("multiply defined label");
668 do_pending_stack_adjust ();
669 emit_label (label_rtx (label));
670 if (DECL_NAME (label))
671 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
673 if (stack_block_stack != 0)
675 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
676 p->next = stack_block_stack->data.block.label_chain;
677 stack_block_stack->data.block.label_chain = p;
682 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
683 from nested functions. */
686 declare_nonlocal_label (label)
689 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
690 LABEL_PRESERVE_P (label_rtx (label)) = 1;
691 if (nonlocal_goto_handler_slot == 0)
693 nonlocal_goto_handler_slot
694 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
695 emit_stack_save (SAVE_NONLOCAL,
696 &nonlocal_goto_stack_level,
697 PREV_INSN (tail_recursion_reentry));
701 /* Generate RTL code for a `goto' statement with target label LABEL.
702 LABEL should be a LABEL_DECL tree node that was or will later be
703 defined with `expand_label'. */
713 expand_goto_internal (label, label_rtx (label), NULL_RTX);
717 /* Check for a nonlocal goto to a containing function. */
718 context = decl_function_context (label);
719 if (context != 0 && context != current_function_decl)
721 struct function *p = find_function_data (context);
722 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
725 p->has_nonlocal_label = 1;
726 current_function_has_nonlocal_goto = 1;
727 LABEL_REF_NONLOCAL_P (label_ref) = 1;
729 /* Copy the rtl for the slots so that they won't be shared in
730 case the virtual stack vars register gets instantiated differently
731 in the parent than in the child. */
733 #if HAVE_nonlocal_goto
734 if (HAVE_nonlocal_goto)
735 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
736 copy_rtx (p->nonlocal_goto_handler_slot),
737 copy_rtx (p->nonlocal_goto_stack_level),
744 /* Restore frame pointer for containing function.
745 This sets the actual hard register used for the frame pointer
746 to the location of the function's incoming static chain info.
747 The non-local goto handler will then adjust it to contain the
748 proper value and reload the argument pointer, if needed. */
749 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
751 /* We have now loaded the frame pointer hardware register with
752 the address of that corresponds to the start of the virtual
753 stack vars. So replace virtual_stack_vars_rtx in all
754 addresses we use with stack_pointer_rtx. */
756 /* Get addr of containing function's current nonlocal goto handler,
757 which will do any cleanups and then jump to the label. */
758 addr = copy_rtx (p->nonlocal_goto_handler_slot);
759 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
760 hard_frame_pointer_rtx));
762 /* Restore the stack pointer. Note this uses fp just restored. */
763 addr = p->nonlocal_goto_stack_level;
765 addr = replace_rtx (copy_rtx (addr),
766 virtual_stack_vars_rtx,
767 hard_frame_pointer_rtx);
769 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
771 /* Put in the static chain register the nonlocal label address. */
772 emit_move_insn (static_chain_rtx, label_ref);
773 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
775 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
776 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
777 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
778 emit_indirect_jump (temp);
782 expand_goto_internal (label, label_rtx (label), NULL_RTX);
785 /* Generate RTL code for a `goto' statement with target label BODY.
786 LABEL should be a LABEL_REF.
787 LAST_INSN, if non-0, is the rtx we should consider as the last
788 insn emitted (for the purposes of cleaning up a return). */
791 expand_goto_internal (body, label, last_insn)
796 struct nesting *block;
799 /* NOTICE! If a bytecode instruction other than `jump' is needed,
800 then the caller has to call bc_expand_goto_internal()
801 directly. This is rather an exceptional case, and there aren't
802 that many places where this is necessary. */
805 expand_goto_internal (body, label, last_insn);
809 if (GET_CODE (label) != CODE_LABEL)
812 /* If label has already been defined, we can tell now
813 whether and how we must alter the stack level. */
815 if (PREV_INSN (label) != 0)
817 /* Find the innermost pending block that contains the label.
818 (Check containment by comparing insn-uids.)
819 Then restore the outermost stack level within that block,
820 and do cleanups of all blocks contained in it. */
821 for (block = block_stack; block; block = block->next)
823 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
825 if (block->data.block.stack_level != 0)
826 stack_level = block->data.block.stack_level;
827 /* Execute the cleanups for blocks we are exiting. */
828 if (block->data.block.cleanups != 0)
830 expand_cleanups (block->data.block.cleanups, NULL_TREE);
831 do_pending_stack_adjust ();
837 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
838 the stack pointer. This one should be deleted as dead by flow. */
839 clear_pending_stack_adjust ();
840 do_pending_stack_adjust ();
841 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
844 if (body != 0 && DECL_TOO_LATE (body))
845 error ("jump to `%s' invalidly jumps into binding contour",
846 IDENTIFIER_POINTER (DECL_NAME (body)));
848 /* Label not yet defined: may need to put this goto
849 on the fixup list. */
850 else if (! expand_fixup (body, label, last_insn))
852 /* No fixup needed. Record that the label is the target
853 of at least one goto that has no fixup. */
855 TREE_ADDRESSABLE (body) = 1;
861 /* Generate a jump with OPCODE to the given bytecode LABEL which is
862 found within BODY. */
865 bc_expand_goto_internal (opcode, label, body)
866 enum bytecode_opcode opcode;
867 struct bc_label *label;
870 struct nesting *block;
871 int stack_level = -1;
873 /* If the label is defined, adjust the stack as necessary.
874 If it's not defined, we have to push the reference on the
880 /* Find the innermost pending block that contains the label.
881 (Check containment by comparing bytecode uids.) Then restore the
882 outermost stack level within that block. */
884 for (block = block_stack; block; block = block->next)
886 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
888 if (block->data.block.bc_stack_level)
889 stack_level = block->data.block.bc_stack_level;
891 /* Execute the cleanups for blocks we are exiting. */
892 if (block->data.block.cleanups != 0)
894 expand_cleanups (block->data.block.cleanups, NULL_TREE);
895 do_pending_stack_adjust ();
899 /* Restore the stack level. If we need to adjust the stack, we
900 must do so after the jump, since the jump may depend on
901 what's on the stack. Thus, any stack-modifying conditional
902 jumps (these are the only ones that rely on what's on the
903 stack) go into the fixup list. */
906 && stack_depth != stack_level
909 bc_expand_fixup (opcode, label, stack_level);
912 if (stack_level >= 0)
913 bc_adjust_stack (stack_depth - stack_level);
915 if (body && DECL_BIT_FIELD (body))
916 error ("jump to `%s' invalidly jumps into binding contour",
917 IDENTIFIER_POINTER (DECL_NAME (body)));
919 /* Emit immediate jump */
920 bc_emit_bytecode (opcode);
921 bc_emit_bytecode_labelref (label);
923 #ifdef DEBUG_PRINT_CODE
924 fputc ('\n', stderr);
929 /* Put goto in the fixup list */
930 bc_expand_fixup (opcode, label, stack_level);
933 /* Generate if necessary a fixup for a goto
934 whose target label in tree structure (if any) is TREE_LABEL
935 and whose target in rtl is RTL_LABEL.
937 If LAST_INSN is nonzero, we pretend that the jump appears
938 after insn LAST_INSN instead of at the current point in the insn stream.
940 The fixup will be used later to insert insns just before the goto.
941 Those insns will restore the stack level as appropriate for the
942 target label, and will (in the case of C++) also invoke any object
943 destructors which have to be invoked when we exit the scopes which
944 are exited by the goto.
946 Value is nonzero if a fixup is made. */
949 expand_fixup (tree_label, rtl_label, last_insn)
954 struct nesting *block, *end_block;
956 /* See if we can recognize which block the label will be output in.
957 This is possible in some very common cases.
958 If we succeed, set END_BLOCK to that block.
959 Otherwise, set it to 0. */
962 && (rtl_label == cond_stack->data.cond.endif_label
963 || rtl_label == cond_stack->data.cond.next_label))
964 end_block = cond_stack;
965 /* If we are in a loop, recognize certain labels which
966 are likely targets. This reduces the number of fixups
967 we need to create. */
969 && (rtl_label == loop_stack->data.loop.start_label
970 || rtl_label == loop_stack->data.loop.end_label
971 || rtl_label == loop_stack->data.loop.continue_label))
972 end_block = loop_stack;
976 /* Now set END_BLOCK to the binding level to which we will return. */
980 struct nesting *next_block = end_block->all;
983 /* First see if the END_BLOCK is inside the innermost binding level.
984 If so, then no cleanups or stack levels are relevant. */
985 while (next_block && next_block != block)
986 next_block = next_block->all;
991 /* Otherwise, set END_BLOCK to the innermost binding level
992 which is outside the relevant control-structure nesting. */
993 next_block = block_stack->next;
994 for (block = block_stack; block != end_block; block = block->all)
995 if (block == next_block)
996 next_block = next_block->next;
997 end_block = next_block;
1000 /* Does any containing block have a stack level or cleanups?
1001 If not, no fixup is needed, and that is the normal case
1002 (the only case, for standard C). */
1003 for (block = block_stack; block != end_block; block = block->next)
1004 if (block->data.block.stack_level != 0
1005 || block->data.block.cleanups != 0)
1008 if (block != end_block)
1010 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1011 struct goto_fixup *fixup
1012 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1013 /* In case an old stack level is restored, make sure that comes
1014 after any pending stack adjust. */
1015 /* ?? If the fixup isn't to come at the present position,
1016 doing the stack adjust here isn't useful. Doing it with our
1017 settings at that location isn't useful either. Let's hope
1020 do_pending_stack_adjust ();
1021 fixup->target = tree_label;
1022 fixup->target_rtl = rtl_label;
1024 /* Create a BLOCK node and a corresponding matched set of
1025 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1026 this point. The notes will encapsulate any and all fixup
1027 code which we might later insert at this point in the insn
1028 stream. Also, the BLOCK node will be the parent (i.e. the
1029 `SUPERBLOCK') of any other BLOCK nodes which we might create
1030 later on when we are expanding the fixup code. */
1033 register rtx original_before_jump
1034 = last_insn ? last_insn : get_last_insn ();
1038 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1039 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1040 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1042 emit_insns_after (fixup->before_jump, original_before_jump);
1045 fixup->block_start_count = block_start_count;
1046 fixup->stack_level = 0;
1047 fixup->cleanup_list_list
1048 = (((block->data.block.outer_cleanups
1050 && block->data.block.outer_cleanups != empty_cleanup_list
1053 || block->data.block.cleanups)
1054 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1055 block->data.block.outer_cleanups)
1057 fixup->next = goto_fixup_chain;
1058 goto_fixup_chain = fixup;
1065 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1066 Make the fixup restore the stack level to STACK_LEVEL. */
1069 bc_expand_fixup (opcode, label, stack_level)
1070 enum bytecode_opcode opcode;
1071 struct bc_label *label;
1074 struct goto_fixup *fixup
1075 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1077 fixup->label = bc_get_bytecode_label ();
1078 fixup->bc_target = label;
1079 fixup->bc_stack_level = stack_level;
1080 fixup->bc_handled = FALSE;
1082 fixup->next = goto_fixup_chain;
1083 goto_fixup_chain = fixup;
1085 /* Insert a jump to the fixup code */
1086 bc_emit_bytecode (opcode);
1087 bc_emit_bytecode_labelref (fixup->label);
1089 #ifdef DEBUG_PRINT_CODE
1090 fputc ('\n', stderr);
1094 /* Expand any needed fixups in the outputmost binding level of the
1095 function. FIRST_INSN is the first insn in the function. */
1098 expand_fixups (first_insn)
1101 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1104 /* When exiting a binding contour, process all pending gotos requiring fixups.
1105 THISBLOCK is the structure that describes the block being exited.
1106 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1107 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1108 FIRST_INSN is the insn that began this contour.
1110 Gotos that jump out of this contour must restore the
1111 stack level and do the cleanups before actually jumping.
1113 DONT_JUMP_IN nonzero means report error there is a jump into this
1114 contour from before the beginning of the contour.
1115 This is also done if STACK_LEVEL is nonzero. */
1118 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1119 struct nesting *thisblock;
1125 register struct goto_fixup *f, *prev;
1127 if (output_bytecode)
1129 /* ??? The second arg is the bc stack level, which is not the same
1130 as STACK_LEVEL. I have no idea what should go here, so I'll
1132 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1136 /* F is the fixup we are considering; PREV is the previous one. */
1137 /* We run this loop in two passes so that cleanups of exited blocks
1138 are run first, and blocks that are exited are marked so
1141 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1143 /* Test for a fixup that is inactive because it is already handled. */
1144 if (f->before_jump == 0)
1146 /* Delete inactive fixup from the chain, if that is easy to do. */
1148 prev->next = f->next;
1150 /* Has this fixup's target label been defined?
1151 If so, we can finalize it. */
1152 else if (PREV_INSN (f->target_rtl) != 0)
1154 register rtx cleanup_insns;
1156 /* Get the first non-label after the label
1157 this goto jumps to. If that's before this scope begins,
1158 we don't have a jump into the scope. */
1159 rtx after_label = f->target_rtl;
1160 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1161 after_label = NEXT_INSN (after_label);
1163 /* If this fixup jumped into this contour from before the beginning
1164 of this contour, report an error. */
1165 /* ??? Bug: this does not detect jumping in through intermediate
1166 blocks that have stack levels or cleanups.
1167 It detects only a problem with the innermost block
1168 around the label. */
1170 && (dont_jump_in || stack_level || cleanup_list)
1171 /* If AFTER_LABEL is 0, it means the jump goes to the end
1172 of the rtl, which means it jumps into this scope. */
1173 && (after_label == 0
1174 || INSN_UID (first_insn) < INSN_UID (after_label))
1175 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1176 && ! DECL_REGISTER (f->target))
1178 error_with_decl (f->target,
1179 "label `%s' used before containing binding contour");
1180 /* Prevent multiple errors for one label. */
1181 DECL_REGISTER (f->target) = 1;
1184 /* We will expand the cleanups into a sequence of their own and
1185 then later on we will attach this new sequence to the insn
1186 stream just ahead of the actual jump insn. */
1190 /* Temporarily restore the lexical context where we will
1191 logically be inserting the fixup code. We do this for the
1192 sake of getting the debugging information right. */
1195 set_block (f->context);
1197 /* Expand the cleanups for blocks this jump exits. */
1198 if (f->cleanup_list_list)
1201 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1202 /* Marked elements correspond to blocks that have been closed.
1203 Do their cleanups. */
1204 if (TREE_ADDRESSABLE (lists)
1205 && TREE_VALUE (lists) != 0)
1207 expand_cleanups (TREE_VALUE (lists), 0);
1208 /* Pop any pushes done in the cleanups,
1209 in case function is about to return. */
1210 do_pending_stack_adjust ();
1214 /* Restore stack level for the biggest contour that this
1215 jump jumps out of. */
1217 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1219 /* Finish up the sequence containing the insns which implement the
1220 necessary cleanups, and then attach that whole sequence to the
1221 insn stream just ahead of the actual jump insn. Attaching it
1222 at that point insures that any cleanups which are in fact
1223 implicit C++ object destructions (which must be executed upon
1224 leaving the block) appear (to the debugger) to be taking place
1225 in an area of the generated code where the object(s) being
1226 destructed are still "in scope". */
1228 cleanup_insns = get_insns ();
1232 emit_insns_after (cleanup_insns, f->before_jump);
1239 /* Mark the cleanups of exited blocks so that they are executed
1240 by the code above. */
1241 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1242 if (f->before_jump != 0
1243 && PREV_INSN (f->target_rtl) == 0
1244 /* Label has still not appeared. If we are exiting a block with
1245 a stack level to restore, that started before the fixup,
1246 mark this stack level as needing restoration
1247 when the fixup is later finalized.
1248 Also mark the cleanup_list_list element for F
1249 that corresponds to this block, so that ultimately
1250 this block's cleanups will be executed by the code above. */
1252 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1253 it means the label is undefined. That's erroneous, but possible. */
1254 && (thisblock->data.block.block_start_count
1255 <= f->block_start_count))
1257 tree lists = f->cleanup_list_list;
1258 for (; lists; lists = TREE_CHAIN (lists))
1259 /* If the following elt. corresponds to our containing block
1260 then the elt. must be for this block. */
1261 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1262 TREE_ADDRESSABLE (lists) = 1;
1265 f->stack_level = stack_level;
1270 /* When exiting a binding contour, process all pending gotos requiring fixups.
1271 Note: STACK_DEPTH is not altered.
1273 The arguments are currently not used in the bytecode compiler, but we may
1274 need them one day for languages other than C.
1276 THISBLOCK is the structure that describes the block being exited.
1277 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1278 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1279 FIRST_INSN is the insn that began this contour.
1281 Gotos that jump out of this contour must restore the
1282 stack level and do the cleanups before actually jumping.
1284 DONT_JUMP_IN nonzero means report error there is a jump into this
1285 contour from before the beginning of the contour.
1286 This is also done if STACK_LEVEL is nonzero. */
1289 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1290 struct nesting *thisblock;
1296 register struct goto_fixup *f, *prev;
1297 int saved_stack_depth;
1299 /* F is the fixup we are considering; PREV is the previous one. */
1301 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1303 /* Test for a fixup that is inactive because it is already handled. */
1304 if (f->before_jump == 0)
1306 /* Delete inactive fixup from the chain, if that is easy to do. */
1308 prev->next = f->next;
1311 /* Emit code to restore the stack and continue */
1312 bc_emit_bytecode_labeldef (f->label);
1314 /* Save stack_depth across call, since bc_adjust_stack () will alter
1315 the perceived stack depth via the instructions generated. */
1317 if (f->bc_stack_level >= 0)
1319 saved_stack_depth = stack_depth;
1320 bc_adjust_stack (stack_depth - f->bc_stack_level);
1321 stack_depth = saved_stack_depth;
1324 bc_emit_bytecode (jump);
1325 bc_emit_bytecode_labelref (f->bc_target);
1327 #ifdef DEBUG_PRINT_CODE
1328 fputc ('\n', stderr);
1332 goto_fixup_chain = NULL;
1335 /* Generate RTL for an asm statement (explicit assembler code).
1336 BODY is a STRING_CST node containing the assembler code text,
1337 or an ADDR_EXPR containing a STRING_CST. */
1343 if (output_bytecode)
1345 error ("`asm' is illegal when generating bytecode");
1349 if (TREE_CODE (body) == ADDR_EXPR)
1350 body = TREE_OPERAND (body, 0);
1352 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1353 TREE_STRING_POINTER (body)));
1357 /* Generate RTL for an asm statement with arguments.
1358 STRING is the instruction template.
1359 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1360 Each output or input has an expression in the TREE_VALUE and
1361 a constraint-string in the TREE_PURPOSE.
1362 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1363 that is clobbered by this insn.
1365 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1366 Some elements of OUTPUTS may be replaced with trees representing temporary
1367 values. The caller should copy those temporary values to the originally
1370 VOL nonzero means the insn is volatile; don't optimize it. */
1373 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1374 tree string, outputs, inputs, clobbers;
1379 rtvec argvec, constraints;
1381 int ninputs = list_length (inputs);
1382 int noutputs = list_length (outputs);
1386 /* Vector of RTX's of evaluated output operands. */
1387 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1388 /* The insn we have emitted. */
1391 if (output_bytecode)
1393 error ("`asm' is illegal when generating bytecode");
1397 /* Count the number of meaningful clobbered registers, ignoring what
1398 we would ignore later. */
1400 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1402 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1403 i = decode_reg_name (regname);
1404 if (i >= 0 || i == -4)
1410 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1412 tree val = TREE_VALUE (tail);
1417 /* If there's an erroneous arg, emit no insn. */
1418 if (TREE_TYPE (val) == error_mark_node)
1421 /* Make sure constraint has `=' and does not have `+'. */
1424 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1426 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1428 error ("output operand constraint contains `+'");
1431 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1436 error ("output operand constraint lacks `='");
1440 /* If an output operand is not a variable or indirect ref,
1442 create a SAVE_EXPR which is a pseudo-reg
1443 to act as an intermediate temporary.
1444 Make the asm insn write into that, then copy it to
1445 the real output operand. */
1447 while (TREE_CODE (val) == COMPONENT_REF
1448 || TREE_CODE (val) == ARRAY_REF)
1449 val = TREE_OPERAND (val, 0);
1451 if (TREE_CODE (val) != VAR_DECL
1452 && TREE_CODE (val) != PARM_DECL
1453 && TREE_CODE (val) != INDIRECT_REF)
1455 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1456 /* If it's a constant, print error now so don't crash later. */
1457 if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR)
1459 error ("invalid output in `asm'");
1464 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1467 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1469 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1473 /* Make vectors for the expression-rtx and constraint strings. */
1475 argvec = rtvec_alloc (ninputs);
1476 constraints = rtvec_alloc (ninputs);
1478 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1479 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1481 MEM_VOLATILE_P (body) = vol;
1483 /* Eval the inputs and put them into ARGVEC.
1484 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1487 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1491 /* If there's an erroneous arg, emit no insn,
1492 because the ASM_INPUT would get VOIDmode
1493 and that could cause a crash in reload. */
1494 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1496 if (TREE_PURPOSE (tail) == NULL_TREE)
1498 error ("hard register `%s' listed as input operand to `asm'",
1499 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1503 /* Make sure constraint has neither `=' nor `+'. */
1505 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1506 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1507 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1509 error ("input operand constraint contains `%c'",
1510 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1514 XVECEXP (body, 3, i) /* argvec */
1515 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1516 XVECEXP (body, 4, i) /* constraints */
1517 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1518 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1522 /* Protect all the operands from the queue,
1523 now that they have all been evaluated. */
1525 for (i = 0; i < ninputs; i++)
1526 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1528 for (i = 0; i < noutputs; i++)
1529 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1531 /* Now, for each output, construct an rtx
1532 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1533 ARGVEC CONSTRAINTS))
1534 If there is more than one, put them inside a PARALLEL. */
1536 if (noutputs == 1 && nclobbers == 0)
1538 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1539 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1541 else if (noutputs == 0 && nclobbers == 0)
1543 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1544 insn = emit_insn (body);
1550 if (num == 0) num = 1;
1551 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1553 /* For each output operand, store a SET. */
1555 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1557 XVECEXP (body, 0, i)
1558 = gen_rtx (SET, VOIDmode,
1560 gen_rtx (ASM_OPERANDS, VOIDmode,
1561 TREE_STRING_POINTER (string),
1562 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1563 i, argvec, constraints,
1565 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1568 /* If there are no outputs (but there are some clobbers)
1569 store the bare ASM_OPERANDS into the PARALLEL. */
1572 XVECEXP (body, 0, i++) = obody;
1574 /* Store (clobber REG) for each clobbered register specified. */
1576 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1578 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1579 int j = decode_reg_name (regname);
1583 if (j == -3) /* `cc', which is not a register */
1586 if (j == -4) /* `memory', don't cache memory across asm */
1588 XVECEXP (body, 0, i++)
1589 = gen_rtx (CLOBBER, VOIDmode,
1590 gen_rtx (MEM, QImode,
1591 gen_rtx (SCRATCH, VOIDmode, 0)));
1595 error ("unknown register name `%s' in `asm'", regname);
1599 /* Use QImode since that's guaranteed to clobber just one reg. */
1600 XVECEXP (body, 0, i++)
1601 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1604 insn = emit_insn (body);
1610 /* Generate RTL to evaluate the expression EXP
1611 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1614 expand_expr_stmt (exp)
1617 if (output_bytecode)
1619 int org_stack_depth = stack_depth;
1621 bc_expand_expr (exp);
1623 /* Restore stack depth */
1624 if (stack_depth < org_stack_depth)
1627 bc_emit_instruction (drop);
1629 last_expr_type = TREE_TYPE (exp);
1633 /* If -W, warn about statements with no side effects,
1634 except for an explicit cast to void (e.g. for assert()), and
1635 except inside a ({...}) where they may be useful. */
1636 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1638 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1639 && !(TREE_CODE (exp) == CONVERT_EXPR
1640 && TREE_TYPE (exp) == void_type_node))
1641 warning_with_file_and_line (emit_filename, emit_lineno,
1642 "statement with no effect");
1643 else if (warn_unused)
1644 warn_if_unused_value (exp);
1646 last_expr_type = TREE_TYPE (exp);
1647 if (! flag_syntax_only)
1648 last_expr_value = expand_expr (exp,
1649 (expr_stmts_for_value
1650 ? NULL_RTX : const0_rtx),
1653 /* If all we do is reference a volatile value in memory,
1654 copy it to a register to be sure it is actually touched. */
1655 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1656 && TREE_THIS_VOLATILE (exp))
1658 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1660 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1661 copy_to_reg (last_expr_value);
1664 rtx lab = gen_label_rtx ();
1666 /* Compare the value with itself to reference it. */
1667 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1668 expand_expr (TYPE_SIZE (last_expr_type),
1669 NULL_RTX, VOIDmode, 0),
1671 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1672 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1677 /* If this expression is part of a ({...}) and is in memory, we may have
1678 to preserve temporaries. */
1679 preserve_temp_slots (last_expr_value);
1681 /* Free any temporaries used to evaluate this expression. Any temporary
1682 used as a result of this expression will already have been preserved
1689 /* Warn if EXP contains any computations whose results are not used.
1690 Return 1 if a warning is printed; 0 otherwise. */
1693 warn_if_unused_value (exp)
1696 if (TREE_USED (exp))
1699 switch (TREE_CODE (exp))
1701 case PREINCREMENT_EXPR:
1702 case POSTINCREMENT_EXPR:
1703 case PREDECREMENT_EXPR:
1704 case POSTDECREMENT_EXPR:
1709 case METHOD_CALL_EXPR:
1711 case WITH_CLEANUP_EXPR:
1713 /* We don't warn about COND_EXPR because it may be a useful
1714 construct if either arm contains a side effect. */
1719 /* For a binding, warn if no side effect within it. */
1720 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1722 case TRUTH_ORIF_EXPR:
1723 case TRUTH_ANDIF_EXPR:
1724 /* In && or ||, warn if 2nd operand has no side effect. */
1725 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1728 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1730 /* Let people do `(foo (), 0)' without a warning. */
1731 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1733 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1737 case NON_LVALUE_EXPR:
1738 /* Don't warn about values cast to void. */
1739 if (TREE_TYPE (exp) == void_type_node)
1741 /* Don't warn about conversions not explicit in the user's program. */
1742 if (TREE_NO_UNUSED_WARNING (exp))
1744 /* Assignment to a cast usually results in a cast of a modify.
1745 Don't complain about that. There can be an arbitrary number of
1746 casts before the modify, so we must loop until we find the first
1747 non-cast expression and then test to see if that is a modify. */
1749 tree tem = TREE_OPERAND (exp, 0);
1751 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1752 tem = TREE_OPERAND (tem, 0);
1754 if (TREE_CODE (tem) == MODIFY_EXPR)
1757 /* ... fall through ... */
1760 /* Referencing a volatile value is a side effect, so don't warn. */
1761 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1762 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1763 && TREE_THIS_VOLATILE (exp))
1765 warning_with_file_and_line (emit_filename, emit_lineno,
1766 "value computed is not used");
1771 /* Clear out the memory of the last expression evaluated. */
1779 /* Begin a statement which will return a value.
1780 Return the RTL_EXPR for this statement expr.
1781 The caller must save that value and pass it to expand_end_stmt_expr. */
1784 expand_start_stmt_expr ()
1789 /* When generating bytecode just note down the stack depth */
1790 if (output_bytecode)
1791 return (build_int_2 (stack_depth, 0));
1793 /* Make the RTL_EXPR node temporary, not momentary,
1794 so that rtl_expr_chain doesn't become garbage. */
1795 momentary = suspend_momentary ();
1796 t = make_node (RTL_EXPR);
1797 resume_momentary (momentary);
1798 start_sequence_for_rtl_expr (t);
1800 expr_stmts_for_value++;
1804 /* Restore the previous state at the end of a statement that returns a value.
1805 Returns a tree node representing the statement's value and the
1806 insns to compute the value.
1808 The nodes of that expression have been freed by now, so we cannot use them.
1809 But we don't want to do that anyway; the expression has already been
1810 evaluated and now we just want to use the value. So generate a RTL_EXPR
1811 with the proper type and RTL value.
1813 If the last substatement was not an expression,
1814 return something with type `void'. */
1817 expand_end_stmt_expr (t)
1820 if (output_bytecode)
1826 /* At this point, all expressions have been evaluated in order.
1827 However, all expression values have been popped when evaluated,
1828 which means we have to recover the last expression value. This is
1829 the last value removed by means of a `drop' instruction. Instead
1830 of adding code to inhibit dropping the last expression value, it
1831 is here recovered by undoing the `drop'. Since `drop' is
1832 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1835 bc_adjust_stack (-1);
1837 if (!last_expr_type)
1838 last_expr_type = void_type_node;
1840 t = make_node (RTL_EXPR);
1841 TREE_TYPE (t) = last_expr_type;
1842 RTL_EXPR_RTL (t) = NULL;
1843 RTL_EXPR_SEQUENCE (t) = NULL;
1845 /* Don't consider deleting this expr or containing exprs at tree level. */
1846 TREE_THIS_VOLATILE (t) = 1;
1854 if (last_expr_type == 0)
1856 last_expr_type = void_type_node;
1857 last_expr_value = const0_rtx;
1859 else if (last_expr_value == 0)
1860 /* There are some cases where this can happen, such as when the
1861 statement is void type. */
1862 last_expr_value = const0_rtx;
1863 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1864 /* Remove any possible QUEUED. */
1865 last_expr_value = protect_from_queue (last_expr_value, 0);
1869 TREE_TYPE (t) = last_expr_type;
1870 RTL_EXPR_RTL (t) = last_expr_value;
1871 RTL_EXPR_SEQUENCE (t) = get_insns ();
1873 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1877 /* Don't consider deleting this expr or containing exprs at tree level. */
1878 TREE_SIDE_EFFECTS (t) = 1;
1879 /* Propagate volatility of the actual RTL expr. */
1880 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1883 expr_stmts_for_value--;
1888 /* The exception handling nesting looks like this:
1891 { <-- exception handler block
1893 <-- in an exception handler
1895 : <-- in a TRY block
1896 : <-- in an exception handler
1901 : <-- in an except block
1902 : <-- in an exception handler
1909 /* Return nonzero iff in a try block at level LEVEL. */
1912 in_try_block (level)
1915 struct nesting *n = except_stack;
1918 while (n && n->data.except_stmt.after_label != 0)
1929 /* Return nonzero iff in an except block at level LEVEL. */
1932 in_except_block (level)
1935 struct nesting *n = except_stack;
1938 while (n && n->data.except_stmt.after_label == 0)
1949 /* Return nonzero iff in an exception handler at level LEVEL. */
1952 in_exception_handler (level)
1955 struct nesting *n = except_stack;
1956 while (n && level--)
1961 /* Record the fact that the current exception nesting raises
1962 exception EX. If not in an exception handler, return 0. */
1969 if (except_stack == 0)
1971 raises_ptr = &except_stack->data.except_stmt.raised;
1972 if (! value_member (ex, *raises_ptr))
1973 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1977 /* Generate RTL for the start of a try block.
1979 TRY_CLAUSE is the condition to test to enter the try block. */
1982 expand_start_try (try_clause, exitflag, escapeflag)
1987 struct nesting *thishandler = ALLOC_NESTING ();
1989 /* Make an entry on cond_stack for the cond we are entering. */
1991 thishandler->next = except_stack;
1992 thishandler->all = nesting_stack;
1993 thishandler->depth = ++nesting_depth;
1994 thishandler->data.except_stmt.raised = 0;
1995 thishandler->data.except_stmt.handled = 0;
1996 thishandler->data.except_stmt.first_insn = get_insns ();
1997 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1998 thishandler->data.except_stmt.unhandled_label = 0;
1999 thishandler->data.except_stmt.after_label = 0;
2000 thishandler->data.except_stmt.escape_label
2001 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
2002 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
2003 except_stack = thishandler;
2004 nesting_stack = thishandler;
2006 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX);
2009 /* End of a TRY block. Nothing to do for now. */
2014 except_stack->data.except_stmt.after_label = gen_label_rtx ();
2015 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
2019 /* Start an `except' nesting contour.
2020 EXITFLAG says whether this contour should be able to `exit' something.
2021 ESCAPEFLAG says whether this contour should be escapable. */
2024 expand_start_except (exitflag, escapeflag)
2031 /* An `exit' from catch clauses goes out to next exit level,
2032 if there is one. Otherwise, it just goes to the end
2033 of the construct. */
2034 for (n = except_stack->next; n; n = n->next)
2035 if (n->exit_label != 0)
2037 except_stack->exit_label = n->exit_label;
2041 except_stack->exit_label = except_stack->data.except_stmt.after_label;
2046 /* An `escape' from catch clauses goes out to next escape level,
2047 if there is one. Otherwise, it just goes to the end
2048 of the construct. */
2049 for (n = except_stack->next; n; n = n->next)
2050 if (n->data.except_stmt.escape_label != 0)
2052 except_stack->data.except_stmt.escape_label
2053 = n->data.except_stmt.escape_label;
2057 except_stack->data.except_stmt.escape_label
2058 = except_stack->data.except_stmt.after_label;
2060 do_pending_stack_adjust ();
2061 emit_label (except_stack->data.except_stmt.except_label);
2064 /* Generate code to `escape' from an exception contour. This
2065 is like `exiting', but does not conflict with constructs which
2068 Return nonzero if this contour is escapable, otherwise
2069 return zero, and language-specific code will emit the
2070 appropriate error message. */
2072 expand_escape_except ()
2076 for (n = except_stack; n; n = n->next)
2077 if (n->data.except_stmt.escape_label != 0)
2079 expand_goto_internal (NULL_TREE,
2080 n->data.except_stmt.escape_label, NULL_RTX);
2087 /* Finish processing and `except' contour.
2088 Culls out all exceptions which might be raise but not
2089 handled, and returns the list to the caller.
2090 Language-specific code is responsible for dealing with these
2094 expand_end_except ()
2097 tree raised = NULL_TREE;
2099 do_pending_stack_adjust ();
2100 emit_label (except_stack->data.except_stmt.after_label);
2102 n = except_stack->next;
2105 /* Propagate exceptions raised but not handled to next
2107 tree handled = except_stack->data.except_stmt.raised;
2108 if (handled != void_type_node)
2110 tree prev = NULL_TREE;
2111 raised = except_stack->data.except_stmt.raised;
2115 for (this_raise = raised, prev = 0; this_raise;
2116 this_raise = TREE_CHAIN (this_raise))
2118 if (value_member (TREE_VALUE (this_raise), handled))
2121 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
2124 raised = TREE_CHAIN (raised);
2125 if (raised == NULL_TREE)
2132 handled = TREE_CHAIN (handled);
2134 if (prev == NULL_TREE)
2137 TREE_CHAIN (prev) = n->data.except_stmt.raised;
2139 n->data.except_stmt.raised = raised;
2143 POPSTACK (except_stack);
2148 /* Record that exception EX is caught by this exception handler.
2149 Return nonzero if in exception handling construct, otherwise return 0. */
2156 if (except_stack == 0)
2158 raises_ptr = &except_stack->data.except_stmt.handled;
2159 if (*raises_ptr != void_type_node
2161 && ! value_member (ex, *raises_ptr))
2162 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
2166 /* Record that this exception handler catches all exceptions.
2167 Return nonzero if in exception handling construct, otherwise return 0. */
2170 expand_catch_default ()
2172 if (except_stack == 0)
2174 except_stack->data.except_stmt.handled = void_type_node;
2181 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
2183 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
2188 /* Generate RTL for the start of an if-then. COND is the expression
2189 whose truth should be tested.
2191 If EXITFLAG is nonzero, this conditional is visible to
2192 `exit_something'. */
2195 expand_start_cond (cond, exitflag)
2199 struct nesting *thiscond = ALLOC_NESTING ();
2201 /* Make an entry on cond_stack for the cond we are entering. */
2203 thiscond->next = cond_stack;
2204 thiscond->all = nesting_stack;
2205 thiscond->depth = ++nesting_depth;
2206 thiscond->data.cond.next_label = gen_label_rtx ();
2207 /* Before we encounter an `else', we don't need a separate exit label
2208 unless there are supposed to be exit statements
2209 to exit this conditional. */
2210 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2211 thiscond->data.cond.endif_label = thiscond->exit_label;
2212 cond_stack = thiscond;
2213 nesting_stack = thiscond;
2215 if (output_bytecode)
2216 bc_expand_start_cond (cond, exitflag);
2218 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2221 /* Generate RTL between then-clause and the elseif-clause
2222 of an if-then-elseif-.... */
2225 expand_start_elseif (cond)
2228 if (cond_stack->data.cond.endif_label == 0)
2229 cond_stack->data.cond.endif_label = gen_label_rtx ();
2230 emit_jump (cond_stack->data.cond.endif_label);
2231 emit_label (cond_stack->data.cond.next_label);
2232 cond_stack->data.cond.next_label = gen_label_rtx ();
2233 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2236 /* Generate RTL between the then-clause and the else-clause
2237 of an if-then-else. */
2240 expand_start_else ()
2242 if (cond_stack->data.cond.endif_label == 0)
2243 cond_stack->data.cond.endif_label = gen_label_rtx ();
2245 if (output_bytecode)
2247 bc_expand_start_else ();
2251 emit_jump (cond_stack->data.cond.endif_label);
2252 emit_label (cond_stack->data.cond.next_label);
2253 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2256 /* Generate RTL for the end of an if-then.
2257 Pop the record for it off of cond_stack. */
2262 struct nesting *thiscond = cond_stack;
2264 if (output_bytecode)
2265 bc_expand_end_cond ();
2268 do_pending_stack_adjust ();
2269 if (thiscond->data.cond.next_label)
2270 emit_label (thiscond->data.cond.next_label);
2271 if (thiscond->data.cond.endif_label)
2272 emit_label (thiscond->data.cond.endif_label);
2275 POPSTACK (cond_stack);
2280 /* Generate code for the start of an if-then. COND is the expression
2281 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2282 is to be visible to exit_something. It is assumed that the caller
2283 has pushed the previous context on the cond stack. */
2286 bc_expand_start_cond (cond, exitflag)
2290 struct nesting *thiscond = cond_stack;
2292 thiscond->data.case_stmt.nominal_type = cond;
2294 thiscond->exit_label = gen_label_rtx ();
2295 bc_expand_expr (cond);
2296 bc_emit_bytecode (xjumpifnot);
2297 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2299 #ifdef DEBUG_PRINT_CODE
2300 fputc ('\n', stderr);
2304 /* Generate the label for the end of an if with
2308 bc_expand_end_cond ()
2310 struct nesting *thiscond = cond_stack;
2312 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2315 /* Generate code for the start of the else- clause of
2319 bc_expand_start_else ()
2321 struct nesting *thiscond = cond_stack;
2323 thiscond->data.cond.endif_label = thiscond->exit_label;
2324 thiscond->exit_label = gen_label_rtx ();
2325 bc_emit_bytecode (jump);
2326 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2328 #ifdef DEBUG_PRINT_CODE
2329 fputc ('\n', stderr);
2332 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2335 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2336 loop should be exited by `exit_something'. This is a loop for which
2337 `expand_continue' will jump to the top of the loop.
2339 Make an entry on loop_stack to record the labels associated with
2343 expand_start_loop (exit_flag)
2346 register struct nesting *thisloop = ALLOC_NESTING ();
2348 /* Make an entry on loop_stack for the loop we are entering. */
2350 thisloop->next = loop_stack;
2351 thisloop->all = nesting_stack;
2352 thisloop->depth = ++nesting_depth;
2353 thisloop->data.loop.start_label = gen_label_rtx ();
2354 thisloop->data.loop.end_label = gen_label_rtx ();
2355 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2356 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2357 loop_stack = thisloop;
2358 nesting_stack = thisloop;
2360 if (output_bytecode)
2362 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2366 do_pending_stack_adjust ();
2368 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2369 emit_label (thisloop->data.loop.start_label);
2374 /* Like expand_start_loop but for a loop where the continuation point
2375 (for expand_continue_loop) will be specified explicitly. */
2378 expand_start_loop_continue_elsewhere (exit_flag)
2381 struct nesting *thisloop = expand_start_loop (exit_flag);
2382 loop_stack->data.loop.continue_label = gen_label_rtx ();
2386 /* Specify the continuation point for a loop started with
2387 expand_start_loop_continue_elsewhere.
2388 Use this at the point in the code to which a continue statement
2392 expand_loop_continue_here ()
2394 if (output_bytecode)
2396 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2399 do_pending_stack_adjust ();
2400 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2401 emit_label (loop_stack->data.loop.continue_label);
2407 bc_expand_end_loop ()
2409 struct nesting *thisloop = loop_stack;
2411 bc_emit_bytecode (jump);
2412 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2414 #ifdef DEBUG_PRINT_CODE
2415 fputc ('\n', stderr);
2418 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2419 POPSTACK (loop_stack);
2424 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2425 Pop the block off of loop_stack. */
2431 register rtx start_label;
2432 rtx last_test_insn = 0;
2435 if (output_bytecode)
2437 bc_expand_end_loop ();
2441 insn = get_last_insn ();
2442 start_label = loop_stack->data.loop.start_label;
2444 /* Mark the continue-point at the top of the loop if none elsewhere. */
2445 if (start_label == loop_stack->data.loop.continue_label)
2446 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2448 do_pending_stack_adjust ();
2450 /* If optimizing, perhaps reorder the loop. If the loop
2451 starts with a conditional exit, roll that to the end
2452 where it will optimize together with the jump back.
2454 We look for the last conditional branch to the exit that we encounter
2455 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2456 branch to the exit first, use it.
2458 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2459 because moving them is not valid. */
2463 ! (GET_CODE (insn) == JUMP_INSN
2464 && GET_CODE (PATTERN (insn)) == SET
2465 && SET_DEST (PATTERN (insn)) == pc_rtx
2466 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2468 /* Scan insns from the top of the loop looking for a qualified
2469 conditional exit. */
2470 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2471 insn = NEXT_INSN (insn))
2473 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2476 if (GET_CODE (insn) == NOTE
2477 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2478 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2481 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2484 if (last_test_insn && num_insns > 30)
2487 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2488 && SET_DEST (PATTERN (insn)) == pc_rtx
2489 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2490 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2491 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2492 == loop_stack->data.loop.end_label))
2493 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2494 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2495 == loop_stack->data.loop.end_label))))
2496 last_test_insn = insn;
2498 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2499 && GET_CODE (PATTERN (insn)) == SET
2500 && SET_DEST (PATTERN (insn)) == pc_rtx
2501 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2502 && (XEXP (SET_SRC (PATTERN (insn)), 0)
2503 == loop_stack->data.loop.end_label))
2504 /* Include BARRIER. */
2505 last_test_insn = NEXT_INSN (insn);
2508 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2510 /* We found one. Move everything from there up
2511 to the end of the loop, and add a jump into the loop
2512 to jump to there. */
2513 register rtx newstart_label = gen_label_rtx ();
2514 register rtx start_move = start_label;
2516 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2517 then we want to move this note also. */
2518 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2519 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2520 == NOTE_INSN_LOOP_CONT))
2521 start_move = PREV_INSN (start_move);
2523 emit_label_after (newstart_label, PREV_INSN (start_move));
2524 reorder_insns (start_move, last_test_insn, get_last_insn ());
2525 emit_jump_insn_after (gen_jump (start_label),
2526 PREV_INSN (newstart_label));
2527 emit_barrier_after (PREV_INSN (newstart_label));
2528 start_label = newstart_label;
2532 emit_jump (start_label);
2533 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2534 emit_label (loop_stack->data.loop.end_label);
2536 POPSTACK (loop_stack);
2541 /* Generate a jump to the current loop's continue-point.
2542 This is usually the top of the loop, but may be specified
2543 explicitly elsewhere. If not currently inside a loop,
2544 return 0 and do nothing; caller will print an error message. */
2547 expand_continue_loop (whichloop)
2548 struct nesting *whichloop;
2552 whichloop = loop_stack;
2555 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2560 /* Generate a jump to exit the current loop. If not currently inside a loop,
2561 return 0 and do nothing; caller will print an error message. */
2564 expand_exit_loop (whichloop)
2565 struct nesting *whichloop;
2569 whichloop = loop_stack;
2572 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2576 /* Generate a conditional jump to exit the current loop if COND
2577 evaluates to zero. If not currently inside a loop,
2578 return 0 and do nothing; caller will print an error message. */
2581 expand_exit_loop_if_false (whichloop, cond)
2582 struct nesting *whichloop;
2587 whichloop = loop_stack;
2590 if (output_bytecode)
2592 bc_expand_expr (cond);
2593 bc_expand_goto_internal (xjumpifnot,
2594 BYTECODE_BC_LABEL (whichloop->exit_label),
2598 do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
2603 /* Return non-zero if we should preserve sub-expressions as separate
2604 pseudos. We never do so if we aren't optimizing. We always do so
2605 if -fexpensive-optimizations.
2607 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2608 the loop may still be a small one. */
2611 preserve_subexpressions_p ()
2615 if (flag_expensive_optimizations)
2618 if (optimize == 0 || loop_stack == 0)
2621 insn = get_last_insn_anywhere ();
2624 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2625 < n_non_fixed_regs * 3));
2629 /* Generate a jump to exit the current loop, conditional, binding contour
2630 or case statement. Not all such constructs are visible to this function,
2631 only those started with EXIT_FLAG nonzero. Individual languages use
2632 the EXIT_FLAG parameter to control which kinds of constructs you can
2635 If not currently inside anything that can be exited,
2636 return 0 and do nothing; caller will print an error message. */
2639 expand_exit_something ()
2643 for (n = nesting_stack; n; n = n->all)
2644 if (n->exit_label != 0)
2646 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2653 /* Generate RTL to return from the current function, with no value.
2654 (That is, we do not do anything about returning any value.) */
2657 expand_null_return ()
2659 struct nesting *block = block_stack;
2662 if (output_bytecode)
2664 bc_emit_instruction (ret);
2668 /* Does any pending block have cleanups? */
2670 while (block && block->data.block.cleanups == 0)
2671 block = block->next;
2673 /* If yes, use a goto to return, since that runs cleanups. */
2675 expand_null_return_1 (last_insn, block != 0);
2678 /* Generate RTL to return from the current function, with value VAL. */
2681 expand_value_return (val)
2684 struct nesting *block = block_stack;
2685 rtx last_insn = get_last_insn ();
2686 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2688 /* Copy the value to the return location
2689 unless it's already there. */
2691 if (return_reg != val)
2693 #ifdef PROMOTE_FUNCTION_RETURN
2694 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2695 int unsignedp = TREE_UNSIGNED (type);
2696 enum machine_mode mode
2697 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2700 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2701 convert_move (return_reg, val, unsignedp);
2704 emit_move_insn (return_reg, val);
2706 if (GET_CODE (return_reg) == REG
2707 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2708 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2710 /* Does any pending block have cleanups? */
2712 while (block && block->data.block.cleanups == 0)
2713 block = block->next;
2715 /* If yes, use a goto to return, since that runs cleanups.
2716 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2718 expand_null_return_1 (last_insn, block != 0);
2721 /* Output a return with no value. If LAST_INSN is nonzero,
2722 pretend that the return takes place after LAST_INSN.
2723 If USE_GOTO is nonzero then don't use a return instruction;
2724 go to the return label instead. This causes any cleanups
2725 of pending blocks to be executed normally. */
2728 expand_null_return_1 (last_insn, use_goto)
2732 rtx end_label = cleanup_label ? cleanup_label : return_label;
2734 clear_pending_stack_adjust ();
2735 do_pending_stack_adjust ();
2738 /* PCC-struct return always uses an epilogue. */
2739 if (current_function_returns_pcc_struct || use_goto)
2742 end_label = return_label = gen_label_rtx ();
2743 expand_goto_internal (NULL_TREE, end_label, last_insn);
2747 /* Otherwise output a simple return-insn if one is available,
2748 unless it won't do the job. */
2750 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2752 emit_jump_insn (gen_return ());
2758 /* Otherwise jump to the epilogue. */
2759 expand_goto_internal (NULL_TREE, end_label, last_insn);
2762 /* Generate RTL to evaluate the expression RETVAL and return it
2763 from the current function. */
2766 expand_return (retval)
2769 /* If there are any cleanups to be performed, then they will
2770 be inserted following LAST_INSN. It is desirable
2771 that the last_insn, for such purposes, should be the
2772 last insn before computing the return value. Otherwise, cleanups
2773 which call functions can clobber the return value. */
2774 /* ??? rms: I think that is erroneous, because in C++ it would
2775 run destructors on variables that might be used in the subsequent
2776 computation of the return value. */
2778 register rtx val = 0;
2782 struct nesting *block;
2784 /* Bytecode returns are quite simple, just leave the result on the
2785 arithmetic stack. */
2786 if (output_bytecode)
2788 bc_expand_expr (retval);
2789 bc_emit_instruction (ret);
2793 /* If function wants no value, give it none. */
2794 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2796 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2798 expand_null_return ();
2802 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2803 cleanups = any_pending_cleanups (1);
2805 if (TREE_CODE (retval) == RESULT_DECL)
2806 retval_rhs = retval;
2807 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2808 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2809 retval_rhs = TREE_OPERAND (retval, 1);
2810 else if (TREE_TYPE (retval) == void_type_node)
2811 /* Recognize tail-recursive call to void function. */
2812 retval_rhs = retval;
2814 retval_rhs = NULL_TREE;
2816 /* Only use `last_insn' if there are cleanups which must be run. */
2817 if (cleanups || cleanup_label != 0)
2818 last_insn = get_last_insn ();
2820 /* Distribute return down conditional expr if either of the sides
2821 may involve tail recursion (see test below). This enhances the number
2822 of tail recursions we see. Don't do this always since it can produce
2823 sub-optimal code in some cases and we distribute assignments into
2824 conditional expressions when it would help. */
2826 if (optimize && retval_rhs != 0
2827 && frame_offset == 0
2828 && TREE_CODE (retval_rhs) == COND_EXPR
2829 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2830 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2832 rtx label = gen_label_rtx ();
2835 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2836 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2837 DECL_RESULT (current_function_decl),
2838 TREE_OPERAND (retval_rhs, 1));
2839 TREE_SIDE_EFFECTS (expr) = 1;
2840 expand_return (expr);
2843 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2844 DECL_RESULT (current_function_decl),
2845 TREE_OPERAND (retval_rhs, 2));
2846 TREE_SIDE_EFFECTS (expr) = 1;
2847 expand_return (expr);
2851 /* For tail-recursive call to current function,
2852 just jump back to the beginning.
2853 It's unsafe if any auto variable in this function
2854 has its address taken; for simplicity,
2855 require stack frame to be empty. */
2856 if (optimize && retval_rhs != 0
2857 && frame_offset == 0
2858 && TREE_CODE (retval_rhs) == CALL_EXPR
2859 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2860 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2861 /* Finish checking validity, and if valid emit code
2862 to set the argument variables for the new call. */
2863 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2864 DECL_ARGUMENTS (current_function_decl)))
2866 if (tail_recursion_label == 0)
2868 tail_recursion_label = gen_label_rtx ();
2869 emit_label_after (tail_recursion_label,
2870 tail_recursion_reentry);
2873 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2878 /* This optimization is safe if there are local cleanups
2879 because expand_null_return takes care of them.
2880 ??? I think it should also be safe when there is a cleanup label,
2881 because expand_null_return takes care of them, too.
2882 Any reason why not? */
2883 if (HAVE_return && cleanup_label == 0
2884 && ! current_function_returns_pcc_struct
2885 && BRANCH_COST <= 1)
2887 /* If this is return x == y; then generate
2888 if (x == y) return 1; else return 0;
2889 if we can do it with explicit return insns and
2890 branches are cheap. */
2892 switch (TREE_CODE (retval_rhs))
2900 case TRUTH_ANDIF_EXPR:
2901 case TRUTH_ORIF_EXPR:
2902 case TRUTH_AND_EXPR:
2904 case TRUTH_NOT_EXPR:
2905 case TRUTH_XOR_EXPR:
2906 op0 = gen_label_rtx ();
2907 jumpifnot (retval_rhs, op0);
2908 expand_value_return (const1_rtx);
2910 expand_value_return (const0_rtx);
2914 #endif /* HAVE_return */
2918 && TREE_TYPE (retval_rhs) != void_type_node
2919 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2921 /* Calculate the return value into a pseudo reg. */
2922 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2924 /* All temporaries have now been used. */
2926 /* Return the calculated value, doing cleanups first. */
2927 expand_value_return (val);
2931 /* No cleanups or no hard reg used;
2932 calculate value into hard return reg. */
2933 expand_expr (retval, const0_rtx, VOIDmode, 0);
2936 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2940 /* Return 1 if the end of the generated RTX is not a barrier.
2941 This means code already compiled can drop through. */
2944 drop_through_at_end_p ()
2946 rtx insn = get_last_insn ();
2947 while (insn && GET_CODE (insn) == NOTE)
2948 insn = PREV_INSN (insn);
2949 return insn && GET_CODE (insn) != BARRIER;
2952 /* Emit code to alter this function's formal parms for a tail-recursive call.
2953 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2954 FORMALS is the chain of decls of formals.
2955 Return 1 if this can be done;
2956 otherwise return 0 and do not emit any code. */
2959 tail_recursion_args (actuals, formals)
2960 tree actuals, formals;
2962 register tree a = actuals, f = formals;
2964 register rtx *argvec;
2966 /* Check that number and types of actuals are compatible
2967 with the formals. This is not always true in valid C code.
2968 Also check that no formal needs to be addressable
2969 and that all formals are scalars. */
2971 /* Also count the args. */
2973 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2975 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2977 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2980 if (a != 0 || f != 0)
2983 /* Compute all the actuals. */
2985 argvec = (rtx *) alloca (i * sizeof (rtx));
2987 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2988 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2990 /* Find which actual values refer to current values of previous formals.
2991 Copy each of them now, before any formal is changed. */
2993 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2997 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2998 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2999 { copy = 1; break; }
3001 argvec[i] = copy_to_reg (argvec[i]);
3004 /* Store the values of the actuals into the formals. */
3006 for (f = formals, a = actuals, i = 0; f;
3007 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3009 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3010 emit_move_insn (DECL_RTL (f), argvec[i]);
3012 convert_move (DECL_RTL (f), argvec[i],
3013 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3020 /* Generate the RTL code for entering a binding contour.
3021 The variables are declared one by one, by calls to `expand_decl'.
3023 EXIT_FLAG is nonzero if this construct should be visible to
3024 `exit_something'. */
3027 expand_start_bindings (exit_flag)
3030 struct nesting *thisblock = ALLOC_NESTING ();
3031 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3033 /* Make an entry on block_stack for the block we are entering. */
3035 thisblock->next = block_stack;
3036 thisblock->all = nesting_stack;
3037 thisblock->depth = ++nesting_depth;
3038 thisblock->data.block.stack_level = 0;
3039 thisblock->data.block.cleanups = 0;
3040 thisblock->data.block.function_call_count = 0;
3044 if (block_stack->data.block.cleanups == NULL_TREE
3045 && (block_stack->data.block.outer_cleanups == NULL_TREE
3046 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
3047 thisblock->data.block.outer_cleanups = empty_cleanup_list;
3049 thisblock->data.block.outer_cleanups
3050 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3051 block_stack->data.block.outer_cleanups);
3054 thisblock->data.block.outer_cleanups = 0;
3058 && !(block_stack->data.block.cleanups == NULL_TREE
3059 && block_stack->data.block.outer_cleanups == NULL_TREE))
3060 thisblock->data.block.outer_cleanups
3061 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3062 block_stack->data.block.outer_cleanups);
3064 thisblock->data.block.outer_cleanups = 0;
3066 thisblock->data.block.label_chain = 0;
3067 thisblock->data.block.innermost_stack_block = stack_block_stack;
3068 thisblock->data.block.first_insn = note;
3069 thisblock->data.block.block_start_count = ++block_start_count;
3070 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3071 block_stack = thisblock;
3072 nesting_stack = thisblock;
3074 if (!output_bytecode)
3076 /* Make a new level for allocating stack slots. */
3081 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3082 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3086 remember_end_note (block)
3087 register tree block;
3089 BLOCK_END_NOTE (block) = last_block_end_note;
3090 last_block_end_note = NULL_RTX;
3093 /* Generate RTL code to terminate a binding contour.
3094 VARS is the chain of VAR_DECL nodes
3095 for the variables bound in this contour.
3096 MARK_ENDS is nonzero if we should put a note at the beginning
3097 and end of this binding contour.
3099 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3100 (That is true automatically if the contour has a saved stack level.) */
3103 expand_end_bindings (vars, mark_ends, dont_jump_in)
3108 register struct nesting *thisblock = block_stack;
3111 if (output_bytecode)
3113 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3118 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3119 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3120 && ! DECL_IN_SYSTEM_HEADER (decl))
3121 warning_with_decl (decl, "unused variable `%s'");
3123 if (thisblock->exit_label)
3125 do_pending_stack_adjust ();
3126 emit_label (thisblock->exit_label);
3129 /* If necessary, make a handler for nonlocal gotos taking
3130 place in the function calls in this block. */
3131 if (function_call_count != thisblock->data.block.function_call_count
3133 /* Make handler for outermost block
3134 if there were any nonlocal gotos to this function. */
3135 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3136 /* Make handler for inner block if it has something
3137 special to do when you jump out of it. */
3138 : (thisblock->data.block.cleanups != 0
3139 || thisblock->data.block.stack_level != 0)))
3142 rtx afterward = gen_label_rtx ();
3143 rtx handler_label = gen_label_rtx ();
3144 rtx save_receiver = gen_reg_rtx (Pmode);
3147 /* Don't let jump_optimize delete the handler. */
3148 LABEL_PRESERVE_P (handler_label) = 1;
3150 /* Record the handler address in the stack slot for that purpose,
3151 during this block, saving and restoring the outer value. */
3152 if (thisblock->next != 0)
3154 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3157 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3158 insns = get_insns ();
3160 emit_insns_before (insns, thisblock->data.block.first_insn);
3164 emit_move_insn (nonlocal_goto_handler_slot,
3165 gen_rtx (LABEL_REF, Pmode, handler_label));
3166 insns = get_insns ();
3168 emit_insns_before (insns, thisblock->data.block.first_insn);
3170 /* Jump around the handler; it runs only when specially invoked. */
3171 emit_jump (afterward);
3172 emit_label (handler_label);
3174 #ifdef HAVE_nonlocal_goto
3175 if (! HAVE_nonlocal_goto)
3177 /* First adjust our frame pointer to its actual value. It was
3178 previously set to the start of the virtual area corresponding to
3179 the stacked variables when we branched here and now needs to be
3180 adjusted to the actual hardware fp value.
3182 Assignments are to virtual registers are converted by
3183 instantiate_virtual_regs into the corresponding assignment
3184 to the underlying register (fp in this case) that makes
3185 the original assignment true.
3186 So the following insn will actually be
3187 decrementing fp by STARTING_FRAME_OFFSET. */
3188 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
3190 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3191 if (fixed_regs[ARG_POINTER_REGNUM])
3193 #ifdef ELIMINABLE_REGS
3194 /* If the argument pointer can be eliminated in favor of the
3195 frame pointer, we don't need to restore it. We assume here
3196 that if such an elimination is present, it can always be used.
3197 This is the case on all known machines; if we don't make this
3198 assumption, we do unnecessary saving on many machines. */
3199 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3202 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3203 if (elim_regs[i].from == ARG_POINTER_REGNUM
3204 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3207 if (i == sizeof elim_regs / sizeof elim_regs [0])
3210 /* Now restore our arg pointer from the address at which it
3211 was saved in our stack frame.
3212 If there hasn't be space allocated for it yet, make
3214 if (arg_pointer_save_area == 0)
3215 arg_pointer_save_area
3216 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3217 emit_move_insn (virtual_incoming_args_rtx,
3218 /* We need a pseudo here, or else
3219 instantiate_virtual_regs_1 complains. */
3220 copy_to_reg (arg_pointer_save_area));
3225 /* The handler expects the desired label address in the static chain
3226 register. It tests the address and does an appropriate jump
3227 to whatever label is desired. */
3228 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3229 /* Skip any labels we shouldn't be able to jump to from here. */
3230 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3232 rtx not_this = gen_label_rtx ();
3233 rtx this = gen_label_rtx ();
3234 do_jump_if_equal (static_chain_rtx,
3235 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3237 emit_jump (not_this);
3239 expand_goto (TREE_VALUE (link));
3240 emit_label (not_this);
3242 /* If label is not recognized, abort. */
3243 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3245 emit_label (afterward);
3248 /* Don't allow jumping into a block that has cleanups or a stack level. */
3250 || thisblock->data.block.stack_level != 0
3251 || thisblock->data.block.cleanups != 0)
3253 struct label_chain *chain;
3255 /* Any labels in this block are no longer valid to go to.
3256 Mark them to cause an error message. */
3257 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3259 DECL_TOO_LATE (chain->label) = 1;
3260 /* If any goto without a fixup came to this label,
3261 that must be an error, because gotos without fixups
3262 come from outside all saved stack-levels and all cleanups. */
3263 if (TREE_ADDRESSABLE (chain->label))
3264 error_with_decl (chain->label,
3265 "label `%s' used before containing binding contour");
3269 /* Restore stack level in effect before the block
3270 (only if variable-size objects allocated). */
3271 /* Perform any cleanups associated with the block. */
3273 if (thisblock->data.block.stack_level != 0
3274 || thisblock->data.block.cleanups != 0)
3276 /* Don't let cleanups affect ({...}) constructs. */
3277 int old_expr_stmts_for_value = expr_stmts_for_value;
3278 rtx old_last_expr_value = last_expr_value;
3279 tree old_last_expr_type = last_expr_type;
3280 expr_stmts_for_value = 0;
3282 /* Do the cleanups. */
3283 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
3284 do_pending_stack_adjust ();
3286 expr_stmts_for_value = old_expr_stmts_for_value;
3287 last_expr_value = old_last_expr_value;
3288 last_expr_type = old_last_expr_type;
3290 /* Restore the stack level. */
3292 if (thisblock->data.block.stack_level != 0)
3294 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3295 thisblock->data.block.stack_level, NULL_RTX);
3296 if (nonlocal_goto_handler_slot != 0)
3297 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3301 /* Any gotos out of this block must also do these things.
3302 Also report any gotos with fixups that came to labels in this
3304 fixup_gotos (thisblock,
3305 thisblock->data.block.stack_level,
3306 thisblock->data.block.cleanups,
3307 thisblock->data.block.first_insn,
3311 /* Mark the beginning and end of the scope if requested.
3312 We do this now, after running cleanups on the variables
3313 just going out of scope, so they are in scope for their cleanups. */
3316 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3318 /* Get rid of the beginning-mark if we don't make an end-mark. */
3319 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3321 /* If doing stupid register allocation, make sure lives of all
3322 register variables declared here extend thru end of scope. */
3325 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3327 rtx rtl = DECL_RTL (decl);
3328 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3332 /* Restore block_stack level for containing block. */
3334 stack_block_stack = thisblock->data.block.innermost_stack_block;
3335 POPSTACK (block_stack);
3337 /* Pop the stack slot nesting and free any slots at this level. */
3342 /* End a binding contour.
3343 VARS is the chain of VAR_DECL nodes for the variables bound
3344 in this contour. MARK_ENDS is nonzer if we should put a note
3345 at the beginning and end of this binding contour.
3346 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3350 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3355 struct nesting *thisbind = nesting_stack;
3359 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3360 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3361 warning_with_decl (decl, "unused variable `%s'");
3363 if (thisbind->exit_label)
3364 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3366 /* Pop block/bindings off stack */
3367 POPSTACK (block_stack);
3370 /* Generate RTL for the automatic variable declaration DECL.
3371 (Other kinds of declarations are simply ignored if seen here.)
3372 CLEANUP is an expression to be executed at exit from this binding contour;
3373 for example, in C++, it might call the destructor for this variable.
3375 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3376 either before or after calling `expand_decl' but before compiling
3377 any subsequent expressions. This is because CLEANUP may be expanded
3378 more than once, on different branches of execution.
3379 For the same reason, CLEANUP may not contain a CALL_EXPR
3380 except as its topmost node--else `preexpand_calls' would get confused.
3382 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3383 that is not associated with any particular variable.
3385 There is no special support here for C++ constructors.
3386 They should be handled by the proper code in DECL_INITIAL. */
3392 struct nesting *thisblock = block_stack;
3395 if (output_bytecode)
3397 bc_expand_decl (decl, 0);
3401 type = TREE_TYPE (decl);
3403 /* Only automatic variables need any expansion done.
3404 Static and external variables, and external functions,
3405 will be handled by `assemble_variable' (called from finish_decl).
3406 TYPE_DECL and CONST_DECL require nothing.
3407 PARM_DECLs are handled in `assign_parms'. */
3409 if (TREE_CODE (decl) != VAR_DECL)
3411 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3414 /* Create the RTL representation for the variable. */
3416 if (type == error_mark_node)
3417 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3418 else if (DECL_SIZE (decl) == 0)
3419 /* Variable with incomplete type. */
3421 if (DECL_INITIAL (decl) == 0)
3422 /* Error message was already done; now avoid a crash. */
3423 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3425 /* An initializer is going to decide the size of this array.
3426 Until we know the size, represent its address with a reg. */
3427 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3429 else if (DECL_MODE (decl) != BLKmode
3430 /* If -ffloat-store, don't put explicit float vars
3432 && !(flag_float_store
3433 && TREE_CODE (type) == REAL_TYPE)
3434 && ! TREE_THIS_VOLATILE (decl)
3435 && ! TREE_ADDRESSABLE (decl)
3436 && (DECL_REGISTER (decl) || ! obey_regdecls))
3438 /* Automatic variable that can go in a register. */
3439 int unsignedp = TREE_UNSIGNED (type);
3440 enum machine_mode reg_mode
3441 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3443 if (TREE_CODE (type) == COMPLEX_TYPE)
3445 rtx realpart, imagpart;
3446 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3448 /* For a complex type variable, make a CONCAT of two pseudos
3449 so that the real and imaginary parts
3450 can be allocated separately. */
3451 realpart = gen_reg_rtx (partmode);
3452 REG_USERVAR_P (realpart) = 1;
3453 imagpart = gen_reg_rtx (partmode);
3454 REG_USERVAR_P (imagpart) = 1;
3455 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3459 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3460 if (TREE_CODE (type) == POINTER_TYPE)
3461 mark_reg_pointer (DECL_RTL (decl));
3462 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3465 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3467 /* Variable of fixed size that goes on the stack. */
3471 /* If we previously made RTL for this decl, it must be an array
3472 whose size was determined by the initializer.
3473 The old address was a register; set that register now
3474 to the proper address. */
3475 if (DECL_RTL (decl) != 0)
3477 if (GET_CODE (DECL_RTL (decl)) != MEM
3478 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3480 oldaddr = XEXP (DECL_RTL (decl), 0);
3484 = assign_stack_temp (DECL_MODE (decl),
3485 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3486 + BITS_PER_UNIT - 1)
3490 /* Set alignment we actually gave this decl. */
3491 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3492 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3496 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3497 if (addr != oldaddr)
3498 emit_move_insn (oldaddr, addr);
3501 /* If this is a memory ref that contains aggregate components,
3502 mark it as such for cse and loop optimize. */
3503 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3505 /* If this is in memory because of -ffloat-store,
3506 set the volatile bit, to prevent optimizations from
3507 undoing the effects. */
3508 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3509 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3513 /* Dynamic-size object: must push space on the stack. */
3517 /* Record the stack pointer on entry to block, if have
3518 not already done so. */
3519 if (thisblock->data.block.stack_level == 0)
3521 do_pending_stack_adjust ();
3522 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3523 &thisblock->data.block.stack_level,
3524 thisblock->data.block.first_insn);
3525 stack_block_stack = thisblock;
3528 /* Compute the variable's size, in bytes. */
3529 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3531 size_int (BITS_PER_UNIT)),
3532 NULL_RTX, VOIDmode, 0);
3535 /* This is equivalent to calling alloca. */
3536 current_function_calls_alloca = 1;
3538 /* Allocate space on the stack for the variable. */
3539 address = allocate_dynamic_stack_space (size, NULL_RTX,
3542 if (nonlocal_goto_handler_slot != 0)
3543 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3545 /* Reference the variable indirect through that rtx. */
3546 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3548 /* If this is a memory ref that contains aggregate components,
3549 mark it as such for cse and loop optimize. */
3550 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3552 /* Indicate the alignment we actually gave this variable. */
3553 #ifdef STACK_BOUNDARY
3554 DECL_ALIGN (decl) = STACK_BOUNDARY;
3556 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3560 if (TREE_THIS_VOLATILE (decl))
3561 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3562 #if 0 /* A variable is not necessarily unchanging
3563 just because it is const. RTX_UNCHANGING_P
3564 means no change in the function,
3565 not merely no change in the variable's scope.
3566 It is correct to set RTX_UNCHANGING_P if the variable's scope
3567 is the whole function. There's no convenient way to test that. */
3568 if (TREE_READONLY (decl))
3569 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3572 /* If doing stupid register allocation, make sure life of any
3573 register variable starts here, at the start of its scope. */
3576 use_variable (DECL_RTL (decl));
3580 /* Generate code for the automatic variable declaration DECL. For
3581 most variables this just means we give it a stack offset. The
3582 compiler sometimes emits cleanups without variables and we will
3583 have to deal with those too. */
3586 bc_expand_decl (decl, cleanup)
3594 /* A cleanup with no variable. */
3601 /* Only auto variables need any work. */
3602 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3605 type = TREE_TYPE (decl);
3607 if (type == error_mark_node)
3608 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3610 else if (DECL_SIZE (decl) == 0)
3612 /* Variable with incomplete type. The stack offset herein will be
3613 fixed later in expand_decl_init (). */
3614 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3616 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3618 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3622 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3625 /* Emit code to perform the initialization of a declaration DECL. */
3628 expand_decl_init (decl)
3631 int was_used = TREE_USED (decl);
3633 if (output_bytecode)
3635 bc_expand_decl_init (decl);
3639 /* If this is a CONST_DECL, we don't have to generate any code, but
3640 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3641 to be set while in the obstack containing the constant. If we don't
3642 do this, we can lose if we have functions nested three deep and the middle
3643 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3644 the innermost function is the first to expand that STRING_CST. */
3645 if (TREE_CODE (decl) == CONST_DECL)
3647 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3648 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3649 EXPAND_INITIALIZER);
3653 if (TREE_STATIC (decl))
3656 /* Compute and store the initial value now. */
3658 if (DECL_INITIAL (decl) == error_mark_node)
3660 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3661 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3662 || code == POINTER_TYPE)
3663 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3667 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3669 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3670 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3674 /* Don't let the initialization count as "using" the variable. */
3675 TREE_USED (decl) = was_used;
3677 /* Free any temporaries we made while initializing the decl. */
3681 /* Expand initialization for variable-sized types. Allocate array
3682 using newlocalSI and set local variable, which is a pointer to the
3686 bc_expand_variable_local_init (decl)
3689 /* Evaluate size expression and coerce to SI */
3690 bc_expand_expr (DECL_SIZE (decl));
3692 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3693 no coercion is necessary (?) */
3695 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3696 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3698 /* Emit code to allocate array */
3699 bc_emit_instruction (newlocalSI);
3701 /* Store array pointer in local variable. This is the only instance
3702 where we actually want the address of the pointer to the
3703 variable-size block, rather than the pointer itself. We avoid
3704 using expand_address() since that would cause the pointer to be
3705 pushed rather than its address. Hence the hard-coded reference;
3706 notice also that the variable is always local (no global
3707 variable-size type variables). */
3709 bc_load_localaddr (DECL_RTL (decl));
3710 bc_emit_instruction (storeP);
3714 /* Emit code to initialize a declaration. */
3717 bc_expand_decl_init (decl)
3720 int org_stack_depth;
3722 /* Statical initializers are handled elsewhere */
3724 if (TREE_STATIC (decl))
3727 /* Memory original stack depth */
3728 org_stack_depth = stack_depth;
3730 /* If the type is variable-size, we first create its space (we ASSUME
3731 it CAN'T be static). We do this regardless of whether there's an
3732 initializer assignment or not. */
3734 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3735 bc_expand_variable_local_init (decl);
3737 /* Expand initializer assignment */
3738 if (DECL_INITIAL (decl) == error_mark_node)
3740 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3742 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3743 || code == POINTER_TYPE)
3745 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3747 else if (DECL_INITIAL (decl))
3748 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3750 /* Restore stack depth */
3751 if (org_stack_depth > stack_depth)
3754 bc_adjust_stack (stack_depth - org_stack_depth);
3758 /* CLEANUP is an expression to be executed at exit from this binding contour;
3759 for example, in C++, it might call the destructor for this variable.
3761 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3762 either before or after calling `expand_decl' but before compiling
3763 any subsequent expressions. This is because CLEANUP may be expanded
3764 more than once, on different branches of execution.
3765 For the same reason, CLEANUP may not contain a CALL_EXPR
3766 except as its topmost node--else `preexpand_calls' would get confused.
3768 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3769 that is not associated with any particular variable. */
3772 expand_decl_cleanup (decl, cleanup)
3775 struct nesting *thisblock = block_stack;
3777 /* Error if we are not in any block. */
3781 /* Record the cleanup if there is one. */
3785 thisblock->data.block.cleanups
3786 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3787 /* If this block has a cleanup, it belongs in stack_block_stack. */
3788 stack_block_stack = thisblock;
3793 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3794 DECL_ELTS is the list of elements that belong to DECL's type.
3795 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3798 expand_anon_union_decl (decl, cleanup, decl_elts)
3799 tree decl, cleanup, decl_elts;
3801 struct nesting *thisblock = block_stack;
3804 expand_decl (decl, cleanup);
3805 x = DECL_RTL (decl);
3809 tree decl_elt = TREE_VALUE (decl_elts);
3810 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3811 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3813 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3814 instead create a new MEM rtx with the proper mode. */
3815 if (GET_CODE (x) == MEM)
3817 if (mode == GET_MODE (x))
3818 DECL_RTL (decl_elt) = x;
3821 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3822 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3823 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3826 else if (GET_CODE (x) == REG)
3828 if (mode == GET_MODE (x))
3829 DECL_RTL (decl_elt) = x;
3831 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3836 /* Record the cleanup if there is one. */
3839 thisblock->data.block.cleanups
3840 = temp_tree_cons (decl_elt, cleanup_elt,
3841 thisblock->data.block.cleanups);
3843 decl_elts = TREE_CHAIN (decl_elts);
3847 /* Expand a list of cleanups LIST.
3848 Elements may be expressions or may be nested lists.
3850 If DONT_DO is nonnull, then any list-element
3851 whose TREE_PURPOSE matches DONT_DO is omitted.
3852 This is sometimes used to avoid a cleanup associated with
3853 a value that is being returned out of the scope. */
3856 expand_cleanups (list, dont_do)
3861 for (tail = list; tail; tail = TREE_CHAIN (tail))
3862 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3864 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3865 expand_cleanups (TREE_VALUE (tail), dont_do);
3868 /* Cleanups may be run multiple times. For example,
3869 when exiting a binding contour, we expand the
3870 cleanups associated with that contour. When a goto
3871 within that binding contour has a target outside that
3872 contour, it will expand all cleanups from its scope to
3873 the target. Though the cleanups are expanded multiple
3874 times, the control paths are non-overlapping so the
3875 cleanups will not be executed twice. */
3876 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3882 /* Move all cleanups from the current block_stack
3883 to the containing block_stack, where they are assumed to
3884 have been created. If anything can cause a temporary to
3885 be created, but not expanded for more than one level of
3886 block_stacks, then this code will have to change. */
3891 struct nesting *block = block_stack;
3892 struct nesting *outer = block->next;
3894 outer->data.block.cleanups
3895 = chainon (block->data.block.cleanups,
3896 outer->data.block.cleanups);
3897 block->data.block.cleanups = 0;
3901 last_cleanup_this_contour ()
3903 if (block_stack == 0)
3906 return block_stack->data.block.cleanups;
3909 /* Return 1 if there are any pending cleanups at this point.
3910 If THIS_CONTOUR is nonzero, check the current contour as well.
3911 Otherwise, look only at the contours that enclose this one. */
3914 any_pending_cleanups (this_contour)
3917 struct nesting *block;
3919 if (block_stack == 0)
3922 if (this_contour && block_stack->data.block.cleanups != NULL)
3924 if (block_stack->data.block.cleanups == 0
3925 && (block_stack->data.block.outer_cleanups == 0
3927 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3932 for (block = block_stack->next; block; block = block->next)
3933 if (block->data.block.cleanups != 0)
3939 /* Enter a case (Pascal) or switch (C) statement.
3940 Push a block onto case_stack and nesting_stack
3941 to accumulate the case-labels that are seen
3942 and to record the labels generated for the statement.
3944 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3945 Otherwise, this construct is transparent for `exit_something'.
3947 EXPR is the index-expression to be dispatched on.
3948 TYPE is its nominal type. We could simply convert EXPR to this type,
3949 but instead we take short cuts. */
3952 expand_start_case (exit_flag, expr, type, printname)
3958 register struct nesting *thiscase = ALLOC_NESTING ();
3960 /* Make an entry on case_stack for the case we are entering. */
3962 thiscase->next = case_stack;
3963 thiscase->all = nesting_stack;
3964 thiscase->depth = ++nesting_depth;
3965 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3966 thiscase->data.case_stmt.case_list = 0;
3967 thiscase->data.case_stmt.index_expr = expr;
3968 thiscase->data.case_stmt.nominal_type = type;
3969 thiscase->data.case_stmt.default_label = 0;
3970 thiscase->data.case_stmt.num_ranges = 0;
3971 thiscase->data.case_stmt.printname = printname;
3972 thiscase->data.case_stmt.seenlabel = 0;
3973 case_stack = thiscase;
3974 nesting_stack = thiscase;
3976 if (output_bytecode)
3978 bc_expand_start_case (thiscase, expr, type, printname);
3982 do_pending_stack_adjust ();
3984 /* Make sure case_stmt.start points to something that won't
3985 need any transformation before expand_end_case. */
3986 if (GET_CODE (get_last_insn ()) != NOTE)
3987 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3989 thiscase->data.case_stmt.start = get_last_insn ();
3993 /* Enter a case statement. It is assumed that the caller has pushed
3994 the current context onto the case stack. */
3997 bc_expand_start_case (thiscase, expr, type, printname)
3998 struct nesting *thiscase;
4003 bc_expand_expr (expr);
4004 bc_expand_conversion (TREE_TYPE (expr), type);
4006 /* For cases, the skip is a place we jump to that's emitted after
4007 the size of the jump table is known. */
4009 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
4010 bc_emit_bytecode (jump);
4011 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4013 #ifdef DEBUG_PRINT_CODE
4014 fputc ('\n', stderr);
4019 /* Start a "dummy case statement" within which case labels are invalid
4020 and are not connected to any larger real case statement.
4021 This can be used if you don't want to let a case statement jump
4022 into the middle of certain kinds of constructs. */
4025 expand_start_case_dummy ()
4027 register struct nesting *thiscase = ALLOC_NESTING ();
4029 /* Make an entry on case_stack for the dummy. */
4031 thiscase->next = case_stack;
4032 thiscase->all = nesting_stack;
4033 thiscase->depth = ++nesting_depth;
4034 thiscase->exit_label = 0;
4035 thiscase->data.case_stmt.case_list = 0;
4036 thiscase->data.case_stmt.start = 0;
4037 thiscase->data.case_stmt.nominal_type = 0;
4038 thiscase->data.case_stmt.default_label = 0;
4039 thiscase->data.case_stmt.num_ranges = 0;
4040 case_stack = thiscase;
4041 nesting_stack = thiscase;
4044 /* End a dummy case statement. */
4047 expand_end_case_dummy ()
4049 POPSTACK (case_stack);
4052 /* Return the data type of the index-expression
4053 of the innermost case statement, or null if none. */
4056 case_index_expr_type ()
4059 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4063 /* Accumulate one case or default label inside a case or switch statement.
4064 VALUE is the value of the case (a null pointer, for a default label).
4065 The function CONVERTER, when applied to arguments T and V,
4066 converts the value V to the type T.
4068 If not currently inside a case or switch statement, return 1 and do
4069 nothing. The caller will print a language-specific error message.
4070 If VALUE is a duplicate or overlaps, return 2 and do nothing
4071 except store the (first) duplicate node in *DUPLICATE.
4072 If VALUE is out of range, return 3 and do nothing.
4073 If we are jumping into the scope of a cleaup or var-sized array, return 5.
4074 Return 0 on success.
4076 Extended to handle range statements. */
4079 pushcase (value, converter, label, duplicate)
4080 register tree value;
4081 tree (*converter) PROTO((tree, tree));
4082 register tree label;
4085 register struct case_node **l;
4086 register struct case_node *n;
4090 if (output_bytecode)
4091 return bc_pushcase (value, label);
4093 /* Fail if not inside a real case statement. */
4094 if (! (case_stack && case_stack->data.case_stmt.start))
4097 if (stack_block_stack
4098 && stack_block_stack->depth > case_stack->depth)
4101 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4102 nominal_type = case_stack->data.case_stmt.nominal_type;
4104 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4105 if (index_type == error_mark_node)
4108 /* Convert VALUE to the type in which the comparisons are nominally done. */
4110 value = (*converter) (nominal_type, value);
4112 /* If this is the first label, warn if any insns have been emitted. */
4113 if (case_stack->data.case_stmt.seenlabel == 0)
4116 for (insn = case_stack->data.case_stmt.start;
4118 insn = NEXT_INSN (insn))
4120 if (GET_CODE (insn) == CODE_LABEL)
4122 if (GET_CODE (insn) != NOTE
4123 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4125 warning ("unreachable code at beginning of %s",
4126 case_stack->data.case_stmt.printname);
4131 case_stack->data.case_stmt.seenlabel = 1;
4133 /* Fail if this value is out of range for the actual type of the index
4134 (which may be narrower than NOMINAL_TYPE). */
4135 if (value != 0 && ! int_fits_type_p (value, index_type))
4138 /* Fail if this is a duplicate or overlaps another entry. */
4141 if (case_stack->data.case_stmt.default_label != 0)
4143 *duplicate = case_stack->data.case_stmt.default_label;
4146 case_stack->data.case_stmt.default_label = label;
4150 /* Find the elt in the chain before which to insert the new value,
4151 to keep the chain sorted in increasing order.
4152 But report an error if this element is a duplicate. */
4153 for (l = &case_stack->data.case_stmt.case_list;
4154 /* Keep going past elements distinctly less than VALUE. */
4155 *l != 0 && tree_int_cst_lt ((*l)->high, value);
4160 /* Element we will insert before must be distinctly greater;
4161 overlap means error. */
4162 if (! tree_int_cst_lt (value, (*l)->low))
4164 *duplicate = (*l)->code_label;
4169 /* Add this label to the chain, and succeed.
4170 Copy VALUE so it is on temporary rather than momentary
4171 obstack and will thus survive till the end of the case statement. */
4172 n = (struct case_node *) oballoc (sizeof (struct case_node));
4175 n->high = n->low = copy_node (value);
4176 n->code_label = label;
4180 expand_label (label);
4184 /* Like pushcase but this case applies to all values
4185 between VALUE1 and VALUE2 (inclusive).
4186 The return value is the same as that of pushcase
4187 but there is one additional error code:
4188 4 means the specified range was empty. */
4191 pushcase_range (value1, value2, converter, label, duplicate)
4192 register tree value1, value2;
4193 tree (*converter) PROTO((tree, tree));
4194 register tree label;
4197 register struct case_node **l;
4198 register struct case_node *n;
4202 /* Fail if not inside a real case statement. */
4203 if (! (case_stack && case_stack->data.case_stmt.start))
4206 if (stack_block_stack
4207 && stack_block_stack->depth > case_stack->depth)
4210 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4211 nominal_type = case_stack->data.case_stmt.nominal_type;
4213 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4214 if (index_type == error_mark_node)
4217 /* If this is the first label, warn if any insns have been emitted. */
4218 if (case_stack->data.case_stmt.seenlabel == 0)
4221 for (insn = case_stack->data.case_stmt.start;
4223 insn = NEXT_INSN (insn))
4225 if (GET_CODE (insn) == CODE_LABEL)
4227 if (GET_CODE (insn) != NOTE
4228 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4230 warning ("unreachable code at beginning of %s",
4231 case_stack->data.case_stmt.printname);
4236 case_stack->data.case_stmt.seenlabel = 1;
4238 /* Convert VALUEs to type in which the comparisons are nominally done. */
4239 if (value1 == 0) /* Negative infinity. */
4240 value1 = TYPE_MIN_VALUE(index_type);
4241 value1 = (*converter) (nominal_type, value1);
4243 if (value2 == 0) /* Positive infinity. */
4244 value2 = TYPE_MAX_VALUE(index_type);
4245 value2 = (*converter) (nominal_type, value2);
4247 /* Fail if these values are out of range. */
4248 if (! int_fits_type_p (value1, index_type))
4251 if (! int_fits_type_p (value2, index_type))
4254 /* Fail if the range is empty. */
4255 if (tree_int_cst_lt (value2, value1))
4258 /* If the bounds are equal, turn this into the one-value case. */
4259 if (tree_int_cst_equal (value1, value2))
4260 return pushcase (value1, converter, label, duplicate);
4262 /* Find the elt in the chain before which to insert the new value,
4263 to keep the chain sorted in increasing order.
4264 But report an error if this element is a duplicate. */
4265 for (l = &case_stack->data.case_stmt.case_list;
4266 /* Keep going past elements distinctly less than this range. */
4267 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
4272 /* Element we will insert before must be distinctly greater;
4273 overlap means error. */
4274 if (! tree_int_cst_lt (value2, (*l)->low))
4276 *duplicate = (*l)->code_label;
4281 /* Add this label to the chain, and succeed.
4282 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
4283 obstack and will thus survive till the end of the case statement. */
4285 n = (struct case_node *) oballoc (sizeof (struct case_node));
4288 n->low = copy_node (value1);
4289 n->high = copy_node (value2);
4290 n->code_label = label;
4293 expand_label (label);
4295 case_stack->data.case_stmt.num_ranges++;
4301 /* Accumulate one case or default label; VALUE is the value of the
4302 case, or nil for a default label. If not currently inside a case,
4303 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4304 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4305 Return 0 on success. This function is a leftover from the earlier
4306 bytecode compiler, which was based on gcc 1.37. It should be
4307 merged into pushcase. */
4310 bc_pushcase (value, label)
4314 struct nesting *thiscase = case_stack;
4315 struct case_node *case_label, *new_label;
4320 /* Fail if duplicate, overlap, or out of type range. */
4323 value = convert (thiscase->data.case_stmt.nominal_type, value);
4324 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4327 for (case_label = thiscase->data.case_stmt.case_list;
4328 case_label->left; case_label = case_label->left)
4329 if (! tree_int_cst_lt (case_label->left->high, value))
4332 if (case_label != thiscase->data.case_stmt.case_list
4333 && ! tree_int_cst_lt (case_label->high, value)
4334 || case_label->left && ! tree_int_cst_lt (value, case_label->left->low))
4337 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4338 new_label->low = new_label->high = copy_node (value);
4339 new_label->code_label = label;
4340 new_label->left = case_label->left;
4342 case_label->left = new_label;
4343 thiscase->data.case_stmt.num_ranges++;
4347 if (thiscase->data.case_stmt.default_label)
4349 thiscase->data.case_stmt.default_label = label;
4352 expand_label (label);
4356 /* Called when the index of a switch statement is an enumerated type
4357 and there is no default label.
4359 Checks that all enumeration literals are covered by the case
4360 expressions of a switch. Also, warn if there are any extra
4361 switch cases that are *not* elements of the enumerated type.
4363 If all enumeration literals were covered by the case expressions,
4364 turn one of the expressions into the default expression since it should
4365 not be possible to fall through such a switch. */
4368 check_for_full_enumeration_handling (type)
4371 register struct case_node *n;
4372 register struct case_node **l;
4373 register tree chain;
4376 if (output_bytecode)
4378 bc_check_for_full_enumeration_handling (type);
4382 /* The time complexity of this loop is currently O(N * M), with
4383 N being the number of members in the enumerated type, and
4384 M being the number of case expressions in the switch. */
4386 for (chain = TYPE_VALUES (type);
4388 chain = TREE_CHAIN (chain))
4390 /* Find a match between enumeral and case expression, if possible.
4391 Quit looking when we've gone too far (since case expressions
4392 are kept sorted in ascending order). Warn about enumerators not
4393 handled in the switch statement case expression list. */
4395 for (n = case_stack->data.case_stmt.case_list;
4396 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
4400 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
4403 warning ("enumeration value `%s' not handled in switch",
4404 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
4409 /* Now we go the other way around; we warn if there are case
4410 expressions that don't correspond to enumerators. This can
4411 occur since C and C++ don't enforce type-checking of
4412 assignments to enumeration variables. */
4415 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4417 for (chain = TYPE_VALUES (type);
4418 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4419 chain = TREE_CHAIN (chain))
4424 if (TYPE_NAME (type) == 0)
4425 warning ("case value `%d' not in enumerated type",
4426 TREE_INT_CST_LOW (n->low));
4428 warning ("case value `%d' not in enumerated type `%s'",
4429 TREE_INT_CST_LOW (n->low),
4430 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4433 : DECL_NAME (TYPE_NAME (type))));
4435 if (!tree_int_cst_equal (n->low, n->high))
4437 for (chain = TYPE_VALUES (type);
4438 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4439 chain = TREE_CHAIN (chain))
4444 if (TYPE_NAME (type) == 0)
4445 warning ("case value `%d' not in enumerated type",
4446 TREE_INT_CST_LOW (n->high));
4448 warning ("case value `%d' not in enumerated type `%s'",
4449 TREE_INT_CST_LOW (n->high),
4450 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4453 : DECL_NAME (TYPE_NAME (type))));
4459 /* ??? This optimization is disabled because it causes valid programs to
4460 fail. ANSI C does not guarantee that an expression with enum type
4461 will have a value that is the same as one of the enumation literals. */
4463 /* If all values were found as case labels, make one of them the default
4464 label. Thus, this switch will never fall through. We arbitrarily pick
4465 the last one to make the default since this is likely the most
4466 efficient choice. */
4470 for (l = &case_stack->data.case_stmt.case_list;
4475 case_stack->data.case_stmt.default_label = (*l)->code_label;
4482 /* Check that all enumeration literals are covered by the case
4483 expressions of a switch. Also warn if there are any cases
4484 that are not elements of the enumerated type. */
4487 bc_check_for_full_enumeration_handling (type)
4490 struct nesting *thiscase = case_stack;
4491 struct case_node *c;
4494 /* Check for enums not handled. */
4495 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4497 for (c = thiscase->data.case_stmt.case_list->left;
4498 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4501 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4502 warning ("enumerated value `%s' not handled in switch",
4503 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4506 /* Check for cases not in the enumeration. */
4507 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4509 for (e = TYPE_VALUES (type);
4510 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4514 warning ("case value `%d' not in enumerated type `%s'",
4515 TREE_INT_CST_LOW (c->low),
4516 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4518 : DECL_NAME (TYPE_NAME (type))));
4522 /* Terminate a case (Pascal) or switch (C) statement
4523 in which ORIG_INDEX is the expression to be tested.
4524 Generate the code to test it and jump to the right place. */
4527 expand_end_case (orig_index)
4530 tree minval, maxval, range, orig_minval;
4531 rtx default_label = 0;
4532 register struct case_node *n;
4540 register struct nesting *thiscase = case_stack;
4544 if (output_bytecode)
4546 bc_expand_end_case (orig_index);
4550 table_label = gen_label_rtx ();
4551 index_expr = thiscase->data.case_stmt.index_expr;
4552 unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
4554 do_pending_stack_adjust ();
4556 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4557 if (TREE_TYPE (index_expr) != error_mark_node)
4559 /* If switch expression was an enumerated type, check that all
4560 enumeration literals are covered by the cases.
4561 No sense trying this if there's a default case, however. */
4563 if (!thiscase->data.case_stmt.default_label
4564 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4565 && TREE_CODE (index_expr) != INTEGER_CST)
4566 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4568 /* If this is the first label, warn if any insns have been emitted. */
4569 if (thiscase->data.case_stmt.seenlabel == 0)
4572 for (insn = get_last_insn ();
4573 insn != case_stack->data.case_stmt.start;
4574 insn = PREV_INSN (insn))
4575 if (GET_CODE (insn) != NOTE
4576 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4578 warning ("unreachable code at beginning of %s",
4579 case_stack->data.case_stmt.printname);
4584 /* If we don't have a default-label, create one here,
4585 after the body of the switch. */
4586 if (thiscase->data.case_stmt.default_label == 0)
4588 thiscase->data.case_stmt.default_label
4589 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4590 expand_label (thiscase->data.case_stmt.default_label);
4592 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4594 before_case = get_last_insn ();
4596 /* Simplify the case-list before we count it. */
4597 group_case_nodes (thiscase->data.case_stmt.case_list);
4599 /* Get upper and lower bounds of case values.
4600 Also convert all the case values to the index expr's data type. */
4603 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4605 /* Check low and high label values are integers. */
4606 if (TREE_CODE (n->low) != INTEGER_CST)
4608 if (TREE_CODE (n->high) != INTEGER_CST)
4611 n->low = convert (TREE_TYPE (index_expr), n->low);
4612 n->high = convert (TREE_TYPE (index_expr), n->high);
4614 /* Count the elements and track the largest and smallest
4615 of them (treating them as signed even if they are not). */
4623 if (INT_CST_LT (n->low, minval))
4625 if (INT_CST_LT (maxval, n->high))
4628 /* A range counts double, since it requires two compares. */
4629 if (! tree_int_cst_equal (n->low, n->high))
4633 orig_minval = minval;
4635 /* Compute span of values. */
4637 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
4640 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
4642 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4644 emit_jump (default_label);
4647 /* If range of values is much bigger than number of values,
4648 make a sequence of conditional branches instead of a dispatch.
4649 If the switch-index is a constant, do it this way
4650 because we can optimize it. */
4652 #ifndef CASE_VALUES_THRESHOLD
4654 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4656 /* If machine does not have a case insn that compares the
4657 bounds, this means extra overhead for dispatch tables
4658 which raises the threshold for using them. */
4659 #define CASE_VALUES_THRESHOLD 5
4660 #endif /* HAVE_casesi */
4661 #endif /* CASE_VALUES_THRESHOLD */
4663 else if (TREE_INT_CST_HIGH (range) != 0
4664 || count < CASE_VALUES_THRESHOLD
4665 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4667 || TREE_CODE (index_expr) == INTEGER_CST
4668 /* These will reduce to a constant. */
4669 || (TREE_CODE (index_expr) == CALL_EXPR
4670 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4671 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4672 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4673 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4674 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4676 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4678 /* If the index is a short or char that we do not have
4679 an insn to handle comparisons directly, convert it to
4680 a full integer now, rather than letting each comparison
4681 generate the conversion. */
4683 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4684 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4685 == CODE_FOR_nothing))
4687 enum machine_mode wider_mode;
4688 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4689 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4690 if (cmp_optab->handlers[(int) wider_mode].insn_code
4691 != CODE_FOR_nothing)
4693 index = convert_to_mode (wider_mode, index, unsignedp);
4699 do_pending_stack_adjust ();
4701 index = protect_from_queue (index, 0);
4702 if (GET_CODE (index) == MEM)
4703 index = copy_to_reg (index);
4704 if (GET_CODE (index) == CONST_INT
4705 || TREE_CODE (index_expr) == INTEGER_CST)
4707 /* Make a tree node with the proper constant value
4708 if we don't already have one. */
4709 if (TREE_CODE (index_expr) != INTEGER_CST)
4712 = build_int_2 (INTVAL (index),
4713 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
4714 index_expr = convert (TREE_TYPE (index_expr), index_expr);
4717 /* For constant index expressions we need only
4718 issue a unconditional branch to the appropriate
4719 target code. The job of removing any unreachable
4720 code is left to the optimisation phase if the
4721 "-O" option is specified. */
4722 for (n = thiscase->data.case_stmt.case_list;
4726 if (! tree_int_cst_lt (index_expr, n->low)
4727 && ! tree_int_cst_lt (n->high, index_expr))
4731 emit_jump (label_rtx (n->code_label));
4733 emit_jump (default_label);
4737 /* If the index expression is not constant we generate
4738 a binary decision tree to select the appropriate
4739 target code. This is done as follows:
4741 The list of cases is rearranged into a binary tree,
4742 nearly optimal assuming equal probability for each case.
4744 The tree is transformed into RTL, eliminating
4745 redundant test conditions at the same time.
4747 If program flow could reach the end of the
4748 decision tree an unconditional jump to the
4749 default code is emitted. */
4752 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4753 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4754 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4756 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4757 default_label, TREE_TYPE (index_expr));
4758 emit_jump_if_reachable (default_label);
4767 enum machine_mode index_mode = SImode;
4768 int index_bits = GET_MODE_BITSIZE (index_mode);
4770 /* Convert the index to SImode. */
4771 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
4772 > GET_MODE_BITSIZE (index_mode))
4774 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
4775 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4777 /* We must handle the endpoints in the original mode. */
4778 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
4779 index_expr, minval);
4780 minval = integer_zero_node;
4781 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4782 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4783 emit_jump_insn (gen_bltu (default_label));
4784 /* Now we can safely truncate. */
4785 index = convert_to_mode (index_mode, index, 0);
4789 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
4790 index_expr = convert (type_for_size (index_bits, 0),
4792 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4795 index = protect_from_queue (index, 0);
4796 do_pending_stack_adjust ();
4798 emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX,
4800 expand_expr (range, NULL_RTX,
4802 table_label, default_label));
4806 #ifdef HAVE_tablejump
4807 if (! win && HAVE_tablejump)
4809 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4810 fold (build (MINUS_EXPR,
4811 TREE_TYPE (index_expr),
4812 index_expr, minval)));
4813 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4815 index = protect_from_queue (index, 0);
4816 do_pending_stack_adjust ();
4818 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
4819 expand_expr (range, NULL_RTX, VOIDmode, 0),
4820 table_label, default_label);
4827 /* Get table of labels to jump to, in order of case index. */
4829 ncases = TREE_INT_CST_LOW (range) + 1;
4830 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4831 bzero (labelvec, ncases * sizeof (rtx));
4833 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4835 register HOST_WIDE_INT i
4836 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4841 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
4842 if (i + TREE_INT_CST_LOW (orig_minval)
4843 == TREE_INT_CST_LOW (n->high))
4849 /* Fill in the gaps with the default. */
4850 for (i = 0; i < ncases; i++)
4851 if (labelvec[i] == 0)
4852 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
4854 /* Output the table */
4855 emit_label (table_label);
4857 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
4858 were an expression, instead of an #ifdef/#ifndef. */
4860 #ifdef CASE_VECTOR_PC_RELATIVE
4864 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
4865 gen_rtx (LABEL_REF, Pmode, table_label),
4866 gen_rtvec_v (ncases, labelvec)));
4868 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
4869 gen_rtvec_v (ncases, labelvec)));
4871 /* If the case insn drops through the table,
4872 after the table we must jump to the default-label.
4873 Otherwise record no drop-through after the table. */
4874 #ifdef CASE_DROPS_THROUGH
4875 emit_jump (default_label);
4881 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4882 reorder_insns (before_case, get_last_insn (),
4883 thiscase->data.case_stmt.start);
4885 if (thiscase->exit_label)
4886 emit_label (thiscase->exit_label);
4888 POPSTACK (case_stack);
4894 /* Terminate a case statement. EXPR is the original index
4898 bc_expand_end_case (expr)
4901 struct nesting *thiscase = case_stack;
4902 enum bytecode_opcode opcode;
4903 struct bc_label *jump_label;
4904 struct case_node *c;
4906 bc_emit_bytecode (jump);
4907 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4909 #ifdef DEBUG_PRINT_CODE
4910 fputc ('\n', stderr);
4913 /* Now that the size of the jump table is known, emit the actual
4914 indexed jump instruction. */
4915 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4917 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
4918 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
4919 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
4921 bc_emit_bytecode (opcode);
4923 /* Now emit the case instructions literal arguments, in order.
4924 In addition to the value on the stack, it uses:
4925 1. The address of the jump table.
4926 2. The size of the jump table.
4927 3. The default label. */
4929 jump_label = bc_get_bytecode_label ();
4930 bc_emit_bytecode_labelref (jump_label);
4931 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
4932 sizeof thiscase->data.case_stmt.num_ranges);
4934 if (thiscase->data.case_stmt.default_label)
4935 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
4937 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4939 /* Output the jump table. */
4941 bc_align_bytecode (3 /* PTR_ALIGN */);
4942 bc_emit_bytecode_labeldef (jump_label);
4944 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
4945 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4947 opcode = TREE_INT_CST_LOW (c->low);
4948 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4950 opcode = TREE_INT_CST_LOW (c->high);
4951 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4953 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4956 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
4957 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4959 bc_emit_bytecode_DI_const (c->low);
4960 bc_emit_bytecode_DI_const (c->high);
4962 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4969 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
4971 /* Possibly issue enumeration warnings. */
4973 if (!thiscase->data.case_stmt.default_label
4974 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
4975 && TREE_CODE (expr) != INTEGER_CST
4977 check_for_full_enumeration_handling (TREE_TYPE (expr));
4980 #ifdef DEBUG_PRINT_CODE
4981 fputc ('\n', stderr);
4984 POPSTACK (case_stack);
4988 /* Return unique bytecode ID. */
4993 static int bc_uid = 0;
4998 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5001 do_jump_if_equal (op1, op2, label, unsignedp)
5002 rtx op1, op2, label;
5005 if (GET_CODE (op1) == CONST_INT
5006 && GET_CODE (op2) == CONST_INT)
5008 if (INTVAL (op1) == INTVAL (op2))
5013 enum machine_mode mode = GET_MODE (op1);
5014 if (mode == VOIDmode)
5015 mode = GET_MODE (op2);
5016 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5017 emit_jump_insn (gen_beq (label));
5021 /* Not all case values are encountered equally. This function
5022 uses a heuristic to weight case labels, in cases where that
5023 looks like a reasonable thing to do.
5025 Right now, all we try to guess is text, and we establish the
5028 chars above space: 16
5037 If we find any cases in the switch that are not either -1 or in the range
5038 of valid ASCII characters, or are control characters other than those
5039 commonly used with "\", don't treat this switch scanning text.
5041 Return 1 if these nodes are suitable for cost estimation, otherwise
5045 estimate_case_costs (node)
5048 tree min_ascii = build_int_2 (-1, -1);
5049 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5053 /* If we haven't already made the cost table, make it now. Note that the
5054 lower bound of the table is -1, not zero. */
5056 if (cost_table == NULL)
5058 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5059 bzero (cost_table - 1, 129 * sizeof (short));
5061 for (i = 0; i < 128; i++)
5065 else if (ispunct (i))
5067 else if (iscntrl (i))
5071 cost_table[' '] = 8;
5072 cost_table['\t'] = 4;
5073 cost_table['\0'] = 4;
5074 cost_table['\n'] = 2;
5075 cost_table['\f'] = 1;
5076 cost_table['\v'] = 1;
5077 cost_table['\b'] = 1;
5080 /* See if all the case expressions look like text. It is text if the
5081 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5082 as signed arithmetic since we don't want to ever access cost_table with a
5083 value less than -1. Also check that none of the constants in a range
5084 are strange control characters. */
5086 for (n = node; n; n = n->right)
5088 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5091 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5092 if (cost_table[i] < 0)
5096 /* All interesting values are within the range of interesting
5097 ASCII characters. */
5101 /* Scan an ordered list of case nodes
5102 combining those with consecutive values or ranges.
5104 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5107 group_case_nodes (head)
5110 case_node_ptr node = head;
5114 rtx lb = next_real_insn (label_rtx (node->code_label));
5115 case_node_ptr np = node;
5117 /* Try to group the successors of NODE with NODE. */
5118 while (((np = np->right) != 0)
5119 /* Do they jump to the same place? */
5120 && next_real_insn (label_rtx (np->code_label)) == lb
5121 /* Are their ranges consecutive? */
5122 && tree_int_cst_equal (np->low,
5123 fold (build (PLUS_EXPR,
5124 TREE_TYPE (node->high),
5127 /* An overflow is not consecutive. */
5128 && tree_int_cst_lt (node->high,
5129 fold (build (PLUS_EXPR,
5130 TREE_TYPE (node->high),
5132 integer_one_node))))
5134 node->high = np->high;
5136 /* NP is the first node after NODE which can't be grouped with it.
5137 Delete the nodes in between, and move on to that node. */
5143 /* Take an ordered list of case nodes
5144 and transform them into a near optimal binary tree,
5145 on the assumption that any target code selection value is as
5146 likely as any other.
5148 The transformation is performed by splitting the ordered
5149 list into two equal sections plus a pivot. The parts are
5150 then attached to the pivot as left and right branches. Each
5151 branch is is then transformed recursively. */
5154 balance_case_nodes (head, parent)
5155 case_node_ptr *head;
5156 case_node_ptr parent;
5158 register case_node_ptr np;
5166 register case_node_ptr *npp;
5169 /* Count the number of entries on branch. Also count the ranges. */
5173 if (!tree_int_cst_equal (np->low, np->high))
5177 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5181 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5189 /* Split this list if it is long enough for that to help. */
5194 /* Find the place in the list that bisects the list's total cost,
5195 Here I gets half the total cost. */
5200 /* Skip nodes while their cost does not reach that amount. */
5201 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5202 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5203 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5206 npp = &(*npp)->right;
5211 /* Leave this branch lopsided, but optimize left-hand
5212 side and fill in `parent' fields for right-hand side. */
5214 np->parent = parent;
5215 balance_case_nodes (&np->left, np);
5216 for (; np->right; np = np->right)
5217 np->right->parent = np;
5221 /* If there are just three nodes, split at the middle one. */
5223 npp = &(*npp)->right;
5226 /* Find the place in the list that bisects the list's total cost,
5227 where ranges count as 2.
5228 Here I gets half the total cost. */
5229 i = (i + ranges + 1) / 2;
5232 /* Skip nodes while their cost does not reach that amount. */
5233 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5238 npp = &(*npp)->right;
5243 np->parent = parent;
5246 /* Optimize each of the two split parts. */
5247 balance_case_nodes (&np->left, np);
5248 balance_case_nodes (&np->right, np);
5252 /* Else leave this branch as one level,
5253 but fill in `parent' fields. */
5255 np->parent = parent;
5256 for (; np->right; np = np->right)
5257 np->right->parent = np;
5262 /* Search the parent sections of the case node tree
5263 to see if a test for the lower bound of NODE would be redundant.
5264 INDEX_TYPE is the type of the index expression.
5266 The instructions to generate the case decision tree are
5267 output in the same order as nodes are processed so it is
5268 known that if a parent node checks the range of the current
5269 node minus one that the current node is bounded at its lower
5270 span. Thus the test would be redundant. */
5273 node_has_low_bound (node, index_type)
5278 case_node_ptr pnode;
5280 /* If the lower bound of this node is the lowest value in the index type,
5281 we need not test it. */
5283 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5286 /* If this node has a left branch, the value at the left must be less
5287 than that at this node, so it cannot be bounded at the bottom and
5288 we need not bother testing any further. */
5293 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5294 node->low, integer_one_node));
5296 /* If the subtraction above overflowed, we can't verify anything.
5297 Otherwise, look for a parent that tests our value - 1. */
5299 if (! tree_int_cst_lt (low_minus_one, node->low))
5302 for (pnode = node->parent; pnode; pnode = pnode->parent)
5303 if (tree_int_cst_equal (low_minus_one, pnode->high))
5309 /* Search the parent sections of the case node tree
5310 to see if a test for the upper bound of NODE would be redundant.
5311 INDEX_TYPE is the type of the index expression.
5313 The instructions to generate the case decision tree are
5314 output in the same order as nodes are processed so it is
5315 known that if a parent node checks the range of the current
5316 node plus one that the current node is bounded at its upper
5317 span. Thus the test would be redundant. */
5320 node_has_high_bound (node, index_type)
5325 case_node_ptr pnode;
5327 /* If the upper bound of this node is the highest value in the type
5328 of the index expression, we need not test against it. */
5330 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5333 /* If this node has a right branch, the value at the right must be greater
5334 than that at this node, so it cannot be bounded at the top and
5335 we need not bother testing any further. */
5340 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5341 node->high, integer_one_node));
5343 /* If the addition above overflowed, we can't verify anything.
5344 Otherwise, look for a parent that tests our value + 1. */
5346 if (! tree_int_cst_lt (node->high, high_plus_one))
5349 for (pnode = node->parent; pnode; pnode = pnode->parent)
5350 if (tree_int_cst_equal (high_plus_one, pnode->low))
5356 /* Search the parent sections of the
5357 case node tree to see if both tests for the upper and lower
5358 bounds of NODE would be redundant. */
5361 node_is_bounded (node, index_type)
5365 return (node_has_low_bound (node, index_type)
5366 && node_has_high_bound (node, index_type));
5369 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5372 emit_jump_if_reachable (label)
5375 if (GET_CODE (get_last_insn ()) != BARRIER)
5379 /* Emit step-by-step code to select a case for the value of INDEX.
5380 The thus generated decision tree follows the form of the
5381 case-node binary tree NODE, whose nodes represent test conditions.
5382 INDEX_TYPE is the type of the index of the switch.
5384 Care is taken to prune redundant tests from the decision tree
5385 by detecting any boundary conditions already checked by
5386 emitted rtx. (See node_has_high_bound, node_has_low_bound
5387 and node_is_bounded, above.)
5389 Where the test conditions can be shown to be redundant we emit
5390 an unconditional jump to the target code. As a further
5391 optimization, the subordinates of a tree node are examined to
5392 check for bounded nodes. In this case conditional and/or
5393 unconditional jumps as a result of the boundary check for the
5394 current node are arranged to target the subordinates associated
5395 code for out of bound conditions on the current node node.
5397 We can assume that when control reaches the code generated here,
5398 the index value has already been compared with the parents
5399 of this node, and determined to be on the same side of each parent
5400 as this node is. Thus, if this node tests for the value 51,
5401 and a parent tested for 52, we don't need to consider
5402 the possibility of a value greater than 51. If another parent
5403 tests for the value 50, then this node need not test anything. */
5406 emit_case_nodes (index, node, default_label, index_type)
5412 /* If INDEX has an unsigned type, we must make unsigned branches. */
5413 int unsignedp = TREE_UNSIGNED (index_type);
5414 typedef rtx rtx_function ();
5415 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5416 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5417 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5418 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5419 enum machine_mode mode = GET_MODE (index);
5421 /* See if our parents have already tested everything for us.
5422 If they have, emit an unconditional jump for this node. */
5423 if (node_is_bounded (node, index_type))
5424 emit_jump (label_rtx (node->code_label));
5426 else if (tree_int_cst_equal (node->low, node->high))
5428 /* Node is single valued. First see if the index expression matches
5429 this node and then check our children, if any. */
5431 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5432 label_rtx (node->code_label), unsignedp);
5434 if (node->right != 0 && node->left != 0)
5436 /* This node has children on both sides.
5437 Dispatch to one side or the other
5438 by comparing the index value with this node's value.
5439 If one subtree is bounded, check that one first,
5440 so we can avoid real branches in the tree. */
5442 if (node_is_bounded (node->right, index_type))
5444 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5446 GT, NULL_RTX, mode, unsignedp, 0);
5448 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5449 emit_case_nodes (index, node->left, default_label, index_type);
5452 else if (node_is_bounded (node->left, index_type))
5454 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5456 LT, NULL_RTX, mode, unsignedp, 0);
5457 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5458 emit_case_nodes (index, node->right, default_label, index_type);
5463 /* Neither node is bounded. First distinguish the two sides;
5464 then emit the code for one side at a time. */
5467 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5469 /* See if the value is on the right. */
5470 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5472 GT, NULL_RTX, mode, unsignedp, 0);
5473 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5475 /* Value must be on the left.
5476 Handle the left-hand subtree. */
5477 emit_case_nodes (index, node->left, default_label, index_type);
5478 /* If left-hand subtree does nothing,
5480 emit_jump_if_reachable (default_label);
5482 /* Code branches here for the right-hand subtree. */
5483 expand_label (test_label);
5484 emit_case_nodes (index, node->right, default_label, index_type);
5488 else if (node->right != 0 && node->left == 0)
5490 /* Here we have a right child but no left so we issue conditional
5491 branch to default and process the right child.
5493 Omit the conditional branch to default if we it avoid only one
5494 right child; it costs too much space to save so little time. */
5496 if (node->right->right || node->right->left
5497 || !tree_int_cst_equal (node->right->low, node->right->high))
5499 if (!node_has_low_bound (node, index_type))
5501 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5503 LT, NULL_RTX, mode, unsignedp, 0);
5504 emit_jump_insn ((*gen_blt_pat) (default_label));
5507 emit_case_nodes (index, node->right, default_label, index_type);
5510 /* We cannot process node->right normally
5511 since we haven't ruled out the numbers less than
5512 this node's value. So handle node->right explicitly. */
5513 do_jump_if_equal (index,
5514 expand_expr (node->right->low, NULL_RTX,
5516 label_rtx (node->right->code_label), unsignedp);
5519 else if (node->right == 0 && node->left != 0)
5521 /* Just one subtree, on the left. */
5523 #if 0 /* The following code and comment were formerly part
5524 of the condition here, but they didn't work
5525 and I don't understand what the idea was. -- rms. */
5526 /* If our "most probable entry" is less probable
5527 than the default label, emit a jump to
5528 the default label using condition codes
5529 already lying around. With no right branch,
5530 a branch-greater-than will get us to the default
5533 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5536 if (node->left->left || node->left->right
5537 || !tree_int_cst_equal (node->left->low, node->left->high))
5539 if (!node_has_high_bound (node, index_type))
5541 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5543 GT, NULL_RTX, mode, unsignedp, 0);
5544 emit_jump_insn ((*gen_bgt_pat) (default_label));
5547 emit_case_nodes (index, node->left, default_label, index_type);
5550 /* We cannot process node->left normally
5551 since we haven't ruled out the numbers less than
5552 this node's value. So handle node->left explicitly. */
5553 do_jump_if_equal (index,
5554 expand_expr (node->left->low, NULL_RTX,
5556 label_rtx (node->left->code_label), unsignedp);
5561 /* Node is a range. These cases are very similar to those for a single
5562 value, except that we do not start by testing whether this node
5563 is the one to branch to. */
5565 if (node->right != 0 && node->left != 0)
5567 /* Node has subtrees on both sides.
5568 If the right-hand subtree is bounded,
5569 test for it first, since we can go straight there.
5570 Otherwise, we need to make a branch in the control structure,
5571 then handle the two subtrees. */
5572 tree test_label = 0;
5574 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5576 GT, NULL_RTX, mode, unsignedp, 0);
5578 if (node_is_bounded (node->right, index_type))
5579 /* Right hand node is fully bounded so we can eliminate any
5580 testing and branch directly to the target code. */
5581 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5584 /* Right hand node requires testing.
5585 Branch to a label where we will handle it later. */
5587 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5588 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5591 /* Value belongs to this node or to the left-hand subtree. */
5593 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5594 GE, NULL_RTX, mode, unsignedp, 0);
5595 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5597 /* Handle the left-hand subtree. */
5598 emit_case_nodes (index, node->left, default_label, index_type);
5600 /* If right node had to be handled later, do that now. */
5604 /* If the left-hand subtree fell through,
5605 don't let it fall into the right-hand subtree. */
5606 emit_jump_if_reachable (default_label);
5608 expand_label (test_label);
5609 emit_case_nodes (index, node->right, default_label, index_type);
5613 else if (node->right != 0 && node->left == 0)
5615 /* Deal with values to the left of this node,
5616 if they are possible. */
5617 if (!node_has_low_bound (node, index_type))
5619 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5621 LT, NULL_RTX, mode, unsignedp, 0);
5622 emit_jump_insn ((*gen_blt_pat) (default_label));
5625 /* Value belongs to this node or to the right-hand subtree. */
5627 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5629 LE, NULL_RTX, mode, unsignedp, 0);
5630 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5632 emit_case_nodes (index, node->right, default_label, index_type);
5635 else if (node->right == 0 && node->left != 0)
5637 /* Deal with values to the right of this node,
5638 if they are possible. */
5639 if (!node_has_high_bound (node, index_type))
5641 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5643 GT, NULL_RTX, mode, unsignedp, 0);
5644 emit_jump_insn ((*gen_bgt_pat) (default_label));
5647 /* Value belongs to this node or to the left-hand subtree. */
5649 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5650 GE, NULL_RTX, mode, unsignedp, 0);
5651 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5653 emit_case_nodes (index, node->left, default_label, index_type);
5658 /* Node has no children so we check low and high bounds to remove
5659 redundant tests. Only one of the bounds can exist,
5660 since otherwise this node is bounded--a case tested already. */
5662 if (!node_has_high_bound (node, index_type))
5664 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5666 GT, NULL_RTX, mode, unsignedp, 0);
5667 emit_jump_insn ((*gen_bgt_pat) (default_label));
5670 if (!node_has_low_bound (node, index_type))
5672 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5674 LT, NULL_RTX, mode, unsignedp, 0);
5675 emit_jump_insn ((*gen_blt_pat) (default_label));
5678 emit_jump (label_rtx (node->code_label));
5683 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5684 so that the debugging info will be correct for the unrolled loop. */
5686 /* Indexed by block number, contains a pointer to the N'th block node. */
5688 static tree *block_vector;
5691 find_loop_tree_blocks ()
5693 tree block = DECL_INITIAL (current_function_decl);
5695 /* There first block is for the function body, and does not have
5696 corresponding block notes. Don't include it in the block vector. */
5697 block = BLOCK_SUBBLOCKS (block);
5699 block_vector = identify_blocks (block, get_insns ());
5703 unroll_block_trees ()
5705 tree block = DECL_INITIAL (current_function_decl);
5707 reorder_blocks (block_vector, block, get_insns ());