1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
55 #include "bc-typecd.h"
56 #include "bc-opcode.h"
60 #define obstack_chunk_alloc xmalloc
61 #define obstack_chunk_free free
62 struct obstack stmt_obstack;
64 /* Filename and line number of last line-number note,
65 whether we actually emitted it or not. */
69 /* Nonzero if within a ({...}) grouping, in which case we must
70 always compute a value for each expr-stmt in case it is the last one. */
72 int expr_stmts_for_value;
74 /* Each time we expand an expression-statement,
75 record the expr's type and its RTL value here. */
77 static tree last_expr_type;
78 static rtx last_expr_value;
80 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
81 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
82 This is used by the `remember_end_note' function to record the endpoint
83 of each generated block in its associated BLOCK node. */
85 static rtx last_block_end_note;
87 /* Number of binding contours started so far in this function. */
89 int block_start_count;
91 /* Nonzero if function being compiled needs to
92 return the address of where it has put a structure value. */
94 extern int current_function_returns_pcc_struct;
96 /* Label that will go on parm cleanup code, if any.
97 Jumping to this label runs cleanup code for parameters, if
98 such code must be run. Following this code is the logical return label. */
100 extern rtx cleanup_label;
102 /* Label that will go on function epilogue.
103 Jumping to this label serves as a "return" instruction
104 on machines which require execution of the epilogue on all returns. */
106 extern rtx return_label;
108 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
109 So we can mark them all live at the end of the function, if nonopt. */
110 extern rtx save_expr_regs;
112 /* Offset to end of allocated area of stack frame.
113 If stack grows down, this is the address of the last stack slot allocated.
114 If stack grows up, this is the address for the next slot. */
115 extern int frame_offset;
117 /* Label to jump back to for tail recursion, or 0 if we have
118 not yet needed one for this function. */
119 extern rtx tail_recursion_label;
121 /* Place after which to insert the tail_recursion_label if we need one. */
122 extern rtx tail_recursion_reentry;
124 /* Location at which to save the argument pointer if it will need to be
125 referenced. There are two cases where this is done: if nonlocal gotos
126 exist, or if vars whose is an offset from the argument pointer will be
127 needed by inner routines. */
129 extern rtx arg_pointer_save_area;
131 /* Chain of all RTL_EXPRs that have insns in them. */
132 extern tree rtl_expr_chain;
134 #if 0 /* Turned off because 0 seems to work just as well. */
135 /* Cleanup lists are required for binding levels regardless of whether
136 that binding level has cleanups or not. This node serves as the
137 cleanup list whenever an empty list is required. */
138 static tree empty_cleanup_list;
141 /* Functions and data structures for expanding case statements. */
143 /* Case label structure, used to hold info on labels within case
144 statements. We handle "range" labels; for a single-value label
145 as in C, the high and low limits are the same.
147 A chain of case nodes is initially maintained via the RIGHT fields
148 in the nodes. Nodes with higher case values are later in the list.
150 Switch statements can be output in one of two forms. A branch table
151 is used if there are more than a few labels and the labels are dense
152 within the range between the smallest and largest case value. If a
153 branch table is used, no further manipulations are done with the case
156 The alternative to the use of a branch table is to generate a series
157 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
158 and PARENT fields to hold a binary tree. Initially the tree is
159 totally unbalanced, with everything on the right. We balance the tree
160 with nodes on the left having lower case values than the parent
161 and nodes on the right having higher values. We then output the tree
166 struct case_node *left; /* Left son in binary tree */
167 struct case_node *right; /* Right son in binary tree; also node chain */
168 struct case_node *parent; /* Parent of node in binary tree */
169 tree low; /* Lowest index value for this label */
170 tree high; /* Highest index value for this label */
171 tree code_label; /* Label to jump to when node matches */
174 typedef struct case_node case_node;
175 typedef struct case_node *case_node_ptr;
177 /* These are used by estimate_case_costs and balance_case_nodes. */
179 /* This must be a signed type, and non-ANSI compilers lack signed char. */
180 static short *cost_table;
181 static int use_cost_table;
183 static int estimate_case_costs ();
184 static void balance_case_nodes ();
185 static void emit_case_nodes ();
186 static void group_case_nodes ();
187 static void emit_jump_if_reachable ();
189 static int warn_if_unused_value ();
190 static void expand_goto_internal ();
191 static void bc_expand_goto_internal ();
192 static int expand_fixup ();
193 static void bc_expand_fixup ();
195 static void bc_fixup_gotos ();
196 void free_temp_slots ();
197 static void expand_cleanups ();
198 static void expand_null_return_1 ();
199 static int tail_recursion_args ();
200 static void do_jump_if_equal ();
201 int bc_expand_exit_loop_if_false ();
202 void bc_expand_start_cond ();
203 void bc_expand_end_cond ();
204 void bc_expand_start_else ();
205 void bc_expand_end_bindings ();
206 void bc_expand_start_case ();
207 void bc_check_for_full_enumeration_handling ();
208 void bc_expand_end_case ();
209 void bc_expand_decl ();
211 extern rtx bc_allocate_local ();
212 extern rtx bc_allocate_variable_array ();
214 /* Stack of control and binding constructs we are currently inside.
216 These constructs begin when you call `expand_start_WHATEVER'
217 and end when you call `expand_end_WHATEVER'. This stack records
218 info about how the construct began that tells the end-function
219 what to do. It also may provide information about the construct
220 to alter the behavior of other constructs within the body.
221 For example, they may affect the behavior of C `break' and `continue'.
223 Each construct gets one `struct nesting' object.
224 All of these objects are chained through the `all' field.
225 `nesting_stack' points to the first object (innermost construct).
226 The position of an entry on `nesting_stack' is in its `depth' field.
228 Each type of construct has its own individual stack.
229 For example, loops have `loop_stack'. Each object points to the
230 next object of the same type through the `next' field.
232 Some constructs are visible to `break' exit-statements and others
233 are not. Which constructs are visible depends on the language.
234 Therefore, the data structure allows each construct to be visible
235 or not, according to the args given when the construct is started.
236 The construct is visible if the `exit_label' field is non-null.
237 In that case, the value should be a CODE_LABEL rtx. */
242 struct nesting *next;
247 /* For conds (if-then and if-then-else statements). */
250 /* Label for the end of the if construct.
251 There is none if EXITFLAG was not set
252 and no `else' has been seen yet. */
254 /* Label for the end of this alternative.
255 This may be the end of the if or the next else/elseif. */
261 /* Label at the top of the loop; place to loop back to. */
263 /* Label at the end of the whole construct. */
265 /* Label for `continue' statement to jump to;
266 this is in front of the stepper of the loop. */
269 /* For variable binding contours. */
272 /* Sequence number of this binding contour within the function,
273 in order of entry. */
274 int block_start_count;
275 /* Nonzero => value to restore stack to on exit. Complemented by
276 bc_stack_level (see below) when generating bytecodes. */
278 /* The NOTE that starts this contour.
279 Used by expand_goto to check whether the destination
280 is within each contour or not. */
282 /* Innermost containing binding contour that has a stack level. */
283 struct nesting *innermost_stack_block;
284 /* List of cleanups to be run on exit from this contour.
285 This is a list of expressions to be evaluated.
286 The TREE_PURPOSE of each link is the ..._DECL node
287 which the cleanup pertains to. */
289 /* List of cleanup-lists of blocks containing this block,
290 as they were at the locus where this block appears.
291 There is an element for each containing block,
292 ordered innermost containing block first.
293 The tail of this list can be 0 (was empty_cleanup_list),
294 if all remaining elements would be empty lists.
295 The element's TREE_VALUE is the cleanup-list of that block,
296 which may be null. */
298 /* Chain of labels defined inside this binding contour.
299 For contours that have stack levels or cleanups. */
300 struct label_chain *label_chain;
301 /* Number of function calls seen, as of start of this block. */
302 int function_call_count;
303 /* Bytecode specific: stack level to restore stack to on exit. */
306 /* For switch (C) or case (Pascal) statements,
307 and also for dummies (see `expand_start_case_dummy'). */
310 /* The insn after which the case dispatch should finally
311 be emitted. Zero for a dummy. */
313 /* For bytecodes, the case table is in-lined right in the code.
314 A label is needed for skipping over this block. It is only
315 used when generating bytecodes. */
317 /* A list of case labels, kept in ascending order by value
318 as the list is built.
319 During expand_end_case, this list may be rearranged into a
320 nearly balanced binary tree. */
321 struct case_node *case_list;
322 /* Label to jump to if no case matches. */
324 /* The expression to be dispatched on. */
326 /* Type that INDEX_EXPR should be converted to. */
328 /* Number of range exprs in case statement. */
330 /* Name of this kind of statement, for warnings. */
332 /* Nonzero if a case label has been seen in this case stmt. */
335 /* For exception contours. */
338 /* List of exceptions raised. This is a TREE_LIST
339 of whatever you want. */
341 /* List of exceptions caught. This is also a TREE_LIST
342 of whatever you want. As a special case, it has the
343 value `void_type_node' if it handles default exceptions. */
346 /* First insn of TRY block, in case resumptive model is needed. */
348 /* Label for the catch clauses. */
350 /* Label for unhandled exceptions. */
352 /* Label at the end of whole construct. */
354 /* Label which "escapes" the exception construct.
355 Like EXIT_LABEL for BREAK construct, but for exceptions. */
361 /* Chain of all pending binding contours. */
362 struct nesting *block_stack;
364 /* If any new stacks are added here, add them to POPSTACKS too. */
366 /* Chain of all pending binding contours that restore stack levels
368 struct nesting *stack_block_stack;
370 /* Chain of all pending conditional statements. */
371 struct nesting *cond_stack;
373 /* Chain of all pending loops. */
374 struct nesting *loop_stack;
376 /* Chain of all pending case or switch statements. */
377 struct nesting *case_stack;
379 /* Chain of all pending exception contours. */
380 struct nesting *except_stack;
382 /* Separate chain including all of the above,
383 chained through the `all' field. */
384 struct nesting *nesting_stack;
386 /* Number of entries on nesting_stack now. */
389 /* Allocate and return a new `struct nesting'. */
391 #define ALLOC_NESTING() \
392 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
394 /* Pop the nesting stack element by element until we pop off
395 the element which is at the top of STACK.
396 Update all the other stacks, popping off elements from them
397 as we pop them from nesting_stack. */
399 #define POPSTACK(STACK) \
400 do { struct nesting *target = STACK; \
401 struct nesting *this; \
402 do { this = nesting_stack; \
403 if (loop_stack == this) \
404 loop_stack = loop_stack->next; \
405 if (cond_stack == this) \
406 cond_stack = cond_stack->next; \
407 if (block_stack == this) \
408 block_stack = block_stack->next; \
409 if (stack_block_stack == this) \
410 stack_block_stack = stack_block_stack->next; \
411 if (case_stack == this) \
412 case_stack = case_stack->next; \
413 if (except_stack == this) \
414 except_stack = except_stack->next; \
415 nesting_depth = nesting_stack->depth - 1; \
416 nesting_stack = this->all; \
417 obstack_free (&stmt_obstack, this); } \
418 while (this != target); } while (0)
420 /* In some cases it is impossible to generate code for a forward goto
421 until the label definition is seen. This happens when it may be necessary
422 for the goto to reset the stack pointer: we don't yet know how to do that.
423 So expand_goto puts an entry on this fixup list.
424 Each time a binding contour that resets the stack is exited,
426 If the target label has now been defined, we can insert the proper code. */
430 /* Points to following fixup. */
431 struct goto_fixup *next;
432 /* Points to the insn before the jump insn.
433 If more code must be inserted, it goes after this insn. */
435 /* The LABEL_DECL that this jump is jumping to, or 0
436 for break, continue or return. */
438 /* The BLOCK for the place where this goto was found. */
440 /* The CODE_LABEL rtx that this is jumping to. */
442 /* Number of binding contours started in current function
443 before the label reference. */
444 int block_start_count;
445 /* The outermost stack level that should be restored for this jump.
446 Each time a binding contour that resets the stack is exited,
447 if the target label is *not* yet defined, this slot is updated. */
449 /* List of lists of cleanup expressions to be run by this goto.
450 There is one element for each block that this goto is within.
451 The tail of this list can be 0 (was empty_cleanup_list),
452 if all remaining elements would be empty.
453 The TREE_VALUE contains the cleanup list of that block as of the
454 time this goto was seen.
455 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
456 tree cleanup_list_list;
458 /* Bytecode specific members follow */
460 /* The label that this jump is jumping to, or 0 for break, continue
462 struct bc_label *bc_target;
464 /* The label we use for the fixup patch */
465 struct bc_label *label;
467 /* True (non-0) if fixup has been handled */
470 /* Like stack_level above, except refers to the interpreter stack */
474 static struct goto_fixup *goto_fixup_chain;
476 /* Within any binding contour that must restore a stack level,
477 all labels are recorded with a chain of these structures. */
481 /* Points to following fixup. */
482 struct label_chain *next;
489 gcc_obstack_init (&stmt_obstack);
491 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
496 init_stmt_for_function ()
498 /* We are not currently within any block, conditional, loop or case. */
500 stack_block_stack = 0;
507 block_start_count = 0;
509 /* No gotos have been expanded yet. */
510 goto_fixup_chain = 0;
512 /* We are not processing a ({...}) grouping. */
513 expr_stmts_for_value = 0;
521 p->block_stack = block_stack;
522 p->stack_block_stack = stack_block_stack;
523 p->cond_stack = cond_stack;
524 p->loop_stack = loop_stack;
525 p->case_stack = case_stack;
526 p->nesting_stack = nesting_stack;
527 p->nesting_depth = nesting_depth;
528 p->block_start_count = block_start_count;
529 p->last_expr_type = last_expr_type;
530 p->last_expr_value = last_expr_value;
531 p->expr_stmts_for_value = expr_stmts_for_value;
532 p->emit_filename = emit_filename;
533 p->emit_lineno = emit_lineno;
534 p->goto_fixup_chain = goto_fixup_chain;
538 restore_stmt_status (p)
541 block_stack = p->block_stack;
542 stack_block_stack = p->stack_block_stack;
543 cond_stack = p->cond_stack;
544 loop_stack = p->loop_stack;
545 case_stack = p->case_stack;
546 nesting_stack = p->nesting_stack;
547 nesting_depth = p->nesting_depth;
548 block_start_count = p->block_start_count;
549 last_expr_type = p->last_expr_type;
550 last_expr_value = p->last_expr_value;
551 expr_stmts_for_value = p->expr_stmts_for_value;
552 emit_filename = p->emit_filename;
553 emit_lineno = p->emit_lineno;
554 goto_fixup_chain = p->goto_fixup_chain;
557 /* Emit a no-op instruction. */
564 if (!output_bytecode)
566 last_insn = get_last_insn ();
568 && (GET_CODE (last_insn) == CODE_LABEL
569 || prev_real_insn (last_insn) == 0))
570 emit_insn (gen_nop ());
574 /* Return the rtx-label that corresponds to a LABEL_DECL,
575 creating it if necessary. */
581 if (TREE_CODE (label) != LABEL_DECL)
584 if (DECL_RTL (label))
585 return DECL_RTL (label);
587 return DECL_RTL (label) = gen_label_rtx ();
590 /* Add an unconditional jump to LABEL as the next sequential instruction. */
596 do_pending_stack_adjust ();
597 emit_jump_insn (gen_jump (label));
601 /* Emit code to jump to the address
602 specified by the pointer expression EXP. */
605 expand_computed_goto (exp)
610 bc_expand_expr (exp);
611 bc_emit_instruction (jumpP);
615 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
617 emit_indirect_jump (x);
621 /* Handle goto statements and the labels that they can go to. */
623 /* Specify the location in the RTL code of a label LABEL,
624 which is a LABEL_DECL tree node.
626 This is used for the kind of label that the user can jump to with a
627 goto statement, and for alternatives of a switch or case statement.
628 RTL labels generated for loops and conditionals don't go through here;
629 they are generated directly at the RTL level, by other functions below.
631 Note that this has nothing to do with defining label *names*.
632 Languages vary in how they do that and what that even means. */
638 struct label_chain *p;
642 if (! DECL_RTL (label))
643 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
644 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
645 error ("multiply defined label");
649 do_pending_stack_adjust ();
650 emit_label (label_rtx (label));
651 if (DECL_NAME (label))
652 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
654 if (stack_block_stack != 0)
656 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
657 p->next = stack_block_stack->data.block.label_chain;
658 stack_block_stack->data.block.label_chain = p;
663 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
664 from nested functions. */
667 declare_nonlocal_label (label)
670 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
671 LABEL_PRESERVE_P (label_rtx (label)) = 1;
672 if (nonlocal_goto_handler_slot == 0)
674 nonlocal_goto_handler_slot
675 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
676 emit_stack_save (SAVE_NONLOCAL,
677 &nonlocal_goto_stack_level,
678 PREV_INSN (tail_recursion_reentry));
682 /* Generate RTL code for a `goto' statement with target label LABEL.
683 LABEL should be a LABEL_DECL tree node that was or will later be
684 defined with `expand_label'. */
694 expand_goto_internal (label, label_rtx (label), NULL_RTX);
698 /* Check for a nonlocal goto to a containing function. */
699 context = decl_function_context (label);
700 if (context != 0 && context != current_function_decl)
702 struct function *p = find_function_data (context);
703 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
706 p->has_nonlocal_label = 1;
707 current_function_has_nonlocal_goto = 1;
708 LABEL_REF_NONLOCAL_P (label_ref) = 1;
710 /* Copy the rtl for the slots so that they won't be shared in
711 case the virtual stack vars register gets instantiated differently
712 in the parent than in the child. */
714 #if HAVE_nonlocal_goto
715 if (HAVE_nonlocal_goto)
716 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
717 copy_rtx (p->nonlocal_goto_handler_slot),
718 copy_rtx (p->nonlocal_goto_stack_level),
725 /* Restore frame pointer for containing function.
726 This sets the actual hard register used for the frame pointer
727 to the location of the function's incoming static chain info.
728 The non-local goto handler will then adjust it to contain the
729 proper value and reload the argument pointer, if needed. */
730 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
732 /* We have now loaded the frame pointer hardware register with
733 the address of that corresponds to the start of the virtual
734 stack vars. So replace virtual_stack_vars_rtx in all
735 addresses we use with stack_pointer_rtx. */
737 /* Get addr of containing function's current nonlocal goto handler,
738 which will do any cleanups and then jump to the label. */
739 addr = copy_rtx (p->nonlocal_goto_handler_slot);
740 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
741 hard_frame_pointer_rtx));
743 /* Restore the stack pointer. Note this uses fp just restored. */
744 addr = p->nonlocal_goto_stack_level;
746 addr = replace_rtx (copy_rtx (addr),
747 virtual_stack_vars_rtx,
748 hard_frame_pointer_rtx);
750 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
752 /* Put in the static chain register the nonlocal label address. */
753 emit_move_insn (static_chain_rtx, label_ref);
754 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
756 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
757 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
758 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
759 emit_indirect_jump (temp);
763 expand_goto_internal (label, label_rtx (label), NULL_RTX);
766 /* Generate RTL code for a `goto' statement with target label BODY.
767 LABEL should be a LABEL_REF.
768 LAST_INSN, if non-0, is the rtx we should consider as the last
769 insn emitted (for the purposes of cleaning up a return). */
772 expand_goto_internal (body, label, last_insn)
777 struct nesting *block;
780 /* NOTICE! If a bytecode instruction other than `jump' is needed,
781 then the caller has to call bc_expand_goto_internal()
782 directly. This is rather an exceptional case, and there aren't
783 that many places where this is necessary. */
786 expand_goto_internal (body, label, last_insn);
790 if (GET_CODE (label) != CODE_LABEL)
793 /* If label has already been defined, we can tell now
794 whether and how we must alter the stack level. */
796 if (PREV_INSN (label) != 0)
798 /* Find the innermost pending block that contains the label.
799 (Check containment by comparing insn-uids.)
800 Then restore the outermost stack level within that block,
801 and do cleanups of all blocks contained in it. */
802 for (block = block_stack; block; block = block->next)
804 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
806 if (block->data.block.stack_level != 0)
807 stack_level = block->data.block.stack_level;
808 /* Execute the cleanups for blocks we are exiting. */
809 if (block->data.block.cleanups != 0)
811 expand_cleanups (block->data.block.cleanups, NULL_TREE);
812 do_pending_stack_adjust ();
818 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
819 the stack pointer. This one should be deleted as dead by flow. */
820 clear_pending_stack_adjust ();
821 do_pending_stack_adjust ();
822 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
825 if (body != 0 && DECL_TOO_LATE (body))
826 error ("jump to `%s' invalidly jumps into binding contour",
827 IDENTIFIER_POINTER (DECL_NAME (body)));
829 /* Label not yet defined: may need to put this goto
830 on the fixup list. */
831 else if (! expand_fixup (body, label, last_insn))
833 /* No fixup needed. Record that the label is the target
834 of at least one goto that has no fixup. */
836 TREE_ADDRESSABLE (body) = 1;
842 /* Generate a jump with OPCODE to the given bytecode LABEL which is
843 found within BODY. */
845 bc_expand_goto_internal (opcode, label, body)
846 enum bytecode_opcode opcode;
847 struct bc_label *label;
850 struct nesting *block;
851 int stack_level = -1;
853 /* If the label is defined, adjust the stack as necessary.
854 If it's not defined, we have to push the reference on the
860 /* Find the innermost pending block that contains the label.
861 (Check containment by comparing bytecode uids.) Then restore the
862 outermost stack level within that block. */
864 for (block = block_stack; block; block = block->next)
866 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
868 if (block->data.block.bc_stack_level)
869 stack_level = block->data.block.bc_stack_level;
871 /* Execute the cleanups for blocks we are exiting. */
872 if (block->data.block.cleanups != 0)
874 expand_cleanups (block->data.block.cleanups, NULL_TREE);
875 do_pending_stack_adjust ();
879 /* Restore the stack level. If we need to adjust the stack, we
880 must do so after the jump, since the jump may depend on
881 what's on the stack. Thus, any stack-modifying conditional
882 jumps (these are the only ones that rely on what's on the
883 stack) go into the fixup list. */
886 && stack_depth != stack_level
889 bc_expand_fixup (opcode, label, stack_level);
892 if (stack_level >= 0)
893 bc_adjust_stack (stack_depth - stack_level);
895 if (body && DECL_BIT_FIELD (body))
896 error ("jump to `%s' invalidly jumps into binding contour",
897 IDENTIFIER_POINTER (DECL_NAME (body)));
899 /* Emit immediate jump */
900 bc_emit_bytecode (opcode);
901 bc_emit_bytecode_labelref (label);
903 #ifdef DEBUG_PRINT_CODE
904 fputc ('\n', stderr);
909 /* Put goto in the fixup list */
910 bc_expand_fixup (opcode, label, stack_level);
913 /* Generate if necessary a fixup for a goto
914 whose target label in tree structure (if any) is TREE_LABEL
915 and whose target in rtl is RTL_LABEL.
917 If LAST_INSN is nonzero, we pretend that the jump appears
918 after insn LAST_INSN instead of at the current point in the insn stream.
920 The fixup will be used later to insert insns just before the goto.
921 Those insns will restore the stack level as appropriate for the
922 target label, and will (in the case of C++) also invoke any object
923 destructors which have to be invoked when we exit the scopes which
924 are exited by the goto.
926 Value is nonzero if a fixup is made. */
929 expand_fixup (tree_label, rtl_label, last_insn)
934 struct nesting *block, *end_block;
936 /* See if we can recognize which block the label will be output in.
937 This is possible in some very common cases.
938 If we succeed, set END_BLOCK to that block.
939 Otherwise, set it to 0. */
942 && (rtl_label == cond_stack->data.cond.endif_label
943 || rtl_label == cond_stack->data.cond.next_label))
944 end_block = cond_stack;
945 /* If we are in a loop, recognize certain labels which
946 are likely targets. This reduces the number of fixups
947 we need to create. */
949 && (rtl_label == loop_stack->data.loop.start_label
950 || rtl_label == loop_stack->data.loop.end_label
951 || rtl_label == loop_stack->data.loop.continue_label))
952 end_block = loop_stack;
956 /* Now set END_BLOCK to the binding level to which we will return. */
960 struct nesting *next_block = end_block->all;
963 /* First see if the END_BLOCK is inside the innermost binding level.
964 If so, then no cleanups or stack levels are relevant. */
965 while (next_block && next_block != block)
966 next_block = next_block->all;
971 /* Otherwise, set END_BLOCK to the innermost binding level
972 which is outside the relevant control-structure nesting. */
973 next_block = block_stack->next;
974 for (block = block_stack; block != end_block; block = block->all)
975 if (block == next_block)
976 next_block = next_block->next;
977 end_block = next_block;
980 /* Does any containing block have a stack level or cleanups?
981 If not, no fixup is needed, and that is the normal case
982 (the only case, for standard C). */
983 for (block = block_stack; block != end_block; block = block->next)
984 if (block->data.block.stack_level != 0
985 || block->data.block.cleanups != 0)
988 if (block != end_block)
990 /* Ok, a fixup is needed. Add a fixup to the list of such. */
991 struct goto_fixup *fixup
992 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
993 /* In case an old stack level is restored, make sure that comes
994 after any pending stack adjust. */
995 /* ?? If the fixup isn't to come at the present position,
996 doing the stack adjust here isn't useful. Doing it with our
997 settings at that location isn't useful either. Let's hope
1000 do_pending_stack_adjust ();
1001 fixup->target = tree_label;
1002 fixup->target_rtl = rtl_label;
1004 /* Create a BLOCK node and a corresponding matched set of
1005 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1006 this point. The notes will encapsulate any and all fixup
1007 code which we might later insert at this point in the insn
1008 stream. Also, the BLOCK node will be the parent (i.e. the
1009 `SUPERBLOCK') of any other BLOCK nodes which we might create
1010 later on when we are expanding the fixup code. */
1013 register rtx original_before_jump
1014 = last_insn ? last_insn : get_last_insn ();
1018 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1019 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1020 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1022 emit_insns_after (fixup->before_jump, original_before_jump);
1025 fixup->block_start_count = block_start_count;
1026 fixup->stack_level = 0;
1027 fixup->cleanup_list_list
1028 = (((block->data.block.outer_cleanups
1030 && block->data.block.outer_cleanups != empty_cleanup_list
1033 || block->data.block.cleanups)
1034 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1035 block->data.block.outer_cleanups)
1037 fixup->next = goto_fixup_chain;
1038 goto_fixup_chain = fixup;
1045 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1046 Make the fixup restore the stack level to STACK_LEVEL. */
1049 bc_expand_fixup (opcode, label, stack_level)
1050 enum bytecode_opcode opcode;
1051 struct bc_label *label;
1054 struct goto_fixup *fixup
1055 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1057 fixup->label = bc_get_bytecode_label ();
1058 fixup->bc_target = label;
1059 fixup->bc_stack_level = stack_level;
1060 fixup->bc_handled = FALSE;
1062 fixup->next = goto_fixup_chain;
1063 goto_fixup_chain = fixup;
1065 /* Insert a jump to the fixup code */
1066 bc_emit_bytecode (opcode);
1067 bc_emit_bytecode_labelref (fixup->label);
1069 #ifdef DEBUG_PRINT_CODE
1070 fputc ('\n', stderr);
1075 /* When exiting a binding contour, process all pending gotos requiring fixups.
1076 THISBLOCK is the structure that describes the block being exited.
1077 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1078 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1079 FIRST_INSN is the insn that began this contour.
1081 Gotos that jump out of this contour must restore the
1082 stack level and do the cleanups before actually jumping.
1084 DONT_JUMP_IN nonzero means report error there is a jump into this
1085 contour from before the beginning of the contour.
1086 This is also done if STACK_LEVEL is nonzero. */
1089 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1090 struct nesting *thisblock;
1096 register struct goto_fixup *f, *prev;
1098 if (output_bytecode)
1100 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in);
1104 /* F is the fixup we are considering; PREV is the previous one. */
1105 /* We run this loop in two passes so that cleanups of exited blocks
1106 are run first, and blocks that are exited are marked so
1109 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1111 /* Test for a fixup that is inactive because it is already handled. */
1112 if (f->before_jump == 0)
1114 /* Delete inactive fixup from the chain, if that is easy to do. */
1116 prev->next = f->next;
1118 /* Has this fixup's target label been defined?
1119 If so, we can finalize it. */
1120 else if (PREV_INSN (f->target_rtl) != 0)
1122 register rtx cleanup_insns;
1124 /* Get the first non-label after the label
1125 this goto jumps to. If that's before this scope begins,
1126 we don't have a jump into the scope. */
1127 rtx after_label = f->target_rtl;
1128 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1129 after_label = NEXT_INSN (after_label);
1131 /* If this fixup jumped into this contour from before the beginning
1132 of this contour, report an error. */
1133 /* ??? Bug: this does not detect jumping in through intermediate
1134 blocks that have stack levels or cleanups.
1135 It detects only a problem with the innermost block
1136 around the label. */
1138 && (dont_jump_in || stack_level || cleanup_list)
1139 /* If AFTER_LABEL is 0, it means the jump goes to the end
1140 of the rtl, which means it jumps into this scope. */
1141 && (after_label == 0
1142 || INSN_UID (first_insn) < INSN_UID (after_label))
1143 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1144 && ! DECL_REGISTER (f->target))
1146 error_with_decl (f->target,
1147 "label `%s' used before containing binding contour");
1148 /* Prevent multiple errors for one label. */
1149 DECL_REGISTER (f->target) = 1;
1152 /* We will expand the cleanups into a sequence of their own and
1153 then later on we will attach this new sequence to the insn
1154 stream just ahead of the actual jump insn. */
1158 /* Temporarily restore the lexical context where we will
1159 logically be inserting the fixup code. We do this for the
1160 sake of getting the debugging information right. */
1163 set_block (f->context);
1165 /* Expand the cleanups for blocks this jump exits. */
1166 if (f->cleanup_list_list)
1169 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1170 /* Marked elements correspond to blocks that have been closed.
1171 Do their cleanups. */
1172 if (TREE_ADDRESSABLE (lists)
1173 && TREE_VALUE (lists) != 0)
1175 expand_cleanups (TREE_VALUE (lists), 0);
1176 /* Pop any pushes done in the cleanups,
1177 in case function is about to return. */
1178 do_pending_stack_adjust ();
1182 /* Restore stack level for the biggest contour that this
1183 jump jumps out of. */
1185 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1187 /* Finish up the sequence containing the insns which implement the
1188 necessary cleanups, and then attach that whole sequence to the
1189 insn stream just ahead of the actual jump insn. Attaching it
1190 at that point insures that any cleanups which are in fact
1191 implicit C++ object destructions (which must be executed upon
1192 leaving the block) appear (to the debugger) to be taking place
1193 in an area of the generated code where the object(s) being
1194 destructed are still "in scope". */
1196 cleanup_insns = get_insns ();
1200 emit_insns_after (cleanup_insns, f->before_jump);
1207 /* Mark the cleanups of exited blocks so that they are executed
1208 by the code above. */
1209 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1210 if (f->before_jump != 0
1211 && PREV_INSN (f->target_rtl) == 0
1212 /* Label has still not appeared. If we are exiting a block with
1213 a stack level to restore, that started before the fixup,
1214 mark this stack level as needing restoration
1215 when the fixup is later finalized.
1216 Also mark the cleanup_list_list element for F
1217 that corresponds to this block, so that ultimately
1218 this block's cleanups will be executed by the code above. */
1220 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1221 it means the label is undefined. That's erroneous, but possible. */
1222 && (thisblock->data.block.block_start_count
1223 <= f->block_start_count))
1225 tree lists = f->cleanup_list_list;
1226 for (; lists; lists = TREE_CHAIN (lists))
1227 /* If the following elt. corresponds to our containing block
1228 then the elt. must be for this block. */
1229 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1230 TREE_ADDRESSABLE (lists) = 1;
1233 f->stack_level = stack_level;
1238 /* When exiting a binding contour, process all pending gotos requiring fixups.
1239 Note: STACK_DEPTH is not altered.
1241 The arguments are currently not used in the bytecode compiler, but we may need
1242 them one day for languages other than C.
1244 THISBLOCK is the structure that describes the block being exited.
1245 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1246 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1247 FIRST_INSN is the insn that began this contour.
1249 Gotos that jump out of this contour must restore the
1250 stack level and do the cleanups before actually jumping.
1252 DONT_JUMP_IN nonzero means report error there is a jump into this
1253 contour from before the beginning of the contour.
1254 This is also done if STACK_LEVEL is nonzero. */
1257 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1258 struct nesting *thisblock;
1264 register struct goto_fixup *f, *prev;
1265 int saved_stack_depth;
1267 /* F is the fixup we are considering; PREV is the previous one. */
1269 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1271 /* Test for a fixup that is inactive because it is already handled. */
1272 if (f->before_jump == 0)
1274 /* Delete inactive fixup from the chain, if that is easy to do. */
1276 prev->next = f->next;
1279 /* Emit code to restore the stack and continue */
1280 bc_emit_bytecode_labeldef (f->label);
1282 /* Save stack_depth across call, since bc_adjust_stack () will alter
1283 the perceived stack depth via the instructions generated. */
1285 if (f->bc_stack_level >= 0)
1287 saved_stack_depth = stack_depth;
1288 bc_adjust_stack (stack_depth - f->bc_stack_level);
1289 stack_depth = saved_stack_depth;
1292 bc_emit_bytecode (jump);
1293 bc_emit_bytecode_labelref (f->bc_target);
1295 #ifdef DEBUG_PRINT_CODE
1296 fputc ('\n', stderr);
1300 goto_fixup_chain = NULL;
1303 /* Generate RTL for an asm statement (explicit assembler code).
1304 BODY is a STRING_CST node containing the assembler code text,
1305 or an ADDR_EXPR containing a STRING_CST. */
1311 if (output_bytecode)
1313 error ("`asm' is illegal when generating bytecode");
1317 if (TREE_CODE (body) == ADDR_EXPR)
1318 body = TREE_OPERAND (body, 0);
1320 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1321 TREE_STRING_POINTER (body)));
1325 /* Generate RTL for an asm statement with arguments.
1326 STRING is the instruction template.
1327 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1328 Each output or input has an expression in the TREE_VALUE and
1329 a constraint-string in the TREE_PURPOSE.
1330 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1331 that is clobbered by this insn.
1333 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1334 Some elements of OUTPUTS may be replaced with trees representing temporary
1335 values. The caller should copy those temporary values to the originally
1338 VOL nonzero means the insn is volatile; don't optimize it. */
1341 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1342 tree string, outputs, inputs, clobbers;
1347 rtvec argvec, constraints;
1349 int ninputs = list_length (inputs);
1350 int noutputs = list_length (outputs);
1354 /* Vector of RTX's of evaluated output operands. */
1355 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1356 /* The insn we have emitted. */
1359 if (output_bytecode)
1361 error ("`asm' is illegal when generating bytecode");
1365 /* Count the number of meaningful clobbered registers, ignoring what
1366 we would ignore later. */
1368 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1370 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1371 i = decode_reg_name (regname);
1372 if (i >= 0 || i == -4)
1378 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1380 tree val = TREE_VALUE (tail);
1385 /* If there's an erroneous arg, emit no insn. */
1386 if (TREE_TYPE (val) == error_mark_node)
1389 /* Make sure constraint has `=' and does not have `+'. */
1392 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1394 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1396 error ("output operand constraint contains `+'");
1399 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1404 error ("output operand constraint lacks `='");
1408 /* If an output operand is not a variable or indirect ref,
1410 create a SAVE_EXPR which is a pseudo-reg
1411 to act as an intermediate temporary.
1412 Make the asm insn write into that, then copy it to
1413 the real output operand. */
1415 while (TREE_CODE (val) == COMPONENT_REF
1416 || TREE_CODE (val) == ARRAY_REF)
1417 val = TREE_OPERAND (val, 0);
1419 if (TREE_CODE (val) != VAR_DECL
1420 && TREE_CODE (val) != PARM_DECL
1421 && TREE_CODE (val) != INDIRECT_REF)
1423 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1424 /* If it's a constant, print error now so don't crash later. */
1425 if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR)
1427 error ("invalid output in `asm'");
1432 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1435 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1437 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1441 /* Make vectors for the expression-rtx and constraint strings. */
1443 argvec = rtvec_alloc (ninputs);
1444 constraints = rtvec_alloc (ninputs);
1446 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1447 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1449 MEM_VOLATILE_P (body) = vol;
1451 /* Eval the inputs and put them into ARGVEC.
1452 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1455 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1459 /* If there's an erroneous arg, emit no insn,
1460 because the ASM_INPUT would get VOIDmode
1461 and that could cause a crash in reload. */
1462 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1464 if (TREE_PURPOSE (tail) == NULL_TREE)
1466 error ("hard register `%s' listed as input operand to `asm'",
1467 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1471 /* Make sure constraint has neither `=' nor `+'. */
1473 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1474 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1475 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1477 error ("input operand constraint contains `%c'",
1478 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1482 XVECEXP (body, 3, i) /* argvec */
1483 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1484 XVECEXP (body, 4, i) /* constraints */
1485 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1486 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1490 /* Protect all the operands from the queue,
1491 now that they have all been evaluated. */
1493 for (i = 0; i < ninputs; i++)
1494 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1496 for (i = 0; i < noutputs; i++)
1497 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1499 /* Now, for each output, construct an rtx
1500 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1501 ARGVEC CONSTRAINTS))
1502 If there is more than one, put them inside a PARALLEL. */
1504 if (noutputs == 1 && nclobbers == 0)
1506 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1507 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1509 else if (noutputs == 0 && nclobbers == 0)
1511 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1512 insn = emit_insn (body);
1518 if (num == 0) num = 1;
1519 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1521 /* For each output operand, store a SET. */
1523 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1525 XVECEXP (body, 0, i)
1526 = gen_rtx (SET, VOIDmode,
1528 gen_rtx (ASM_OPERANDS, VOIDmode,
1529 TREE_STRING_POINTER (string),
1530 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1531 i, argvec, constraints,
1533 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1536 /* If there are no outputs (but there are some clobbers)
1537 store the bare ASM_OPERANDS into the PARALLEL. */
1540 XVECEXP (body, 0, i++) = obody;
1542 /* Store (clobber REG) for each clobbered register specified. */
1544 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1546 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1547 int j = decode_reg_name (regname);
1551 if (j == -3) /* `cc', which is not a register */
1554 if (j == -4) /* `memory', don't cache memory across asm */
1556 XVECEXP (body, 0, i++)
1557 = gen_rtx (CLOBBER, VOIDmode,
1558 gen_rtx (MEM, QImode,
1559 gen_rtx (SCRATCH, VOIDmode, 0)));
1563 error ("unknown register name `%s' in `asm'", regname);
1567 /* Use QImode since that's guaranteed to clobber just one reg. */
1568 XVECEXP (body, 0, i++)
1569 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1572 insn = emit_insn (body);
1578 /* Generate RTL to evaluate the expression EXP
1579 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1582 expand_expr_stmt (exp)
1585 if (output_bytecode)
1587 int org_stack_depth = stack_depth;
1589 bc_expand_expr (exp);
1591 /* Restore stack depth */
1592 if (stack_depth < org_stack_depth)
1595 bc_emit_instruction (drop);
1597 last_expr_type = TREE_TYPE (exp);
1601 /* If -W, warn about statements with no side effects,
1602 except for an explicit cast to void (e.g. for assert()), and
1603 except inside a ({...}) where they may be useful. */
1604 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1606 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1607 && !(TREE_CODE (exp) == CONVERT_EXPR
1608 && TREE_TYPE (exp) == void_type_node))
1609 warning_with_file_and_line (emit_filename, emit_lineno,
1610 "statement with no effect");
1611 else if (warn_unused)
1612 warn_if_unused_value (exp);
1614 last_expr_type = TREE_TYPE (exp);
1615 if (! flag_syntax_only)
1616 last_expr_value = expand_expr (exp,
1617 (expr_stmts_for_value
1618 ? NULL_RTX : const0_rtx),
1621 /* If all we do is reference a volatile value in memory,
1622 copy it to a register to be sure it is actually touched. */
1623 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1624 && TREE_THIS_VOLATILE (exp))
1626 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1628 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1629 copy_to_reg (last_expr_value);
1632 rtx lab = gen_label_rtx ();
1634 /* Compare the value with itself to reference it. */
1635 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1636 expand_expr (TYPE_SIZE (last_expr_type),
1637 NULL_RTX, VOIDmode, 0),
1639 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1640 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1645 /* If this expression is part of a ({...}) and is in memory, we may have
1646 to preserve temporaries. */
1647 preserve_temp_slots (last_expr_value);
1649 /* Free any temporaries used to evaluate this expression. Any temporary
1650 used as a result of this expression will already have been preserved
1657 /* Warn if EXP contains any computations whose results are not used.
1658 Return 1 if a warning is printed; 0 otherwise. */
1661 warn_if_unused_value (exp)
1664 if (TREE_USED (exp))
1667 switch (TREE_CODE (exp))
1669 case PREINCREMENT_EXPR:
1670 case POSTINCREMENT_EXPR:
1671 case PREDECREMENT_EXPR:
1672 case POSTDECREMENT_EXPR:
1677 case METHOD_CALL_EXPR:
1679 case WITH_CLEANUP_EXPR:
1681 /* We don't warn about COND_EXPR because it may be a useful
1682 construct if either arm contains a side effect. */
1687 /* For a binding, warn if no side effect within it. */
1688 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1690 case TRUTH_ORIF_EXPR:
1691 case TRUTH_ANDIF_EXPR:
1692 /* In && or ||, warn if 2nd operand has no side effect. */
1693 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1696 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1698 /* Let people do `(foo (), 0)' without a warning. */
1699 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1701 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1705 case NON_LVALUE_EXPR:
1706 /* Don't warn about values cast to void. */
1707 if (TREE_TYPE (exp) == void_type_node)
1709 /* Don't warn about conversions not explicit in the user's program. */
1710 if (TREE_NO_UNUSED_WARNING (exp))
1712 /* Assignment to a cast usually results in a cast of a modify.
1713 Don't complain about that. There can be an arbitrary number of
1714 casts before the modify, so we must loop until we find the first
1715 non-cast expression and then test to see if that is a modify. */
1717 tree tem = TREE_OPERAND (exp, 0);
1719 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1720 tem = TREE_OPERAND (tem, 0);
1722 if (TREE_CODE (tem) == MODIFY_EXPR)
1725 /* ... fall through ... */
1728 /* Referencing a volatile value is a side effect, so don't warn. */
1729 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1730 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1731 && TREE_THIS_VOLATILE (exp))
1733 warning_with_file_and_line (emit_filename, emit_lineno,
1734 "value computed is not used");
1739 /* Clear out the memory of the last expression evaluated. */
1747 /* Begin a statement which will return a value.
1748 Return the RTL_EXPR for this statement expr.
1749 The caller must save that value and pass it to expand_end_stmt_expr. */
1752 expand_start_stmt_expr ()
1757 /* When generating bytecode just note down the stack depth */
1758 if (output_bytecode)
1759 return (build_int_2 (stack_depth, 0));
1761 /* Make the RTL_EXPR node temporary, not momentary,
1762 so that rtl_expr_chain doesn't become garbage. */
1763 momentary = suspend_momentary ();
1764 t = make_node (RTL_EXPR);
1765 resume_momentary (momentary);
1766 start_sequence_for_rtl_expr (t);
1768 expr_stmts_for_value++;
1772 /* Restore the previous state at the end of a statement that returns a value.
1773 Returns a tree node representing the statement's value and the
1774 insns to compute the value.
1776 The nodes of that expression have been freed by now, so we cannot use them.
1777 But we don't want to do that anyway; the expression has already been
1778 evaluated and now we just want to use the value. So generate a RTL_EXPR
1779 with the proper type and RTL value.
1781 If the last substatement was not an expression,
1782 return something with type `void'. */
1785 expand_end_stmt_expr (t)
1788 if (output_bytecode)
1794 /* At this point, all expressions have been evaluated in order.
1795 However, all expression values have been popped when evaluated,
1796 which means we have to recover the last expression value. This is
1797 the last value removed by means of a `drop' instruction. Instead
1798 of adding code to inhibit dropping the last expression value, it
1799 is here recovered by undoing the `drop'. Since `drop' is
1800 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1803 bc_adjust_stack (-1);
1805 if (!last_expr_type)
1806 last_expr_type = void_type_node;
1808 t = make_node (RTL_EXPR);
1809 TREE_TYPE (t) = last_expr_type;
1810 RTL_EXPR_RTL (t) = NULL;
1811 RTL_EXPR_SEQUENCE (t) = NULL;
1813 /* Don't consider deleting this expr or containing exprs at tree level. */
1814 TREE_THIS_VOLATILE (t) = 1;
1822 if (last_expr_type == 0)
1824 last_expr_type = void_type_node;
1825 last_expr_value = const0_rtx;
1827 else if (last_expr_value == 0)
1828 /* There are some cases where this can happen, such as when the
1829 statement is void type. */
1830 last_expr_value = const0_rtx;
1831 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1832 /* Remove any possible QUEUED. */
1833 last_expr_value = protect_from_queue (last_expr_value, 0);
1837 TREE_TYPE (t) = last_expr_type;
1838 RTL_EXPR_RTL (t) = last_expr_value;
1839 RTL_EXPR_SEQUENCE (t) = get_insns ();
1841 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1845 /* Don't consider deleting this expr or containing exprs at tree level. */
1846 TREE_SIDE_EFFECTS (t) = 1;
1847 /* Propagate volatility of the actual RTL expr. */
1848 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1851 expr_stmts_for_value--;
1856 /* The exception handling nesting looks like this:
1859 { <-- exception handler block
1861 <-- in an exception handler
1863 : <-- in a TRY block
1864 : <-- in an exception handler
1869 : <-- in an except block
1870 : <-- in an exception handler
1877 /* Return nonzero iff in a try block at level LEVEL. */
1880 in_try_block (level)
1883 struct nesting *n = except_stack;
1886 while (n && n->data.except_stmt.after_label != 0)
1897 /* Return nonzero iff in an except block at level LEVEL. */
1900 in_except_block (level)
1903 struct nesting *n = except_stack;
1906 while (n && n->data.except_stmt.after_label == 0)
1917 /* Return nonzero iff in an exception handler at level LEVEL. */
1920 in_exception_handler (level)
1923 struct nesting *n = except_stack;
1924 while (n && level--)
1929 /* Record the fact that the current exception nesting raises
1930 exception EX. If not in an exception handler, return 0. */
1937 if (except_stack == 0)
1939 raises_ptr = &except_stack->data.except_stmt.raised;
1940 if (! value_member (ex, *raises_ptr))
1941 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1945 /* Generate RTL for the start of a try block.
1947 TRY_CLAUSE is the condition to test to enter the try block. */
1950 expand_start_try (try_clause, exitflag, escapeflag)
1955 struct nesting *thishandler = ALLOC_NESTING ();
1957 /* Make an entry on cond_stack for the cond we are entering. */
1959 thishandler->next = except_stack;
1960 thishandler->all = nesting_stack;
1961 thishandler->depth = ++nesting_depth;
1962 thishandler->data.except_stmt.raised = 0;
1963 thishandler->data.except_stmt.handled = 0;
1964 thishandler->data.except_stmt.first_insn = get_insns ();
1965 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1966 thishandler->data.except_stmt.unhandled_label = 0;
1967 thishandler->data.except_stmt.after_label = 0;
1968 thishandler->data.except_stmt.escape_label
1969 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1970 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1971 except_stack = thishandler;
1972 nesting_stack = thishandler;
1974 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX);
1977 /* End of a TRY block. Nothing to do for now. */
1982 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1983 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1987 /* Start an `except' nesting contour.
1988 EXITFLAG says whether this contour should be able to `exit' something.
1989 ESCAPEFLAG says whether this contour should be escapable. */
1992 expand_start_except (exitflag, escapeflag)
1999 /* An `exit' from catch clauses goes out to next exit level,
2000 if there is one. Otherwise, it just goes to the end
2001 of the construct. */
2002 for (n = except_stack->next; n; n = n->next)
2003 if (n->exit_label != 0)
2005 except_stack->exit_label = n->exit_label;
2009 except_stack->exit_label = except_stack->data.except_stmt.after_label;
2014 /* An `escape' from catch clauses goes out to next escape level,
2015 if there is one. Otherwise, it just goes to the end
2016 of the construct. */
2017 for (n = except_stack->next; n; n = n->next)
2018 if (n->data.except_stmt.escape_label != 0)
2020 except_stack->data.except_stmt.escape_label
2021 = n->data.except_stmt.escape_label;
2025 except_stack->data.except_stmt.escape_label
2026 = except_stack->data.except_stmt.after_label;
2028 do_pending_stack_adjust ();
2029 emit_label (except_stack->data.except_stmt.except_label);
2032 /* Generate code to `escape' from an exception contour. This
2033 is like `exiting', but does not conflict with constructs which
2036 Return nonzero if this contour is escapable, otherwise
2037 return zero, and language-specific code will emit the
2038 appropriate error message. */
2040 expand_escape_except ()
2044 for (n = except_stack; n; n = n->next)
2045 if (n->data.except_stmt.escape_label != 0)
2047 expand_goto_internal (NULL_TREE,
2048 n->data.except_stmt.escape_label, NULL_RTX);
2055 /* Finish processing and `except' contour.
2056 Culls out all exceptions which might be raise but not
2057 handled, and returns the list to the caller.
2058 Language-specific code is responsible for dealing with these
2062 expand_end_except ()
2065 tree raised = NULL_TREE;
2067 do_pending_stack_adjust ();
2068 emit_label (except_stack->data.except_stmt.after_label);
2070 n = except_stack->next;
2073 /* Propagate exceptions raised but not handled to next
2075 tree handled = except_stack->data.except_stmt.raised;
2076 if (handled != void_type_node)
2078 tree prev = NULL_TREE;
2079 raised = except_stack->data.except_stmt.raised;
2083 for (this_raise = raised, prev = 0; this_raise;
2084 this_raise = TREE_CHAIN (this_raise))
2086 if (value_member (TREE_VALUE (this_raise), handled))
2089 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
2092 raised = TREE_CHAIN (raised);
2093 if (raised == NULL_TREE)
2100 handled = TREE_CHAIN (handled);
2102 if (prev == NULL_TREE)
2105 TREE_CHAIN (prev) = n->data.except_stmt.raised;
2107 n->data.except_stmt.raised = raised;
2111 POPSTACK (except_stack);
2116 /* Record that exception EX is caught by this exception handler.
2117 Return nonzero if in exception handling construct, otherwise return 0. */
2124 if (except_stack == 0)
2126 raises_ptr = &except_stack->data.except_stmt.handled;
2127 if (*raises_ptr != void_type_node
2129 && ! value_member (ex, *raises_ptr))
2130 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
2134 /* Record that this exception handler catches all exceptions.
2135 Return nonzero if in exception handling construct, otherwise return 0. */
2138 expand_catch_default ()
2140 if (except_stack == 0)
2142 except_stack->data.except_stmt.handled = void_type_node;
2149 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
2151 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
2156 /* Generate RTL for the start of an if-then. COND is the expression
2157 whose truth should be tested.
2159 If EXITFLAG is nonzero, this conditional is visible to
2160 `exit_something'. */
2163 expand_start_cond (cond, exitflag)
2167 struct nesting *thiscond = ALLOC_NESTING ();
2169 /* Make an entry on cond_stack for the cond we are entering. */
2171 thiscond->next = cond_stack;
2172 thiscond->all = nesting_stack;
2173 thiscond->depth = ++nesting_depth;
2174 thiscond->data.cond.next_label = gen_label_rtx ();
2175 /* Before we encounter an `else', we don't need a separate exit label
2176 unless there are supposed to be exit statements
2177 to exit this conditional. */
2178 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2179 thiscond->data.cond.endif_label = thiscond->exit_label;
2180 cond_stack = thiscond;
2181 nesting_stack = thiscond;
2183 if (output_bytecode)
2184 bc_expand_start_cond (cond, exitflag);
2186 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2189 /* Generate RTL between then-clause and the elseif-clause
2190 of an if-then-elseif-.... */
2193 expand_start_elseif (cond)
2196 if (cond_stack->data.cond.endif_label == 0)
2197 cond_stack->data.cond.endif_label = gen_label_rtx ();
2198 emit_jump (cond_stack->data.cond.endif_label);
2199 emit_label (cond_stack->data.cond.next_label);
2200 cond_stack->data.cond.next_label = gen_label_rtx ();
2201 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2204 /* Generate RTL between the then-clause and the else-clause
2205 of an if-then-else. */
2208 expand_start_else ()
2210 if (cond_stack->data.cond.endif_label == 0)
2211 cond_stack->data.cond.endif_label = gen_label_rtx ();
2213 if (output_bytecode)
2215 bc_expand_start_else ();
2219 emit_jump (cond_stack->data.cond.endif_label);
2220 emit_label (cond_stack->data.cond.next_label);
2221 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2224 /* Generate RTL for the end of an if-then.
2225 Pop the record for it off of cond_stack. */
2230 struct nesting *thiscond = cond_stack;
2232 if (output_bytecode)
2233 bc_expand_end_cond ();
2236 do_pending_stack_adjust ();
2237 if (thiscond->data.cond.next_label)
2238 emit_label (thiscond->data.cond.next_label);
2239 if (thiscond->data.cond.endif_label)
2240 emit_label (thiscond->data.cond.endif_label);
2243 POPSTACK (cond_stack);
2248 /* Generate code for the start of an if-then. COND is the expression
2249 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2250 is to be visible to exit_something. It is assumed that the caller
2251 has pushed the previous context on the cond stack. */
2253 bc_expand_start_cond (cond, exitflag)
2257 struct nesting *thiscond = cond_stack;
2259 thiscond->data.case_stmt.nominal_type = cond;
2261 thiscond->exit_label = gen_label_rtx ();
2262 bc_expand_expr (cond);
2263 bc_emit_bytecode (xjumpifnot);
2264 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2266 #ifdef DEBUG_PRINT_CODE
2267 fputc ('\n', stderr);
2271 /* Generate the label for the end of an if with
2274 bc_expand_end_cond ()
2276 struct nesting *thiscond = cond_stack;
2278 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2281 /* Generate code for the start of the else- clause of
2284 bc_expand_start_else ()
2286 struct nesting *thiscond = cond_stack;
2288 thiscond->data.cond.endif_label = thiscond->exit_label;
2289 thiscond->exit_label = gen_label_rtx ();
2290 bc_emit_bytecode (jump);
2291 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2293 #ifdef DEBUG_PRINT_CODE
2294 fputc ('\n', stderr);
2297 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2300 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2301 loop should be exited by `exit_something'. This is a loop for which
2302 `expand_continue' will jump to the top of the loop.
2304 Make an entry on loop_stack to record the labels associated with
2308 expand_start_loop (exit_flag)
2311 register struct nesting *thisloop = ALLOC_NESTING ();
2313 /* Make an entry on loop_stack for the loop we are entering. */
2315 thisloop->next = loop_stack;
2316 thisloop->all = nesting_stack;
2317 thisloop->depth = ++nesting_depth;
2318 thisloop->data.loop.start_label = gen_label_rtx ();
2319 thisloop->data.loop.end_label = gen_label_rtx ();
2320 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2321 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2322 loop_stack = thisloop;
2323 nesting_stack = thisloop;
2325 if (output_bytecode)
2327 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2331 do_pending_stack_adjust ();
2333 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2334 emit_label (thisloop->data.loop.start_label);
2339 /* Like expand_start_loop but for a loop where the continuation point
2340 (for expand_continue_loop) will be specified explicitly. */
2343 expand_start_loop_continue_elsewhere (exit_flag)
2346 struct nesting *thisloop = expand_start_loop (exit_flag);
2347 loop_stack->data.loop.continue_label = gen_label_rtx ();
2351 /* Specify the continuation point for a loop started with
2352 expand_start_loop_continue_elsewhere.
2353 Use this at the point in the code to which a continue statement
2357 expand_loop_continue_here ()
2359 if (output_bytecode)
2361 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2364 do_pending_stack_adjust ();
2365 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2366 emit_label (loop_stack->data.loop.continue_label);
2371 bc_expand_end_loop ()
2373 struct nesting *thisloop = loop_stack;
2375 bc_emit_bytecode (jump);
2376 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2378 #ifdef DEBUG_PRINT_CODE
2379 fputc ('\n', stderr);
2382 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2383 POPSTACK (loop_stack);
2388 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2389 Pop the block off of loop_stack. */
2395 register rtx start_label;
2396 rtx last_test_insn = 0;
2399 if (output_bytecode)
2401 bc_expand_end_loop ();
2405 insn = get_last_insn ();
2406 start_label = loop_stack->data.loop.start_label;
2408 /* Mark the continue-point at the top of the loop if none elsewhere. */
2409 if (start_label == loop_stack->data.loop.continue_label)
2410 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2412 do_pending_stack_adjust ();
2414 /* If optimizing, perhaps reorder the loop. If the loop
2415 starts with a conditional exit, roll that to the end
2416 where it will optimize together with the jump back.
2418 We look for the last conditional branch to the exit that we encounter
2419 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2420 branch to the exit first, use it.
2422 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2423 because moving them is not valid. */
2427 ! (GET_CODE (insn) == JUMP_INSN
2428 && GET_CODE (PATTERN (insn)) == SET
2429 && SET_DEST (PATTERN (insn)) == pc_rtx
2430 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2432 /* Scan insns from the top of the loop looking for a qualified
2433 conditional exit. */
2434 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2435 insn = NEXT_INSN (insn))
2437 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2440 if (GET_CODE (insn) == NOTE
2441 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2442 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2445 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2448 if (last_test_insn && num_insns > 30)
2451 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2452 && SET_DEST (PATTERN (insn)) == pc_rtx
2453 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2454 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2455 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2456 == loop_stack->data.loop.end_label))
2457 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2458 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2459 == loop_stack->data.loop.end_label))))
2460 last_test_insn = insn;
2462 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2463 && GET_CODE (PATTERN (insn)) == SET
2464 && SET_DEST (PATTERN (insn)) == pc_rtx
2465 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2466 && (XEXP (SET_SRC (PATTERN (insn)), 0)
2467 == loop_stack->data.loop.end_label))
2468 /* Include BARRIER. */
2469 last_test_insn = NEXT_INSN (insn);
2472 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2474 /* We found one. Move everything from there up
2475 to the end of the loop, and add a jump into the loop
2476 to jump to there. */
2477 register rtx newstart_label = gen_label_rtx ();
2478 register rtx start_move = start_label;
2480 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2481 then we want to move this note also. */
2482 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2483 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2484 == NOTE_INSN_LOOP_CONT))
2485 start_move = PREV_INSN (start_move);
2487 emit_label_after (newstart_label, PREV_INSN (start_move));
2488 reorder_insns (start_move, last_test_insn, get_last_insn ());
2489 emit_jump_insn_after (gen_jump (start_label),
2490 PREV_INSN (newstart_label));
2491 emit_barrier_after (PREV_INSN (newstart_label));
2492 start_label = newstart_label;
2496 emit_jump (start_label);
2497 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2498 emit_label (loop_stack->data.loop.end_label);
2500 POPSTACK (loop_stack);
2505 /* Generate a jump to the current loop's continue-point.
2506 This is usually the top of the loop, but may be specified
2507 explicitly elsewhere. If not currently inside a loop,
2508 return 0 and do nothing; caller will print an error message. */
2511 expand_continue_loop (whichloop)
2512 struct nesting *whichloop;
2516 whichloop = loop_stack;
2519 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2524 /* Generate a jump to exit the current loop. If not currently inside a loop,
2525 return 0 and do nothing; caller will print an error message. */
2528 expand_exit_loop (whichloop)
2529 struct nesting *whichloop;
2533 whichloop = loop_stack;
2536 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2540 /* Generate a conditional jump to exit the current loop if COND
2541 evaluates to zero. If not currently inside a loop,
2542 return 0 and do nothing; caller will print an error message. */
2545 expand_exit_loop_if_false (whichloop, cond)
2546 struct nesting *whichloop;
2551 whichloop = loop_stack;
2554 if (output_bytecode)
2556 bc_expand_expr (cond);
2557 bc_expand_goto_internal (xjumpifnot,
2558 BYTECODE_BC_LABEL (whichloop->exit_label),
2562 do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
2567 /* Return non-zero if we should preserve sub-expressions as separate
2568 pseudos. We never do so if we aren't optimizing. We always do so
2569 if -fexpensive-optimizations.
2571 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2572 the loop may still be a small one. */
2575 preserve_subexpressions_p ()
2579 if (flag_expensive_optimizations)
2582 if (optimize == 0 || loop_stack == 0)
2585 insn = get_last_insn_anywhere ();
2588 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2589 < n_non_fixed_regs * 3));
2593 /* Generate a jump to exit the current loop, conditional, binding contour
2594 or case statement. Not all such constructs are visible to this function,
2595 only those started with EXIT_FLAG nonzero. Individual languages use
2596 the EXIT_FLAG parameter to control which kinds of constructs you can
2599 If not currently inside anything that can be exited,
2600 return 0 and do nothing; caller will print an error message. */
2603 expand_exit_something ()
2607 for (n = nesting_stack; n; n = n->all)
2608 if (n->exit_label != 0)
2610 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2617 /* Generate RTL to return from the current function, with no value.
2618 (That is, we do not do anything about returning any value.) */
2621 expand_null_return ()
2623 struct nesting *block = block_stack;
2626 if (output_bytecode)
2628 bc_emit_instruction (ret);
2632 /* Does any pending block have cleanups? */
2634 while (block && block->data.block.cleanups == 0)
2635 block = block->next;
2637 /* If yes, use a goto to return, since that runs cleanups. */
2639 expand_null_return_1 (last_insn, block != 0);
2642 /* Generate RTL to return from the current function, with value VAL. */
2645 expand_value_return (val)
2648 struct nesting *block = block_stack;
2649 rtx last_insn = get_last_insn ();
2650 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2652 /* Copy the value to the return location
2653 unless it's already there. */
2655 if (return_reg != val)
2657 #ifdef PROMOTE_FUNCTION_RETURN
2658 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2659 int unsignedp = TREE_UNSIGNED (type);
2660 enum machine_mode mode
2661 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2664 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2665 convert_move (return_reg, val, unsignedp);
2668 emit_move_insn (return_reg, val);
2670 if (GET_CODE (return_reg) == REG
2671 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2672 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2674 /* Does any pending block have cleanups? */
2676 while (block && block->data.block.cleanups == 0)
2677 block = block->next;
2679 /* If yes, use a goto to return, since that runs cleanups.
2680 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2682 expand_null_return_1 (last_insn, block != 0);
2685 /* Output a return with no value. If LAST_INSN is nonzero,
2686 pretend that the return takes place after LAST_INSN.
2687 If USE_GOTO is nonzero then don't use a return instruction;
2688 go to the return label instead. This causes any cleanups
2689 of pending blocks to be executed normally. */
2692 expand_null_return_1 (last_insn, use_goto)
2696 rtx end_label = cleanup_label ? cleanup_label : return_label;
2698 clear_pending_stack_adjust ();
2699 do_pending_stack_adjust ();
2702 /* PCC-struct return always uses an epilogue. */
2703 if (current_function_returns_pcc_struct || use_goto)
2706 end_label = return_label = gen_label_rtx ();
2707 expand_goto_internal (NULL_TREE, end_label, last_insn);
2711 /* Otherwise output a simple return-insn if one is available,
2712 unless it won't do the job. */
2714 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2716 emit_jump_insn (gen_return ());
2722 /* Otherwise jump to the epilogue. */
2723 expand_goto_internal (NULL_TREE, end_label, last_insn);
2726 /* Generate RTL to evaluate the expression RETVAL and return it
2727 from the current function. */
2730 expand_return (retval)
2733 /* If there are any cleanups to be performed, then they will
2734 be inserted following LAST_INSN. It is desirable
2735 that the last_insn, for such purposes, should be the
2736 last insn before computing the return value. Otherwise, cleanups
2737 which call functions can clobber the return value. */
2738 /* ??? rms: I think that is erroneous, because in C++ it would
2739 run destructors on variables that might be used in the subsequent
2740 computation of the return value. */
2742 register rtx val = 0;
2746 struct nesting *block;
2748 /* Bytecode returns are quite simple, just leave the result on the
2749 arithmetic stack. */
2750 if (output_bytecode)
2752 bc_expand_expr (retval);
2753 bc_emit_instruction (ret);
2757 /* If function wants no value, give it none. */
2758 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2760 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2762 expand_null_return ();
2766 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2767 cleanups = any_pending_cleanups (1);
2769 if (TREE_CODE (retval) == RESULT_DECL)
2770 retval_rhs = retval;
2771 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2772 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2773 retval_rhs = TREE_OPERAND (retval, 1);
2774 else if (TREE_TYPE (retval) == void_type_node)
2775 /* Recognize tail-recursive call to void function. */
2776 retval_rhs = retval;
2778 retval_rhs = NULL_TREE;
2780 /* Only use `last_insn' if there are cleanups which must be run. */
2781 if (cleanups || cleanup_label != 0)
2782 last_insn = get_last_insn ();
2784 /* Distribute return down conditional expr if either of the sides
2785 may involve tail recursion (see test below). This enhances the number
2786 of tail recursions we see. Don't do this always since it can produce
2787 sub-optimal code in some cases and we distribute assignments into
2788 conditional expressions when it would help. */
2790 if (optimize && retval_rhs != 0
2791 && frame_offset == 0
2792 && TREE_CODE (retval_rhs) == COND_EXPR
2793 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2794 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2796 rtx label = gen_label_rtx ();
2799 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2800 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2801 DECL_RESULT (current_function_decl),
2802 TREE_OPERAND (retval_rhs, 1));
2803 TREE_SIDE_EFFECTS (expr) = 1;
2804 expand_return (expr);
2807 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2808 DECL_RESULT (current_function_decl),
2809 TREE_OPERAND (retval_rhs, 2));
2810 TREE_SIDE_EFFECTS (expr) = 1;
2811 expand_return (expr);
2815 /* For tail-recursive call to current function,
2816 just jump back to the beginning.
2817 It's unsafe if any auto variable in this function
2818 has its address taken; for simplicity,
2819 require stack frame to be empty. */
2820 if (optimize && retval_rhs != 0
2821 && frame_offset == 0
2822 && TREE_CODE (retval_rhs) == CALL_EXPR
2823 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2824 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2825 /* Finish checking validity, and if valid emit code
2826 to set the argument variables for the new call. */
2827 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2828 DECL_ARGUMENTS (current_function_decl)))
2830 if (tail_recursion_label == 0)
2832 tail_recursion_label = gen_label_rtx ();
2833 emit_label_after (tail_recursion_label,
2834 tail_recursion_reentry);
2837 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2842 /* This optimization is safe if there are local cleanups
2843 because expand_null_return takes care of them.
2844 ??? I think it should also be safe when there is a cleanup label,
2845 because expand_null_return takes care of them, too.
2846 Any reason why not? */
2847 if (HAVE_return && cleanup_label == 0
2848 && ! current_function_returns_pcc_struct
2849 && BRANCH_COST <= 1)
2851 /* If this is return x == y; then generate
2852 if (x == y) return 1; else return 0;
2853 if we can do it with explicit return insns and
2854 branches are cheap. */
2856 switch (TREE_CODE (retval_rhs))
2864 case TRUTH_ANDIF_EXPR:
2865 case TRUTH_ORIF_EXPR:
2866 case TRUTH_AND_EXPR:
2868 case TRUTH_NOT_EXPR:
2869 case TRUTH_XOR_EXPR:
2870 op0 = gen_label_rtx ();
2871 jumpifnot (retval_rhs, op0);
2872 expand_value_return (const1_rtx);
2874 expand_value_return (const0_rtx);
2878 #endif /* HAVE_return */
2882 && TREE_TYPE (retval_rhs) != void_type_node
2883 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2885 /* Calculate the return value into a pseudo reg. */
2886 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2888 /* All temporaries have now been used. */
2890 /* Return the calculated value, doing cleanups first. */
2891 expand_value_return (val);
2895 /* No cleanups or no hard reg used;
2896 calculate value into hard return reg. */
2897 expand_expr (retval, const0_rtx, VOIDmode, 0);
2900 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2904 /* Return 1 if the end of the generated RTX is not a barrier.
2905 This means code already compiled can drop through. */
2908 drop_through_at_end_p ()
2910 rtx insn = get_last_insn ();
2911 while (insn && GET_CODE (insn) == NOTE)
2912 insn = PREV_INSN (insn);
2913 return insn && GET_CODE (insn) != BARRIER;
2916 /* Emit code to alter this function's formal parms for a tail-recursive call.
2917 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2918 FORMALS is the chain of decls of formals.
2919 Return 1 if this can be done;
2920 otherwise return 0 and do not emit any code. */
2923 tail_recursion_args (actuals, formals)
2924 tree actuals, formals;
2926 register tree a = actuals, f = formals;
2928 register rtx *argvec;
2930 /* Check that number and types of actuals are compatible
2931 with the formals. This is not always true in valid C code.
2932 Also check that no formal needs to be addressable
2933 and that all formals are scalars. */
2935 /* Also count the args. */
2937 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2939 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2941 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2944 if (a != 0 || f != 0)
2947 /* Compute all the actuals. */
2949 argvec = (rtx *) alloca (i * sizeof (rtx));
2951 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2952 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2954 /* Find which actual values refer to current values of previous formals.
2955 Copy each of them now, before any formal is changed. */
2957 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2961 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2962 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2963 { copy = 1; break; }
2965 argvec[i] = copy_to_reg (argvec[i]);
2968 /* Store the values of the actuals into the formals. */
2970 for (f = formals, a = actuals, i = 0; f;
2971 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2973 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2974 emit_move_insn (DECL_RTL (f), argvec[i]);
2976 convert_move (DECL_RTL (f), argvec[i],
2977 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2984 /* Generate the RTL code for entering a binding contour.
2985 The variables are declared one by one, by calls to `expand_decl'.
2987 EXIT_FLAG is nonzero if this construct should be visible to
2988 `exit_something'. */
2991 expand_start_bindings (exit_flag)
2994 struct nesting *thisblock = ALLOC_NESTING ();
2995 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2997 /* Make an entry on block_stack for the block we are entering. */
2999 thisblock->next = block_stack;
3000 thisblock->all = nesting_stack;
3001 thisblock->depth = ++nesting_depth;
3002 thisblock->data.block.stack_level = 0;
3003 thisblock->data.block.cleanups = 0;
3004 thisblock->data.block.function_call_count = 0;
3008 if (block_stack->data.block.cleanups == NULL_TREE
3009 && (block_stack->data.block.outer_cleanups == NULL_TREE
3010 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
3011 thisblock->data.block.outer_cleanups = empty_cleanup_list;
3013 thisblock->data.block.outer_cleanups
3014 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3015 block_stack->data.block.outer_cleanups);
3018 thisblock->data.block.outer_cleanups = 0;
3022 && !(block_stack->data.block.cleanups == NULL_TREE
3023 && block_stack->data.block.outer_cleanups == NULL_TREE))
3024 thisblock->data.block.outer_cleanups
3025 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3026 block_stack->data.block.outer_cleanups);
3028 thisblock->data.block.outer_cleanups = 0;
3030 thisblock->data.block.label_chain = 0;
3031 thisblock->data.block.innermost_stack_block = stack_block_stack;
3032 thisblock->data.block.first_insn = note;
3033 thisblock->data.block.block_start_count = ++block_start_count;
3034 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3035 block_stack = thisblock;
3036 nesting_stack = thisblock;
3038 if (!output_bytecode)
3040 /* Make a new level for allocating stack slots. */
3045 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3046 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3050 remember_end_note (block)
3051 register tree block;
3053 BLOCK_END_NOTE (block) = last_block_end_note;
3054 last_block_end_note = NULL_RTX;
3057 /* Generate RTL code to terminate a binding contour.
3058 VARS is the chain of VAR_DECL nodes
3059 for the variables bound in this contour.
3060 MARK_ENDS is nonzero if we should put a note at the beginning
3061 and end of this binding contour.
3063 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3064 (That is true automatically if the contour has a saved stack level.) */
3067 expand_end_bindings (vars, mark_ends, dont_jump_in)
3072 register struct nesting *thisblock = block_stack;
3075 if (output_bytecode)
3077 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3082 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3083 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3084 && ! DECL_IN_SYSTEM_HEADER (decl))
3085 warning_with_decl (decl, "unused variable `%s'");
3087 if (thisblock->exit_label)
3089 do_pending_stack_adjust ();
3090 emit_label (thisblock->exit_label);
3093 /* If necessary, make a handler for nonlocal gotos taking
3094 place in the function calls in this block. */
3095 if (function_call_count != thisblock->data.block.function_call_count
3097 /* Make handler for outermost block
3098 if there were any nonlocal gotos to this function. */
3099 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3100 /* Make handler for inner block if it has something
3101 special to do when you jump out of it. */
3102 : (thisblock->data.block.cleanups != 0
3103 || thisblock->data.block.stack_level != 0)))
3106 rtx afterward = gen_label_rtx ();
3107 rtx handler_label = gen_label_rtx ();
3108 rtx save_receiver = gen_reg_rtx (Pmode);
3111 /* Don't let jump_optimize delete the handler. */
3112 LABEL_PRESERVE_P (handler_label) = 1;
3114 /* Record the handler address in the stack slot for that purpose,
3115 during this block, saving and restoring the outer value. */
3116 if (thisblock->next != 0)
3118 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3121 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3122 insns = get_insns ();
3124 emit_insns_before (insns, thisblock->data.block.first_insn);
3128 emit_move_insn (nonlocal_goto_handler_slot,
3129 gen_rtx (LABEL_REF, Pmode, handler_label));
3130 insns = get_insns ();
3132 emit_insns_before (insns, thisblock->data.block.first_insn);
3134 /* Jump around the handler; it runs only when specially invoked. */
3135 emit_jump (afterward);
3136 emit_label (handler_label);
3138 #ifdef HAVE_nonlocal_goto
3139 if (! HAVE_nonlocal_goto)
3141 /* First adjust our frame pointer to its actual value. It was
3142 previously set to the start of the virtual area corresponding to
3143 the stacked variables when we branched here and now needs to be
3144 adjusted to the actual hardware fp value.
3146 Assignments are to virtual registers are converted by
3147 instantiate_virtual_regs into the corresponding assignment
3148 to the underlying register (fp in this case) that makes
3149 the original assignment true.
3150 So the following insn will actually be
3151 decrementing fp by STARTING_FRAME_OFFSET. */
3152 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
3154 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3155 if (fixed_regs[ARG_POINTER_REGNUM])
3157 #ifdef ELIMINABLE_REGS
3158 /* If the argument pointer can be eliminated in favor of the
3159 frame pointer, we don't need to restore it. We assume here
3160 that if such an elimination is present, it can always be used.
3161 This is the case on all known machines; if we don't make this
3162 assumption, we do unnecessary saving on many machines. */
3163 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3166 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3167 if (elim_regs[i].from == ARG_POINTER_REGNUM
3168 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3171 if (i == sizeof elim_regs / sizeof elim_regs [0])
3174 /* Now restore our arg pointer from the address at which it
3175 was saved in our stack frame.
3176 If there hasn't be space allocated for it yet, make
3178 if (arg_pointer_save_area == 0)
3179 arg_pointer_save_area
3180 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3181 emit_move_insn (virtual_incoming_args_rtx,
3182 /* We need a pseudo here, or else
3183 instantiate_virtual_regs_1 complains. */
3184 copy_to_reg (arg_pointer_save_area));
3189 /* The handler expects the desired label address in the static chain
3190 register. It tests the address and does an appropriate jump
3191 to whatever label is desired. */
3192 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3193 /* Skip any labels we shouldn't be able to jump to from here. */
3194 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3196 rtx not_this = gen_label_rtx ();
3197 rtx this = gen_label_rtx ();
3198 do_jump_if_equal (static_chain_rtx,
3199 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3201 emit_jump (not_this);
3203 expand_goto (TREE_VALUE (link));
3204 emit_label (not_this);
3206 /* If label is not recognized, abort. */
3207 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3209 emit_label (afterward);
3212 /* Don't allow jumping into a block that has cleanups or a stack level. */
3214 || thisblock->data.block.stack_level != 0
3215 || thisblock->data.block.cleanups != 0)
3217 struct label_chain *chain;
3219 /* Any labels in this block are no longer valid to go to.
3220 Mark them to cause an error message. */
3221 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3223 DECL_TOO_LATE (chain->label) = 1;
3224 /* If any goto without a fixup came to this label,
3225 that must be an error, because gotos without fixups
3226 come from outside all saved stack-levels and all cleanups. */
3227 if (TREE_ADDRESSABLE (chain->label))
3228 error_with_decl (chain->label,
3229 "label `%s' used before containing binding contour");
3233 /* Restore stack level in effect before the block
3234 (only if variable-size objects allocated). */
3235 /* Perform any cleanups associated with the block. */
3237 if (thisblock->data.block.stack_level != 0
3238 || thisblock->data.block.cleanups != 0)
3240 /* Don't let cleanups affect ({...}) constructs. */
3241 int old_expr_stmts_for_value = expr_stmts_for_value;
3242 rtx old_last_expr_value = last_expr_value;
3243 tree old_last_expr_type = last_expr_type;
3244 expr_stmts_for_value = 0;
3246 /* Do the cleanups. */
3247 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
3248 do_pending_stack_adjust ();
3250 expr_stmts_for_value = old_expr_stmts_for_value;
3251 last_expr_value = old_last_expr_value;
3252 last_expr_type = old_last_expr_type;
3254 /* Restore the stack level. */
3256 if (thisblock->data.block.stack_level != 0)
3258 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3259 thisblock->data.block.stack_level, NULL_RTX);
3260 if (nonlocal_goto_handler_slot != 0)
3261 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3265 /* Any gotos out of this block must also do these things.
3266 Also report any gotos with fixups that came to labels in this
3268 fixup_gotos (thisblock,
3269 thisblock->data.block.stack_level,
3270 thisblock->data.block.cleanups,
3271 thisblock->data.block.first_insn,
3275 /* Mark the beginning and end of the scope if requested.
3276 We do this now, after running cleanups on the variables
3277 just going out of scope, so they are in scope for their cleanups. */
3280 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3282 /* Get rid of the beginning-mark if we don't make an end-mark. */
3283 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3285 /* If doing stupid register allocation, make sure lives of all
3286 register variables declared here extend thru end of scope. */
3289 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3291 rtx rtl = DECL_RTL (decl);
3292 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3296 /* Restore block_stack level for containing block. */
3298 stack_block_stack = thisblock->data.block.innermost_stack_block;
3299 POPSTACK (block_stack);
3301 /* Pop the stack slot nesting and free any slots at this level. */
3306 /* End a binding contour.
3307 VARS is the chain of VAR_DECL nodes for the variables bound
3308 in this contour. MARK_ENDS is nonzer if we should put a note
3309 at the beginning and end of this binding contour.
3310 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3314 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3319 struct nesting *thisbind = nesting_stack;
3323 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3324 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3325 warning_with_decl (decl, "unused variable `%s'");
3327 if (thisbind->exit_label)
3328 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3330 /* Pop block/bindings off stack */
3331 POPSTACK (block_stack);
3334 /* Generate RTL for the automatic variable declaration DECL.
3335 (Other kinds of declarations are simply ignored if seen here.)
3336 CLEANUP is an expression to be executed at exit from this binding contour;
3337 for example, in C++, it might call the destructor for this variable.
3339 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3340 either before or after calling `expand_decl' but before compiling
3341 any subsequent expressions. This is because CLEANUP may be expanded
3342 more than once, on different branches of execution.
3343 For the same reason, CLEANUP may not contain a CALL_EXPR
3344 except as its topmost node--else `preexpand_calls' would get confused.
3346 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3347 that is not associated with any particular variable.
3349 There is no special support here for C++ constructors.
3350 They should be handled by the proper code in DECL_INITIAL. */
3356 struct nesting *thisblock = block_stack;
3359 if (output_bytecode)
3361 bc_expand_decl (decl, 0);
3365 type = TREE_TYPE (decl);
3367 /* Only automatic variables need any expansion done.
3368 Static and external variables, and external functions,
3369 will be handled by `assemble_variable' (called from finish_decl).
3370 TYPE_DECL and CONST_DECL require nothing.
3371 PARM_DECLs are handled in `assign_parms'. */
3373 if (TREE_CODE (decl) != VAR_DECL)
3375 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3378 /* Create the RTL representation for the variable. */
3380 if (type == error_mark_node)
3381 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3382 else if (DECL_SIZE (decl) == 0)
3383 /* Variable with incomplete type. */
3385 if (DECL_INITIAL (decl) == 0)
3386 /* Error message was already done; now avoid a crash. */
3387 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3389 /* An initializer is going to decide the size of this array.
3390 Until we know the size, represent its address with a reg. */
3391 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3393 else if (DECL_MODE (decl) != BLKmode
3394 /* If -ffloat-store, don't put explicit float vars
3396 && !(flag_float_store
3397 && TREE_CODE (type) == REAL_TYPE)
3398 && ! TREE_THIS_VOLATILE (decl)
3399 && ! TREE_ADDRESSABLE (decl)
3400 && (DECL_REGISTER (decl) || ! obey_regdecls))
3402 /* Automatic variable that can go in a register. */
3403 int unsignedp = TREE_UNSIGNED (type);
3404 enum machine_mode reg_mode
3405 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3407 if (TREE_CODE (type) == COMPLEX_TYPE)
3409 rtx realpart, imagpart;
3410 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3412 /* For a complex type variable, make a CONCAT of two pseudos
3413 so that the real and imaginary parts
3414 can be allocated separately. */
3415 realpart = gen_reg_rtx (partmode);
3416 REG_USERVAR_P (realpart) = 1;
3417 imagpart = gen_reg_rtx (partmode);
3418 REG_USERVAR_P (imagpart) = 1;
3419 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3423 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3424 if (TREE_CODE (type) == POINTER_TYPE)
3425 mark_reg_pointer (DECL_RTL (decl));
3426 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3429 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3431 /* Variable of fixed size that goes on the stack. */
3435 /* If we previously made RTL for this decl, it must be an array
3436 whose size was determined by the initializer.
3437 The old address was a register; set that register now
3438 to the proper address. */
3439 if (DECL_RTL (decl) != 0)
3441 if (GET_CODE (DECL_RTL (decl)) != MEM
3442 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3444 oldaddr = XEXP (DECL_RTL (decl), 0);
3448 = assign_stack_temp (DECL_MODE (decl),
3449 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3450 + BITS_PER_UNIT - 1)
3454 /* Set alignment we actually gave this decl. */
3455 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3456 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3460 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3461 if (addr != oldaddr)
3462 emit_move_insn (oldaddr, addr);
3465 /* If this is a memory ref that contains aggregate components,
3466 mark it as such for cse and loop optimize. */
3467 MEM_IN_STRUCT_P (DECL_RTL (decl))
3468 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
3469 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
3470 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
3471 || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
3473 /* If this is in memory because of -ffloat-store,
3474 set the volatile bit, to prevent optimizations from
3475 undoing the effects. */
3476 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3477 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3481 /* Dynamic-size object: must push space on the stack. */
3485 /* Record the stack pointer on entry to block, if have
3486 not already done so. */
3487 if (thisblock->data.block.stack_level == 0)
3489 do_pending_stack_adjust ();
3490 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3491 &thisblock->data.block.stack_level,
3492 thisblock->data.block.first_insn);
3493 stack_block_stack = thisblock;
3496 /* Compute the variable's size, in bytes. */
3497 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3499 size_int (BITS_PER_UNIT)),
3500 NULL_RTX, VOIDmode, 0);
3503 /* This is equivalent to calling alloca. */
3504 current_function_calls_alloca = 1;
3506 /* Allocate space on the stack for the variable. */
3507 address = allocate_dynamic_stack_space (size, NULL_RTX,
3510 if (nonlocal_goto_handler_slot != 0)
3511 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3513 /* Reference the variable indirect through that rtx. */
3514 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3516 /* If this is a memory ref that contains aggregate components,
3517 mark it as such for cse and loop optimize. */
3518 MEM_IN_STRUCT_P (DECL_RTL (decl))
3519 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
3520 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
3521 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
3522 || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
3524 /* Indicate the alignment we actually gave this variable. */
3525 #ifdef STACK_BOUNDARY
3526 DECL_ALIGN (decl) = STACK_BOUNDARY;
3528 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3532 if (TREE_THIS_VOLATILE (decl))
3533 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3534 #if 0 /* A variable is not necessarily unchanging
3535 just because it is const. RTX_UNCHANGING_P
3536 means no change in the function,
3537 not merely no change in the variable's scope.
3538 It is correct to set RTX_UNCHANGING_P if the variable's scope
3539 is the whole function. There's no convenient way to test that. */
3540 if (TREE_READONLY (decl))
3541 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3544 /* If doing stupid register allocation, make sure life of any
3545 register variable starts here, at the start of its scope. */
3548 use_variable (DECL_RTL (decl));
3552 /* Generate code for the automatic variable declaration DECL. For
3553 most variables this just means we give it a stack offset. The
3554 compiler sometimes emits cleanups without variables and we will
3555 have to deal with those too. */
3558 bc_expand_decl (decl, cleanup)
3566 /* A cleanup with no variable. */
3573 /* Only auto variables need any work. */
3574 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3577 type = TREE_TYPE (decl);
3579 if (type == error_mark_node)
3580 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3582 else if (DECL_SIZE (decl) == 0)
3584 /* Variable with incomplete type. The stack offset herein will be
3585 fixed later in expand_decl_init (). */
3586 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3588 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3590 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3594 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3597 /* Emit code to perform the initialization of a declaration DECL. */
3600 expand_decl_init (decl)
3603 int was_used = TREE_USED (decl);
3605 /* If this is a CONST_DECL, we don't have to generate any code, but
3606 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3607 to be set while in the obstack containing the constant. If we don't
3608 do this, we can lose if we have functions nested three deep and the middle
3609 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3610 the innermost function is the first to expand that STRING_CST. */
3611 if (TREE_CODE (decl) == CONST_DECL)
3613 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3614 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3615 EXPAND_INITIALIZER);
3619 if (TREE_STATIC (decl))
3622 /* Compute and store the initial value now. */
3624 if (DECL_INITIAL (decl) == error_mark_node)
3626 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3627 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3628 || code == POINTER_TYPE)
3629 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3633 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3635 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3636 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3640 /* Don't let the initialization count as "using" the variable. */
3641 TREE_USED (decl) = was_used;
3643 /* Free any temporaries we made while initializing the decl. */
3647 /* Expand initialization for variable-sized types. Allocate array
3648 using newlocalSI and set local variable, which is a pointer to the
3651 bc_expand_variable_local_init (decl)
3654 /* Evaluate size expression and coerce to SI */
3655 bc_expand_expr (DECL_SIZE (decl));
3657 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3658 no coercion is necessary (?) */
3660 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3661 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3663 /* Emit code to allocate array */
3664 bc_emit_instruction (newlocalSI);
3666 /* Store array pointer in local variable. This is the only instance
3667 where we actually want the address of the pointer to the
3668 variable-size block, rather than the pointer itself. We avoid
3669 using expand_address() since that would cause the pointer to be
3670 pushed rather than its address. Hence the hard-coded reference;
3671 notice also that the variable is always local (no global
3672 variable-size type variables). */
3674 bc_load_localaddr (DECL_RTL (decl));
3675 bc_emit_instruction (storeP);
3679 /* Emit code to initialize a declaration. */
3681 bc_expand_decl_init (decl)
3684 int org_stack_depth;
3686 /* Statical initializers are handled elsewhere */
3688 if (TREE_STATIC (decl))
3691 /* Memory original stack depth */
3692 org_stack_depth = stack_depth;
3694 /* If the type is variable-size, we first create its space (we ASSUME
3695 it CAN'T be static). We do this regardless of whether there's an
3696 initializer assignment or not. */
3698 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3699 bc_expand_variable_local_init (decl);
3701 /* Expand initializer assignment */
3702 if (DECL_INITIAL (decl) == error_mark_node)
3704 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3706 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3707 || code == POINTER_TYPE)
3709 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3711 else if (DECL_INITIAL (decl))
3712 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3714 /* Restore stack depth */
3715 if (org_stack_depth > stack_depth)
3718 bc_adjust_stack (stack_depth - org_stack_depth);
3722 /* CLEANUP is an expression to be executed at exit from this binding contour;
3723 for example, in C++, it might call the destructor for this variable.
3725 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3726 either before or after calling `expand_decl' but before compiling
3727 any subsequent expressions. This is because CLEANUP may be expanded
3728 more than once, on different branches of execution.
3729 For the same reason, CLEANUP may not contain a CALL_EXPR
3730 except as its topmost node--else `preexpand_calls' would get confused.
3732 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3733 that is not associated with any particular variable. */
3736 expand_decl_cleanup (decl, cleanup)
3739 struct nesting *thisblock = block_stack;
3741 /* Error if we are not in any block. */
3745 /* Record the cleanup if there is one. */
3749 thisblock->data.block.cleanups
3750 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3751 /* If this block has a cleanup, it belongs in stack_block_stack. */
3752 stack_block_stack = thisblock;
3757 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3758 DECL_ELTS is the list of elements that belong to DECL's type.
3759 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3762 expand_anon_union_decl (decl, cleanup, decl_elts)
3763 tree decl, cleanup, decl_elts;
3765 struct nesting *thisblock = block_stack;
3768 expand_decl (decl, cleanup);
3769 x = DECL_RTL (decl);
3773 tree decl_elt = TREE_VALUE (decl_elts);
3774 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3775 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3777 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3778 instead create a new MEM rtx with the proper mode. */
3779 if (GET_CODE (x) == MEM)
3781 if (mode == GET_MODE (x))
3782 DECL_RTL (decl_elt) = x;
3785 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3786 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3787 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3790 else if (GET_CODE (x) == REG)
3792 if (mode == GET_MODE (x))
3793 DECL_RTL (decl_elt) = x;
3795 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3800 /* Record the cleanup if there is one. */
3803 thisblock->data.block.cleanups
3804 = temp_tree_cons (decl_elt, cleanup_elt,
3805 thisblock->data.block.cleanups);
3807 decl_elts = TREE_CHAIN (decl_elts);
3811 /* Expand a list of cleanups LIST.
3812 Elements may be expressions or may be nested lists.
3814 If DONT_DO is nonnull, then any list-element
3815 whose TREE_PURPOSE matches DONT_DO is omitted.
3816 This is sometimes used to avoid a cleanup associated with
3817 a value that is being returned out of the scope. */
3820 expand_cleanups (list, dont_do)
3825 for (tail = list; tail; tail = TREE_CHAIN (tail))
3826 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3828 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3829 expand_cleanups (TREE_VALUE (tail), dont_do);
3832 /* Cleanups may be run multiple times. For example,
3833 when exiting a binding contour, we expand the
3834 cleanups associated with that contour. When a goto
3835 within that binding contour has a target outside that
3836 contour, it will expand all cleanups from its scope to
3837 the target. Though the cleanups are expanded multiple
3838 times, the control paths are non-overlapping so the
3839 cleanups will not be executed twice. */
3840 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3846 /* Move all cleanups from the current block_stack
3847 to the containing block_stack, where they are assumed to
3848 have been created. If anything can cause a temporary to
3849 be created, but not expanded for more than one level of
3850 block_stacks, then this code will have to change. */
3855 struct nesting *block = block_stack;
3856 struct nesting *outer = block->next;
3858 outer->data.block.cleanups
3859 = chainon (block->data.block.cleanups,
3860 outer->data.block.cleanups);
3861 block->data.block.cleanups = 0;
3865 last_cleanup_this_contour ()
3867 if (block_stack == 0)
3870 return block_stack->data.block.cleanups;
3873 /* Return 1 if there are any pending cleanups at this point.
3874 If THIS_CONTOUR is nonzero, check the current contour as well.
3875 Otherwise, look only at the contours that enclose this one. */
3878 any_pending_cleanups (this_contour)
3881 struct nesting *block;
3883 if (block_stack == 0)
3886 if (this_contour && block_stack->data.block.cleanups != NULL)
3888 if (block_stack->data.block.cleanups == 0
3889 && (block_stack->data.block.outer_cleanups == 0
3891 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3896 for (block = block_stack->next; block; block = block->next)
3897 if (block->data.block.cleanups != 0)
3903 /* Enter a case (Pascal) or switch (C) statement.
3904 Push a block onto case_stack and nesting_stack
3905 to accumulate the case-labels that are seen
3906 and to record the labels generated for the statement.
3908 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3909 Otherwise, this construct is transparent for `exit_something'.
3911 EXPR is the index-expression to be dispatched on.
3912 TYPE is its nominal type. We could simply convert EXPR to this type,
3913 but instead we take short cuts. */
3916 expand_start_case (exit_flag, expr, type, printname)
3922 register struct nesting *thiscase = ALLOC_NESTING ();
3924 /* Make an entry on case_stack for the case we are entering. */
3926 thiscase->next = case_stack;
3927 thiscase->all = nesting_stack;
3928 thiscase->depth = ++nesting_depth;
3929 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3930 thiscase->data.case_stmt.case_list = 0;
3931 thiscase->data.case_stmt.index_expr = expr;
3932 thiscase->data.case_stmt.nominal_type = type;
3933 thiscase->data.case_stmt.default_label = 0;
3934 thiscase->data.case_stmt.num_ranges = 0;
3935 thiscase->data.case_stmt.printname = printname;
3936 thiscase->data.case_stmt.seenlabel = 0;
3937 case_stack = thiscase;
3938 nesting_stack = thiscase;
3940 if (output_bytecode)
3942 bc_expand_start_case (thiscase, expr, type, printname);
3946 do_pending_stack_adjust ();
3948 /* Make sure case_stmt.start points to something that won't
3949 need any transformation before expand_end_case. */
3950 if (GET_CODE (get_last_insn ()) != NOTE)
3951 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3953 thiscase->data.case_stmt.start = get_last_insn ();
3957 /* Enter a case statement. It is assumed that the caller has pushed
3958 the current context onto the case stack. */
3960 bc_expand_start_case (thiscase, expr, type, printname)
3961 struct nesting *thiscase;
3966 bc_expand_expr (expr);
3967 bc_expand_conversion (TREE_TYPE (expr), type);
3969 /* For cases, the skip is a place we jump to that's emitted after
3970 the size of the jump table is known. */
3972 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3973 bc_emit_bytecode (jump);
3974 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
3976 #ifdef DEBUG_PRINT_CODE
3977 fputc ('\n', stderr);
3982 /* Start a "dummy case statement" within which case labels are invalid
3983 and are not connected to any larger real case statement.
3984 This can be used if you don't want to let a case statement jump
3985 into the middle of certain kinds of constructs. */
3988 expand_start_case_dummy ()
3990 register struct nesting *thiscase = ALLOC_NESTING ();
3992 /* Make an entry on case_stack for the dummy. */
3994 thiscase->next = case_stack;
3995 thiscase->all = nesting_stack;
3996 thiscase->depth = ++nesting_depth;
3997 thiscase->exit_label = 0;
3998 thiscase->data.case_stmt.case_list = 0;
3999 thiscase->data.case_stmt.start = 0;
4000 thiscase->data.case_stmt.nominal_type = 0;
4001 thiscase->data.case_stmt.default_label = 0;
4002 thiscase->data.case_stmt.num_ranges = 0;
4003 case_stack = thiscase;
4004 nesting_stack = thiscase;
4007 /* End a dummy case statement. */
4010 expand_end_case_dummy ()
4012 POPSTACK (case_stack);
4015 /* Return the data type of the index-expression
4016 of the innermost case statement, or null if none. */
4019 case_index_expr_type ()
4022 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4026 /* Accumulate one case or default label inside a case or switch statement.
4027 VALUE is the value of the case (a null pointer, for a default label).
4028 The function CONVERTER, when applied to arguments T and V,
4029 converts the value V to the type T.
4031 If not currently inside a case or switch statement, return 1 and do
4032 nothing. The caller will print a language-specific error message.
4033 If VALUE is a duplicate or overlaps, return 2 and do nothing
4034 except store the (first) duplicate node in *DUPLICATE.
4035 If VALUE is out of range, return 3 and do nothing.
4036 If we are jumping into the scope of a cleaup or var-sized array, return 5.
4037 Return 0 on success.
4039 Extended to handle range statements. */
4042 pushcase (value, converter, label, duplicate)
4043 register tree value;
4044 tree (*converter) PROTO((tree, tree));
4045 register tree label;
4048 register struct case_node **l;
4049 register struct case_node *n;
4053 if (output_bytecode)
4054 return bc_pushcase (value, label);
4056 /* Fail if not inside a real case statement. */
4057 if (! (case_stack && case_stack->data.case_stmt.start))
4060 if (stack_block_stack
4061 && stack_block_stack->depth > case_stack->depth)
4064 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4065 nominal_type = case_stack->data.case_stmt.nominal_type;
4067 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4068 if (index_type == error_mark_node)
4071 /* Convert VALUE to the type in which the comparisons are nominally done. */
4073 value = (*converter) (nominal_type, value);
4075 /* If this is the first label, warn if any insns have been emitted. */
4076 if (case_stack->data.case_stmt.seenlabel == 0)
4079 for (insn = case_stack->data.case_stmt.start;
4081 insn = NEXT_INSN (insn))
4083 if (GET_CODE (insn) == CODE_LABEL)
4085 if (GET_CODE (insn) != NOTE
4086 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4088 warning ("unreachable code at beginning of %s",
4089 case_stack->data.case_stmt.printname);
4094 case_stack->data.case_stmt.seenlabel = 1;
4096 /* Fail if this value is out of range for the actual type of the index
4097 (which may be narrower than NOMINAL_TYPE). */
4098 if (value != 0 && ! int_fits_type_p (value, index_type))
4101 /* Fail if this is a duplicate or overlaps another entry. */
4104 if (case_stack->data.case_stmt.default_label != 0)
4106 *duplicate = case_stack->data.case_stmt.default_label;
4109 case_stack->data.case_stmt.default_label = label;
4113 /* Find the elt in the chain before which to insert the new value,
4114 to keep the chain sorted in increasing order.
4115 But report an error if this element is a duplicate. */
4116 for (l = &case_stack->data.case_stmt.case_list;
4117 /* Keep going past elements distinctly less than VALUE. */
4118 *l != 0 && tree_int_cst_lt ((*l)->high, value);
4123 /* Element we will insert before must be distinctly greater;
4124 overlap means error. */
4125 if (! tree_int_cst_lt (value, (*l)->low))
4127 *duplicate = (*l)->code_label;
4132 /* Add this label to the chain, and succeed.
4133 Copy VALUE so it is on temporary rather than momentary
4134 obstack and will thus survive till the end of the case statement. */
4135 n = (struct case_node *) oballoc (sizeof (struct case_node));
4138 n->high = n->low = copy_node (value);
4139 n->code_label = label;
4143 expand_label (label);
4147 /* Like pushcase but this case applies to all values
4148 between VALUE1 and VALUE2 (inclusive).
4149 The return value is the same as that of pushcase
4150 but there is one additional error code:
4151 4 means the specified range was empty. */
4154 pushcase_range (value1, value2, converter, label, duplicate)
4155 register tree value1, value2;
4156 tree (*converter) PROTO((tree, tree));
4157 register tree label;
4160 register struct case_node **l;
4161 register struct case_node *n;
4165 /* Fail if not inside a real case statement. */
4166 if (! (case_stack && case_stack->data.case_stmt.start))
4169 if (stack_block_stack
4170 && stack_block_stack->depth > case_stack->depth)
4173 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4174 nominal_type = case_stack->data.case_stmt.nominal_type;
4176 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4177 if (index_type == error_mark_node)
4180 /* If this is the first label, warn if any insns have been emitted. */
4181 if (case_stack->data.case_stmt.seenlabel == 0)
4184 for (insn = case_stack->data.case_stmt.start;
4186 insn = NEXT_INSN (insn))
4188 if (GET_CODE (insn) == CODE_LABEL)
4190 if (GET_CODE (insn) != NOTE
4191 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4193 warning ("unreachable code at beginning of %s",
4194 case_stack->data.case_stmt.printname);
4199 case_stack->data.case_stmt.seenlabel = 1;
4201 /* Convert VALUEs to type in which the comparisons are nominally done. */
4202 if (value1 == 0) /* Negative infinity. */
4203 value1 = TYPE_MIN_VALUE(index_type);
4204 value1 = (*converter) (nominal_type, value1);
4206 if (value2 == 0) /* Positive infinity. */
4207 value2 = TYPE_MAX_VALUE(index_type);
4208 value2 = (*converter) (nominal_type, value2);
4210 /* Fail if these values are out of range. */
4211 if (! int_fits_type_p (value1, index_type))
4214 if (! int_fits_type_p (value2, index_type))
4217 /* Fail if the range is empty. */
4218 if (tree_int_cst_lt (value2, value1))
4221 /* If the bounds are equal, turn this into the one-value case. */
4222 if (tree_int_cst_equal (value1, value2))
4223 return pushcase (value1, converter, label, duplicate);
4225 /* Find the elt in the chain before which to insert the new value,
4226 to keep the chain sorted in increasing order.
4227 But report an error if this element is a duplicate. */
4228 for (l = &case_stack->data.case_stmt.case_list;
4229 /* Keep going past elements distinctly less than this range. */
4230 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
4235 /* Element we will insert before must be distinctly greater;
4236 overlap means error. */
4237 if (! tree_int_cst_lt (value2, (*l)->low))
4239 *duplicate = (*l)->code_label;
4244 /* Add this label to the chain, and succeed.
4245 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
4246 obstack and will thus survive till the end of the case statement. */
4248 n = (struct case_node *) oballoc (sizeof (struct case_node));
4251 n->low = copy_node (value1);
4252 n->high = copy_node (value2);
4253 n->code_label = label;
4256 expand_label (label);
4258 case_stack->data.case_stmt.num_ranges++;
4264 /* Accumulate one case or default label; VALUE is the value of the
4265 case, or nil for a default label. If not currently inside a case,
4266 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4267 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4268 Return 0 on success. This function is a leftover from the earlier
4269 bytecode compiler, which was based on gcc 1.37. It should be
4270 merged into pushcase. */
4273 bc_pushcase (value, label)
4277 struct nesting *thiscase = case_stack;
4278 struct case_node *case_label, *new_label;
4283 /* Fail if duplicate, overlap, or out of type range. */
4286 value = convert (thiscase->data.case_stmt.nominal_type, value);
4287 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4290 for (case_label = thiscase->data.case_stmt.case_list;
4291 case_label->left; case_label = case_label->left)
4292 if (! tree_int_cst_lt (case_label->left->high, value))
4295 if (case_label != thiscase->data.case_stmt.case_list
4296 && ! tree_int_cst_lt (case_label->high, value)
4297 || case_label->left && ! tree_int_cst_lt (value, case_label->left->low))
4300 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4301 new_label->low = new_label->high = copy_node (value);
4302 new_label->code_label = label;
4303 new_label->left = case_label->left;
4305 case_label->left = new_label;
4306 thiscase->data.case_stmt.num_ranges++;
4310 if (thiscase->data.case_stmt.default_label)
4312 thiscase->data.case_stmt.default_label = label;
4315 expand_label (label);
4319 /* Called when the index of a switch statement is an enumerated type
4320 and there is no default label.
4322 Checks that all enumeration literals are covered by the case
4323 expressions of a switch. Also, warn if there are any extra
4324 switch cases that are *not* elements of the enumerated type.
4326 If all enumeration literals were covered by the case expressions,
4327 turn one of the expressions into the default expression since it should
4328 not be possible to fall through such a switch. */
4331 check_for_full_enumeration_handling (type)
4334 register struct case_node *n;
4335 register struct case_node **l;
4336 register tree chain;
4339 if (output_bytecode)
4341 bc_check_for_full_enumeration_handling (type);
4345 /* The time complexity of this loop is currently O(N * M), with
4346 N being the number of members in the enumerated type, and
4347 M being the number of case expressions in the switch. */
4349 for (chain = TYPE_VALUES (type);
4351 chain = TREE_CHAIN (chain))
4353 /* Find a match between enumeral and case expression, if possible.
4354 Quit looking when we've gone too far (since case expressions
4355 are kept sorted in ascending order). Warn about enumerators not
4356 handled in the switch statement case expression list. */
4358 for (n = case_stack->data.case_stmt.case_list;
4359 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
4363 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
4366 warning ("enumeration value `%s' not handled in switch",
4367 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
4372 /* Now we go the other way around; we warn if there are case
4373 expressions that don't correspond to enumerators. This can
4374 occur since C and C++ don't enforce type-checking of
4375 assignments to enumeration variables. */
4378 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4380 for (chain = TYPE_VALUES (type);
4381 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4382 chain = TREE_CHAIN (chain))
4387 if (TYPE_NAME (type) == 0)
4388 warning ("case value `%d' not in enumerated type",
4389 TREE_INT_CST_LOW (n->low));
4391 warning ("case value `%d' not in enumerated type `%s'",
4392 TREE_INT_CST_LOW (n->low),
4393 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4396 : DECL_NAME (TYPE_NAME (type))));
4398 if (!tree_int_cst_equal (n->low, n->high))
4400 for (chain = TYPE_VALUES (type);
4401 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4402 chain = TREE_CHAIN (chain))
4407 if (TYPE_NAME (type) == 0)
4408 warning ("case value `%d' not in enumerated type",
4409 TREE_INT_CST_LOW (n->high));
4411 warning ("case value `%d' not in enumerated type `%s'",
4412 TREE_INT_CST_LOW (n->high),
4413 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4416 : DECL_NAME (TYPE_NAME (type))));
4422 /* ??? This optimization is disabled because it causes valid programs to
4423 fail. ANSI C does not guarantee that an expression with enum type
4424 will have a value that is the same as one of the enumation literals. */
4426 /* If all values were found as case labels, make one of them the default
4427 label. Thus, this switch will never fall through. We arbitrarily pick
4428 the last one to make the default since this is likely the most
4429 efficient choice. */
4433 for (l = &case_stack->data.case_stmt.case_list;
4438 case_stack->data.case_stmt.default_label = (*l)->code_label;
4445 /* Check that all enumeration literals are covered by the case
4446 expressions of a switch. Also warn if there are any cases
4447 that are not elements of the enumerated type. */
4449 bc_check_for_full_enumeration_handling (type)
4452 struct nesting *thiscase = case_stack;
4453 struct case_node *c;
4456 /* Check for enums not handled. */
4457 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4459 for (c = thiscase->data.case_stmt.case_list->left;
4460 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4463 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4464 warning ("enumerated value `%s' not handled in switch",
4465 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4468 /* Check for cases not in the enumeration. */
4469 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4471 for (e = TYPE_VALUES (type);
4472 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4476 warning ("case value `%d' not in enumerated type `%s'",
4477 TREE_INT_CST_LOW (c->low),
4478 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4480 : DECL_NAME (TYPE_NAME (type))));
4484 /* Terminate a case (Pascal) or switch (C) statement
4485 in which ORIG_INDEX is the expression to be tested.
4486 Generate the code to test it and jump to the right place. */
4489 expand_end_case (orig_index)
4492 tree minval, maxval, range, orig_minval;
4493 rtx default_label = 0;
4494 register struct case_node *n;
4502 register struct nesting *thiscase = case_stack;
4506 if (output_bytecode)
4508 bc_expand_end_case (orig_index);
4512 table_label = gen_label_rtx ();
4513 index_expr = thiscase->data.case_stmt.index_expr;
4514 unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
4516 do_pending_stack_adjust ();
4518 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4519 if (TREE_TYPE (index_expr) != error_mark_node)
4521 /* If switch expression was an enumerated type, check that all
4522 enumeration literals are covered by the cases.
4523 No sense trying this if there's a default case, however. */
4525 if (!thiscase->data.case_stmt.default_label
4526 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4527 && TREE_CODE (index_expr) != INTEGER_CST)
4528 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4530 /* If this is the first label, warn if any insns have been emitted. */
4531 if (thiscase->data.case_stmt.seenlabel == 0)
4534 for (insn = get_last_insn ();
4535 insn != case_stack->data.case_stmt.start;
4536 insn = PREV_INSN (insn))
4537 if (GET_CODE (insn) != NOTE
4538 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4540 warning ("unreachable code at beginning of %s",
4541 case_stack->data.case_stmt.printname);
4546 /* If we don't have a default-label, create one here,
4547 after the body of the switch. */
4548 if (thiscase->data.case_stmt.default_label == 0)
4550 thiscase->data.case_stmt.default_label
4551 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4552 expand_label (thiscase->data.case_stmt.default_label);
4554 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4556 before_case = get_last_insn ();
4558 /* Simplify the case-list before we count it. */
4559 group_case_nodes (thiscase->data.case_stmt.case_list);
4561 /* Get upper and lower bounds of case values.
4562 Also convert all the case values to the index expr's data type. */
4565 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4567 /* Check low and high label values are integers. */
4568 if (TREE_CODE (n->low) != INTEGER_CST)
4570 if (TREE_CODE (n->high) != INTEGER_CST)
4573 n->low = convert (TREE_TYPE (index_expr), n->low);
4574 n->high = convert (TREE_TYPE (index_expr), n->high);
4576 /* Count the elements and track the largest and smallest
4577 of them (treating them as signed even if they are not). */
4585 if (INT_CST_LT (n->low, minval))
4587 if (INT_CST_LT (maxval, n->high))
4590 /* A range counts double, since it requires two compares. */
4591 if (! tree_int_cst_equal (n->low, n->high))
4595 orig_minval = minval;
4597 /* Compute span of values. */
4599 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
4602 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
4604 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4606 emit_jump (default_label);
4609 /* If range of values is much bigger than number of values,
4610 make a sequence of conditional branches instead of a dispatch.
4611 If the switch-index is a constant, do it this way
4612 because we can optimize it. */
4614 #ifndef CASE_VALUES_THRESHOLD
4616 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4618 /* If machine does not have a case insn that compares the
4619 bounds, this means extra overhead for dispatch tables
4620 which raises the threshold for using them. */
4621 #define CASE_VALUES_THRESHOLD 5
4622 #endif /* HAVE_casesi */
4623 #endif /* CASE_VALUES_THRESHOLD */
4625 else if (TREE_INT_CST_HIGH (range) != 0
4626 || count < CASE_VALUES_THRESHOLD
4627 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4629 || TREE_CODE (index_expr) == INTEGER_CST
4630 /* These will reduce to a constant. */
4631 || (TREE_CODE (index_expr) == CALL_EXPR
4632 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4633 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4634 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4635 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4636 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4638 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4640 /* If the index is a short or char that we do not have
4641 an insn to handle comparisons directly, convert it to
4642 a full integer now, rather than letting each comparison
4643 generate the conversion. */
4645 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4646 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4647 == CODE_FOR_nothing))
4649 enum machine_mode wider_mode;
4650 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4651 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4652 if (cmp_optab->handlers[(int) wider_mode].insn_code
4653 != CODE_FOR_nothing)
4655 index = convert_to_mode (wider_mode, index, unsignedp);
4661 do_pending_stack_adjust ();
4663 index = protect_from_queue (index, 0);
4664 if (GET_CODE (index) == MEM)
4665 index = copy_to_reg (index);
4666 if (GET_CODE (index) == CONST_INT
4667 || TREE_CODE (index_expr) == INTEGER_CST)
4669 /* Make a tree node with the proper constant value
4670 if we don't already have one. */
4671 if (TREE_CODE (index_expr) != INTEGER_CST)
4674 = build_int_2 (INTVAL (index),
4675 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
4676 index_expr = convert (TREE_TYPE (index_expr), index_expr);
4679 /* For constant index expressions we need only
4680 issue a unconditional branch to the appropriate
4681 target code. The job of removing any unreachable
4682 code is left to the optimisation phase if the
4683 "-O" option is specified. */
4684 for (n = thiscase->data.case_stmt.case_list;
4688 if (! tree_int_cst_lt (index_expr, n->low)
4689 && ! tree_int_cst_lt (n->high, index_expr))
4693 emit_jump (label_rtx (n->code_label));
4695 emit_jump (default_label);
4699 /* If the index expression is not constant we generate
4700 a binary decision tree to select the appropriate
4701 target code. This is done as follows:
4703 The list of cases is rearranged into a binary tree,
4704 nearly optimal assuming equal probability for each case.
4706 The tree is transformed into RTL, eliminating
4707 redundant test conditions at the same time.
4709 If program flow could reach the end of the
4710 decision tree an unconditional jump to the
4711 default code is emitted. */
4714 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4715 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4716 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4718 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4719 default_label, TREE_TYPE (index_expr));
4720 emit_jump_if_reachable (default_label);
4729 enum machine_mode index_mode = SImode;
4730 int index_bits = GET_MODE_BITSIZE (index_mode);
4732 /* Convert the index to SImode. */
4733 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
4734 > GET_MODE_BITSIZE (index_mode))
4736 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
4737 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4739 /* We must handle the endpoints in the original mode. */
4740 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
4741 index_expr, minval);
4742 minval = integer_zero_node;
4743 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4744 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4745 emit_jump_insn (gen_bltu (default_label));
4746 /* Now we can safely truncate. */
4747 index = convert_to_mode (index_mode, index, 0);
4751 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
4752 index_expr = convert (type_for_size (index_bits, 0),
4754 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4757 index = protect_from_queue (index, 0);
4758 do_pending_stack_adjust ();
4760 emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX,
4762 expand_expr (range, NULL_RTX,
4764 table_label, default_label));
4768 #ifdef HAVE_tablejump
4769 if (! win && HAVE_tablejump)
4771 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4772 fold (build (MINUS_EXPR,
4773 TREE_TYPE (index_expr),
4774 index_expr, minval)));
4775 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4777 index = protect_from_queue (index, 0);
4778 do_pending_stack_adjust ();
4780 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
4781 expand_expr (range, NULL_RTX, VOIDmode, 0),
4782 table_label, default_label);
4789 /* Get table of labels to jump to, in order of case index. */
4791 ncases = TREE_INT_CST_LOW (range) + 1;
4792 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4793 bzero (labelvec, ncases * sizeof (rtx));
4795 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4797 register HOST_WIDE_INT i
4798 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4803 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
4804 if (i + TREE_INT_CST_LOW (orig_minval)
4805 == TREE_INT_CST_LOW (n->high))
4811 /* Fill in the gaps with the default. */
4812 for (i = 0; i < ncases; i++)
4813 if (labelvec[i] == 0)
4814 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
4816 /* Output the table */
4817 emit_label (table_label);
4819 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
4820 were an expression, instead of an #ifdef/#ifndef. */
4822 #ifdef CASE_VECTOR_PC_RELATIVE
4826 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
4827 gen_rtx (LABEL_REF, Pmode, table_label),
4828 gen_rtvec_v (ncases, labelvec)));
4830 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
4831 gen_rtvec_v (ncases, labelvec)));
4833 /* If the case insn drops through the table,
4834 after the table we must jump to the default-label.
4835 Otherwise record no drop-through after the table. */
4836 #ifdef CASE_DROPS_THROUGH
4837 emit_jump (default_label);
4843 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4844 reorder_insns (before_case, get_last_insn (),
4845 thiscase->data.case_stmt.start);
4847 if (thiscase->exit_label)
4848 emit_label (thiscase->exit_label);
4850 POPSTACK (case_stack);
4856 /* Terminate a case statement. EXPR is the original index
4859 bc_expand_end_case (expr)
4862 struct nesting *thiscase = case_stack;
4863 enum bytecode_opcode opcode;
4864 struct bc_label *jump_label;
4865 struct case_node *c;
4867 bc_emit_bytecode (jump);
4868 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4870 #ifdef DEBUG_PRINT_CODE
4871 fputc ('\n', stderr);
4874 /* Now that the size of the jump table is known, emit the actual
4875 indexed jump instruction. */
4876 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4878 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
4879 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
4880 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
4882 bc_emit_bytecode (opcode);
4884 /* Now emit the case instructions literal arguments, in order.
4885 In addition to the value on the stack, it uses:
4886 1. The address of the jump table.
4887 2. The size of the jump table.
4888 3. The default label. */
4890 jump_label = bc_get_bytecode_label ();
4891 bc_emit_bytecode_labelref (jump_label);
4892 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
4893 sizeof thiscase->data.case_stmt.num_ranges);
4895 if (thiscase->data.case_stmt.default_label)
4896 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
4898 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4900 /* Output the jump table. */
4902 bc_align_bytecode (3 /* PTR_ALIGN */);
4903 bc_emit_bytecode_labeldef (jump_label);
4905 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
4906 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4908 opcode = TREE_INT_CST_LOW (c->low);
4909 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4911 opcode = TREE_INT_CST_LOW (c->high);
4912 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4914 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4917 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
4918 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4920 bc_emit_bytecode_DI_const (c->low);
4921 bc_emit_bytecode_DI_const (c->high);
4923 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4930 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
4932 /* Possibly issue enumeration warnings. */
4934 if (!thiscase->data.case_stmt.default_label
4935 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
4936 && TREE_CODE (expr) != INTEGER_CST
4938 check_for_full_enumeration_handling (TREE_TYPE (expr));
4941 #ifdef DEBUG_PRINT_CODE
4942 fputc ('\n', stderr);
4945 POPSTACK (case_stack);
4949 /* Return unique bytecode ID. */
4953 static int bc_uid = 0;
4958 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4961 do_jump_if_equal (op1, op2, label, unsignedp)
4962 rtx op1, op2, label;
4965 if (GET_CODE (op1) == CONST_INT
4966 && GET_CODE (op2) == CONST_INT)
4968 if (INTVAL (op1) == INTVAL (op2))
4973 enum machine_mode mode = GET_MODE (op1);
4974 if (mode == VOIDmode)
4975 mode = GET_MODE (op2);
4976 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
4977 emit_jump_insn (gen_beq (label));
4981 /* Not all case values are encountered equally. This function
4982 uses a heuristic to weight case labels, in cases where that
4983 looks like a reasonable thing to do.
4985 Right now, all we try to guess is text, and we establish the
4988 chars above space: 16
4997 If we find any cases in the switch that are not either -1 or in the range
4998 of valid ASCII characters, or are control characters other than those
4999 commonly used with "\", don't treat this switch scanning text.
5001 Return 1 if these nodes are suitable for cost estimation, otherwise
5005 estimate_case_costs (node)
5008 tree min_ascii = build_int_2 (-1, -1);
5009 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5013 /* If we haven't already made the cost table, make it now. Note that the
5014 lower bound of the table is -1, not zero. */
5016 if (cost_table == NULL)
5018 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5019 bzero (cost_table - 1, 129 * sizeof (short));
5021 for (i = 0; i < 128; i++)
5025 else if (ispunct (i))
5027 else if (iscntrl (i))
5031 cost_table[' '] = 8;
5032 cost_table['\t'] = 4;
5033 cost_table['\0'] = 4;
5034 cost_table['\n'] = 2;
5035 cost_table['\f'] = 1;
5036 cost_table['\v'] = 1;
5037 cost_table['\b'] = 1;
5040 /* See if all the case expressions look like text. It is text if the
5041 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5042 as signed arithmetic since we don't want to ever access cost_table with a
5043 value less than -1. Also check that none of the constants in a range
5044 are strange control characters. */
5046 for (n = node; n; n = n->right)
5048 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5051 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5052 if (cost_table[i] < 0)
5056 /* All interesting values are within the range of interesting
5057 ASCII characters. */
5061 /* Scan an ordered list of case nodes
5062 combining those with consecutive values or ranges.
5064 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5067 group_case_nodes (head)
5070 case_node_ptr node = head;
5074 rtx lb = next_real_insn (label_rtx (node->code_label));
5075 case_node_ptr np = node;
5077 /* Try to group the successors of NODE with NODE. */
5078 while (((np = np->right) != 0)
5079 /* Do they jump to the same place? */
5080 && next_real_insn (label_rtx (np->code_label)) == lb
5081 /* Are their ranges consecutive? */
5082 && tree_int_cst_equal (np->low,
5083 fold (build (PLUS_EXPR,
5084 TREE_TYPE (node->high),
5087 /* An overflow is not consecutive. */
5088 && tree_int_cst_lt (node->high,
5089 fold (build (PLUS_EXPR,
5090 TREE_TYPE (node->high),
5092 integer_one_node))))
5094 node->high = np->high;
5096 /* NP is the first node after NODE which can't be grouped with it.
5097 Delete the nodes in between, and move on to that node. */
5103 /* Take an ordered list of case nodes
5104 and transform them into a near optimal binary tree,
5105 on the assumption that any target code selection value is as
5106 likely as any other.
5108 The transformation is performed by splitting the ordered
5109 list into two equal sections plus a pivot. The parts are
5110 then attached to the pivot as left and right branches. Each
5111 branch is is then transformed recursively. */
5114 balance_case_nodes (head, parent)
5115 case_node_ptr *head;
5116 case_node_ptr parent;
5118 register case_node_ptr np;
5126 register case_node_ptr *npp;
5129 /* Count the number of entries on branch. Also count the ranges. */
5133 if (!tree_int_cst_equal (np->low, np->high))
5137 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5141 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5149 /* Split this list if it is long enough for that to help. */
5154 /* Find the place in the list that bisects the list's total cost,
5155 Here I gets half the total cost. */
5160 /* Skip nodes while their cost does not reach that amount. */
5161 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5162 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5163 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5166 npp = &(*npp)->right;
5171 /* Leave this branch lopsided, but optimize left-hand
5172 side and fill in `parent' fields for right-hand side. */
5174 np->parent = parent;
5175 balance_case_nodes (&np->left, np);
5176 for (; np->right; np = np->right)
5177 np->right->parent = np;
5181 /* If there are just three nodes, split at the middle one. */
5183 npp = &(*npp)->right;
5186 /* Find the place in the list that bisects the list's total cost,
5187 where ranges count as 2.
5188 Here I gets half the total cost. */
5189 i = (i + ranges + 1) / 2;
5192 /* Skip nodes while their cost does not reach that amount. */
5193 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5198 npp = &(*npp)->right;
5203 np->parent = parent;
5206 /* Optimize each of the two split parts. */
5207 balance_case_nodes (&np->left, np);
5208 balance_case_nodes (&np->right, np);
5212 /* Else leave this branch as one level,
5213 but fill in `parent' fields. */
5215 np->parent = parent;
5216 for (; np->right; np = np->right)
5217 np->right->parent = np;
5222 /* Search the parent sections of the case node tree
5223 to see if a test for the lower bound of NODE would be redundant.
5224 INDEX_TYPE is the type of the index expression.
5226 The instructions to generate the case decision tree are
5227 output in the same order as nodes are processed so it is
5228 known that if a parent node checks the range of the current
5229 node minus one that the current node is bounded at its lower
5230 span. Thus the test would be redundant. */
5233 node_has_low_bound (node, index_type)
5238 case_node_ptr pnode;
5240 /* If the lower bound of this node is the lowest value in the index type,
5241 we need not test it. */
5243 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5246 /* If this node has a left branch, the value at the left must be less
5247 than that at this node, so it cannot be bounded at the bottom and
5248 we need not bother testing any further. */
5253 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5254 node->low, integer_one_node));
5256 /* If the subtraction above overflowed, we can't verify anything.
5257 Otherwise, look for a parent that tests our value - 1. */
5259 if (! tree_int_cst_lt (low_minus_one, node->low))
5262 for (pnode = node->parent; pnode; pnode = pnode->parent)
5263 if (tree_int_cst_equal (low_minus_one, pnode->high))
5269 /* Search the parent sections of the case node tree
5270 to see if a test for the upper bound of NODE would be redundant.
5271 INDEX_TYPE is the type of the index expression.
5273 The instructions to generate the case decision tree are
5274 output in the same order as nodes are processed so it is
5275 known that if a parent node checks the range of the current
5276 node plus one that the current node is bounded at its upper
5277 span. Thus the test would be redundant. */
5280 node_has_high_bound (node, index_type)
5285 case_node_ptr pnode;
5287 /* If the upper bound of this node is the highest value in the type
5288 of the index expression, we need not test against it. */
5290 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5293 /* If this node has a right branch, the value at the right must be greater
5294 than that at this node, so it cannot be bounded at the top and
5295 we need not bother testing any further. */
5300 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5301 node->high, integer_one_node));
5303 /* If the addition above overflowed, we can't verify anything.
5304 Otherwise, look for a parent that tests our value + 1. */
5306 if (! tree_int_cst_lt (node->high, high_plus_one))
5309 for (pnode = node->parent; pnode; pnode = pnode->parent)
5310 if (tree_int_cst_equal (high_plus_one, pnode->low))
5316 /* Search the parent sections of the
5317 case node tree to see if both tests for the upper and lower
5318 bounds of NODE would be redundant. */
5321 node_is_bounded (node, index_type)
5325 return (node_has_low_bound (node, index_type)
5326 && node_has_high_bound (node, index_type));
5329 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5332 emit_jump_if_reachable (label)
5335 if (GET_CODE (get_last_insn ()) != BARRIER)
5339 /* Emit step-by-step code to select a case for the value of INDEX.
5340 The thus generated decision tree follows the form of the
5341 case-node binary tree NODE, whose nodes represent test conditions.
5342 INDEX_TYPE is the type of the index of the switch.
5344 Care is taken to prune redundant tests from the decision tree
5345 by detecting any boundary conditions already checked by
5346 emitted rtx. (See node_has_high_bound, node_has_low_bound
5347 and node_is_bounded, above.)
5349 Where the test conditions can be shown to be redundant we emit
5350 an unconditional jump to the target code. As a further
5351 optimization, the subordinates of a tree node are examined to
5352 check for bounded nodes. In this case conditional and/or
5353 unconditional jumps as a result of the boundary check for the
5354 current node are arranged to target the subordinates associated
5355 code for out of bound conditions on the current node node.
5357 We can assume that when control reaches the code generated here,
5358 the index value has already been compared with the parents
5359 of this node, and determined to be on the same side of each parent
5360 as this node is. Thus, if this node tests for the value 51,
5361 and a parent tested for 52, we don't need to consider
5362 the possibility of a value greater than 51. If another parent
5363 tests for the value 50, then this node need not test anything. */
5366 emit_case_nodes (index, node, default_label, index_type)
5372 /* If INDEX has an unsigned type, we must make unsigned branches. */
5373 int unsignedp = TREE_UNSIGNED (index_type);
5374 typedef rtx rtx_function ();
5375 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5376 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5377 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5378 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5379 enum machine_mode mode = GET_MODE (index);
5381 /* See if our parents have already tested everything for us.
5382 If they have, emit an unconditional jump for this node. */
5383 if (node_is_bounded (node, index_type))
5384 emit_jump (label_rtx (node->code_label));
5386 else if (tree_int_cst_equal (node->low, node->high))
5388 /* Node is single valued. First see if the index expression matches
5389 this node and then check our children, if any. */
5391 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5392 label_rtx (node->code_label), unsignedp);
5394 if (node->right != 0 && node->left != 0)
5396 /* This node has children on both sides.
5397 Dispatch to one side or the other
5398 by comparing the index value with this node's value.
5399 If one subtree is bounded, check that one first,
5400 so we can avoid real branches in the tree. */
5402 if (node_is_bounded (node->right, index_type))
5404 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5406 GT, NULL_RTX, mode, unsignedp, 0);
5408 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5409 emit_case_nodes (index, node->left, default_label, index_type);
5412 else if (node_is_bounded (node->left, index_type))
5414 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5416 LT, NULL_RTX, mode, unsignedp, 0);
5417 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5418 emit_case_nodes (index, node->right, default_label, index_type);
5423 /* Neither node is bounded. First distinguish the two sides;
5424 then emit the code for one side at a time. */
5427 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5429 /* See if the value is on the right. */
5430 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5432 GT, NULL_RTX, mode, unsignedp, 0);
5433 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5435 /* Value must be on the left.
5436 Handle the left-hand subtree. */
5437 emit_case_nodes (index, node->left, default_label, index_type);
5438 /* If left-hand subtree does nothing,
5440 emit_jump_if_reachable (default_label);
5442 /* Code branches here for the right-hand subtree. */
5443 expand_label (test_label);
5444 emit_case_nodes (index, node->right, default_label, index_type);
5448 else if (node->right != 0 && node->left == 0)
5450 /* Here we have a right child but no left so we issue conditional
5451 branch to default and process the right child.
5453 Omit the conditional branch to default if we it avoid only one
5454 right child; it costs too much space to save so little time. */
5456 if (node->right->right || node->right->left
5457 || !tree_int_cst_equal (node->right->low, node->right->high))
5459 if (!node_has_low_bound (node, index_type))
5461 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5463 LT, NULL_RTX, mode, unsignedp, 0);
5464 emit_jump_insn ((*gen_blt_pat) (default_label));
5467 emit_case_nodes (index, node->right, default_label, index_type);
5470 /* We cannot process node->right normally
5471 since we haven't ruled out the numbers less than
5472 this node's value. So handle node->right explicitly. */
5473 do_jump_if_equal (index,
5474 expand_expr (node->right->low, NULL_RTX,
5476 label_rtx (node->right->code_label), unsignedp);
5479 else if (node->right == 0 && node->left != 0)
5481 /* Just one subtree, on the left. */
5483 #if 0 /* The following code and comment were formerly part
5484 of the condition here, but they didn't work
5485 and I don't understand what the idea was. -- rms. */
5486 /* If our "most probable entry" is less probable
5487 than the default label, emit a jump to
5488 the default label using condition codes
5489 already lying around. With no right branch,
5490 a branch-greater-than will get us to the default
5493 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5496 if (node->left->left || node->left->right
5497 || !tree_int_cst_equal (node->left->low, node->left->high))
5499 if (!node_has_high_bound (node, index_type))
5501 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5503 GT, NULL_RTX, mode, unsignedp, 0);
5504 emit_jump_insn ((*gen_bgt_pat) (default_label));
5507 emit_case_nodes (index, node->left, default_label, index_type);
5510 /* We cannot process node->left normally
5511 since we haven't ruled out the numbers less than
5512 this node's value. So handle node->left explicitly. */
5513 do_jump_if_equal (index,
5514 expand_expr (node->left->low, NULL_RTX,
5516 label_rtx (node->left->code_label), unsignedp);
5521 /* Node is a range. These cases are very similar to those for a single
5522 value, except that we do not start by testing whether this node
5523 is the one to branch to. */
5525 if (node->right != 0 && node->left != 0)
5527 /* Node has subtrees on both sides.
5528 If the right-hand subtree is bounded,
5529 test for it first, since we can go straight there.
5530 Otherwise, we need to make a branch in the control structure,
5531 then handle the two subtrees. */
5532 tree test_label = 0;
5534 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5536 GT, NULL_RTX, mode, unsignedp, 0);
5538 if (node_is_bounded (node->right, index_type))
5539 /* Right hand node is fully bounded so we can eliminate any
5540 testing and branch directly to the target code. */
5541 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5544 /* Right hand node requires testing.
5545 Branch to a label where we will handle it later. */
5547 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5548 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5551 /* Value belongs to this node or to the left-hand subtree. */
5553 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5554 GE, NULL_RTX, mode, unsignedp, 0);
5555 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5557 /* Handle the left-hand subtree. */
5558 emit_case_nodes (index, node->left, default_label, index_type);
5560 /* If right node had to be handled later, do that now. */
5564 /* If the left-hand subtree fell through,
5565 don't let it fall into the right-hand subtree. */
5566 emit_jump_if_reachable (default_label);
5568 expand_label (test_label);
5569 emit_case_nodes (index, node->right, default_label, index_type);
5573 else if (node->right != 0 && node->left == 0)
5575 /* Deal with values to the left of this node,
5576 if they are possible. */
5577 if (!node_has_low_bound (node, index_type))
5579 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5581 LT, NULL_RTX, mode, unsignedp, 0);
5582 emit_jump_insn ((*gen_blt_pat) (default_label));
5585 /* Value belongs to this node or to the right-hand subtree. */
5587 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5589 LE, NULL_RTX, mode, unsignedp, 0);
5590 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5592 emit_case_nodes (index, node->right, default_label, index_type);
5595 else if (node->right == 0 && node->left != 0)
5597 /* Deal with values to the right of this node,
5598 if they are possible. */
5599 if (!node_has_high_bound (node, index_type))
5601 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5603 GT, NULL_RTX, mode, unsignedp, 0);
5604 emit_jump_insn ((*gen_bgt_pat) (default_label));
5607 /* Value belongs to this node or to the left-hand subtree. */
5609 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5610 GE, NULL_RTX, mode, unsignedp, 0);
5611 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5613 emit_case_nodes (index, node->left, default_label, index_type);
5618 /* Node has no children so we check low and high bounds to remove
5619 redundant tests. Only one of the bounds can exist,
5620 since otherwise this node is bounded--a case tested already. */
5622 if (!node_has_high_bound (node, index_type))
5624 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5626 GT, NULL_RTX, mode, unsignedp, 0);
5627 emit_jump_insn ((*gen_bgt_pat) (default_label));
5630 if (!node_has_low_bound (node, index_type))
5632 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5634 LT, NULL_RTX, mode, unsignedp, 0);
5635 emit_jump_insn ((*gen_blt_pat) (default_label));
5638 emit_jump (label_rtx (node->code_label));
5643 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5644 so that the debugging info will be correct for the unrolled loop. */
5646 /* Indexed by block number, contains a pointer to the N'th block node. */
5648 static tree *block_vector;
5651 find_loop_tree_blocks ()
5653 tree block = DECL_INITIAL (current_function_decl);
5655 /* There first block is for the function body, and does not have
5656 corresponding block notes. Don't include it in the block vector. */
5657 block = BLOCK_SUBBLOCKS (block);
5659 block_vector = identify_blocks (block, get_insns ());
5663 unroll_block_trees ()
5665 tree block = DECL_INITIAL (current_function_decl);
5667 reorder_blocks (block_vector, block, get_insns ());