1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
55 #include "bc-typecd.h"
56 #include "bc-opcode.h"
60 #define obstack_chunk_alloc xmalloc
61 #define obstack_chunk_free free
62 struct obstack stmt_obstack;
64 /* Filename and line number of last line-number note,
65 whether we actually emitted it or not. */
69 /* Nonzero if within a ({...}) grouping, in which case we must
70 always compute a value for each expr-stmt in case it is the last one. */
72 int expr_stmts_for_value;
74 /* Each time we expand an expression-statement,
75 record the expr's type and its RTL value here. */
77 static tree last_expr_type;
78 static rtx last_expr_value;
80 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
81 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
82 This is used by the `remember_end_note' function to record the endpoint
83 of each generated block in its associated BLOCK node. */
85 static rtx last_block_end_note;
87 /* Number of binding contours started so far in this function. */
89 int block_start_count;
91 /* Nonzero if function being compiled needs to
92 return the address of where it has put a structure value. */
94 extern int current_function_returns_pcc_struct;
96 /* Label that will go on parm cleanup code, if any.
97 Jumping to this label runs cleanup code for parameters, if
98 such code must be run. Following this code is the logical return label. */
100 extern rtx cleanup_label;
102 /* Label that will go on function epilogue.
103 Jumping to this label serves as a "return" instruction
104 on machines which require execution of the epilogue on all returns. */
106 extern rtx return_label;
108 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
109 So we can mark them all live at the end of the function, if nonopt. */
110 extern rtx save_expr_regs;
112 /* Offset to end of allocated area of stack frame.
113 If stack grows down, this is the address of the last stack slot allocated.
114 If stack grows up, this is the address for the next slot. */
115 extern int frame_offset;
117 /* Label to jump back to for tail recursion, or 0 if we have
118 not yet needed one for this function. */
119 extern rtx tail_recursion_label;
121 /* Place after which to insert the tail_recursion_label if we need one. */
122 extern rtx tail_recursion_reentry;
124 /* Location at which to save the argument pointer if it will need to be
125 referenced. There are two cases where this is done: if nonlocal gotos
126 exist, or if vars whose is an offset from the argument pointer will be
127 needed by inner routines. */
129 extern rtx arg_pointer_save_area;
131 /* Chain of all RTL_EXPRs that have insns in them. */
132 extern tree rtl_expr_chain;
134 #if 0 /* Turned off because 0 seems to work just as well. */
135 /* Cleanup lists are required for binding levels regardless of whether
136 that binding level has cleanups or not. This node serves as the
137 cleanup list whenever an empty list is required. */
138 static tree empty_cleanup_list;
141 /* Functions and data structures for expanding case statements. */
143 /* Case label structure, used to hold info on labels within case
144 statements. We handle "range" labels; for a single-value label
145 as in C, the high and low limits are the same.
147 A chain of case nodes is initially maintained via the RIGHT fields
148 in the nodes. Nodes with higher case values are later in the list.
150 Switch statements can be output in one of two forms. A branch table
151 is used if there are more than a few labels and the labels are dense
152 within the range between the smallest and largest case value. If a
153 branch table is used, no further manipulations are done with the case
156 The alternative to the use of a branch table is to generate a series
157 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
158 and PARENT fields to hold a binary tree. Initially the tree is
159 totally unbalanced, with everything on the right. We balance the tree
160 with nodes on the left having lower case values than the parent
161 and nodes on the right having higher values. We then output the tree
166 struct case_node *left; /* Left son in binary tree */
167 struct case_node *right; /* Right son in binary tree; also node chain */
168 struct case_node *parent; /* Parent of node in binary tree */
169 tree low; /* Lowest index value for this label */
170 tree high; /* Highest index value for this label */
171 tree code_label; /* Label to jump to when node matches */
174 typedef struct case_node case_node;
175 typedef struct case_node *case_node_ptr;
177 /* These are used by estimate_case_costs and balance_case_nodes. */
179 /* This must be a signed type, and non-ANSI compilers lack signed char. */
180 static short *cost_table;
181 static int use_cost_table;
183 static int estimate_case_costs ();
184 static void balance_case_nodes ();
185 static void emit_case_nodes ();
186 static void group_case_nodes ();
187 static void emit_jump_if_reachable ();
189 static int warn_if_unused_value ();
190 static void expand_goto_internal ();
191 static void bc_expand_goto_internal ();
192 static int expand_fixup ();
193 static void bc_expand_fixup ();
195 static void bc_fixup_gotos ();
196 void free_temp_slots ();
197 static void expand_cleanups ();
198 static void expand_null_return_1 ();
199 static int tail_recursion_args ();
200 static void do_jump_if_equal ();
201 int bc_expand_exit_loop_if_false ();
202 void bc_expand_start_cond ();
203 void bc_expand_end_cond ();
204 void bc_expand_start_else ();
205 void bc_expand_end_bindings ();
206 void bc_expand_start_case ();
207 void bc_check_for_full_enumeration_handling ();
208 void bc_expand_end_case ();
209 void bc_expand_decl ();
211 extern rtx bc_allocate_local ();
212 extern rtx bc_allocate_variable_array ();
214 /* Stack of control and binding constructs we are currently inside.
216 These constructs begin when you call `expand_start_WHATEVER'
217 and end when you call `expand_end_WHATEVER'. This stack records
218 info about how the construct began that tells the end-function
219 what to do. It also may provide information about the construct
220 to alter the behavior of other constructs within the body.
221 For example, they may affect the behavior of C `break' and `continue'.
223 Each construct gets one `struct nesting' object.
224 All of these objects are chained through the `all' field.
225 `nesting_stack' points to the first object (innermost construct).
226 The position of an entry on `nesting_stack' is in its `depth' field.
228 Each type of construct has its own individual stack.
229 For example, loops have `loop_stack'. Each object points to the
230 next object of the same type through the `next' field.
232 Some constructs are visible to `break' exit-statements and others
233 are not. Which constructs are visible depends on the language.
234 Therefore, the data structure allows each construct to be visible
235 or not, according to the args given when the construct is started.
236 The construct is visible if the `exit_label' field is non-null.
237 In that case, the value should be a CODE_LABEL rtx. */
242 struct nesting *next;
247 /* For conds (if-then and if-then-else statements). */
250 /* Label for the end of the if construct.
251 There is none if EXITFLAG was not set
252 and no `else' has been seen yet. */
254 /* Label for the end of this alternative.
255 This may be the end of the if or the next else/elseif. */
261 /* Label at the top of the loop; place to loop back to. */
263 /* Label at the end of the whole construct. */
265 /* Label for `continue' statement to jump to;
266 this is in front of the stepper of the loop. */
269 /* For variable binding contours. */
272 /* Sequence number of this binding contour within the function,
273 in order of entry. */
274 int block_start_count;
275 /* Nonzero => value to restore stack to on exit. Complemented by
276 bc_stack_level (see below) when generating bytecodes. */
278 /* The NOTE that starts this contour.
279 Used by expand_goto to check whether the destination
280 is within each contour or not. */
282 /* Innermost containing binding contour that has a stack level. */
283 struct nesting *innermost_stack_block;
284 /* List of cleanups to be run on exit from this contour.
285 This is a list of expressions to be evaluated.
286 The TREE_PURPOSE of each link is the ..._DECL node
287 which the cleanup pertains to. */
289 /* List of cleanup-lists of blocks containing this block,
290 as they were at the locus where this block appears.
291 There is an element for each containing block,
292 ordered innermost containing block first.
293 The tail of this list can be 0 (was empty_cleanup_list),
294 if all remaining elements would be empty lists.
295 The element's TREE_VALUE is the cleanup-list of that block,
296 which may be null. */
298 /* Chain of labels defined inside this binding contour.
299 For contours that have stack levels or cleanups. */
300 struct label_chain *label_chain;
301 /* Number of function calls seen, as of start of this block. */
302 int function_call_count;
303 /* Bytecode specific: stack level to restore stack to on exit. */
306 /* For switch (C) or case (Pascal) statements,
307 and also for dummies (see `expand_start_case_dummy'). */
310 /* The insn after which the case dispatch should finally
311 be emitted. Zero for a dummy. */
313 /* For bytecodes, the case table is in-lined right in the code.
314 A label is needed for skipping over this block. It is only
315 used when generating bytecodes. */
317 /* A list of case labels, kept in ascending order by value
318 as the list is built.
319 During expand_end_case, this list may be rearranged into a
320 nearly balanced binary tree. */
321 struct case_node *case_list;
322 /* Label to jump to if no case matches. */
324 /* The expression to be dispatched on. */
326 /* Type that INDEX_EXPR should be converted to. */
328 /* Number of range exprs in case statement. */
330 /* Name of this kind of statement, for warnings. */
332 /* Nonzero if a case label has been seen in this case stmt. */
335 /* For exception contours. */
338 /* List of exceptions raised. This is a TREE_LIST
339 of whatever you want. */
341 /* List of exceptions caught. This is also a TREE_LIST
342 of whatever you want. As a special case, it has the
343 value `void_type_node' if it handles default exceptions. */
346 /* First insn of TRY block, in case resumptive model is needed. */
348 /* Label for the catch clauses. */
350 /* Label for unhandled exceptions. */
352 /* Label at the end of whole construct. */
354 /* Label which "escapes" the exception construct.
355 Like EXIT_LABEL for BREAK construct, but for exceptions. */
361 /* Chain of all pending binding contours. */
362 struct nesting *block_stack;
364 /* If any new stacks are added here, add them to POPSTACKS too. */
366 /* Chain of all pending binding contours that restore stack levels
368 struct nesting *stack_block_stack;
370 /* Chain of all pending conditional statements. */
371 struct nesting *cond_stack;
373 /* Chain of all pending loops. */
374 struct nesting *loop_stack;
376 /* Chain of all pending case or switch statements. */
377 struct nesting *case_stack;
379 /* Chain of all pending exception contours. */
380 struct nesting *except_stack;
382 /* Separate chain including all of the above,
383 chained through the `all' field. */
384 struct nesting *nesting_stack;
386 /* Number of entries on nesting_stack now. */
389 /* Allocate and return a new `struct nesting'. */
391 #define ALLOC_NESTING() \
392 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
394 /* Pop the nesting stack element by element until we pop off
395 the element which is at the top of STACK.
396 Update all the other stacks, popping off elements from them
397 as we pop them from nesting_stack. */
399 #define POPSTACK(STACK) \
400 do { struct nesting *target = STACK; \
401 struct nesting *this; \
402 do { this = nesting_stack; \
403 if (loop_stack == this) \
404 loop_stack = loop_stack->next; \
405 if (cond_stack == this) \
406 cond_stack = cond_stack->next; \
407 if (block_stack == this) \
408 block_stack = block_stack->next; \
409 if (stack_block_stack == this) \
410 stack_block_stack = stack_block_stack->next; \
411 if (case_stack == this) \
412 case_stack = case_stack->next; \
413 if (except_stack == this) \
414 except_stack = except_stack->next; \
415 nesting_depth = nesting_stack->depth - 1; \
416 nesting_stack = this->all; \
417 obstack_free (&stmt_obstack, this); } \
418 while (this != target); } while (0)
420 /* In some cases it is impossible to generate code for a forward goto
421 until the label definition is seen. This happens when it may be necessary
422 for the goto to reset the stack pointer: we don't yet know how to do that.
423 So expand_goto puts an entry on this fixup list.
424 Each time a binding contour that resets the stack is exited,
426 If the target label has now been defined, we can insert the proper code. */
430 /* Points to following fixup. */
431 struct goto_fixup *next;
432 /* Points to the insn before the jump insn.
433 If more code must be inserted, it goes after this insn. */
435 /* The LABEL_DECL that this jump is jumping to, or 0
436 for break, continue or return. */
438 /* The BLOCK for the place where this goto was found. */
440 /* The CODE_LABEL rtx that this is jumping to. */
442 /* Number of binding contours started in current function
443 before the label reference. */
444 int block_start_count;
445 /* The outermost stack level that should be restored for this jump.
446 Each time a binding contour that resets the stack is exited,
447 if the target label is *not* yet defined, this slot is updated. */
449 /* List of lists of cleanup expressions to be run by this goto.
450 There is one element for each block that this goto is within.
451 The tail of this list can be 0 (was empty_cleanup_list),
452 if all remaining elements would be empty.
453 The TREE_VALUE contains the cleanup list of that block as of the
454 time this goto was seen.
455 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
456 tree cleanup_list_list;
458 /* Bytecode specific members follow */
460 /* The label that this jump is jumping to, or 0 for break, continue
462 struct bc_label *bc_target;
464 /* The label we use for the fixup patch */
465 struct bc_label *label;
467 /* True (non-0) if fixup has been handled */
470 /* Like stack_level above, except refers to the interpreter stack */
474 static struct goto_fixup *goto_fixup_chain;
476 /* Within any binding contour that must restore a stack level,
477 all labels are recorded with a chain of these structures. */
481 /* Points to following fixup. */
482 struct label_chain *next;
489 gcc_obstack_init (&stmt_obstack);
491 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
496 init_stmt_for_function ()
498 /* We are not currently within any block, conditional, loop or case. */
506 block_start_count = 0;
508 /* No gotos have been expanded yet. */
509 goto_fixup_chain = 0;
511 /* We are not processing a ({...}) grouping. */
512 expr_stmts_for_value = 0;
520 p->block_stack = block_stack;
521 p->stack_block_stack = stack_block_stack;
522 p->cond_stack = cond_stack;
523 p->loop_stack = loop_stack;
524 p->case_stack = case_stack;
525 p->nesting_stack = nesting_stack;
526 p->nesting_depth = nesting_depth;
527 p->block_start_count = block_start_count;
528 p->last_expr_type = last_expr_type;
529 p->last_expr_value = last_expr_value;
530 p->expr_stmts_for_value = expr_stmts_for_value;
531 p->emit_filename = emit_filename;
532 p->emit_lineno = emit_lineno;
533 p->goto_fixup_chain = goto_fixup_chain;
537 restore_stmt_status (p)
540 block_stack = p->block_stack;
541 stack_block_stack = p->stack_block_stack;
542 cond_stack = p->cond_stack;
543 loop_stack = p->loop_stack;
544 case_stack = p->case_stack;
545 nesting_stack = p->nesting_stack;
546 nesting_depth = p->nesting_depth;
547 block_start_count = p->block_start_count;
548 last_expr_type = p->last_expr_type;
549 last_expr_value = p->last_expr_value;
550 expr_stmts_for_value = p->expr_stmts_for_value;
551 emit_filename = p->emit_filename;
552 emit_lineno = p->emit_lineno;
553 goto_fixup_chain = p->goto_fixup_chain;
556 /* Emit a no-op instruction. */
563 if (!output_bytecode)
565 last_insn = get_last_insn ();
567 && (GET_CODE (last_insn) == CODE_LABEL
568 || prev_real_insn (last_insn) == 0))
569 emit_insn (gen_nop ());
573 /* Return the rtx-label that corresponds to a LABEL_DECL,
574 creating it if necessary. */
580 if (TREE_CODE (label) != LABEL_DECL)
583 if (DECL_RTL (label))
584 return DECL_RTL (label);
586 return DECL_RTL (label) = gen_label_rtx ();
589 /* Add an unconditional jump to LABEL as the next sequential instruction. */
595 do_pending_stack_adjust ();
596 emit_jump_insn (gen_jump (label));
600 /* Emit code to jump to the address
601 specified by the pointer expression EXP. */
604 expand_computed_goto (exp)
609 bc_expand_expr (exp);
610 bc_emit_instruction (jumpP);
614 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
616 emit_indirect_jump (x);
620 /* Handle goto statements and the labels that they can go to. */
622 /* Specify the location in the RTL code of a label LABEL,
623 which is a LABEL_DECL tree node.
625 This is used for the kind of label that the user can jump to with a
626 goto statement, and for alternatives of a switch or case statement.
627 RTL labels generated for loops and conditionals don't go through here;
628 they are generated directly at the RTL level, by other functions below.
630 Note that this has nothing to do with defining label *names*.
631 Languages vary in how they do that and what that even means. */
637 struct label_chain *p;
641 if (! DECL_RTL (label))
642 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
643 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
644 error ("multiply defined label");
648 do_pending_stack_adjust ();
649 emit_label (label_rtx (label));
650 if (DECL_NAME (label))
651 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
653 if (stack_block_stack != 0)
655 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
656 p->next = stack_block_stack->data.block.label_chain;
657 stack_block_stack->data.block.label_chain = p;
662 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
663 from nested functions. */
666 declare_nonlocal_label (label)
669 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
670 LABEL_PRESERVE_P (label_rtx (label)) = 1;
671 if (nonlocal_goto_handler_slot == 0)
673 nonlocal_goto_handler_slot
674 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
675 emit_stack_save (SAVE_NONLOCAL,
676 &nonlocal_goto_stack_level,
677 PREV_INSN (tail_recursion_reentry));
681 /* Generate RTL code for a `goto' statement with target label LABEL.
682 LABEL should be a LABEL_DECL tree node that was or will later be
683 defined with `expand_label'. */
693 expand_goto_internal (label, label_rtx (label), NULL_RTX);
697 /* Check for a nonlocal goto to a containing function. */
698 context = decl_function_context (label);
699 if (context != 0 && context != current_function_decl)
701 struct function *p = find_function_data (context);
702 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
705 p->has_nonlocal_label = 1;
706 current_function_has_nonlocal_goto = 1;
707 LABEL_REF_NONLOCAL_P (label_ref) = 1;
709 /* Copy the rtl for the slots so that they won't be shared in
710 case the virtual stack vars register gets instantiated differently
711 in the parent than in the child. */
713 #if HAVE_nonlocal_goto
714 if (HAVE_nonlocal_goto)
715 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
716 copy_rtx (p->nonlocal_goto_handler_slot),
717 copy_rtx (p->nonlocal_goto_stack_level),
724 /* Restore frame pointer for containing function.
725 This sets the actual hard register used for the frame pointer
726 to the location of the function's incoming static chain info.
727 The non-local goto handler will then adjust it to contain the
728 proper value and reload the argument pointer, if needed. */
729 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
731 /* We have now loaded the frame pointer hardware register with
732 the address of that corresponds to the start of the virtual
733 stack vars. So replace virtual_stack_vars_rtx in all
734 addresses we use with stack_pointer_rtx. */
736 /* Get addr of containing function's current nonlocal goto handler,
737 which will do any cleanups and then jump to the label. */
738 addr = copy_rtx (p->nonlocal_goto_handler_slot);
739 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
740 hard_frame_pointer_rtx));
742 /* Restore the stack pointer. Note this uses fp just restored. */
743 addr = p->nonlocal_goto_stack_level;
745 addr = replace_rtx (copy_rtx (addr),
746 virtual_stack_vars_rtx,
747 hard_frame_pointer_rtx);
749 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
751 /* Put in the static chain register the nonlocal label address. */
752 emit_move_insn (static_chain_rtx, label_ref);
753 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
755 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
756 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
757 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
758 emit_indirect_jump (temp);
762 expand_goto_internal (label, label_rtx (label), NULL_RTX);
765 /* Generate RTL code for a `goto' statement with target label BODY.
766 LABEL should be a LABEL_REF.
767 LAST_INSN, if non-0, is the rtx we should consider as the last
768 insn emitted (for the purposes of cleaning up a return). */
771 expand_goto_internal (body, label, last_insn)
776 struct nesting *block;
779 /* NOTICE! If a bytecode instruction other than `jump' is needed,
780 then the caller has to call bc_expand_goto_internal()
781 directly. This is rather an exceptional case, and there aren't
782 that many places where this is necessary. */
785 expand_goto_internal (body, label, last_insn);
789 if (GET_CODE (label) != CODE_LABEL)
792 /* If label has already been defined, we can tell now
793 whether and how we must alter the stack level. */
795 if (PREV_INSN (label) != 0)
797 /* Find the innermost pending block that contains the label.
798 (Check containment by comparing insn-uids.)
799 Then restore the outermost stack level within that block,
800 and do cleanups of all blocks contained in it. */
801 for (block = block_stack; block; block = block->next)
803 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
805 if (block->data.block.stack_level != 0)
806 stack_level = block->data.block.stack_level;
807 /* Execute the cleanups for blocks we are exiting. */
808 if (block->data.block.cleanups != 0)
810 expand_cleanups (block->data.block.cleanups, NULL_TREE);
811 do_pending_stack_adjust ();
817 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
818 the stack pointer. This one should be deleted as dead by flow. */
819 clear_pending_stack_adjust ();
820 do_pending_stack_adjust ();
821 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
824 if (body != 0 && DECL_TOO_LATE (body))
825 error ("jump to `%s' invalidly jumps into binding contour",
826 IDENTIFIER_POINTER (DECL_NAME (body)));
828 /* Label not yet defined: may need to put this goto
829 on the fixup list. */
830 else if (! expand_fixup (body, label, last_insn))
832 /* No fixup needed. Record that the label is the target
833 of at least one goto that has no fixup. */
835 TREE_ADDRESSABLE (body) = 1;
841 /* Generate a jump with OPCODE to the given bytecode LABEL which is
842 found within BODY. */
844 bc_expand_goto_internal (opcode, label, body)
845 enum bytecode_opcode opcode;
846 struct bc_label *label;
849 struct nesting *block;
850 int stack_level = -1;
852 /* If the label is defined, adjust the stack as necessary.
853 If it's not defined, we have to push the reference on the
859 /* Find the innermost pending block that contains the label.
860 (Check containment by comparing bytecode uids.) Then restore the
861 outermost stack level within that block. */
863 for (block = block_stack; block; block = block->next)
865 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
867 if (block->data.block.bc_stack_level)
868 stack_level = block->data.block.bc_stack_level;
870 /* Execute the cleanups for blocks we are exiting. */
871 if (block->data.block.cleanups != 0)
873 expand_cleanups (block->data.block.cleanups, NULL_TREE);
874 do_pending_stack_adjust ();
878 /* Restore the stack level. If we need to adjust the stack, we
879 must do so after the jump, since the jump may depend on
880 what's on the stack. Thus, any stack-modifying conditional
881 jumps (these are the only ones that rely on what's on the
882 stack) go into the fixup list. */
885 && stack_depth != stack_level
888 bc_expand_fixup (opcode, label, stack_level);
891 if (stack_level >= 0)
892 bc_adjust_stack (stack_depth - stack_level);
894 if (body && DECL_BIT_FIELD (body))
895 error ("jump to `%s' invalidly jumps into binding contour",
896 IDENTIFIER_POINTER (DECL_NAME (body)));
898 /* Emit immediate jump */
899 bc_emit_bytecode (opcode);
900 bc_emit_bytecode_labelref (label);
902 #ifdef DEBUG_PRINT_CODE
903 fputc ('\n', stderr);
908 /* Put goto in the fixup list */
909 bc_expand_fixup (opcode, label, stack_level);
912 /* Generate if necessary a fixup for a goto
913 whose target label in tree structure (if any) is TREE_LABEL
914 and whose target in rtl is RTL_LABEL.
916 If LAST_INSN is nonzero, we pretend that the jump appears
917 after insn LAST_INSN instead of at the current point in the insn stream.
919 The fixup will be used later to insert insns just before the goto.
920 Those insns will restore the stack level as appropriate for the
921 target label, and will (in the case of C++) also invoke any object
922 destructors which have to be invoked when we exit the scopes which
923 are exited by the goto.
925 Value is nonzero if a fixup is made. */
928 expand_fixup (tree_label, rtl_label, last_insn)
933 struct nesting *block, *end_block;
935 /* See if we can recognize which block the label will be output in.
936 This is possible in some very common cases.
937 If we succeed, set END_BLOCK to that block.
938 Otherwise, set it to 0. */
941 && (rtl_label == cond_stack->data.cond.endif_label
942 || rtl_label == cond_stack->data.cond.next_label))
943 end_block = cond_stack;
944 /* If we are in a loop, recognize certain labels which
945 are likely targets. This reduces the number of fixups
946 we need to create. */
948 && (rtl_label == loop_stack->data.loop.start_label
949 || rtl_label == loop_stack->data.loop.end_label
950 || rtl_label == loop_stack->data.loop.continue_label))
951 end_block = loop_stack;
955 /* Now set END_BLOCK to the binding level to which we will return. */
959 struct nesting *next_block = end_block->all;
962 /* First see if the END_BLOCK is inside the innermost binding level.
963 If so, then no cleanups or stack levels are relevant. */
964 while (next_block && next_block != block)
965 next_block = next_block->all;
970 /* Otherwise, set END_BLOCK to the innermost binding level
971 which is outside the relevant control-structure nesting. */
972 next_block = block_stack->next;
973 for (block = block_stack; block != end_block; block = block->all)
974 if (block == next_block)
975 next_block = next_block->next;
976 end_block = next_block;
979 /* Does any containing block have a stack level or cleanups?
980 If not, no fixup is needed, and that is the normal case
981 (the only case, for standard C). */
982 for (block = block_stack; block != end_block; block = block->next)
983 if (block->data.block.stack_level != 0
984 || block->data.block.cleanups != 0)
987 if (block != end_block)
989 /* Ok, a fixup is needed. Add a fixup to the list of such. */
990 struct goto_fixup *fixup
991 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
992 /* In case an old stack level is restored, make sure that comes
993 after any pending stack adjust. */
994 /* ?? If the fixup isn't to come at the present position,
995 doing the stack adjust here isn't useful. Doing it with our
996 settings at that location isn't useful either. Let's hope
999 do_pending_stack_adjust ();
1000 fixup->target = tree_label;
1001 fixup->target_rtl = rtl_label;
1003 /* Create a BLOCK node and a corresponding matched set of
1004 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1005 this point. The notes will encapsulate any and all fixup
1006 code which we might later insert at this point in the insn
1007 stream. Also, the BLOCK node will be the parent (i.e. the
1008 `SUPERBLOCK') of any other BLOCK nodes which we might create
1009 later on when we are expanding the fixup code. */
1012 register rtx original_before_jump
1013 = last_insn ? last_insn : get_last_insn ();
1017 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1018 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1019 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1021 emit_insns_after (fixup->before_jump, original_before_jump);
1024 fixup->block_start_count = block_start_count;
1025 fixup->stack_level = 0;
1026 fixup->cleanup_list_list
1027 = (((block->data.block.outer_cleanups
1029 && block->data.block.outer_cleanups != empty_cleanup_list
1032 || block->data.block.cleanups)
1033 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1034 block->data.block.outer_cleanups)
1036 fixup->next = goto_fixup_chain;
1037 goto_fixup_chain = fixup;
1044 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1045 Make the fixup restore the stack level to STACK_LEVEL. */
1048 bc_expand_fixup (opcode, label, stack_level)
1049 enum bytecode_opcode opcode;
1050 struct bc_label *label;
1053 struct goto_fixup *fixup
1054 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1056 fixup->label = bc_get_bytecode_label ();
1057 fixup->bc_target = label;
1058 fixup->bc_stack_level = stack_level;
1059 fixup->bc_handled = FALSE;
1061 fixup->next = goto_fixup_chain;
1062 goto_fixup_chain = fixup;
1064 /* Insert a jump to the fixup code */
1065 bc_emit_bytecode (opcode);
1066 bc_emit_bytecode_labelref (fixup->label);
1068 #ifdef DEBUG_PRINT_CODE
1069 fputc ('\n', stderr);
1074 /* When exiting a binding contour, process all pending gotos requiring fixups.
1075 THISBLOCK is the structure that describes the block being exited.
1076 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1077 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1078 FIRST_INSN is the insn that began this contour.
1080 Gotos that jump out of this contour must restore the
1081 stack level and do the cleanups before actually jumping.
1083 DONT_JUMP_IN nonzero means report error there is a jump into this
1084 contour from before the beginning of the contour.
1085 This is also done if STACK_LEVEL is nonzero. */
1088 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1089 struct nesting *thisblock;
1095 register struct goto_fixup *f, *prev;
1097 if (output_bytecode)
1099 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in);
1103 /* F is the fixup we are considering; PREV is the previous one. */
1104 /* We run this loop in two passes so that cleanups of exited blocks
1105 are run first, and blocks that are exited are marked so
1108 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1110 /* Test for a fixup that is inactive because it is already handled. */
1111 if (f->before_jump == 0)
1113 /* Delete inactive fixup from the chain, if that is easy to do. */
1115 prev->next = f->next;
1117 /* Has this fixup's target label been defined?
1118 If so, we can finalize it. */
1119 else if (PREV_INSN (f->target_rtl) != 0)
1121 register rtx cleanup_insns;
1123 /* Get the first non-label after the label
1124 this goto jumps to. If that's before this scope begins,
1125 we don't have a jump into the scope. */
1126 rtx after_label = f->target_rtl;
1127 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1128 after_label = NEXT_INSN (after_label);
1130 /* If this fixup jumped into this contour from before the beginning
1131 of this contour, report an error. */
1132 /* ??? Bug: this does not detect jumping in through intermediate
1133 blocks that have stack levels or cleanups.
1134 It detects only a problem with the innermost block
1135 around the label. */
1137 && (dont_jump_in || stack_level || cleanup_list)
1138 /* If AFTER_LABEL is 0, it means the jump goes to the end
1139 of the rtl, which means it jumps into this scope. */
1140 && (after_label == 0
1141 || INSN_UID (first_insn) < INSN_UID (after_label))
1142 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1143 && ! DECL_REGISTER (f->target))
1145 error_with_decl (f->target,
1146 "label `%s' used before containing binding contour");
1147 /* Prevent multiple errors for one label. */
1148 DECL_REGISTER (f->target) = 1;
1151 /* We will expand the cleanups into a sequence of their own and
1152 then later on we will attach this new sequence to the insn
1153 stream just ahead of the actual jump insn. */
1157 /* Temporarily restore the lexical context where we will
1158 logically be inserting the fixup code. We do this for the
1159 sake of getting the debugging information right. */
1162 set_block (f->context);
1164 /* Expand the cleanups for blocks this jump exits. */
1165 if (f->cleanup_list_list)
1168 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1169 /* Marked elements correspond to blocks that have been closed.
1170 Do their cleanups. */
1171 if (TREE_ADDRESSABLE (lists)
1172 && TREE_VALUE (lists) != 0)
1174 expand_cleanups (TREE_VALUE (lists), 0);
1175 /* Pop any pushes done in the cleanups,
1176 in case function is about to return. */
1177 do_pending_stack_adjust ();
1181 /* Restore stack level for the biggest contour that this
1182 jump jumps out of. */
1184 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1186 /* Finish up the sequence containing the insns which implement the
1187 necessary cleanups, and then attach that whole sequence to the
1188 insn stream just ahead of the actual jump insn. Attaching it
1189 at that point insures that any cleanups which are in fact
1190 implicit C++ object destructions (which must be executed upon
1191 leaving the block) appear (to the debugger) to be taking place
1192 in an area of the generated code where the object(s) being
1193 destructed are still "in scope". */
1195 cleanup_insns = get_insns ();
1199 emit_insns_after (cleanup_insns, f->before_jump);
1206 /* Mark the cleanups of exited blocks so that they are executed
1207 by the code above. */
1208 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1209 if (f->before_jump != 0
1210 && PREV_INSN (f->target_rtl) == 0
1211 /* Label has still not appeared. If we are exiting a block with
1212 a stack level to restore, that started before the fixup,
1213 mark this stack level as needing restoration
1214 when the fixup is later finalized.
1215 Also mark the cleanup_list_list element for F
1216 that corresponds to this block, so that ultimately
1217 this block's cleanups will be executed by the code above. */
1219 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1220 it means the label is undefined. That's erroneous, but possible. */
1221 && (thisblock->data.block.block_start_count
1222 <= f->block_start_count))
1224 tree lists = f->cleanup_list_list;
1225 for (; lists; lists = TREE_CHAIN (lists))
1226 /* If the following elt. corresponds to our containing block
1227 then the elt. must be for this block. */
1228 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1229 TREE_ADDRESSABLE (lists) = 1;
1232 f->stack_level = stack_level;
1237 /* When exiting a binding contour, process all pending gotos requiring fixups.
1238 Note: STACK_DEPTH is not altered.
1240 The arguments are currently not used in the bytecode compiler, but we may need
1241 them one day for languages other than C.
1243 THISBLOCK is the structure that describes the block being exited.
1244 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1245 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1246 FIRST_INSN is the insn that began this contour.
1248 Gotos that jump out of this contour must restore the
1249 stack level and do the cleanups before actually jumping.
1251 DONT_JUMP_IN nonzero means report error there is a jump into this
1252 contour from before the beginning of the contour.
1253 This is also done if STACK_LEVEL is nonzero. */
1256 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1257 struct nesting *thisblock;
1263 register struct goto_fixup *f, *prev;
1264 int saved_stack_depth;
1266 /* F is the fixup we are considering; PREV is the previous one. */
1268 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1270 /* Test for a fixup that is inactive because it is already handled. */
1271 if (f->before_jump == 0)
1273 /* Delete inactive fixup from the chain, if that is easy to do. */
1275 prev->next = f->next;
1278 /* Emit code to restore the stack and continue */
1279 bc_emit_bytecode_labeldef (f->label);
1281 /* Save stack_depth across call, since bc_adjust_stack () will alter
1282 the perceived stack depth via the instructions generated. */
1284 if (f->bc_stack_level >= 0)
1286 saved_stack_depth = stack_depth;
1287 bc_adjust_stack (stack_depth - f->bc_stack_level);
1288 stack_depth = saved_stack_depth;
1291 bc_emit_bytecode (jump);
1292 bc_emit_bytecode_labelref (f->bc_target);
1294 #ifdef DEBUG_PRINT_CODE
1295 fputc ('\n', stderr);
1299 goto_fixup_chain = NULL;
1302 /* Generate RTL for an asm statement (explicit assembler code).
1303 BODY is a STRING_CST node containing the assembler code text,
1304 or an ADDR_EXPR containing a STRING_CST. */
1310 if (output_bytecode)
1312 error ("`asm' is illegal when generating bytecode");
1316 if (TREE_CODE (body) == ADDR_EXPR)
1317 body = TREE_OPERAND (body, 0);
1319 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1320 TREE_STRING_POINTER (body)));
1324 /* Generate RTL for an asm statement with arguments.
1325 STRING is the instruction template.
1326 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1327 Each output or input has an expression in the TREE_VALUE and
1328 a constraint-string in the TREE_PURPOSE.
1329 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1330 that is clobbered by this insn.
1332 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1333 Some elements of OUTPUTS may be replaced with trees representing temporary
1334 values. The caller should copy those temporary values to the originally
1337 VOL nonzero means the insn is volatile; don't optimize it. */
1340 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1341 tree string, outputs, inputs, clobbers;
1346 rtvec argvec, constraints;
1348 int ninputs = list_length (inputs);
1349 int noutputs = list_length (outputs);
1353 /* Vector of RTX's of evaluated output operands. */
1354 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1355 /* The insn we have emitted. */
1358 if (output_bytecode)
1360 error ("`asm' is illegal when generating bytecode");
1364 /* Count the number of meaningful clobbered registers, ignoring what
1365 we would ignore later. */
1367 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1369 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1370 i = decode_reg_name (regname);
1371 if (i >= 0 || i == -4)
1377 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1379 tree val = TREE_VALUE (tail);
1384 /* If there's an erroneous arg, emit no insn. */
1385 if (TREE_TYPE (val) == error_mark_node)
1388 /* Make sure constraint has `=' and does not have `+'. */
1391 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1393 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1395 error ("output operand constraint contains `+'");
1398 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1403 error ("output operand constraint lacks `='");
1407 /* If an output operand is not a variable or indirect ref,
1409 create a SAVE_EXPR which is a pseudo-reg
1410 to act as an intermediate temporary.
1411 Make the asm insn write into that, then copy it to
1412 the real output operand. */
1414 while (TREE_CODE (val) == COMPONENT_REF
1415 || TREE_CODE (val) == ARRAY_REF)
1416 val = TREE_OPERAND (val, 0);
1418 if (TREE_CODE (val) != VAR_DECL
1419 && TREE_CODE (val) != PARM_DECL
1420 && TREE_CODE (val) != INDIRECT_REF)
1422 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1423 /* If it's a constant, print error now so don't crash later. */
1424 if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR)
1426 error ("invalid output in `asm'");
1431 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1434 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1436 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1440 /* Make vectors for the expression-rtx and constraint strings. */
1442 argvec = rtvec_alloc (ninputs);
1443 constraints = rtvec_alloc (ninputs);
1445 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1446 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1448 MEM_VOLATILE_P (body) = vol;
1450 /* Eval the inputs and put them into ARGVEC.
1451 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1454 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1458 /* If there's an erroneous arg, emit no insn,
1459 because the ASM_INPUT would get VOIDmode
1460 and that could cause a crash in reload. */
1461 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1463 if (TREE_PURPOSE (tail) == NULL_TREE)
1465 error ("hard register `%s' listed as input operand to `asm'",
1466 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1470 /* Make sure constraint has neither `=' nor `+'. */
1472 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1473 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1474 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1476 error ("input operand constraint contains `%c'",
1477 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1481 XVECEXP (body, 3, i) /* argvec */
1482 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1483 XVECEXP (body, 4, i) /* constraints */
1484 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1485 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1489 /* Protect all the operands from the queue,
1490 now that they have all been evaluated. */
1492 for (i = 0; i < ninputs; i++)
1493 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1495 for (i = 0; i < noutputs; i++)
1496 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1498 /* Now, for each output, construct an rtx
1499 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1500 ARGVEC CONSTRAINTS))
1501 If there is more than one, put them inside a PARALLEL. */
1503 if (noutputs == 1 && nclobbers == 0)
1505 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1506 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1508 else if (noutputs == 0 && nclobbers == 0)
1510 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1511 insn = emit_insn (body);
1517 if (num == 0) num = 1;
1518 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1520 /* For each output operand, store a SET. */
1522 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1524 XVECEXP (body, 0, i)
1525 = gen_rtx (SET, VOIDmode,
1527 gen_rtx (ASM_OPERANDS, VOIDmode,
1528 TREE_STRING_POINTER (string),
1529 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1530 i, argvec, constraints,
1532 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1535 /* If there are no outputs (but there are some clobbers)
1536 store the bare ASM_OPERANDS into the PARALLEL. */
1539 XVECEXP (body, 0, i++) = obody;
1541 /* Store (clobber REG) for each clobbered register specified. */
1543 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1545 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1546 int j = decode_reg_name (regname);
1550 if (j == -3) /* `cc', which is not a register */
1553 if (j == -4) /* `memory', don't cache memory across asm */
1555 XVECEXP (body, 0, i++)
1556 = gen_rtx (CLOBBER, VOIDmode,
1557 gen_rtx (MEM, QImode,
1558 gen_rtx (SCRATCH, VOIDmode, 0)));
1562 error ("unknown register name `%s' in `asm'", regname);
1566 /* Use QImode since that's guaranteed to clobber just one reg. */
1567 XVECEXP (body, 0, i++)
1568 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1571 insn = emit_insn (body);
1577 /* Generate RTL to evaluate the expression EXP
1578 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1581 expand_expr_stmt (exp)
1584 if (output_bytecode)
1586 int org_stack_depth = stack_depth;
1588 bc_expand_expr (exp);
1590 /* Restore stack depth */
1591 if (stack_depth < org_stack_depth)
1594 bc_emit_instruction (drop);
1596 last_expr_type = TREE_TYPE (exp);
1600 /* If -W, warn about statements with no side effects,
1601 except for an explicit cast to void (e.g. for assert()), and
1602 except inside a ({...}) where they may be useful. */
1603 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1605 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1606 && !(TREE_CODE (exp) == CONVERT_EXPR
1607 && TREE_TYPE (exp) == void_type_node))
1608 warning_with_file_and_line (emit_filename, emit_lineno,
1609 "statement with no effect");
1610 else if (warn_unused)
1611 warn_if_unused_value (exp);
1613 last_expr_type = TREE_TYPE (exp);
1614 if (! flag_syntax_only)
1615 last_expr_value = expand_expr (exp,
1616 (expr_stmts_for_value
1617 ? NULL_RTX : const0_rtx),
1620 /* If all we do is reference a volatile value in memory,
1621 copy it to a register to be sure it is actually touched. */
1622 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1623 && TREE_THIS_VOLATILE (exp))
1625 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1627 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1628 copy_to_reg (last_expr_value);
1631 rtx lab = gen_label_rtx ();
1633 /* Compare the value with itself to reference it. */
1634 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1635 expand_expr (TYPE_SIZE (last_expr_type),
1636 NULL_RTX, VOIDmode, 0),
1638 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1639 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1644 /* If this expression is part of a ({...}) and is in memory, we may have
1645 to preserve temporaries. */
1646 preserve_temp_slots (last_expr_value);
1648 /* Free any temporaries used to evaluate this expression. Any temporary
1649 used as a result of this expression will already have been preserved
1656 /* Warn if EXP contains any computations whose results are not used.
1657 Return 1 if a warning is printed; 0 otherwise. */
1660 warn_if_unused_value (exp)
1663 if (TREE_USED (exp))
1666 switch (TREE_CODE (exp))
1668 case PREINCREMENT_EXPR:
1669 case POSTINCREMENT_EXPR:
1670 case PREDECREMENT_EXPR:
1671 case POSTDECREMENT_EXPR:
1676 case METHOD_CALL_EXPR:
1678 case WITH_CLEANUP_EXPR:
1680 /* We don't warn about COND_EXPR because it may be a useful
1681 construct if either arm contains a side effect. */
1686 /* For a binding, warn if no side effect within it. */
1687 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1689 case TRUTH_ORIF_EXPR:
1690 case TRUTH_ANDIF_EXPR:
1691 /* In && or ||, warn if 2nd operand has no side effect. */
1692 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1695 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1697 /* Let people do `(foo (), 0)' without a warning. */
1698 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1700 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1704 case NON_LVALUE_EXPR:
1705 /* Don't warn about values cast to void. */
1706 if (TREE_TYPE (exp) == void_type_node)
1708 /* Don't warn about conversions not explicit in the user's program. */
1709 if (TREE_NO_UNUSED_WARNING (exp))
1711 /* Assignment to a cast usually results in a cast of a modify.
1712 Don't complain about that. There can be an arbitrary number of
1713 casts before the modify, so we must loop until we find the first
1714 non-cast expression and then test to see if that is a modify. */
1716 tree tem = TREE_OPERAND (exp, 0);
1718 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1719 tem = TREE_OPERAND (tem, 0);
1721 if (TREE_CODE (tem) == MODIFY_EXPR)
1724 /* ... fall through ... */
1727 /* Referencing a volatile value is a side effect, so don't warn. */
1728 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1729 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1730 && TREE_THIS_VOLATILE (exp))
1732 warning_with_file_and_line (emit_filename, emit_lineno,
1733 "value computed is not used");
1738 /* Clear out the memory of the last expression evaluated. */
1746 /* Begin a statement which will return a value.
1747 Return the RTL_EXPR for this statement expr.
1748 The caller must save that value and pass it to expand_end_stmt_expr. */
1751 expand_start_stmt_expr ()
1756 /* When generating bytecode just note down the stack depth */
1757 if (output_bytecode)
1758 return (build_int_2 (stack_depth, 0));
1760 /* Make the RTL_EXPR node temporary, not momentary,
1761 so that rtl_expr_chain doesn't become garbage. */
1762 momentary = suspend_momentary ();
1763 t = make_node (RTL_EXPR);
1764 resume_momentary (momentary);
1765 start_sequence_for_rtl_expr (t);
1767 expr_stmts_for_value++;
1771 /* Restore the previous state at the end of a statement that returns a value.
1772 Returns a tree node representing the statement's value and the
1773 insns to compute the value.
1775 The nodes of that expression have been freed by now, so we cannot use them.
1776 But we don't want to do that anyway; the expression has already been
1777 evaluated and now we just want to use the value. So generate a RTL_EXPR
1778 with the proper type and RTL value.
1780 If the last substatement was not an expression,
1781 return something with type `void'. */
1784 expand_end_stmt_expr (t)
1787 if (output_bytecode)
1793 /* At this point, all expressions have been evaluated in order.
1794 However, all expression values have been popped when evaluated,
1795 which means we have to recover the last expression value. This is
1796 the last value removed by means of a `drop' instruction. Instead
1797 of adding code to inhibit dropping the last expression value, it
1798 is here recovered by undoing the `drop'. Since `drop' is
1799 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1802 bc_adjust_stack (-1);
1804 if (!last_expr_type)
1805 last_expr_type = void_type_node;
1807 t = make_node (RTL_EXPR);
1808 TREE_TYPE (t) = last_expr_type;
1809 RTL_EXPR_RTL (t) = NULL;
1810 RTL_EXPR_SEQUENCE (t) = NULL;
1812 /* Don't consider deleting this expr or containing exprs at tree level. */
1813 TREE_THIS_VOLATILE (t) = 1;
1821 if (last_expr_type == 0)
1823 last_expr_type = void_type_node;
1824 last_expr_value = const0_rtx;
1826 else if (last_expr_value == 0)
1827 /* There are some cases where this can happen, such as when the
1828 statement is void type. */
1829 last_expr_value = const0_rtx;
1830 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1831 /* Remove any possible QUEUED. */
1832 last_expr_value = protect_from_queue (last_expr_value, 0);
1836 TREE_TYPE (t) = last_expr_type;
1837 RTL_EXPR_RTL (t) = last_expr_value;
1838 RTL_EXPR_SEQUENCE (t) = get_insns ();
1840 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1844 /* Don't consider deleting this expr or containing exprs at tree level. */
1845 TREE_SIDE_EFFECTS (t) = 1;
1846 /* Propagate volatility of the actual RTL expr. */
1847 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1850 expr_stmts_for_value--;
1855 /* The exception handling nesting looks like this:
1858 { <-- exception handler block
1860 <-- in an exception handler
1862 : <-- in a TRY block
1863 : <-- in an exception handler
1868 : <-- in an except block
1869 : <-- in an exception handler
1876 /* Return nonzero iff in a try block at level LEVEL. */
1879 in_try_block (level)
1882 struct nesting *n = except_stack;
1885 while (n && n->data.except_stmt.after_label != 0)
1896 /* Return nonzero iff in an except block at level LEVEL. */
1899 in_except_block (level)
1902 struct nesting *n = except_stack;
1905 while (n && n->data.except_stmt.after_label == 0)
1916 /* Return nonzero iff in an exception handler at level LEVEL. */
1919 in_exception_handler (level)
1922 struct nesting *n = except_stack;
1923 while (n && level--)
1928 /* Record the fact that the current exception nesting raises
1929 exception EX. If not in an exception handler, return 0. */
1936 if (except_stack == 0)
1938 raises_ptr = &except_stack->data.except_stmt.raised;
1939 if (! value_member (ex, *raises_ptr))
1940 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1944 /* Generate RTL for the start of a try block.
1946 TRY_CLAUSE is the condition to test to enter the try block. */
1949 expand_start_try (try_clause, exitflag, escapeflag)
1954 struct nesting *thishandler = ALLOC_NESTING ();
1956 /* Make an entry on cond_stack for the cond we are entering. */
1958 thishandler->next = except_stack;
1959 thishandler->all = nesting_stack;
1960 thishandler->depth = ++nesting_depth;
1961 thishandler->data.except_stmt.raised = 0;
1962 thishandler->data.except_stmt.handled = 0;
1963 thishandler->data.except_stmt.first_insn = get_insns ();
1964 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1965 thishandler->data.except_stmt.unhandled_label = 0;
1966 thishandler->data.except_stmt.after_label = 0;
1967 thishandler->data.except_stmt.escape_label
1968 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1969 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1970 except_stack = thishandler;
1971 nesting_stack = thishandler;
1973 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX);
1976 /* End of a TRY block. Nothing to do for now. */
1981 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1982 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1986 /* Start an `except' nesting contour.
1987 EXITFLAG says whether this contour should be able to `exit' something.
1988 ESCAPEFLAG says whether this contour should be escapable. */
1991 expand_start_except (exitflag, escapeflag)
1998 /* An `exit' from catch clauses goes out to next exit level,
1999 if there is one. Otherwise, it just goes to the end
2000 of the construct. */
2001 for (n = except_stack->next; n; n = n->next)
2002 if (n->exit_label != 0)
2004 except_stack->exit_label = n->exit_label;
2008 except_stack->exit_label = except_stack->data.except_stmt.after_label;
2013 /* An `escape' from catch clauses goes out to next escape level,
2014 if there is one. Otherwise, it just goes to the end
2015 of the construct. */
2016 for (n = except_stack->next; n; n = n->next)
2017 if (n->data.except_stmt.escape_label != 0)
2019 except_stack->data.except_stmt.escape_label
2020 = n->data.except_stmt.escape_label;
2024 except_stack->data.except_stmt.escape_label
2025 = except_stack->data.except_stmt.after_label;
2027 do_pending_stack_adjust ();
2028 emit_label (except_stack->data.except_stmt.except_label);
2031 /* Generate code to `escape' from an exception contour. This
2032 is like `exiting', but does not conflict with constructs which
2035 Return nonzero if this contour is escapable, otherwise
2036 return zero, and language-specific code will emit the
2037 appropriate error message. */
2039 expand_escape_except ()
2043 for (n = except_stack; n; n = n->next)
2044 if (n->data.except_stmt.escape_label != 0)
2046 expand_goto_internal (NULL_TREE,
2047 n->data.except_stmt.escape_label, NULL_RTX);
2054 /* Finish processing and `except' contour.
2055 Culls out all exceptions which might be raise but not
2056 handled, and returns the list to the caller.
2057 Language-specific code is responsible for dealing with these
2061 expand_end_except ()
2064 tree raised = NULL_TREE;
2066 do_pending_stack_adjust ();
2067 emit_label (except_stack->data.except_stmt.after_label);
2069 n = except_stack->next;
2072 /* Propagate exceptions raised but not handled to next
2074 tree handled = except_stack->data.except_stmt.raised;
2075 if (handled != void_type_node)
2077 tree prev = NULL_TREE;
2078 raised = except_stack->data.except_stmt.raised;
2082 for (this_raise = raised, prev = 0; this_raise;
2083 this_raise = TREE_CHAIN (this_raise))
2085 if (value_member (TREE_VALUE (this_raise), handled))
2088 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
2091 raised = TREE_CHAIN (raised);
2092 if (raised == NULL_TREE)
2099 handled = TREE_CHAIN (handled);
2101 if (prev == NULL_TREE)
2104 TREE_CHAIN (prev) = n->data.except_stmt.raised;
2106 n->data.except_stmt.raised = raised;
2110 POPSTACK (except_stack);
2115 /* Record that exception EX is caught by this exception handler.
2116 Return nonzero if in exception handling construct, otherwise return 0. */
2123 if (except_stack == 0)
2125 raises_ptr = &except_stack->data.except_stmt.handled;
2126 if (*raises_ptr != void_type_node
2128 && ! value_member (ex, *raises_ptr))
2129 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
2133 /* Record that this exception handler catches all exceptions.
2134 Return nonzero if in exception handling construct, otherwise return 0. */
2137 expand_catch_default ()
2139 if (except_stack == 0)
2141 except_stack->data.except_stmt.handled = void_type_node;
2148 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
2150 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
2155 /* Generate RTL for the start of an if-then. COND is the expression
2156 whose truth should be tested.
2158 If EXITFLAG is nonzero, this conditional is visible to
2159 `exit_something'. */
2162 expand_start_cond (cond, exitflag)
2166 struct nesting *thiscond = ALLOC_NESTING ();
2168 /* Make an entry on cond_stack for the cond we are entering. */
2170 thiscond->next = cond_stack;
2171 thiscond->all = nesting_stack;
2172 thiscond->depth = ++nesting_depth;
2173 thiscond->data.cond.next_label = gen_label_rtx ();
2174 /* Before we encounter an `else', we don't need a separate exit label
2175 unless there are supposed to be exit statements
2176 to exit this conditional. */
2177 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2178 thiscond->data.cond.endif_label = thiscond->exit_label;
2179 cond_stack = thiscond;
2180 nesting_stack = thiscond;
2182 if (output_bytecode)
2183 bc_expand_start_cond (cond, exitflag);
2185 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2188 /* Generate RTL between then-clause and the elseif-clause
2189 of an if-then-elseif-.... */
2192 expand_start_elseif (cond)
2195 if (cond_stack->data.cond.endif_label == 0)
2196 cond_stack->data.cond.endif_label = gen_label_rtx ();
2197 emit_jump (cond_stack->data.cond.endif_label);
2198 emit_label (cond_stack->data.cond.next_label);
2199 cond_stack->data.cond.next_label = gen_label_rtx ();
2200 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2203 /* Generate RTL between the then-clause and the else-clause
2204 of an if-then-else. */
2207 expand_start_else ()
2209 if (cond_stack->data.cond.endif_label == 0)
2210 cond_stack->data.cond.endif_label = gen_label_rtx ();
2212 if (output_bytecode)
2214 bc_expand_start_else ();
2218 emit_jump (cond_stack->data.cond.endif_label);
2219 emit_label (cond_stack->data.cond.next_label);
2220 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2223 /* Generate RTL for the end of an if-then.
2224 Pop the record for it off of cond_stack. */
2229 struct nesting *thiscond = cond_stack;
2231 if (output_bytecode)
2232 bc_expand_end_cond ();
2235 do_pending_stack_adjust ();
2236 if (thiscond->data.cond.next_label)
2237 emit_label (thiscond->data.cond.next_label);
2238 if (thiscond->data.cond.endif_label)
2239 emit_label (thiscond->data.cond.endif_label);
2242 POPSTACK (cond_stack);
2247 /* Generate code for the start of an if-then. COND is the expression
2248 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2249 is to be visible to exit_something. It is assumed that the caller
2250 has pushed the previous context on the cond stack. */
2252 bc_expand_start_cond (cond, exitflag)
2256 struct nesting *thiscond = cond_stack;
2258 thiscond->data.case_stmt.nominal_type = cond;
2259 bc_expand_expr (cond);
2260 bc_emit_bytecode (xjumpifnot);
2261 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2263 #ifdef DEBUG_PRINT_CODE
2264 fputc ('\n', stderr);
2268 /* Generate the label for the end of an if with
2271 bc_expand_end_cond ()
2273 struct nesting *thiscond = cond_stack;
2275 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2278 /* Generate code for the start of the else- clause of
2281 bc_expand_start_else ()
2283 struct nesting *thiscond = cond_stack;
2285 thiscond->data.cond.endif_label = thiscond->exit_label;
2286 thiscond->exit_label = gen_label_rtx ();
2287 bc_emit_bytecode (jump);
2288 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2290 #ifdef DEBUG_PRINT_CODE
2291 fputc ('\n', stderr);
2294 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2297 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2298 loop should be exited by `exit_something'. This is a loop for which
2299 `expand_continue' will jump to the top of the loop.
2301 Make an entry on loop_stack to record the labels associated with
2305 expand_start_loop (exit_flag)
2308 register struct nesting *thisloop = ALLOC_NESTING ();
2310 /* Make an entry on loop_stack for the loop we are entering. */
2312 thisloop->next = loop_stack;
2313 thisloop->all = nesting_stack;
2314 thisloop->depth = ++nesting_depth;
2315 thisloop->data.loop.start_label = gen_label_rtx ();
2316 thisloop->data.loop.end_label = gen_label_rtx ();
2317 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2318 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2319 loop_stack = thisloop;
2320 nesting_stack = thisloop;
2322 if (output_bytecode)
2324 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2328 do_pending_stack_adjust ();
2330 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2331 emit_label (thisloop->data.loop.start_label);
2336 /* Like expand_start_loop but for a loop where the continuation point
2337 (for expand_continue_loop) will be specified explicitly. */
2340 expand_start_loop_continue_elsewhere (exit_flag)
2343 struct nesting *thisloop = expand_start_loop (exit_flag);
2344 loop_stack->data.loop.continue_label = gen_label_rtx ();
2348 /* Specify the continuation point for a loop started with
2349 expand_start_loop_continue_elsewhere.
2350 Use this at the point in the code to which a continue statement
2354 expand_loop_continue_here ()
2356 if (output_bytecode)
2358 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2361 do_pending_stack_adjust ();
2362 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2363 emit_label (loop_stack->data.loop.continue_label);
2368 bc_expand_end_loop ()
2370 struct nesting *thisloop = loop_stack;
2372 bc_emit_bytecode (jump);
2373 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2375 #ifdef DEBUG_PRINT_CODE
2376 fputc ('\n', stderr);
2379 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2380 POPSTACK (loop_stack);
2385 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2386 Pop the block off of loop_stack. */
2392 register rtx start_label;
2393 rtx last_test_insn = 0;
2396 if (output_bytecode)
2398 bc_expand_end_loop ();
2402 insn = get_last_insn ();
2403 start_label = loop_stack->data.loop.start_label;
2405 /* Mark the continue-point at the top of the loop if none elsewhere. */
2406 if (start_label == loop_stack->data.loop.continue_label)
2407 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2409 do_pending_stack_adjust ();
2411 /* If optimizing, perhaps reorder the loop. If the loop
2412 starts with a conditional exit, roll that to the end
2413 where it will optimize together with the jump back.
2415 We look for the last conditional branch to the exit that we encounter
2416 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2417 branch to the exit first, use it.
2419 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2420 because moving them is not valid. */
2424 ! (GET_CODE (insn) == JUMP_INSN
2425 && GET_CODE (PATTERN (insn)) == SET
2426 && SET_DEST (PATTERN (insn)) == pc_rtx
2427 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2429 /* Scan insns from the top of the loop looking for a qualified
2430 conditional exit. */
2431 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2432 insn = NEXT_INSN (insn))
2434 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2437 if (GET_CODE (insn) == NOTE
2438 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2439 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2442 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2445 if (last_test_insn && num_insns > 30)
2448 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2449 && SET_DEST (PATTERN (insn)) == pc_rtx
2450 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2451 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2452 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2453 == loop_stack->data.loop.end_label))
2454 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2455 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2456 == loop_stack->data.loop.end_label))))
2457 last_test_insn = insn;
2459 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2460 && GET_CODE (PATTERN (insn)) == SET
2461 && SET_DEST (PATTERN (insn)) == pc_rtx
2462 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2463 && (XEXP (SET_SRC (PATTERN (insn)), 0)
2464 == loop_stack->data.loop.end_label))
2465 /* Include BARRIER. */
2466 last_test_insn = NEXT_INSN (insn);
2469 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2471 /* We found one. Move everything from there up
2472 to the end of the loop, and add a jump into the loop
2473 to jump to there. */
2474 register rtx newstart_label = gen_label_rtx ();
2475 register rtx start_move = start_label;
2477 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2478 then we want to move this note also. */
2479 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2480 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2481 == NOTE_INSN_LOOP_CONT))
2482 start_move = PREV_INSN (start_move);
2484 emit_label_after (newstart_label, PREV_INSN (start_move));
2485 reorder_insns (start_move, last_test_insn, get_last_insn ());
2486 emit_jump_insn_after (gen_jump (start_label),
2487 PREV_INSN (newstart_label));
2488 emit_barrier_after (PREV_INSN (newstart_label));
2489 start_label = newstart_label;
2493 emit_jump (start_label);
2494 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2495 emit_label (loop_stack->data.loop.end_label);
2497 POPSTACK (loop_stack);
2502 /* Generate a jump to the current loop's continue-point.
2503 This is usually the top of the loop, but may be specified
2504 explicitly elsewhere. If not currently inside a loop,
2505 return 0 and do nothing; caller will print an error message. */
2508 expand_continue_loop (whichloop)
2509 struct nesting *whichloop;
2513 whichloop = loop_stack;
2516 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2521 /* Generate a jump to exit the current loop. If not currently inside a loop,
2522 return 0 and do nothing; caller will print an error message. */
2525 expand_exit_loop (whichloop)
2526 struct nesting *whichloop;
2530 whichloop = loop_stack;
2533 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2537 /* Generate a conditional jump to exit the current loop if COND
2538 evaluates to zero. If not currently inside a loop,
2539 return 0 and do nothing; caller will print an error message. */
2542 expand_exit_loop_if_false (whichloop, cond)
2543 struct nesting *whichloop;
2548 whichloop = loop_stack;
2551 if (output_bytecode)
2553 bc_expand_expr (cond);
2554 bc_expand_goto_internal (xjumpifnot,
2555 BYTECODE_BC_LABEL (whichloop->exit_label),
2559 do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
2564 /* Return non-zero if we should preserve sub-expressions as separate
2565 pseudos. We never do so if we aren't optimizing. We always do so
2566 if -fexpensive-optimizations.
2568 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2569 the loop may still be a small one. */
2572 preserve_subexpressions_p ()
2576 if (flag_expensive_optimizations)
2579 if (optimize == 0 || loop_stack == 0)
2582 insn = get_last_insn_anywhere ();
2585 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2586 < n_non_fixed_regs * 3));
2590 /* Generate a jump to exit the current loop, conditional, binding contour
2591 or case statement. Not all such constructs are visible to this function,
2592 only those started with EXIT_FLAG nonzero. Individual languages use
2593 the EXIT_FLAG parameter to control which kinds of constructs you can
2596 If not currently inside anything that can be exited,
2597 return 0 and do nothing; caller will print an error message. */
2600 expand_exit_something ()
2604 for (n = nesting_stack; n; n = n->all)
2605 if (n->exit_label != 0)
2607 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2614 /* Generate RTL to return from the current function, with no value.
2615 (That is, we do not do anything about returning any value.) */
2618 expand_null_return ()
2620 struct nesting *block = block_stack;
2623 if (output_bytecode)
2625 bc_emit_instruction (ret);
2629 /* Does any pending block have cleanups? */
2631 while (block && block->data.block.cleanups == 0)
2632 block = block->next;
2634 /* If yes, use a goto to return, since that runs cleanups. */
2636 expand_null_return_1 (last_insn, block != 0);
2639 /* Generate RTL to return from the current function, with value VAL. */
2642 expand_value_return (val)
2645 struct nesting *block = block_stack;
2646 rtx last_insn = get_last_insn ();
2647 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2649 /* Copy the value to the return location
2650 unless it's already there. */
2652 if (return_reg != val)
2654 #ifdef PROMOTE_FUNCTION_RETURN
2655 enum machine_mode mode = DECL_MODE (DECL_RESULT (current_function_decl));
2656 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2657 int unsignedp = TREE_UNSIGNED (type);
2659 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
2660 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
2661 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
2662 || TREE_CODE (type) == OFFSET_TYPE)
2664 PROMOTE_MODE (mode, unsignedp, type);
2667 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2668 convert_move (return_reg, val, unsignedp);
2671 emit_move_insn (return_reg, val);
2673 if (GET_CODE (return_reg) == REG
2674 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2675 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2677 /* Does any pending block have cleanups? */
2679 while (block && block->data.block.cleanups == 0)
2680 block = block->next;
2682 /* If yes, use a goto to return, since that runs cleanups.
2683 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2685 expand_null_return_1 (last_insn, block != 0);
2688 /* Output a return with no value. If LAST_INSN is nonzero,
2689 pretend that the return takes place after LAST_INSN.
2690 If USE_GOTO is nonzero then don't use a return instruction;
2691 go to the return label instead. This causes any cleanups
2692 of pending blocks to be executed normally. */
2695 expand_null_return_1 (last_insn, use_goto)
2699 rtx end_label = cleanup_label ? cleanup_label : return_label;
2701 clear_pending_stack_adjust ();
2702 do_pending_stack_adjust ();
2705 /* PCC-struct return always uses an epilogue. */
2706 if (current_function_returns_pcc_struct || use_goto)
2709 end_label = return_label = gen_label_rtx ();
2710 expand_goto_internal (NULL_TREE, end_label, last_insn);
2714 /* Otherwise output a simple return-insn if one is available,
2715 unless it won't do the job. */
2717 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2719 emit_jump_insn (gen_return ());
2725 /* Otherwise jump to the epilogue. */
2726 expand_goto_internal (NULL_TREE, end_label, last_insn);
2729 /* Generate RTL to evaluate the expression RETVAL and return it
2730 from the current function. */
2733 expand_return (retval)
2736 /* If there are any cleanups to be performed, then they will
2737 be inserted following LAST_INSN. It is desirable
2738 that the last_insn, for such purposes, should be the
2739 last insn before computing the return value. Otherwise, cleanups
2740 which call functions can clobber the return value. */
2741 /* ??? rms: I think that is erroneous, because in C++ it would
2742 run destructors on variables that might be used in the subsequent
2743 computation of the return value. */
2745 register rtx val = 0;
2749 struct nesting *block;
2751 /* Bytecode returns are quite simple, just leave the result on the
2752 arithmetic stack. */
2753 if (output_bytecode)
2755 bc_expand_expr (retval);
2756 bc_emit_instruction (ret);
2760 /* If function wants no value, give it none. */
2761 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2763 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2765 expand_null_return ();
2769 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2770 cleanups = any_pending_cleanups (1);
2772 if (TREE_CODE (retval) == RESULT_DECL)
2773 retval_rhs = retval;
2774 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2775 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2776 retval_rhs = TREE_OPERAND (retval, 1);
2777 else if (TREE_TYPE (retval) == void_type_node)
2778 /* Recognize tail-recursive call to void function. */
2779 retval_rhs = retval;
2781 retval_rhs = NULL_TREE;
2783 /* Only use `last_insn' if there are cleanups which must be run. */
2784 if (cleanups || cleanup_label != 0)
2785 last_insn = get_last_insn ();
2787 /* Distribute return down conditional expr if either of the sides
2788 may involve tail recursion (see test below). This enhances the number
2789 of tail recursions we see. Don't do this always since it can produce
2790 sub-optimal code in some cases and we distribute assignments into
2791 conditional expressions when it would help. */
2793 if (optimize && retval_rhs != 0
2794 && frame_offset == 0
2795 && TREE_CODE (retval_rhs) == COND_EXPR
2796 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2797 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2799 rtx label = gen_label_rtx ();
2802 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2803 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2804 DECL_RESULT (current_function_decl),
2805 TREE_OPERAND (retval_rhs, 1));
2806 TREE_SIDE_EFFECTS (expr) = 1;
2807 expand_return (expr);
2810 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2811 DECL_RESULT (current_function_decl),
2812 TREE_OPERAND (retval_rhs, 2));
2813 TREE_SIDE_EFFECTS (expr) = 1;
2814 expand_return (expr);
2818 /* For tail-recursive call to current function,
2819 just jump back to the beginning.
2820 It's unsafe if any auto variable in this function
2821 has its address taken; for simplicity,
2822 require stack frame to be empty. */
2823 if (optimize && retval_rhs != 0
2824 && frame_offset == 0
2825 && TREE_CODE (retval_rhs) == CALL_EXPR
2826 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2827 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2828 /* Finish checking validity, and if valid emit code
2829 to set the argument variables for the new call. */
2830 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2831 DECL_ARGUMENTS (current_function_decl)))
2833 if (tail_recursion_label == 0)
2835 tail_recursion_label = gen_label_rtx ();
2836 emit_label_after (tail_recursion_label,
2837 tail_recursion_reentry);
2840 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2845 /* This optimization is safe if there are local cleanups
2846 because expand_null_return takes care of them.
2847 ??? I think it should also be safe when there is a cleanup label,
2848 because expand_null_return takes care of them, too.
2849 Any reason why not? */
2850 if (HAVE_return && cleanup_label == 0
2851 && ! current_function_returns_pcc_struct
2852 && BRANCH_COST <= 1)
2854 /* If this is return x == y; then generate
2855 if (x == y) return 1; else return 0;
2856 if we can do it with explicit return insns and
2857 branches are cheap. */
2859 switch (TREE_CODE (retval_rhs))
2867 case TRUTH_ANDIF_EXPR:
2868 case TRUTH_ORIF_EXPR:
2869 case TRUTH_AND_EXPR:
2871 case TRUTH_NOT_EXPR:
2872 case TRUTH_XOR_EXPR:
2873 op0 = gen_label_rtx ();
2874 jumpifnot (retval_rhs, op0);
2875 expand_value_return (const1_rtx);
2877 expand_value_return (const0_rtx);
2881 #endif /* HAVE_return */
2885 && TREE_TYPE (retval_rhs) != void_type_node
2886 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2888 /* Calculate the return value into a pseudo reg. */
2889 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2891 /* All temporaries have now been used. */
2893 /* Return the calculated value, doing cleanups first. */
2894 expand_value_return (val);
2898 /* No cleanups or no hard reg used;
2899 calculate value into hard return reg. */
2900 expand_expr (retval, const0_rtx, VOIDmode, 0);
2903 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2907 /* Return 1 if the end of the generated RTX is not a barrier.
2908 This means code already compiled can drop through. */
2911 drop_through_at_end_p ()
2913 rtx insn = get_last_insn ();
2914 while (insn && GET_CODE (insn) == NOTE)
2915 insn = PREV_INSN (insn);
2916 return insn && GET_CODE (insn) != BARRIER;
2919 /* Emit code to alter this function's formal parms for a tail-recursive call.
2920 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2921 FORMALS is the chain of decls of formals.
2922 Return 1 if this can be done;
2923 otherwise return 0 and do not emit any code. */
2926 tail_recursion_args (actuals, formals)
2927 tree actuals, formals;
2929 register tree a = actuals, f = formals;
2931 register rtx *argvec;
2933 /* Check that number and types of actuals are compatible
2934 with the formals. This is not always true in valid C code.
2935 Also check that no formal needs to be addressable
2936 and that all formals are scalars. */
2938 /* Also count the args. */
2940 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2942 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2944 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2947 if (a != 0 || f != 0)
2950 /* Compute all the actuals. */
2952 argvec = (rtx *) alloca (i * sizeof (rtx));
2954 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2955 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2957 /* Find which actual values refer to current values of previous formals.
2958 Copy each of them now, before any formal is changed. */
2960 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2964 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2965 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2966 { copy = 1; break; }
2968 argvec[i] = copy_to_reg (argvec[i]);
2971 /* Store the values of the actuals into the formals. */
2973 for (f = formals, a = actuals, i = 0; f;
2974 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2976 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2977 emit_move_insn (DECL_RTL (f), argvec[i]);
2979 convert_move (DECL_RTL (f), argvec[i],
2980 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2987 /* Generate the RTL code for entering a binding contour.
2988 The variables are declared one by one, by calls to `expand_decl'.
2990 EXIT_FLAG is nonzero if this construct should be visible to
2991 `exit_something'. */
2994 expand_start_bindings (exit_flag)
2997 struct nesting *thisblock = ALLOC_NESTING ();
3000 if (!output_bytecode)
3001 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3003 /* Make an entry on block_stack for the block we are entering. */
3005 thisblock->next = block_stack;
3006 thisblock->all = nesting_stack;
3007 thisblock->depth = ++nesting_depth;
3008 thisblock->data.block.stack_level = 0;
3009 thisblock->data.block.cleanups = 0;
3010 thisblock->data.block.function_call_count = 0;
3014 if (block_stack->data.block.cleanups == NULL_TREE
3015 && (block_stack->data.block.outer_cleanups == NULL_TREE
3016 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
3017 thisblock->data.block.outer_cleanups = empty_cleanup_list;
3019 thisblock->data.block.outer_cleanups
3020 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3021 block_stack->data.block.outer_cleanups);
3024 thisblock->data.block.outer_cleanups = 0;
3028 && !(block_stack->data.block.cleanups == NULL_TREE
3029 && block_stack->data.block.outer_cleanups == NULL_TREE))
3030 thisblock->data.block.outer_cleanups
3031 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3032 block_stack->data.block.outer_cleanups);
3034 thisblock->data.block.outer_cleanups = 0;
3036 thisblock->data.block.label_chain = 0;
3037 thisblock->data.block.innermost_stack_block = stack_block_stack;
3038 thisblock->data.block.first_insn = note;
3039 thisblock->data.block.block_start_count = ++block_start_count;
3040 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3041 block_stack = thisblock;
3042 nesting_stack = thisblock;
3044 if (!output_bytecode)
3046 /* Make a new level for allocating stack slots. */
3051 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3052 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3056 remember_end_note (block)
3057 register tree block;
3059 BLOCK_END_NOTE (block) = last_block_end_note;
3060 last_block_end_note = NULL_RTX;
3063 /* Generate RTL code to terminate a binding contour.
3064 VARS is the chain of VAR_DECL nodes
3065 for the variables bound in this contour.
3066 MARK_ENDS is nonzero if we should put a note at the beginning
3067 and end of this binding contour.
3069 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3070 (That is true automatically if the contour has a saved stack level.) */
3073 expand_end_bindings (vars, mark_ends, dont_jump_in)
3078 register struct nesting *thisblock = block_stack;
3081 if (output_bytecode)
3083 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3088 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3089 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3090 && ! DECL_IN_SYSTEM_HEADER (decl))
3091 warning_with_decl (decl, "unused variable `%s'");
3093 if (thisblock->exit_label)
3095 do_pending_stack_adjust ();
3096 emit_label (thisblock->exit_label);
3099 /* If necessary, make a handler for nonlocal gotos taking
3100 place in the function calls in this block. */
3101 if (function_call_count != thisblock->data.block.function_call_count
3103 /* Make handler for outermost block
3104 if there were any nonlocal gotos to this function. */
3105 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3106 /* Make handler for inner block if it has something
3107 special to do when you jump out of it. */
3108 : (thisblock->data.block.cleanups != 0
3109 || thisblock->data.block.stack_level != 0)))
3112 rtx afterward = gen_label_rtx ();
3113 rtx handler_label = gen_label_rtx ();
3114 rtx save_receiver = gen_reg_rtx (Pmode);
3117 /* Don't let jump_optimize delete the handler. */
3118 LABEL_PRESERVE_P (handler_label) = 1;
3120 /* Record the handler address in the stack slot for that purpose,
3121 during this block, saving and restoring the outer value. */
3122 if (thisblock->next != 0)
3124 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3127 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3128 insns = get_insns ();
3130 emit_insns_before (insns, thisblock->data.block.first_insn);
3134 emit_move_insn (nonlocal_goto_handler_slot,
3135 gen_rtx (LABEL_REF, Pmode, handler_label));
3136 insns = get_insns ();
3138 emit_insns_before (insns, thisblock->data.block.first_insn);
3140 /* Jump around the handler; it runs only when specially invoked. */
3141 emit_jump (afterward);
3142 emit_label (handler_label);
3144 #ifdef HAVE_nonlocal_goto
3145 if (! HAVE_nonlocal_goto)
3147 /* First adjust our frame pointer to its actual value. It was
3148 previously set to the start of the virtual area corresponding to
3149 the stacked variables when we branched here and now needs to be
3150 adjusted to the actual hardware fp value.
3152 Assignments are to virtual registers are converted by
3153 instantiate_virtual_regs into the corresponding assignment
3154 to the underlying register (fp in this case) that makes
3155 the original assignment true.
3156 So the following insn will actually be
3157 decrementing fp by STARTING_FRAME_OFFSET. */
3158 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
3160 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3161 if (fixed_regs[ARG_POINTER_REGNUM])
3163 #ifdef ELIMINABLE_REGS
3164 /* If the argument pointer can be eliminated in favor of the
3165 frame pointer, we don't need to restore it. We assume here
3166 that if such an elimination is present, it can always be used.
3167 This is the case on all known machines; if we don't make this
3168 assumption, we do unnecessary saving on many machines. */
3169 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3172 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3173 if (elim_regs[i].from == ARG_POINTER_REGNUM
3174 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3177 if (i == sizeof elim_regs / sizeof elim_regs [0])
3180 /* Now restore our arg pointer from the address at which it
3181 was saved in our stack frame.
3182 If there hasn't be space allocated for it yet, make
3184 if (arg_pointer_save_area == 0)
3185 arg_pointer_save_area
3186 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3187 emit_move_insn (virtual_incoming_args_rtx,
3188 /* We need a pseudo here, or else
3189 instantiate_virtual_regs_1 complains. */
3190 copy_to_reg (arg_pointer_save_area));
3195 /* The handler expects the desired label address in the static chain
3196 register. It tests the address and does an appropriate jump
3197 to whatever label is desired. */
3198 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3199 /* Skip any labels we shouldn't be able to jump to from here. */
3200 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3202 rtx not_this = gen_label_rtx ();
3203 rtx this = gen_label_rtx ();
3204 do_jump_if_equal (static_chain_rtx,
3205 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3207 emit_jump (not_this);
3209 expand_goto (TREE_VALUE (link));
3210 emit_label (not_this);
3212 /* If label is not recognized, abort. */
3213 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3215 emit_label (afterward);
3218 /* Don't allow jumping into a block that has cleanups or a stack level. */
3220 || thisblock->data.block.stack_level != 0
3221 || thisblock->data.block.cleanups != 0)
3223 struct label_chain *chain;
3225 /* Any labels in this block are no longer valid to go to.
3226 Mark them to cause an error message. */
3227 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3229 DECL_TOO_LATE (chain->label) = 1;
3230 /* If any goto without a fixup came to this label,
3231 that must be an error, because gotos without fixups
3232 come from outside all saved stack-levels and all cleanups. */
3233 if (TREE_ADDRESSABLE (chain->label))
3234 error_with_decl (chain->label,
3235 "label `%s' used before containing binding contour");
3239 /* Restore stack level in effect before the block
3240 (only if variable-size objects allocated). */
3241 /* Perform any cleanups associated with the block. */
3243 if (thisblock->data.block.stack_level != 0
3244 || thisblock->data.block.cleanups != 0)
3246 /* Don't let cleanups affect ({...}) constructs. */
3247 int old_expr_stmts_for_value = expr_stmts_for_value;
3248 rtx old_last_expr_value = last_expr_value;
3249 tree old_last_expr_type = last_expr_type;
3250 expr_stmts_for_value = 0;
3252 /* Do the cleanups. */
3253 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
3254 do_pending_stack_adjust ();
3256 expr_stmts_for_value = old_expr_stmts_for_value;
3257 last_expr_value = old_last_expr_value;
3258 last_expr_type = old_last_expr_type;
3260 /* Restore the stack level. */
3262 if (thisblock->data.block.stack_level != 0)
3264 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3265 thisblock->data.block.stack_level, NULL_RTX);
3266 if (nonlocal_goto_handler_slot != 0)
3267 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3271 /* Any gotos out of this block must also do these things.
3272 Also report any gotos with fixups that came to labels in this
3274 fixup_gotos (thisblock,
3275 thisblock->data.block.stack_level,
3276 thisblock->data.block.cleanups,
3277 thisblock->data.block.first_insn,
3281 /* Mark the beginning and end of the scope if requested.
3282 We do this now, after running cleanups on the variables
3283 just going out of scope, so they are in scope for their cleanups. */
3286 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3288 /* Get rid of the beginning-mark if we don't make an end-mark. */
3289 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3291 /* If doing stupid register allocation, make sure lives of all
3292 register variables declared here extend thru end of scope. */
3295 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3297 rtx rtl = DECL_RTL (decl);
3298 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3302 /* Restore block_stack level for containing block. */
3304 stack_block_stack = thisblock->data.block.innermost_stack_block;
3305 POPSTACK (block_stack);
3307 /* Pop the stack slot nesting and free any slots at this level. */
3312 /* End a binding contour.
3313 VARS is the chain of VAR_DECL nodes for the variables bound
3314 in this contour. MARK_ENDS is nonzer if we should put a note
3315 at the beginning and end of this binding contour.
3316 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3320 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3325 struct nesting *thisbind = nesting_stack;
3329 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3330 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3331 warning_with_decl (decl, "unused variable `%s'");
3333 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3335 /* Pop block/bindings off stack */
3336 POPSTACK (nesting_stack);
3337 POPSTACK (block_stack);
3340 /* Generate RTL for the automatic variable declaration DECL.
3341 (Other kinds of declarations are simply ignored if seen here.)
3342 CLEANUP is an expression to be executed at exit from this binding contour;
3343 for example, in C++, it might call the destructor for this variable.
3345 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3346 either before or after calling `expand_decl' but before compiling
3347 any subsequent expressions. This is because CLEANUP may be expanded
3348 more than once, on different branches of execution.
3349 For the same reason, CLEANUP may not contain a CALL_EXPR
3350 except as its topmost node--else `preexpand_calls' would get confused.
3352 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3353 that is not associated with any particular variable.
3355 There is no special support here for C++ constructors.
3356 They should be handled by the proper code in DECL_INITIAL. */
3362 struct nesting *thisblock = block_stack;
3365 if (output_bytecode)
3367 bc_expand_decl (decl, 0);
3371 type = TREE_TYPE (decl);
3373 /* Only automatic variables need any expansion done.
3374 Static and external variables, and external functions,
3375 will be handled by `assemble_variable' (called from finish_decl).
3376 TYPE_DECL and CONST_DECL require nothing.
3377 PARM_DECLs are handled in `assign_parms'. */
3379 if (TREE_CODE (decl) != VAR_DECL)
3381 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3384 /* Create the RTL representation for the variable. */
3386 if (type == error_mark_node)
3387 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3388 else if (DECL_SIZE (decl) == 0)
3389 /* Variable with incomplete type. */
3391 if (DECL_INITIAL (decl) == 0)
3392 /* Error message was already done; now avoid a crash. */
3393 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3395 /* An initializer is going to decide the size of this array.
3396 Until we know the size, represent its address with a reg. */
3397 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3399 else if (DECL_MODE (decl) != BLKmode
3400 /* If -ffloat-store, don't put explicit float vars
3402 && !(flag_float_store
3403 && TREE_CODE (type) == REAL_TYPE)
3404 && ! TREE_THIS_VOLATILE (decl)
3405 && ! TREE_ADDRESSABLE (decl)
3406 && (DECL_REGISTER (decl) || ! obey_regdecls))
3408 /* Automatic variable that can go in a register. */
3409 enum machine_mode reg_mode = DECL_MODE (decl);
3410 int unsignedp = TREE_UNSIGNED (type);
3412 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
3413 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
3414 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
3415 || TREE_CODE (type) == OFFSET_TYPE)
3417 PROMOTE_MODE (reg_mode, unsignedp, type);
3420 if (TREE_CODE (type) == COMPLEX_TYPE)
3422 rtx realpart, imagpart;
3423 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3425 /* For a complex type variable, make a CONCAT of two pseudos
3426 so that the real and imaginary parts
3427 can be allocated separately. */
3428 realpart = gen_reg_rtx (partmode);
3429 REG_USERVAR_P (realpart) = 1;
3430 imagpart = gen_reg_rtx (partmode);
3431 REG_USERVAR_P (imagpart) = 1;
3432 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3436 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3437 if (TREE_CODE (type) == POINTER_TYPE)
3438 mark_reg_pointer (DECL_RTL (decl));
3439 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3442 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3444 /* Variable of fixed size that goes on the stack. */
3448 /* If we previously made RTL for this decl, it must be an array
3449 whose size was determined by the initializer.
3450 The old address was a register; set that register now
3451 to the proper address. */
3452 if (DECL_RTL (decl) != 0)
3454 if (GET_CODE (DECL_RTL (decl)) != MEM
3455 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3457 oldaddr = XEXP (DECL_RTL (decl), 0);
3461 = assign_stack_temp (DECL_MODE (decl),
3462 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3463 + BITS_PER_UNIT - 1)
3467 /* Set alignment we actually gave this decl. */
3468 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3469 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3473 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3474 if (addr != oldaddr)
3475 emit_move_insn (oldaddr, addr);
3478 /* If this is a memory ref that contains aggregate components,
3479 mark it as such for cse and loop optimize. */
3480 MEM_IN_STRUCT_P (DECL_RTL (decl))
3481 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
3482 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
3483 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
3484 || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
3486 /* If this is in memory because of -ffloat-store,
3487 set the volatile bit, to prevent optimizations from
3488 undoing the effects. */
3489 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3490 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3494 /* Dynamic-size object: must push space on the stack. */
3498 /* Record the stack pointer on entry to block, if have
3499 not already done so. */
3500 if (thisblock->data.block.stack_level == 0)
3502 do_pending_stack_adjust ();
3503 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3504 &thisblock->data.block.stack_level,
3505 thisblock->data.block.first_insn);
3506 stack_block_stack = thisblock;
3509 /* Compute the variable's size, in bytes. */
3510 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3512 size_int (BITS_PER_UNIT)),
3513 NULL_RTX, VOIDmode, 0);
3516 /* This is equivalent to calling alloca. */
3517 current_function_calls_alloca = 1;
3519 /* Allocate space on the stack for the variable. */
3520 address = allocate_dynamic_stack_space (size, NULL_RTX,
3523 if (nonlocal_goto_handler_slot != 0)
3524 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3526 /* Reference the variable indirect through that rtx. */
3527 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3529 /* If this is a memory ref that contains aggregate components,
3530 mark it as such for cse and loop optimize. */
3531 MEM_IN_STRUCT_P (DECL_RTL (decl))
3532 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
3533 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
3534 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
3535 || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
3537 /* Indicate the alignment we actually gave this variable. */
3538 #ifdef STACK_BOUNDARY
3539 DECL_ALIGN (decl) = STACK_BOUNDARY;
3541 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3545 if (TREE_THIS_VOLATILE (decl))
3546 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3547 #if 0 /* A variable is not necessarily unchanging
3548 just because it is const. RTX_UNCHANGING_P
3549 means no change in the function,
3550 not merely no change in the variable's scope.
3551 It is correct to set RTX_UNCHANGING_P if the variable's scope
3552 is the whole function. There's no convenient way to test that. */
3553 if (TREE_READONLY (decl))
3554 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3557 /* If doing stupid register allocation, make sure life of any
3558 register variable starts here, at the start of its scope. */
3561 use_variable (DECL_RTL (decl));
3565 /* Generate code for the automatic variable declaration DECL. For
3566 most variables this just means we give it a stack offset. The
3567 compiler sometimes emits cleanups without variables and we will
3568 have to deal with those too. */
3571 bc_expand_decl (decl, cleanup)
3579 /* A cleanup with no variable. */
3586 /* Only auto variables need any work. */
3587 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3590 type = TREE_TYPE (decl);
3592 if (type == error_mark_node)
3593 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3595 else if (DECL_SIZE (decl) == 0)
3597 /* Variable with incomplete type. The stack offset herein will be
3598 fixed later in expand_decl_init (). */
3599 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3601 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3603 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3607 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3610 /* Emit code to perform the initialization of a declaration DECL. */
3613 expand_decl_init (decl)
3616 int was_used = TREE_USED (decl);
3618 /* If this is a CONST_DECL, we don't have to generate any code, but
3619 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3620 to be set while in the obstack containing the constant. If we don't
3621 do this, we can lose if we have functions nested three deep and the middle
3622 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3623 the innermost function is the first to expand that STRING_CST. */
3624 if (TREE_CODE (decl) == CONST_DECL)
3626 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3627 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3628 EXPAND_INITIALIZER);
3632 if (TREE_STATIC (decl))
3635 /* Compute and store the initial value now. */
3637 if (DECL_INITIAL (decl) == error_mark_node)
3639 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3640 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3641 || code == POINTER_TYPE)
3642 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3646 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3648 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3649 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3653 /* Don't let the initialization count as "using" the variable. */
3654 TREE_USED (decl) = was_used;
3656 /* Free any temporaries we made while initializing the decl. */
3660 /* Expand initialization for variable-sized types. Allocate array
3661 using newlocalSI and set local variable, which is a pointer to the
3664 bc_expand_variable_local_init (decl)
3667 /* Evaluate size expression and coerce to SI */
3668 bc_expand_expr (DECL_SIZE (decl));
3670 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3671 no coercion is necessary (?) */
3673 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3674 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3676 /* Emit code to allocate array */
3677 bc_emit_instruction (newlocalSI);
3679 /* Store array pointer in local variable. This is the only instance
3680 where we actually want the address of the pointer to the
3681 variable-size block, rather than the pointer itself. We avoid
3682 using expand_address() since that would cause the pointer to be
3683 pushed rather than its address. Hence the hard-coded reference;
3684 notice also that the variable is always local (no global
3685 variable-size type variables). */
3687 bc_load_localaddr (DECL_RTL (decl));
3688 bc_emit_instruction (storeP);
3692 /* Emit code to initialize a declaration. */
3694 bc_expand_decl_init (decl)
3697 int org_stack_depth;
3699 /* Statical initializers are handled elsewhere */
3701 if (TREE_STATIC (decl))
3704 /* Memory original stack depth */
3705 org_stack_depth = stack_depth;
3707 /* If the type is variable-size, we first create its space (we ASSUME
3708 it CAN'T be static). We do this regardless of whether there's an
3709 initializer assignment or not. */
3711 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3712 bc_expand_variable_local_init (decl);
3714 /* Expand initializer assignment */
3715 if (DECL_INITIAL (decl) == error_mark_node)
3717 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3719 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3720 || code == POINTER_TYPE)
3722 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3724 else if (DECL_INITIAL (decl))
3725 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3727 /* Restore stack depth */
3728 if (org_stack_depth > stack_depth)
3731 bc_adjust_stack (stack_depth - org_stack_depth);
3735 /* CLEANUP is an expression to be executed at exit from this binding contour;
3736 for example, in C++, it might call the destructor for this variable.
3738 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3739 either before or after calling `expand_decl' but before compiling
3740 any subsequent expressions. This is because CLEANUP may be expanded
3741 more than once, on different branches of execution.
3742 For the same reason, CLEANUP may not contain a CALL_EXPR
3743 except as its topmost node--else `preexpand_calls' would get confused.
3745 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3746 that is not associated with any particular variable. */
3749 expand_decl_cleanup (decl, cleanup)
3752 struct nesting *thisblock = block_stack;
3754 /* Error if we are not in any block. */
3758 /* Record the cleanup if there is one. */
3762 thisblock->data.block.cleanups
3763 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3764 /* If this block has a cleanup, it belongs in stack_block_stack. */
3765 stack_block_stack = thisblock;
3770 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3771 DECL_ELTS is the list of elements that belong to DECL's type.
3772 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3775 expand_anon_union_decl (decl, cleanup, decl_elts)
3776 tree decl, cleanup, decl_elts;
3778 struct nesting *thisblock = block_stack;
3781 expand_decl (decl, cleanup);
3782 x = DECL_RTL (decl);
3786 tree decl_elt = TREE_VALUE (decl_elts);
3787 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3788 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3790 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3791 instead create a new MEM rtx with the proper mode. */
3792 if (GET_CODE (x) == MEM)
3794 if (mode == GET_MODE (x))
3795 DECL_RTL (decl_elt) = x;
3798 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3799 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3800 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3803 else if (GET_CODE (x) == REG)
3805 if (mode == GET_MODE (x))
3806 DECL_RTL (decl_elt) = x;
3808 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3813 /* Record the cleanup if there is one. */
3816 thisblock->data.block.cleanups
3817 = temp_tree_cons (decl_elt, cleanup_elt,
3818 thisblock->data.block.cleanups);
3820 decl_elts = TREE_CHAIN (decl_elts);
3824 /* Expand a list of cleanups LIST.
3825 Elements may be expressions or may be nested lists.
3827 If DONT_DO is nonnull, then any list-element
3828 whose TREE_PURPOSE matches DONT_DO is omitted.
3829 This is sometimes used to avoid a cleanup associated with
3830 a value that is being returned out of the scope. */
3833 expand_cleanups (list, dont_do)
3838 for (tail = list; tail; tail = TREE_CHAIN (tail))
3839 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3841 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3842 expand_cleanups (TREE_VALUE (tail), dont_do);
3845 /* Cleanups may be run multiple times. For example,
3846 when exiting a binding contour, we expand the
3847 cleanups associated with that contour. When a goto
3848 within that binding contour has a target outside that
3849 contour, it will expand all cleanups from its scope to
3850 the target. Though the cleanups are expanded multiple
3851 times, the control paths are non-overlapping so the
3852 cleanups will not be executed twice. */
3853 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3859 /* Move all cleanups from the current block_stack
3860 to the containing block_stack, where they are assumed to
3861 have been created. If anything can cause a temporary to
3862 be created, but not expanded for more than one level of
3863 block_stacks, then this code will have to change. */
3868 struct nesting *block = block_stack;
3869 struct nesting *outer = block->next;
3871 outer->data.block.cleanups
3872 = chainon (block->data.block.cleanups,
3873 outer->data.block.cleanups);
3874 block->data.block.cleanups = 0;
3878 last_cleanup_this_contour ()
3880 if (block_stack == 0)
3883 return block_stack->data.block.cleanups;
3886 /* Return 1 if there are any pending cleanups at this point.
3887 If THIS_CONTOUR is nonzero, check the current contour as well.
3888 Otherwise, look only at the contours that enclose this one. */
3891 any_pending_cleanups (this_contour)
3894 struct nesting *block;
3896 if (block_stack == 0)
3899 if (this_contour && block_stack->data.block.cleanups != NULL)
3901 if (block_stack->data.block.cleanups == 0
3902 && (block_stack->data.block.outer_cleanups == 0
3904 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3909 for (block = block_stack->next; block; block = block->next)
3910 if (block->data.block.cleanups != 0)
3916 /* Enter a case (Pascal) or switch (C) statement.
3917 Push a block onto case_stack and nesting_stack
3918 to accumulate the case-labels that are seen
3919 and to record the labels generated for the statement.
3921 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3922 Otherwise, this construct is transparent for `exit_something'.
3924 EXPR is the index-expression to be dispatched on.
3925 TYPE is its nominal type. We could simply convert EXPR to this type,
3926 but instead we take short cuts. */
3929 expand_start_case (exit_flag, expr, type, printname)
3935 register struct nesting *thiscase = ALLOC_NESTING ();
3937 /* Make an entry on case_stack for the case we are entering. */
3939 thiscase->next = case_stack;
3940 thiscase->all = nesting_stack;
3941 thiscase->depth = ++nesting_depth;
3942 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3943 thiscase->data.case_stmt.case_list = 0;
3944 thiscase->data.case_stmt.index_expr = expr;
3945 thiscase->data.case_stmt.nominal_type = type;
3946 thiscase->data.case_stmt.default_label = 0;
3947 thiscase->data.case_stmt.num_ranges = 0;
3948 thiscase->data.case_stmt.printname = printname;
3949 thiscase->data.case_stmt.seenlabel = 0;
3950 case_stack = thiscase;
3951 nesting_stack = thiscase;
3953 if (output_bytecode)
3955 bc_expand_start_case (thiscase, expr, type, printname);
3959 do_pending_stack_adjust ();
3961 /* Make sure case_stmt.start points to something that won't
3962 need any transformation before expand_end_case. */
3963 if (GET_CODE (get_last_insn ()) != NOTE)
3964 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3966 thiscase->data.case_stmt.start = get_last_insn ();
3970 /* Enter a case statement. It is assumed that the caller has pushed
3971 the current context onto the case stack. */
3973 bc_expand_start_case (thiscase, expr, type, printname)
3974 struct nesting *thiscase;
3979 bc_expand_expr (expr);
3980 bc_expand_conversion (TREE_TYPE (expr), type);
3982 /* For cases, the skip is a place we jump to that's emitted after
3983 the size of the jump table is known. */
3985 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3986 bc_emit_bytecode (jump);
3987 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
3989 #ifdef DEBUG_PRINT_CODE
3990 fputc ('\n', stderr);
3995 /* Start a "dummy case statement" within which case labels are invalid
3996 and are not connected to any larger real case statement.
3997 This can be used if you don't want to let a case statement jump
3998 into the middle of certain kinds of constructs. */
4001 expand_start_case_dummy ()
4003 register struct nesting *thiscase = ALLOC_NESTING ();
4005 /* Make an entry on case_stack for the dummy. */
4007 thiscase->next = case_stack;
4008 thiscase->all = nesting_stack;
4009 thiscase->depth = ++nesting_depth;
4010 thiscase->exit_label = 0;
4011 thiscase->data.case_stmt.case_list = 0;
4012 thiscase->data.case_stmt.start = 0;
4013 thiscase->data.case_stmt.nominal_type = 0;
4014 thiscase->data.case_stmt.default_label = 0;
4015 thiscase->data.case_stmt.num_ranges = 0;
4016 case_stack = thiscase;
4017 nesting_stack = thiscase;
4020 /* End a dummy case statement. */
4023 expand_end_case_dummy ()
4025 POPSTACK (case_stack);
4028 /* Return the data type of the index-expression
4029 of the innermost case statement, or null if none. */
4032 case_index_expr_type ()
4035 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4039 /* Accumulate one case or default label inside a case or switch statement.
4040 VALUE is the value of the case (a null pointer, for a default label).
4041 The function CONVERTER, when applied to arguments T and V,
4042 converts the value V to the type T.
4044 If not currently inside a case or switch statement, return 1 and do
4045 nothing. The caller will print a language-specific error message.
4046 If VALUE is a duplicate or overlaps, return 2 and do nothing
4047 except store the (first) duplicate node in *DUPLICATE.
4048 If VALUE is out of range, return 3 and do nothing.
4049 If we are jumping into the scope of a cleaup or var-sized array, return 5.
4050 Return 0 on success.
4052 Extended to handle range statements. */
4055 pushcase (value, converter, label, duplicate)
4056 register tree value;
4057 tree (*converter) PROTO((tree, tree));
4058 register tree label;
4061 register struct case_node **l;
4062 register struct case_node *n;
4066 if (output_bytecode)
4067 return bc_pushcase (value, label);
4069 /* Fail if not inside a real case statement. */
4070 if (! (case_stack && case_stack->data.case_stmt.start))
4073 if (stack_block_stack
4074 && stack_block_stack->depth > case_stack->depth)
4077 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4078 nominal_type = case_stack->data.case_stmt.nominal_type;
4080 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4081 if (index_type == error_mark_node)
4084 /* Convert VALUE to the type in which the comparisons are nominally done. */
4086 value = (*converter) (nominal_type, value);
4088 /* If this is the first label, warn if any insns have been emitted. */
4089 if (case_stack->data.case_stmt.seenlabel == 0)
4092 for (insn = case_stack->data.case_stmt.start;
4094 insn = NEXT_INSN (insn))
4096 if (GET_CODE (insn) == CODE_LABEL)
4098 if (GET_CODE (insn) != NOTE
4099 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4101 warning ("unreachable code at beginning of %s",
4102 case_stack->data.case_stmt.printname);
4107 case_stack->data.case_stmt.seenlabel = 1;
4109 /* Fail if this value is out of range for the actual type of the index
4110 (which may be narrower than NOMINAL_TYPE). */
4111 if (value != 0 && ! int_fits_type_p (value, index_type))
4114 /* Fail if this is a duplicate or overlaps another entry. */
4117 if (case_stack->data.case_stmt.default_label != 0)
4119 *duplicate = case_stack->data.case_stmt.default_label;
4122 case_stack->data.case_stmt.default_label = label;
4126 /* Find the elt in the chain before which to insert the new value,
4127 to keep the chain sorted in increasing order.
4128 But report an error if this element is a duplicate. */
4129 for (l = &case_stack->data.case_stmt.case_list;
4130 /* Keep going past elements distinctly less than VALUE. */
4131 *l != 0 && tree_int_cst_lt ((*l)->high, value);
4136 /* Element we will insert before must be distinctly greater;
4137 overlap means error. */
4138 if (! tree_int_cst_lt (value, (*l)->low))
4140 *duplicate = (*l)->code_label;
4145 /* Add this label to the chain, and succeed.
4146 Copy VALUE so it is on temporary rather than momentary
4147 obstack and will thus survive till the end of the case statement. */
4148 n = (struct case_node *) oballoc (sizeof (struct case_node));
4151 n->high = n->low = copy_node (value);
4152 n->code_label = label;
4156 expand_label (label);
4160 /* Like pushcase but this case applies to all values
4161 between VALUE1 and VALUE2 (inclusive).
4162 The return value is the same as that of pushcase
4163 but there is one additional error code:
4164 4 means the specified range was empty. */
4167 pushcase_range (value1, value2, converter, label, duplicate)
4168 register tree value1, value2;
4169 tree (*converter) PROTO((tree, tree));
4170 register tree label;
4173 register struct case_node **l;
4174 register struct case_node *n;
4178 /* Fail if not inside a real case statement. */
4179 if (! (case_stack && case_stack->data.case_stmt.start))
4182 if (stack_block_stack
4183 && stack_block_stack->depth > case_stack->depth)
4186 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4187 nominal_type = case_stack->data.case_stmt.nominal_type;
4189 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4190 if (index_type == error_mark_node)
4193 /* If this is the first label, warn if any insns have been emitted. */
4194 if (case_stack->data.case_stmt.seenlabel == 0)
4197 for (insn = case_stack->data.case_stmt.start;
4199 insn = NEXT_INSN (insn))
4201 if (GET_CODE (insn) == CODE_LABEL)
4203 if (GET_CODE (insn) != NOTE
4204 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4206 warning ("unreachable code at beginning of %s",
4207 case_stack->data.case_stmt.printname);
4212 case_stack->data.case_stmt.seenlabel = 1;
4214 /* Convert VALUEs to type in which the comparisons are nominally done. */
4215 if (value1 == 0) /* Negative infinity. */
4216 value1 = TYPE_MIN_VALUE(index_type);
4217 value1 = (*converter) (nominal_type, value1);
4219 if (value2 == 0) /* Positive infinity. */
4220 value2 = TYPE_MAX_VALUE(index_type);
4221 value2 = (*converter) (nominal_type, value2);
4223 /* Fail if these values are out of range. */
4224 if (! int_fits_type_p (value1, index_type))
4227 if (! int_fits_type_p (value2, index_type))
4230 /* Fail if the range is empty. */
4231 if (tree_int_cst_lt (value2, value1))
4234 /* If the bounds are equal, turn this into the one-value case. */
4235 if (tree_int_cst_equal (value1, value2))
4236 return pushcase (value1, converter, label, duplicate);
4238 /* Find the elt in the chain before which to insert the new value,
4239 to keep the chain sorted in increasing order.
4240 But report an error if this element is a duplicate. */
4241 for (l = &case_stack->data.case_stmt.case_list;
4242 /* Keep going past elements distinctly less than this range. */
4243 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
4248 /* Element we will insert before must be distinctly greater;
4249 overlap means error. */
4250 if (! tree_int_cst_lt (value2, (*l)->low))
4252 *duplicate = (*l)->code_label;
4257 /* Add this label to the chain, and succeed.
4258 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
4259 obstack and will thus survive till the end of the case statement. */
4261 n = (struct case_node *) oballoc (sizeof (struct case_node));
4264 n->low = copy_node (value1);
4265 n->high = copy_node (value2);
4266 n->code_label = label;
4269 expand_label (label);
4271 case_stack->data.case_stmt.num_ranges++;
4277 /* Accumulate one case or default label; VALUE is the value of the
4278 case, or nil for a default label. If not currently inside a case,
4279 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4280 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4281 Return 0 on success. This function is a leftover from the earlier
4282 bytecode compiler, which was based on gcc 1.37. It should be
4283 merged into pushcase. */
4286 bc_pushcase (value, label)
4290 struct nesting *thiscase = case_stack;
4291 struct case_node *case_label, *new_label;
4296 /* Fail if duplicate, overlap, or out of type range. */
4299 value = convert (thiscase->data.case_stmt.nominal_type, value);
4300 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4303 for (case_label = thiscase->data.case_stmt.case_list;
4304 case_label->left; case_label = case_label->left)
4305 if (! tree_int_cst_lt (case_label->left->high, value))
4308 if (case_label != thiscase->data.case_stmt.case_list
4309 && ! tree_int_cst_lt (case_label->high, value)
4310 || case_label->left && ! tree_int_cst_lt (value, case_label->left->low))
4313 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4314 new_label->low = new_label->high = copy_node (value);
4315 new_label->code_label = label;
4316 new_label->left = case_label->left;
4318 case_label->left = new_label;
4319 thiscase->data.case_stmt.num_ranges++;
4323 if (thiscase->data.case_stmt.default_label)
4325 thiscase->data.case_stmt.default_label = label;
4328 expand_label (label);
4332 /* Called when the index of a switch statement is an enumerated type
4333 and there is no default label.
4335 Checks that all enumeration literals are covered by the case
4336 expressions of a switch. Also, warn if there are any extra
4337 switch cases that are *not* elements of the enumerated type.
4339 If all enumeration literals were covered by the case expressions,
4340 turn one of the expressions into the default expression since it should
4341 not be possible to fall through such a switch. */
4344 check_for_full_enumeration_handling (type)
4347 register struct case_node *n;
4348 register struct case_node **l;
4349 register tree chain;
4352 if (output_bytecode)
4354 bc_check_for_full_enumeration_handling (type);
4358 /* The time complexity of this loop is currently O(N * M), with
4359 N being the number of members in the enumerated type, and
4360 M being the number of case expressions in the switch. */
4362 for (chain = TYPE_VALUES (type);
4364 chain = TREE_CHAIN (chain))
4366 /* Find a match between enumeral and case expression, if possible.
4367 Quit looking when we've gone too far (since case expressions
4368 are kept sorted in ascending order). Warn about enumerators not
4369 handled in the switch statement case expression list. */
4371 for (n = case_stack->data.case_stmt.case_list;
4372 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
4376 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
4379 warning ("enumeration value `%s' not handled in switch",
4380 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
4385 /* Now we go the other way around; we warn if there are case
4386 expressions that don't correspond to enumerators. This can
4387 occur since C and C++ don't enforce type-checking of
4388 assignments to enumeration variables. */
4391 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4393 for (chain = TYPE_VALUES (type);
4394 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4395 chain = TREE_CHAIN (chain))
4400 if (TYPE_NAME (type) == 0)
4401 warning ("case value `%d' not in enumerated type",
4402 TREE_INT_CST_LOW (n->low));
4404 warning ("case value `%d' not in enumerated type `%s'",
4405 TREE_INT_CST_LOW (n->low),
4406 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4409 : DECL_NAME (TYPE_NAME (type))));
4411 if (!tree_int_cst_equal (n->low, n->high))
4413 for (chain = TYPE_VALUES (type);
4414 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4415 chain = TREE_CHAIN (chain))
4420 if (TYPE_NAME (type) == 0)
4421 warning ("case value `%d' not in enumerated type",
4422 TREE_INT_CST_LOW (n->high));
4424 warning ("case value `%d' not in enumerated type `%s'",
4425 TREE_INT_CST_LOW (n->high),
4426 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4429 : DECL_NAME (TYPE_NAME (type))));
4435 /* ??? This optimization is disabled because it causes valid programs to
4436 fail. ANSI C does not guarantee that an expression with enum type
4437 will have a value that is the same as one of the enumation literals. */
4439 /* If all values were found as case labels, make one of them the default
4440 label. Thus, this switch will never fall through. We arbitrarily pick
4441 the last one to make the default since this is likely the most
4442 efficient choice. */
4446 for (l = &case_stack->data.case_stmt.case_list;
4451 case_stack->data.case_stmt.default_label = (*l)->code_label;
4458 /* Check that all enumeration literals are covered by the case
4459 expressions of a switch. Also warn if there are any cases
4460 that are not elements of the enumerated type. */
4462 bc_check_for_full_enumeration_handling (type)
4465 struct nesting *thiscase = case_stack;
4466 struct case_node *c;
4469 /* Check for enums not handled. */
4470 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4472 for (c = thiscase->data.case_stmt.case_list->left;
4473 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4476 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4477 warning ("enumerated value `%s' not handled in switch",
4478 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4481 /* Check for cases not in the enumeration. */
4482 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4484 for (e = TYPE_VALUES (type);
4485 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4489 warning ("case value `%d' not in enumerated type `%s'",
4490 TREE_INT_CST_LOW (c->low),
4491 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4493 : DECL_NAME (TYPE_NAME (type))));
4497 /* Terminate a case (Pascal) or switch (C) statement
4498 in which ORIG_INDEX is the expression to be tested.
4499 Generate the code to test it and jump to the right place. */
4502 expand_end_case (orig_index)
4505 tree minval, maxval, range, orig_minval;
4506 rtx default_label = 0;
4507 register struct case_node *n;
4515 register struct nesting *thiscase = case_stack;
4519 if (output_bytecode)
4521 bc_expand_end_case (orig_index);
4525 table_label = gen_label_rtx ();
4526 index_expr = thiscase->data.case_stmt.index_expr;
4527 unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
4529 do_pending_stack_adjust ();
4531 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4532 if (TREE_TYPE (index_expr) != error_mark_node)
4534 /* If switch expression was an enumerated type, check that all
4535 enumeration literals are covered by the cases.
4536 No sense trying this if there's a default case, however. */
4538 if (!thiscase->data.case_stmt.default_label
4539 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4540 && TREE_CODE (index_expr) != INTEGER_CST)
4541 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4543 /* If this is the first label, warn if any insns have been emitted. */
4544 if (thiscase->data.case_stmt.seenlabel == 0)
4547 for (insn = get_last_insn ();
4548 insn != case_stack->data.case_stmt.start;
4549 insn = PREV_INSN (insn))
4550 if (GET_CODE (insn) != NOTE
4551 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4553 warning ("unreachable code at beginning of %s",
4554 case_stack->data.case_stmt.printname);
4559 /* If we don't have a default-label, create one here,
4560 after the body of the switch. */
4561 if (thiscase->data.case_stmt.default_label == 0)
4563 thiscase->data.case_stmt.default_label
4564 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4565 expand_label (thiscase->data.case_stmt.default_label);
4567 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4569 before_case = get_last_insn ();
4571 /* Simplify the case-list before we count it. */
4572 group_case_nodes (thiscase->data.case_stmt.case_list);
4574 /* Get upper and lower bounds of case values.
4575 Also convert all the case values to the index expr's data type. */
4578 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4580 /* Check low and high label values are integers. */
4581 if (TREE_CODE (n->low) != INTEGER_CST)
4583 if (TREE_CODE (n->high) != INTEGER_CST)
4586 n->low = convert (TREE_TYPE (index_expr), n->low);
4587 n->high = convert (TREE_TYPE (index_expr), n->high);
4589 /* Count the elements and track the largest and smallest
4590 of them (treating them as signed even if they are not). */
4598 if (INT_CST_LT (n->low, minval))
4600 if (INT_CST_LT (maxval, n->high))
4603 /* A range counts double, since it requires two compares. */
4604 if (! tree_int_cst_equal (n->low, n->high))
4608 orig_minval = minval;
4610 /* Compute span of values. */
4612 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
4615 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
4617 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4619 emit_jump (default_label);
4622 /* If range of values is much bigger than number of values,
4623 make a sequence of conditional branches instead of a dispatch.
4624 If the switch-index is a constant, do it this way
4625 because we can optimize it. */
4627 #ifndef CASE_VALUES_THRESHOLD
4629 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4631 /* If machine does not have a case insn that compares the
4632 bounds, this means extra overhead for dispatch tables
4633 which raises the threshold for using them. */
4634 #define CASE_VALUES_THRESHOLD 5
4635 #endif /* HAVE_casesi */
4636 #endif /* CASE_VALUES_THRESHOLD */
4638 else if (TREE_INT_CST_HIGH (range) != 0
4639 || count < CASE_VALUES_THRESHOLD
4640 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4642 || TREE_CODE (index_expr) == INTEGER_CST
4643 /* These will reduce to a constant. */
4644 || (TREE_CODE (index_expr) == CALL_EXPR
4645 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4646 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4647 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4648 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4649 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4651 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4653 /* If the index is a short or char that we do not have
4654 an insn to handle comparisons directly, convert it to
4655 a full integer now, rather than letting each comparison
4656 generate the conversion. */
4658 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4659 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4660 == CODE_FOR_nothing))
4662 enum machine_mode wider_mode;
4663 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4664 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4665 if (cmp_optab->handlers[(int) wider_mode].insn_code
4666 != CODE_FOR_nothing)
4668 index = convert_to_mode (wider_mode, index, unsignedp);
4674 do_pending_stack_adjust ();
4676 index = protect_from_queue (index, 0);
4677 if (GET_CODE (index) == MEM)
4678 index = copy_to_reg (index);
4679 if (GET_CODE (index) == CONST_INT
4680 || TREE_CODE (index_expr) == INTEGER_CST)
4682 /* Make a tree node with the proper constant value
4683 if we don't already have one. */
4684 if (TREE_CODE (index_expr) != INTEGER_CST)
4687 = build_int_2 (INTVAL (index),
4688 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
4689 index_expr = convert (TREE_TYPE (index_expr), index_expr);
4692 /* For constant index expressions we need only
4693 issue a unconditional branch to the appropriate
4694 target code. The job of removing any unreachable
4695 code is left to the optimisation phase if the
4696 "-O" option is specified. */
4697 for (n = thiscase->data.case_stmt.case_list;
4701 if (! tree_int_cst_lt (index_expr, n->low)
4702 && ! tree_int_cst_lt (n->high, index_expr))
4706 emit_jump (label_rtx (n->code_label));
4708 emit_jump (default_label);
4712 /* If the index expression is not constant we generate
4713 a binary decision tree to select the appropriate
4714 target code. This is done as follows:
4716 The list of cases is rearranged into a binary tree,
4717 nearly optimal assuming equal probability for each case.
4719 The tree is transformed into RTL, eliminating
4720 redundant test conditions at the same time.
4722 If program flow could reach the end of the
4723 decision tree an unconditional jump to the
4724 default code is emitted. */
4727 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4728 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4729 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4731 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4732 default_label, TREE_TYPE (index_expr));
4733 emit_jump_if_reachable (default_label);
4742 enum machine_mode index_mode = SImode;
4743 int index_bits = GET_MODE_BITSIZE (index_mode);
4745 /* Convert the index to SImode. */
4746 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
4747 > GET_MODE_BITSIZE (index_mode))
4749 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
4750 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4752 /* We must handle the endpoints in the original mode. */
4753 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
4754 index_expr, minval);
4755 minval = integer_zero_node;
4756 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4757 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4758 emit_jump_insn (gen_bltu (default_label));
4759 /* Now we can safely truncate. */
4760 index = convert_to_mode (index_mode, index, 0);
4764 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
4765 index_expr = convert (type_for_size (index_bits, 0),
4767 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4770 index = protect_from_queue (index, 0);
4771 do_pending_stack_adjust ();
4773 emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX,
4775 expand_expr (range, NULL_RTX,
4777 table_label, default_label));
4781 #ifdef HAVE_tablejump
4782 if (! win && HAVE_tablejump)
4784 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4785 fold (build (MINUS_EXPR,
4786 TREE_TYPE (index_expr),
4787 index_expr, minval)));
4788 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4790 index = protect_from_queue (index, 0);
4791 do_pending_stack_adjust ();
4793 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
4794 expand_expr (range, NULL_RTX, VOIDmode, 0),
4795 table_label, default_label);
4802 /* Get table of labels to jump to, in order of case index. */
4804 ncases = TREE_INT_CST_LOW (range) + 1;
4805 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4806 bzero (labelvec, ncases * sizeof (rtx));
4808 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4810 register HOST_WIDE_INT i
4811 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4816 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
4817 if (i + TREE_INT_CST_LOW (orig_minval)
4818 == TREE_INT_CST_LOW (n->high))
4824 /* Fill in the gaps with the default. */
4825 for (i = 0; i < ncases; i++)
4826 if (labelvec[i] == 0)
4827 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
4829 /* Output the table */
4830 emit_label (table_label);
4832 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
4833 were an expression, instead of an #ifdef/#ifndef. */
4835 #ifdef CASE_VECTOR_PC_RELATIVE
4839 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
4840 gen_rtx (LABEL_REF, Pmode, table_label),
4841 gen_rtvec_v (ncases, labelvec)));
4843 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
4844 gen_rtvec_v (ncases, labelvec)));
4846 /* If the case insn drops through the table,
4847 after the table we must jump to the default-label.
4848 Otherwise record no drop-through after the table. */
4849 #ifdef CASE_DROPS_THROUGH
4850 emit_jump (default_label);
4856 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4857 reorder_insns (before_case, get_last_insn (),
4858 thiscase->data.case_stmt.start);
4860 if (thiscase->exit_label)
4861 emit_label (thiscase->exit_label);
4863 POPSTACK (case_stack);
4869 /* Terminate a case statement. EXPR is the original index
4872 bc_expand_end_case (expr)
4875 struct nesting *thiscase = case_stack;
4876 enum bytecode_opcode opcode;
4877 struct bc_label *jump_label;
4878 struct case_node *c;
4880 bc_emit_bytecode (jump);
4881 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4883 #ifdef DEBUG_PRINT_CODE
4884 fputc ('\n', stderr);
4887 /* Now that the size of the jump table is known, emit the actual
4888 indexed jump instruction. */
4889 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4891 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
4892 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
4893 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
4895 bc_emit_bytecode (opcode);
4897 /* Now emit the case instructions literal arguments, in order.
4898 In addition to the value on the stack, it uses:
4899 1. The address of the jump table.
4900 2. The size of the jump table.
4901 3. The default label. */
4903 jump_label = bc_get_bytecode_label ();
4904 bc_emit_bytecode_labelref (jump_label);
4905 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
4906 sizeof thiscase->data.case_stmt.num_ranges);
4908 if (thiscase->data.case_stmt.default_label)
4909 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
4911 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4913 /* Output the jump table. */
4915 bc_align_bytecode (3 /* PTR_ALIGN */);
4916 bc_emit_bytecode_labeldef (jump_label);
4918 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
4919 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4921 opcode = TREE_INT_CST_LOW (c->low);
4922 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4924 opcode = TREE_INT_CST_LOW (c->high);
4925 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4927 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4930 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
4931 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4933 bc_emit_bytecode_DI_const (c->low);
4934 bc_emit_bytecode_DI_const (c->high);
4936 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4943 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
4945 /* Possibly issue enumeration warnings. */
4947 if (!thiscase->data.case_stmt.default_label
4948 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
4949 && TREE_CODE (expr) != INTEGER_CST
4951 check_for_full_enumeration_handling (TREE_TYPE (expr));
4954 #ifdef DEBUG_PRINT_CODE
4955 fputc ('\n', stderr);
4958 POPSTACK (case_stack);
4962 /* Return unique bytecode ID. */
4966 static int bc_uid = 0;
4971 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4974 do_jump_if_equal (op1, op2, label, unsignedp)
4975 rtx op1, op2, label;
4978 if (GET_CODE (op1) == CONST_INT
4979 && GET_CODE (op2) == CONST_INT)
4981 if (INTVAL (op1) == INTVAL (op2))
4986 enum machine_mode mode = GET_MODE (op1);
4987 if (mode == VOIDmode)
4988 mode = GET_MODE (op2);
4989 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
4990 emit_jump_insn (gen_beq (label));
4994 /* Not all case values are encountered equally. This function
4995 uses a heuristic to weight case labels, in cases where that
4996 looks like a reasonable thing to do.
4998 Right now, all we try to guess is text, and we establish the
5001 chars above space: 16
5010 If we find any cases in the switch that are not either -1 or in the range
5011 of valid ASCII characters, or are control characters other than those
5012 commonly used with "\", don't treat this switch scanning text.
5014 Return 1 if these nodes are suitable for cost estimation, otherwise
5018 estimate_case_costs (node)
5021 tree min_ascii = build_int_2 (-1, -1);
5022 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5026 /* If we haven't already made the cost table, make it now. Note that the
5027 lower bound of the table is -1, not zero. */
5029 if (cost_table == NULL)
5031 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5032 bzero (cost_table - 1, 129 * sizeof (short));
5034 for (i = 0; i < 128; i++)
5038 else if (ispunct (i))
5040 else if (iscntrl (i))
5044 cost_table[' '] = 8;
5045 cost_table['\t'] = 4;
5046 cost_table['\0'] = 4;
5047 cost_table['\n'] = 2;
5048 cost_table['\f'] = 1;
5049 cost_table['\v'] = 1;
5050 cost_table['\b'] = 1;
5053 /* See if all the case expressions look like text. It is text if the
5054 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5055 as signed arithmetic since we don't want to ever access cost_table with a
5056 value less than -1. Also check that none of the constants in a range
5057 are strange control characters. */
5059 for (n = node; n; n = n->right)
5061 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5064 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5065 if (cost_table[i] < 0)
5069 /* All interesting values are within the range of interesting
5070 ASCII characters. */
5074 /* Scan an ordered list of case nodes
5075 combining those with consecutive values or ranges.
5077 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5080 group_case_nodes (head)
5083 case_node_ptr node = head;
5087 rtx lb = next_real_insn (label_rtx (node->code_label));
5088 case_node_ptr np = node;
5090 /* Try to group the successors of NODE with NODE. */
5091 while (((np = np->right) != 0)
5092 /* Do they jump to the same place? */
5093 && next_real_insn (label_rtx (np->code_label)) == lb
5094 /* Are their ranges consecutive? */
5095 && tree_int_cst_equal (np->low,
5096 fold (build (PLUS_EXPR,
5097 TREE_TYPE (node->high),
5100 /* An overflow is not consecutive. */
5101 && tree_int_cst_lt (node->high,
5102 fold (build (PLUS_EXPR,
5103 TREE_TYPE (node->high),
5105 integer_one_node))))
5107 node->high = np->high;
5109 /* NP is the first node after NODE which can't be grouped with it.
5110 Delete the nodes in between, and move on to that node. */
5116 /* Take an ordered list of case nodes
5117 and transform them into a near optimal binary tree,
5118 on the assumption that any target code selection value is as
5119 likely as any other.
5121 The transformation is performed by splitting the ordered
5122 list into two equal sections plus a pivot. The parts are
5123 then attached to the pivot as left and right branches. Each
5124 branch is is then transformed recursively. */
5127 balance_case_nodes (head, parent)
5128 case_node_ptr *head;
5129 case_node_ptr parent;
5131 register case_node_ptr np;
5139 register case_node_ptr *npp;
5142 /* Count the number of entries on branch. Also count the ranges. */
5146 if (!tree_int_cst_equal (np->low, np->high))
5150 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5154 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5162 /* Split this list if it is long enough for that to help. */
5167 /* Find the place in the list that bisects the list's total cost,
5168 Here I gets half the total cost. */
5173 /* Skip nodes while their cost does not reach that amount. */
5174 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5175 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5176 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5179 npp = &(*npp)->right;
5184 /* Leave this branch lopsided, but optimize left-hand
5185 side and fill in `parent' fields for right-hand side. */
5187 np->parent = parent;
5188 balance_case_nodes (&np->left, np);
5189 for (; np->right; np = np->right)
5190 np->right->parent = np;
5194 /* If there are just three nodes, split at the middle one. */
5196 npp = &(*npp)->right;
5199 /* Find the place in the list that bisects the list's total cost,
5200 where ranges count as 2.
5201 Here I gets half the total cost. */
5202 i = (i + ranges + 1) / 2;
5205 /* Skip nodes while their cost does not reach that amount. */
5206 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5211 npp = &(*npp)->right;
5216 np->parent = parent;
5219 /* Optimize each of the two split parts. */
5220 balance_case_nodes (&np->left, np);
5221 balance_case_nodes (&np->right, np);
5225 /* Else leave this branch as one level,
5226 but fill in `parent' fields. */
5228 np->parent = parent;
5229 for (; np->right; np = np->right)
5230 np->right->parent = np;
5235 /* Search the parent sections of the case node tree
5236 to see if a test for the lower bound of NODE would be redundant.
5237 INDEX_TYPE is the type of the index expression.
5239 The instructions to generate the case decision tree are
5240 output in the same order as nodes are processed so it is
5241 known that if a parent node checks the range of the current
5242 node minus one that the current node is bounded at its lower
5243 span. Thus the test would be redundant. */
5246 node_has_low_bound (node, index_type)
5251 case_node_ptr pnode;
5253 /* If the lower bound of this node is the lowest value in the index type,
5254 we need not test it. */
5256 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5259 /* If this node has a left branch, the value at the left must be less
5260 than that at this node, so it cannot be bounded at the bottom and
5261 we need not bother testing any further. */
5266 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5267 node->low, integer_one_node));
5269 /* If the subtraction above overflowed, we can't verify anything.
5270 Otherwise, look for a parent that tests our value - 1. */
5272 if (! tree_int_cst_lt (low_minus_one, node->low))
5275 for (pnode = node->parent; pnode; pnode = pnode->parent)
5276 if (tree_int_cst_equal (low_minus_one, pnode->high))
5282 /* Search the parent sections of the case node tree
5283 to see if a test for the upper bound of NODE would be redundant.
5284 INDEX_TYPE is the type of the index expression.
5286 The instructions to generate the case decision tree are
5287 output in the same order as nodes are processed so it is
5288 known that if a parent node checks the range of the current
5289 node plus one that the current node is bounded at its upper
5290 span. Thus the test would be redundant. */
5293 node_has_high_bound (node, index_type)
5298 case_node_ptr pnode;
5300 /* If the upper bound of this node is the highest value in the type
5301 of the index expression, we need not test against it. */
5303 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5306 /* If this node has a right branch, the value at the right must be greater
5307 than that at this node, so it cannot be bounded at the top and
5308 we need not bother testing any further. */
5313 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5314 node->high, integer_one_node));
5316 /* If the addition above overflowed, we can't verify anything.
5317 Otherwise, look for a parent that tests our value + 1. */
5319 if (! tree_int_cst_lt (node->high, high_plus_one))
5322 for (pnode = node->parent; pnode; pnode = pnode->parent)
5323 if (tree_int_cst_equal (high_plus_one, pnode->low))
5329 /* Search the parent sections of the
5330 case node tree to see if both tests for the upper and lower
5331 bounds of NODE would be redundant. */
5334 node_is_bounded (node, index_type)
5338 return (node_has_low_bound (node, index_type)
5339 && node_has_high_bound (node, index_type));
5342 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5345 emit_jump_if_reachable (label)
5348 if (GET_CODE (get_last_insn ()) != BARRIER)
5352 /* Emit step-by-step code to select a case for the value of INDEX.
5353 The thus generated decision tree follows the form of the
5354 case-node binary tree NODE, whose nodes represent test conditions.
5355 INDEX_TYPE is the type of the index of the switch.
5357 Care is taken to prune redundant tests from the decision tree
5358 by detecting any boundary conditions already checked by
5359 emitted rtx. (See node_has_high_bound, node_has_low_bound
5360 and node_is_bounded, above.)
5362 Where the test conditions can be shown to be redundant we emit
5363 an unconditional jump to the target code. As a further
5364 optimization, the subordinates of a tree node are examined to
5365 check for bounded nodes. In this case conditional and/or
5366 unconditional jumps as a result of the boundary check for the
5367 current node are arranged to target the subordinates associated
5368 code for out of bound conditions on the current node node.
5370 We can assume that when control reaches the code generated here,
5371 the index value has already been compared with the parents
5372 of this node, and determined to be on the same side of each parent
5373 as this node is. Thus, if this node tests for the value 51,
5374 and a parent tested for 52, we don't need to consider
5375 the possibility of a value greater than 51. If another parent
5376 tests for the value 50, then this node need not test anything. */
5379 emit_case_nodes (index, node, default_label, index_type)
5385 /* If INDEX has an unsigned type, we must make unsigned branches. */
5386 int unsignedp = TREE_UNSIGNED (index_type);
5387 typedef rtx rtx_function ();
5388 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5389 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5390 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5391 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5392 enum machine_mode mode = GET_MODE (index);
5394 /* See if our parents have already tested everything for us.
5395 If they have, emit an unconditional jump for this node. */
5396 if (node_is_bounded (node, index_type))
5397 emit_jump (label_rtx (node->code_label));
5399 else if (tree_int_cst_equal (node->low, node->high))
5401 /* Node is single valued. First see if the index expression matches
5402 this node and then check our children, if any. */
5404 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5405 label_rtx (node->code_label), unsignedp);
5407 if (node->right != 0 && node->left != 0)
5409 /* This node has children on both sides.
5410 Dispatch to one side or the other
5411 by comparing the index value with this node's value.
5412 If one subtree is bounded, check that one first,
5413 so we can avoid real branches in the tree. */
5415 if (node_is_bounded (node->right, index_type))
5417 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5419 GT, NULL_RTX, mode, unsignedp, 0);
5421 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5422 emit_case_nodes (index, node->left, default_label, index_type);
5425 else if (node_is_bounded (node->left, index_type))
5427 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5429 LT, NULL_RTX, mode, unsignedp, 0);
5430 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5431 emit_case_nodes (index, node->right, default_label, index_type);
5436 /* Neither node is bounded. First distinguish the two sides;
5437 then emit the code for one side at a time. */
5440 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5442 /* See if the value is on the right. */
5443 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5445 GT, NULL_RTX, mode, unsignedp, 0);
5446 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5448 /* Value must be on the left.
5449 Handle the left-hand subtree. */
5450 emit_case_nodes (index, node->left, default_label, index_type);
5451 /* If left-hand subtree does nothing,
5453 emit_jump_if_reachable (default_label);
5455 /* Code branches here for the right-hand subtree. */
5456 expand_label (test_label);
5457 emit_case_nodes (index, node->right, default_label, index_type);
5461 else if (node->right != 0 && node->left == 0)
5463 /* Here we have a right child but no left so we issue conditional
5464 branch to default and process the right child.
5466 Omit the conditional branch to default if we it avoid only one
5467 right child; it costs too much space to save so little time. */
5469 if (node->right->right || node->right->left
5470 || !tree_int_cst_equal (node->right->low, node->right->high))
5472 if (!node_has_low_bound (node, index_type))
5474 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5476 LT, NULL_RTX, mode, unsignedp, 0);
5477 emit_jump_insn ((*gen_blt_pat) (default_label));
5480 emit_case_nodes (index, node->right, default_label, index_type);
5483 /* We cannot process node->right normally
5484 since we haven't ruled out the numbers less than
5485 this node's value. So handle node->right explicitly. */
5486 do_jump_if_equal (index,
5487 expand_expr (node->right->low, NULL_RTX,
5489 label_rtx (node->right->code_label), unsignedp);
5492 else if (node->right == 0 && node->left != 0)
5494 /* Just one subtree, on the left. */
5496 #if 0 /* The following code and comment were formerly part
5497 of the condition here, but they didn't work
5498 and I don't understand what the idea was. -- rms. */
5499 /* If our "most probable entry" is less probable
5500 than the default label, emit a jump to
5501 the default label using condition codes
5502 already lying around. With no right branch,
5503 a branch-greater-than will get us to the default
5506 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5509 if (node->left->left || node->left->right
5510 || !tree_int_cst_equal (node->left->low, node->left->high))
5512 if (!node_has_high_bound (node, index_type))
5514 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5516 GT, NULL_RTX, mode, unsignedp, 0);
5517 emit_jump_insn ((*gen_bgt_pat) (default_label));
5520 emit_case_nodes (index, node->left, default_label, index_type);
5523 /* We cannot process node->left normally
5524 since we haven't ruled out the numbers less than
5525 this node's value. So handle node->left explicitly. */
5526 do_jump_if_equal (index,
5527 expand_expr (node->left->low, NULL_RTX,
5529 label_rtx (node->left->code_label), unsignedp);
5534 /* Node is a range. These cases are very similar to those for a single
5535 value, except that we do not start by testing whether this node
5536 is the one to branch to. */
5538 if (node->right != 0 && node->left != 0)
5540 /* Node has subtrees on both sides.
5541 If the right-hand subtree is bounded,
5542 test for it first, since we can go straight there.
5543 Otherwise, we need to make a branch in the control structure,
5544 then handle the two subtrees. */
5545 tree test_label = 0;
5547 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5549 GT, NULL_RTX, mode, unsignedp, 0);
5551 if (node_is_bounded (node->right, index_type))
5552 /* Right hand node is fully bounded so we can eliminate any
5553 testing and branch directly to the target code. */
5554 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5557 /* Right hand node requires testing.
5558 Branch to a label where we will handle it later. */
5560 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5561 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5564 /* Value belongs to this node or to the left-hand subtree. */
5566 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5567 GE, NULL_RTX, mode, unsignedp, 0);
5568 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5570 /* Handle the left-hand subtree. */
5571 emit_case_nodes (index, node->left, default_label, index_type);
5573 /* If right node had to be handled later, do that now. */
5577 /* If the left-hand subtree fell through,
5578 don't let it fall into the right-hand subtree. */
5579 emit_jump_if_reachable (default_label);
5581 expand_label (test_label);
5582 emit_case_nodes (index, node->right, default_label, index_type);
5586 else if (node->right != 0 && node->left == 0)
5588 /* Deal with values to the left of this node,
5589 if they are possible. */
5590 if (!node_has_low_bound (node, index_type))
5592 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5594 LT, NULL_RTX, mode, unsignedp, 0);
5595 emit_jump_insn ((*gen_blt_pat) (default_label));
5598 /* Value belongs to this node or to the right-hand subtree. */
5600 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5602 LE, NULL_RTX, mode, unsignedp, 0);
5603 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5605 emit_case_nodes (index, node->right, default_label, index_type);
5608 else if (node->right == 0 && node->left != 0)
5610 /* Deal with values to the right of this node,
5611 if they are possible. */
5612 if (!node_has_high_bound (node, index_type))
5614 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5616 GT, NULL_RTX, mode, unsignedp, 0);
5617 emit_jump_insn ((*gen_bgt_pat) (default_label));
5620 /* Value belongs to this node or to the left-hand subtree. */
5622 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5623 GE, NULL_RTX, mode, unsignedp, 0);
5624 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5626 emit_case_nodes (index, node->left, default_label, index_type);
5631 /* Node has no children so we check low and high bounds to remove
5632 redundant tests. Only one of the bounds can exist,
5633 since otherwise this node is bounded--a case tested already. */
5635 if (!node_has_high_bound (node, index_type))
5637 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5639 GT, NULL_RTX, mode, unsignedp, 0);
5640 emit_jump_insn ((*gen_bgt_pat) (default_label));
5643 if (!node_has_low_bound (node, index_type))
5645 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5647 LT, NULL_RTX, mode, unsignedp, 0);
5648 emit_jump_insn ((*gen_blt_pat) (default_label));
5651 emit_jump (label_rtx (node->code_label));
5656 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5657 so that the debugging info will be correct for the unrolled loop. */
5659 /* Indexed by block number, contains a pointer to the N'th block node. */
5661 static tree *block_vector;
5664 find_loop_tree_blocks ()
5666 tree block = DECL_INITIAL (current_function_decl);
5668 /* There first block is for the function body, and does not have
5669 corresponding block notes. Don't include it in the block vector. */
5670 block = BLOCK_SUBBLOCKS (block);
5672 block_vector = identify_blocks (block, get_insns ());
5676 unroll_block_trees ()
5678 tree block = DECL_INITIAL (current_function_decl);
5680 reorder_blocks (block_vector, block, get_insns ());