1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
55 #include "bc-typecd.h"
56 #include "bc-opcode.h"
60 #define obstack_chunk_alloc xmalloc
61 #define obstack_chunk_free free
62 struct obstack stmt_obstack;
64 /* Filename and line number of last line-number note,
65 whether we actually emitted it or not. */
69 /* Nonzero if within a ({...}) grouping, in which case we must
70 always compute a value for each expr-stmt in case it is the last one. */
72 int expr_stmts_for_value;
74 /* Each time we expand an expression-statement,
75 record the expr's type and its RTL value here. */
77 static tree last_expr_type;
78 static rtx last_expr_value;
80 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
81 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
82 This is used by the `remember_end_note' function to record the endpoint
83 of each generated block in its associated BLOCK node. */
85 static rtx last_block_end_note;
87 /* Number of binding contours started so far in this function. */
89 int block_start_count;
91 /* Nonzero if function being compiled needs to
92 return the address of where it has put a structure value. */
94 extern int current_function_returns_pcc_struct;
96 /* Label that will go on parm cleanup code, if any.
97 Jumping to this label runs cleanup code for parameters, if
98 such code must be run. Following this code is the logical return label. */
100 extern rtx cleanup_label;
102 /* Label that will go on function epilogue.
103 Jumping to this label serves as a "return" instruction
104 on machines which require execution of the epilogue on all returns. */
106 extern rtx return_label;
108 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
109 So we can mark them all live at the end of the function, if nonopt. */
110 extern rtx save_expr_regs;
112 /* Offset to end of allocated area of stack frame.
113 If stack grows down, this is the address of the last stack slot allocated.
114 If stack grows up, this is the address for the next slot. */
115 extern int frame_offset;
117 /* Label to jump back to for tail recursion, or 0 if we have
118 not yet needed one for this function. */
119 extern rtx tail_recursion_label;
121 /* Place after which to insert the tail_recursion_label if we need one. */
122 extern rtx tail_recursion_reentry;
124 /* Location at which to save the argument pointer if it will need to be
125 referenced. There are two cases where this is done: if nonlocal gotos
126 exist, or if vars whose is an offset from the argument pointer will be
127 needed by inner routines. */
129 extern rtx arg_pointer_save_area;
131 /* Chain of all RTL_EXPRs that have insns in them. */
132 extern tree rtl_expr_chain;
134 #if 0 /* Turned off because 0 seems to work just as well. */
135 /* Cleanup lists are required for binding levels regardless of whether
136 that binding level has cleanups or not. This node serves as the
137 cleanup list whenever an empty list is required. */
138 static tree empty_cleanup_list;
141 /* Functions and data structures for expanding case statements. */
143 /* Case label structure, used to hold info on labels within case
144 statements. We handle "range" labels; for a single-value label
145 as in C, the high and low limits are the same.
147 A chain of case nodes is initially maintained via the RIGHT fields
148 in the nodes. Nodes with higher case values are later in the list.
150 Switch statements can be output in one of two forms. A branch table
151 is used if there are more than a few labels and the labels are dense
152 within the range between the smallest and largest case value. If a
153 branch table is used, no further manipulations are done with the case
156 The alternative to the use of a branch table is to generate a series
157 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
158 and PARENT fields to hold a binary tree. Initially the tree is
159 totally unbalanced, with everything on the right. We balance the tree
160 with nodes on the left having lower case values than the parent
161 and nodes on the right having higher values. We then output the tree
166 struct case_node *left; /* Left son in binary tree */
167 struct case_node *right; /* Right son in binary tree; also node chain */
168 struct case_node *parent; /* Parent of node in binary tree */
169 tree low; /* Lowest index value for this label */
170 tree high; /* Highest index value for this label */
171 tree code_label; /* Label to jump to when node matches */
174 typedef struct case_node case_node;
175 typedef struct case_node *case_node_ptr;
177 /* These are used by estimate_case_costs and balance_case_nodes. */
179 /* This must be a signed type, and non-ANSI compilers lack signed char. */
180 static short *cost_table;
181 static int use_cost_table;
183 static int estimate_case_costs ();
184 static void balance_case_nodes ();
185 static void emit_case_nodes ();
186 static void group_case_nodes ();
187 static void emit_jump_if_reachable ();
189 static int warn_if_unused_value ();
190 static void expand_goto_internal ();
191 static void bc_expand_goto_internal ();
192 static int expand_fixup ();
193 static void bc_expand_fixup ();
195 static void bc_fixup_gotos ();
196 void free_temp_slots ();
197 static void expand_cleanups ();
198 static void expand_null_return_1 ();
199 static int tail_recursion_args ();
200 static void do_jump_if_equal ();
201 int bc_expand_exit_loop_if_false ();
202 void bc_expand_start_cond ();
203 void bc_expand_end_cond ();
204 void bc_expand_start_else ();
205 void bc_expand_end_bindings ();
206 void bc_expand_start_case ();
207 void bc_check_for_full_enumeration_handling ();
208 void bc_expand_end_case ();
209 void bc_expand_decl ();
211 extern rtx bc_allocate_local ();
212 extern rtx bc_allocate_variable_array ();
214 /* Stack of control and binding constructs we are currently inside.
216 These constructs begin when you call `expand_start_WHATEVER'
217 and end when you call `expand_end_WHATEVER'. This stack records
218 info about how the construct began that tells the end-function
219 what to do. It also may provide information about the construct
220 to alter the behavior of other constructs within the body.
221 For example, they may affect the behavior of C `break' and `continue'.
223 Each construct gets one `struct nesting' object.
224 All of these objects are chained through the `all' field.
225 `nesting_stack' points to the first object (innermost construct).
226 The position of an entry on `nesting_stack' is in its `depth' field.
228 Each type of construct has its own individual stack.
229 For example, loops have `loop_stack'. Each object points to the
230 next object of the same type through the `next' field.
232 Some constructs are visible to `break' exit-statements and others
233 are not. Which constructs are visible depends on the language.
234 Therefore, the data structure allows each construct to be visible
235 or not, according to the args given when the construct is started.
236 The construct is visible if the `exit_label' field is non-null.
237 In that case, the value should be a CODE_LABEL rtx. */
242 struct nesting *next;
247 /* For conds (if-then and if-then-else statements). */
250 /* Label for the end of the if construct.
251 There is none if EXITFLAG was not set
252 and no `else' has been seen yet. */
254 /* Label for the end of this alternative.
255 This may be the end of the if or the next else/elseif. */
261 /* Label at the top of the loop; place to loop back to. */
263 /* Label at the end of the whole construct. */
265 /* Label for `continue' statement to jump to;
266 this is in front of the stepper of the loop. */
269 /* For variable binding contours. */
272 /* Sequence number of this binding contour within the function,
273 in order of entry. */
274 int block_start_count;
275 /* Nonzero => value to restore stack to on exit. Complemented by
276 bc_stack_level (see below) when generating bytecodes. */
278 /* The NOTE that starts this contour.
279 Used by expand_goto to check whether the destination
280 is within each contour or not. */
282 /* Innermost containing binding contour that has a stack level. */
283 struct nesting *innermost_stack_block;
284 /* List of cleanups to be run on exit from this contour.
285 This is a list of expressions to be evaluated.
286 The TREE_PURPOSE of each link is the ..._DECL node
287 which the cleanup pertains to. */
289 /* List of cleanup-lists of blocks containing this block,
290 as they were at the locus where this block appears.
291 There is an element for each containing block,
292 ordered innermost containing block first.
293 The tail of this list can be 0 (was empty_cleanup_list),
294 if all remaining elements would be empty lists.
295 The element's TREE_VALUE is the cleanup-list of that block,
296 which may be null. */
298 /* Chain of labels defined inside this binding contour.
299 For contours that have stack levels or cleanups. */
300 struct label_chain *label_chain;
301 /* Number of function calls seen, as of start of this block. */
302 int function_call_count;
303 /* Bytecode specific: stack level to restore stack to on exit. */
306 /* For switch (C) or case (Pascal) statements,
307 and also for dummies (see `expand_start_case_dummy'). */
310 /* The insn after which the case dispatch should finally
311 be emitted. Zero for a dummy. */
313 /* For bytecodes, the case table is in-lined right in the code.
314 A label is needed for skipping over this block. It is only
315 used when generating bytecodes. */
317 /* A list of case labels, kept in ascending order by value
318 as the list is built.
319 During expand_end_case, this list may be rearranged into a
320 nearly balanced binary tree. */
321 struct case_node *case_list;
322 /* Label to jump to if no case matches. */
324 /* The expression to be dispatched on. */
326 /* Type that INDEX_EXPR should be converted to. */
328 /* Number of range exprs in case statement. */
330 /* Name of this kind of statement, for warnings. */
332 /* Nonzero if a case label has been seen in this case stmt. */
335 /* For exception contours. */
338 /* List of exceptions raised. This is a TREE_LIST
339 of whatever you want. */
341 /* List of exceptions caught. This is also a TREE_LIST
342 of whatever you want. As a special case, it has the
343 value `void_type_node' if it handles default exceptions. */
346 /* First insn of TRY block, in case resumptive model is needed. */
348 /* Label for the catch clauses. */
350 /* Label for unhandled exceptions. */
352 /* Label at the end of whole construct. */
354 /* Label which "escapes" the exception construct.
355 Like EXIT_LABEL for BREAK construct, but for exceptions. */
361 /* Chain of all pending binding contours. */
362 struct nesting *block_stack;
364 /* If any new stacks are added here, add them to POPSTACKS too. */
366 /* Chain of all pending binding contours that restore stack levels
368 struct nesting *stack_block_stack;
370 /* Chain of all pending conditional statements. */
371 struct nesting *cond_stack;
373 /* Chain of all pending loops. */
374 struct nesting *loop_stack;
376 /* Chain of all pending case or switch statements. */
377 struct nesting *case_stack;
379 /* Chain of all pending exception contours. */
380 struct nesting *except_stack;
382 /* Separate chain including all of the above,
383 chained through the `all' field. */
384 struct nesting *nesting_stack;
386 /* Number of entries on nesting_stack now. */
389 /* Allocate and return a new `struct nesting'. */
391 #define ALLOC_NESTING() \
392 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
394 /* Pop the nesting stack element by element until we pop off
395 the element which is at the top of STACK.
396 Update all the other stacks, popping off elements from them
397 as we pop them from nesting_stack. */
399 #define POPSTACK(STACK) \
400 do { struct nesting *target = STACK; \
401 struct nesting *this; \
402 do { this = nesting_stack; \
403 if (loop_stack == this) \
404 loop_stack = loop_stack->next; \
405 if (cond_stack == this) \
406 cond_stack = cond_stack->next; \
407 if (block_stack == this) \
408 block_stack = block_stack->next; \
409 if (stack_block_stack == this) \
410 stack_block_stack = stack_block_stack->next; \
411 if (case_stack == this) \
412 case_stack = case_stack->next; \
413 if (except_stack == this) \
414 except_stack = except_stack->next; \
415 nesting_depth = nesting_stack->depth - 1; \
416 nesting_stack = this->all; \
417 obstack_free (&stmt_obstack, this); } \
418 while (this != target); } while (0)
420 /* In some cases it is impossible to generate code for a forward goto
421 until the label definition is seen. This happens when it may be necessary
422 for the goto to reset the stack pointer: we don't yet know how to do that.
423 So expand_goto puts an entry on this fixup list.
424 Each time a binding contour that resets the stack is exited,
426 If the target label has now been defined, we can insert the proper code. */
430 /* Points to following fixup. */
431 struct goto_fixup *next;
432 /* Points to the insn before the jump insn.
433 If more code must be inserted, it goes after this insn. */
435 /* The LABEL_DECL that this jump is jumping to, or 0
436 for break, continue or return. */
438 /* The BLOCK for the place where this goto was found. */
440 /* The CODE_LABEL rtx that this is jumping to. */
442 /* Number of binding contours started in current function
443 before the label reference. */
444 int block_start_count;
445 /* The outermost stack level that should be restored for this jump.
446 Each time a binding contour that resets the stack is exited,
447 if the target label is *not* yet defined, this slot is updated. */
449 /* List of lists of cleanup expressions to be run by this goto.
450 There is one element for each block that this goto is within.
451 The tail of this list can be 0 (was empty_cleanup_list),
452 if all remaining elements would be empty.
453 The TREE_VALUE contains the cleanup list of that block as of the
454 time this goto was seen.
455 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
456 tree cleanup_list_list;
458 /* Bytecode specific members follow */
460 /* The label that this jump is jumping to, or 0 for break, continue
462 struct bc_label *bc_target;
464 /* The label we use for the fixup patch */
465 struct bc_label *label;
467 /* True (non-0) if fixup has been handled */
470 /* Like stack_level above, except refers to the interpreter stack */
474 static struct goto_fixup *goto_fixup_chain;
476 /* Within any binding contour that must restore a stack level,
477 all labels are recorded with a chain of these structures. */
481 /* Points to following fixup. */
482 struct label_chain *next;
489 gcc_obstack_init (&stmt_obstack);
491 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
496 init_stmt_for_function ()
498 /* We are not currently within any block, conditional, loop or case. */
506 block_start_count = 0;
508 /* No gotos have been expanded yet. */
509 goto_fixup_chain = 0;
511 /* We are not processing a ({...}) grouping. */
512 expr_stmts_for_value = 0;
520 p->block_stack = block_stack;
521 p->stack_block_stack = stack_block_stack;
522 p->cond_stack = cond_stack;
523 p->loop_stack = loop_stack;
524 p->case_stack = case_stack;
525 p->nesting_stack = nesting_stack;
526 p->nesting_depth = nesting_depth;
527 p->block_start_count = block_start_count;
528 p->last_expr_type = last_expr_type;
529 p->last_expr_value = last_expr_value;
530 p->expr_stmts_for_value = expr_stmts_for_value;
531 p->emit_filename = emit_filename;
532 p->emit_lineno = emit_lineno;
533 p->goto_fixup_chain = goto_fixup_chain;
537 restore_stmt_status (p)
540 block_stack = p->block_stack;
541 stack_block_stack = p->stack_block_stack;
542 cond_stack = p->cond_stack;
543 loop_stack = p->loop_stack;
544 case_stack = p->case_stack;
545 nesting_stack = p->nesting_stack;
546 nesting_depth = p->nesting_depth;
547 block_start_count = p->block_start_count;
548 last_expr_type = p->last_expr_type;
549 last_expr_value = p->last_expr_value;
550 expr_stmts_for_value = p->expr_stmts_for_value;
551 emit_filename = p->emit_filename;
552 emit_lineno = p->emit_lineno;
553 goto_fixup_chain = p->goto_fixup_chain;
556 /* Emit a no-op instruction. */
563 if (!output_bytecode)
565 last_insn = get_last_insn ();
567 && (GET_CODE (last_insn) == CODE_LABEL
568 || prev_real_insn (last_insn) == 0))
569 emit_insn (gen_nop ());
573 /* Return the rtx-label that corresponds to a LABEL_DECL,
574 creating it if necessary. */
580 if (TREE_CODE (label) != LABEL_DECL)
583 if (DECL_RTL (label))
584 return DECL_RTL (label);
586 return DECL_RTL (label) = gen_label_rtx ();
589 /* Add an unconditional jump to LABEL as the next sequential instruction. */
595 do_pending_stack_adjust ();
596 emit_jump_insn (gen_jump (label));
600 /* Emit code to jump to the address
601 specified by the pointer expression EXP. */
604 expand_computed_goto (exp)
609 bc_expand_expr (exp);
610 bc_emit_instruction (jumpP);
614 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
616 emit_indirect_jump (x);
620 /* Handle goto statements and the labels that they can go to. */
622 /* Specify the location in the RTL code of a label LABEL,
623 which is a LABEL_DECL tree node.
625 This is used for the kind of label that the user can jump to with a
626 goto statement, and for alternatives of a switch or case statement.
627 RTL labels generated for loops and conditionals don't go through here;
628 they are generated directly at the RTL level, by other functions below.
630 Note that this has nothing to do with defining label *names*.
631 Languages vary in how they do that and what that even means. */
637 struct label_chain *p;
641 if (! DECL_RTL (label))
642 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
643 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
644 error ("multiply defined label");
648 do_pending_stack_adjust ();
649 emit_label (label_rtx (label));
650 if (DECL_NAME (label))
651 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
653 if (stack_block_stack != 0)
655 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
656 p->next = stack_block_stack->data.block.label_chain;
657 stack_block_stack->data.block.label_chain = p;
662 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
663 from nested functions. */
666 declare_nonlocal_label (label)
669 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
670 LABEL_PRESERVE_P (label_rtx (label)) = 1;
671 if (nonlocal_goto_handler_slot == 0)
673 nonlocal_goto_handler_slot
674 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
675 emit_stack_save (SAVE_NONLOCAL,
676 &nonlocal_goto_stack_level,
677 PREV_INSN (tail_recursion_reentry));
681 /* Generate RTL code for a `goto' statement with target label LABEL.
682 LABEL should be a LABEL_DECL tree node that was or will later be
683 defined with `expand_label'. */
693 expand_goto_internal (label, label_rtx (label), NULL_RTX);
697 /* Check for a nonlocal goto to a containing function. */
698 context = decl_function_context (label);
699 if (context != 0 && context != current_function_decl)
701 struct function *p = find_function_data (context);
702 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
705 p->has_nonlocal_label = 1;
706 current_function_has_nonlocal_goto = 1;
707 LABEL_REF_NONLOCAL_P (label_ref) = 1;
709 /* Copy the rtl for the slots so that they won't be shared in
710 case the virtual stack vars register gets instantiated differently
711 in the parent than in the child. */
713 #if HAVE_nonlocal_goto
714 if (HAVE_nonlocal_goto)
715 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
716 copy_rtx (p->nonlocal_goto_handler_slot),
717 copy_rtx (p->nonlocal_goto_stack_level),
724 /* Restore frame pointer for containing function.
725 This sets the actual hard register used for the frame pointer
726 to the location of the function's incoming static chain info.
727 The non-local goto handler will then adjust it to contain the
728 proper value and reload the argument pointer, if needed. */
729 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
731 /* We have now loaded the frame pointer hardware register with
732 the address of that corresponds to the start of the virtual
733 stack vars. So replace virtual_stack_vars_rtx in all
734 addresses we use with stack_pointer_rtx. */
736 /* Get addr of containing function's current nonlocal goto handler,
737 which will do any cleanups and then jump to the label. */
738 addr = copy_rtx (p->nonlocal_goto_handler_slot);
739 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
740 hard_frame_pointer_rtx));
742 /* Restore the stack pointer. Note this uses fp just restored. */
743 addr = p->nonlocal_goto_stack_level;
745 addr = replace_rtx (copy_rtx (addr),
746 virtual_stack_vars_rtx,
747 hard_frame_pointer_rtx);
749 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
751 /* Put in the static chain register the nonlocal label address. */
752 emit_move_insn (static_chain_rtx, label_ref);
753 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
755 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
756 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
757 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
758 emit_indirect_jump (temp);
762 expand_goto_internal (label, label_rtx (label), NULL_RTX);
765 /* Generate RTL code for a `goto' statement with target label BODY.
766 LABEL should be a LABEL_REF.
767 LAST_INSN, if non-0, is the rtx we should consider as the last
768 insn emitted (for the purposes of cleaning up a return). */
771 expand_goto_internal (body, label, last_insn)
776 struct nesting *block;
779 /* NOTICE! If a bytecode instruction other than `jump' is needed,
780 then the caller has to call bc_expand_goto_internal()
781 directly. This is rather an exceptional case, and there aren't
782 that many places where this is necessary. */
785 expand_goto_internal (body, label, last_insn);
789 if (GET_CODE (label) != CODE_LABEL)
792 /* If label has already been defined, we can tell now
793 whether and how we must alter the stack level. */
795 if (PREV_INSN (label) != 0)
797 /* Find the innermost pending block that contains the label.
798 (Check containment by comparing insn-uids.)
799 Then restore the outermost stack level within that block,
800 and do cleanups of all blocks contained in it. */
801 for (block = block_stack; block; block = block->next)
803 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
805 if (block->data.block.stack_level != 0)
806 stack_level = block->data.block.stack_level;
807 /* Execute the cleanups for blocks we are exiting. */
808 if (block->data.block.cleanups != 0)
810 expand_cleanups (block->data.block.cleanups, NULL_TREE);
811 do_pending_stack_adjust ();
817 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
818 the stack pointer. This one should be deleted as dead by flow. */
819 clear_pending_stack_adjust ();
820 do_pending_stack_adjust ();
821 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
824 if (body != 0 && DECL_TOO_LATE (body))
825 error ("jump to `%s' invalidly jumps into binding contour",
826 IDENTIFIER_POINTER (DECL_NAME (body)));
828 /* Label not yet defined: may need to put this goto
829 on the fixup list. */
830 else if (! expand_fixup (body, label, last_insn))
832 /* No fixup needed. Record that the label is the target
833 of at least one goto that has no fixup. */
835 TREE_ADDRESSABLE (body) = 1;
841 /* Generate a jump with OPCODE to the given bytecode LABEL which is
842 found within BODY. */
844 bc_expand_goto_internal (opcode, label, body)
845 enum bytecode_opcode opcode;
846 struct bc_label *label;
849 struct nesting *block;
850 int stack_level = -1;
852 /* If the label is defined, adjust the stack as necessary.
853 If it's not defined, we have to push the reference on the
859 /* Find the innermost pending block that contains the label.
860 (Check containment by comparing bytecode uids.) Then restore the
861 outermost stack level within that block. */
863 for (block = block_stack; block; block = block->next)
865 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
867 if (block->data.block.bc_stack_level)
868 stack_level = block->data.block.bc_stack_level;
870 /* Execute the cleanups for blocks we are exiting. */
871 if (block->data.block.cleanups != 0)
873 expand_cleanups (block->data.block.cleanups, NULL_TREE);
874 do_pending_stack_adjust ();
878 /* Restore the stack level. If we need to adjust the stack, we
879 must do so after the jump, since the jump may depend on
880 what's on the stack. Thus, any stack-modifying conditional
881 jumps (these are the only ones that rely on what's on the
882 stack) go into the fixup list. */
885 && stack_depth != stack_level
888 bc_expand_fixup (opcode, label, stack_level);
891 if (stack_level >= 0)
892 bc_adjust_stack (stack_depth - stack_level);
894 if (body && DECL_BIT_FIELD (body))
895 error ("jump to `%s' invalidly jumps into binding contour",
896 IDENTIFIER_POINTER (DECL_NAME (body)));
898 /* Emit immediate jump */
899 bc_emit_bytecode (opcode);
900 bc_emit_bytecode_labelref (label);
902 #ifdef DEBUG_PRINT_CODE
903 fputc ('\n', stderr);
908 /* Put goto in the fixup list */
909 bc_expand_fixup (opcode, label, stack_level);
912 /* Generate if necessary a fixup for a goto
913 whose target label in tree structure (if any) is TREE_LABEL
914 and whose target in rtl is RTL_LABEL.
916 If LAST_INSN is nonzero, we pretend that the jump appears
917 after insn LAST_INSN instead of at the current point in the insn stream.
919 The fixup will be used later to insert insns just before the goto.
920 Those insns will restore the stack level as appropriate for the
921 target label, and will (in the case of C++) also invoke any object
922 destructors which have to be invoked when we exit the scopes which
923 are exited by the goto.
925 Value is nonzero if a fixup is made. */
928 expand_fixup (tree_label, rtl_label, last_insn)
933 struct nesting *block, *end_block;
935 /* See if we can recognize which block the label will be output in.
936 This is possible in some very common cases.
937 If we succeed, set END_BLOCK to that block.
938 Otherwise, set it to 0. */
941 && (rtl_label == cond_stack->data.cond.endif_label
942 || rtl_label == cond_stack->data.cond.next_label))
943 end_block = cond_stack;
944 /* If we are in a loop, recognize certain labels which
945 are likely targets. This reduces the number of fixups
946 we need to create. */
948 && (rtl_label == loop_stack->data.loop.start_label
949 || rtl_label == loop_stack->data.loop.end_label
950 || rtl_label == loop_stack->data.loop.continue_label))
951 end_block = loop_stack;
955 /* Now set END_BLOCK to the binding level to which we will return. */
959 struct nesting *next_block = end_block->all;
962 /* First see if the END_BLOCK is inside the innermost binding level.
963 If so, then no cleanups or stack levels are relevant. */
964 while (next_block && next_block != block)
965 next_block = next_block->all;
970 /* Otherwise, set END_BLOCK to the innermost binding level
971 which is outside the relevant control-structure nesting. */
972 next_block = block_stack->next;
973 for (block = block_stack; block != end_block; block = block->all)
974 if (block == next_block)
975 next_block = next_block->next;
976 end_block = next_block;
979 /* Does any containing block have a stack level or cleanups?
980 If not, no fixup is needed, and that is the normal case
981 (the only case, for standard C). */
982 for (block = block_stack; block != end_block; block = block->next)
983 if (block->data.block.stack_level != 0
984 || block->data.block.cleanups != 0)
987 if (block != end_block)
989 /* Ok, a fixup is needed. Add a fixup to the list of such. */
990 struct goto_fixup *fixup
991 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
992 /* In case an old stack level is restored, make sure that comes
993 after any pending stack adjust. */
994 /* ?? If the fixup isn't to come at the present position,
995 doing the stack adjust here isn't useful. Doing it with our
996 settings at that location isn't useful either. Let's hope
999 do_pending_stack_adjust ();
1000 fixup->target = tree_label;
1001 fixup->target_rtl = rtl_label;
1003 /* Create a BLOCK node and a corresponding matched set of
1004 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1005 this point. The notes will encapsulate any and all fixup
1006 code which we might later insert at this point in the insn
1007 stream. Also, the BLOCK node will be the parent (i.e. the
1008 `SUPERBLOCK') of any other BLOCK nodes which we might create
1009 later on when we are expanding the fixup code. */
1012 register rtx original_before_jump
1013 = last_insn ? last_insn : get_last_insn ();
1017 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1018 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1019 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1021 emit_insns_after (fixup->before_jump, original_before_jump);
1024 fixup->block_start_count = block_start_count;
1025 fixup->stack_level = 0;
1026 fixup->cleanup_list_list
1027 = (((block->data.block.outer_cleanups
1029 && block->data.block.outer_cleanups != empty_cleanup_list
1032 || block->data.block.cleanups)
1033 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1034 block->data.block.outer_cleanups)
1036 fixup->next = goto_fixup_chain;
1037 goto_fixup_chain = fixup;
1044 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1045 Make the fixup restore the stack level to STACK_LEVEL. */
1048 bc_expand_fixup (opcode, label, stack_level)
1049 enum bytecode_opcode opcode;
1050 struct bc_label *label;
1053 struct goto_fixup *fixup
1054 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1056 fixup->label = bc_get_bytecode_label ();
1057 fixup->bc_target = label;
1058 fixup->bc_stack_level = stack_level;
1059 fixup->bc_handled = FALSE;
1061 fixup->next = goto_fixup_chain;
1062 goto_fixup_chain = fixup;
1064 /* Insert a jump to the fixup code */
1065 bc_emit_bytecode (opcode);
1066 bc_emit_bytecode_labelref (fixup->label);
1068 #ifdef DEBUG_PRINT_CODE
1069 fputc ('\n', stderr);
1074 /* When exiting a binding contour, process all pending gotos requiring fixups.
1075 THISBLOCK is the structure that describes the block being exited.
1076 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1077 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1078 FIRST_INSN is the insn that began this contour.
1080 Gotos that jump out of this contour must restore the
1081 stack level and do the cleanups before actually jumping.
1083 DONT_JUMP_IN nonzero means report error there is a jump into this
1084 contour from before the beginning of the contour.
1085 This is also done if STACK_LEVEL is nonzero. */
1088 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1089 struct nesting *thisblock;
1095 register struct goto_fixup *f, *prev;
1097 if (output_bytecode)
1099 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in);
1103 /* F is the fixup we are considering; PREV is the previous one. */
1104 /* We run this loop in two passes so that cleanups of exited blocks
1105 are run first, and blocks that are exited are marked so
1108 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1110 /* Test for a fixup that is inactive because it is already handled. */
1111 if (f->before_jump == 0)
1113 /* Delete inactive fixup from the chain, if that is easy to do. */
1115 prev->next = f->next;
1117 /* Has this fixup's target label been defined?
1118 If so, we can finalize it. */
1119 else if (PREV_INSN (f->target_rtl) != 0)
1121 register rtx cleanup_insns;
1123 /* Get the first non-label after the label
1124 this goto jumps to. If that's before this scope begins,
1125 we don't have a jump into the scope. */
1126 rtx after_label = f->target_rtl;
1127 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1128 after_label = NEXT_INSN (after_label);
1130 /* If this fixup jumped into this contour from before the beginning
1131 of this contour, report an error. */
1132 /* ??? Bug: this does not detect jumping in through intermediate
1133 blocks that have stack levels or cleanups.
1134 It detects only a problem with the innermost block
1135 around the label. */
1137 && (dont_jump_in || stack_level || cleanup_list)
1138 /* If AFTER_LABEL is 0, it means the jump goes to the end
1139 of the rtl, which means it jumps into this scope. */
1140 && (after_label == 0
1141 || INSN_UID (first_insn) < INSN_UID (after_label))
1142 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1143 && ! DECL_REGISTER (f->target))
1145 error_with_decl (f->target,
1146 "label `%s' used before containing binding contour");
1147 /* Prevent multiple errors for one label. */
1148 DECL_REGISTER (f->target) = 1;
1151 /* We will expand the cleanups into a sequence of their own and
1152 then later on we will attach this new sequence to the insn
1153 stream just ahead of the actual jump insn. */
1157 /* Temporarily restore the lexical context where we will
1158 logically be inserting the fixup code. We do this for the
1159 sake of getting the debugging information right. */
1162 set_block (f->context);
1164 /* Expand the cleanups for blocks this jump exits. */
1165 if (f->cleanup_list_list)
1168 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1169 /* Marked elements correspond to blocks that have been closed.
1170 Do their cleanups. */
1171 if (TREE_ADDRESSABLE (lists)
1172 && TREE_VALUE (lists) != 0)
1174 expand_cleanups (TREE_VALUE (lists), 0);
1175 /* Pop any pushes done in the cleanups,
1176 in case function is about to return. */
1177 do_pending_stack_adjust ();
1181 /* Restore stack level for the biggest contour that this
1182 jump jumps out of. */
1184 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1186 /* Finish up the sequence containing the insns which implement the
1187 necessary cleanups, and then attach that whole sequence to the
1188 insn stream just ahead of the actual jump insn. Attaching it
1189 at that point insures that any cleanups which are in fact
1190 implicit C++ object destructions (which must be executed upon
1191 leaving the block) appear (to the debugger) to be taking place
1192 in an area of the generated code where the object(s) being
1193 destructed are still "in scope". */
1195 cleanup_insns = get_insns ();
1199 emit_insns_after (cleanup_insns, f->before_jump);
1206 /* Mark the cleanups of exited blocks so that they are executed
1207 by the code above. */
1208 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1209 if (f->before_jump != 0
1210 && PREV_INSN (f->target_rtl) == 0
1211 /* Label has still not appeared. If we are exiting a block with
1212 a stack level to restore, that started before the fixup,
1213 mark this stack level as needing restoration
1214 when the fixup is later finalized.
1215 Also mark the cleanup_list_list element for F
1216 that corresponds to this block, so that ultimately
1217 this block's cleanups will be executed by the code above. */
1219 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1220 it means the label is undefined. That's erroneous, but possible. */
1221 && (thisblock->data.block.block_start_count
1222 <= f->block_start_count))
1224 tree lists = f->cleanup_list_list;
1225 for (; lists; lists = TREE_CHAIN (lists))
1226 /* If the following elt. corresponds to our containing block
1227 then the elt. must be for this block. */
1228 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1229 TREE_ADDRESSABLE (lists) = 1;
1232 f->stack_level = stack_level;
1237 /* When exiting a binding contour, process all pending gotos requiring fixups.
1238 Note: STACK_DEPTH is not altered.
1240 The arguments are currently not used in the bytecode compiler, but we may need
1241 them one day for languages other than C.
1243 THISBLOCK is the structure that describes the block being exited.
1244 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1245 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1246 FIRST_INSN is the insn that began this contour.
1248 Gotos that jump out of this contour must restore the
1249 stack level and do the cleanups before actually jumping.
1251 DONT_JUMP_IN nonzero means report error there is a jump into this
1252 contour from before the beginning of the contour.
1253 This is also done if STACK_LEVEL is nonzero. */
1256 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1257 struct nesting *thisblock;
1263 register struct goto_fixup *f, *prev;
1264 int saved_stack_depth;
1266 /* F is the fixup we are considering; PREV is the previous one. */
1268 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1270 /* Test for a fixup that is inactive because it is already handled. */
1271 if (f->before_jump == 0)
1273 /* Delete inactive fixup from the chain, if that is easy to do. */
1275 prev->next = f->next;
1278 /* Emit code to restore the stack and continue */
1279 bc_emit_bytecode_labeldef (f->label);
1281 /* Save stack_depth across call, since bc_adjust_stack () will alter
1282 the perceived stack depth via the instructions generated. */
1284 if (f->bc_stack_level >= 0)
1286 saved_stack_depth = stack_depth;
1287 bc_adjust_stack (stack_depth - f->bc_stack_level);
1288 stack_depth = saved_stack_depth;
1291 bc_emit_bytecode (jump);
1292 bc_emit_bytecode_labelref (f->bc_target);
1294 #ifdef DEBUG_PRINT_CODE
1295 fputc ('\n', stderr);
1299 goto_fixup_chain = NULL;
1302 /* Generate RTL for an asm statement (explicit assembler code).
1303 BODY is a STRING_CST node containing the assembler code text,
1304 or an ADDR_EXPR containing a STRING_CST. */
1310 if (output_bytecode)
1312 error ("`asm' is illegal when generating bytecode");
1316 if (TREE_CODE (body) == ADDR_EXPR)
1317 body = TREE_OPERAND (body, 0);
1319 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1320 TREE_STRING_POINTER (body)));
1324 /* Generate RTL for an asm statement with arguments.
1325 STRING is the instruction template.
1326 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1327 Each output or input has an expression in the TREE_VALUE and
1328 a constraint-string in the TREE_PURPOSE.
1329 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1330 that is clobbered by this insn.
1332 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1333 Some elements of OUTPUTS may be replaced with trees representing temporary
1334 values. The caller should copy those temporary values to the originally
1337 VOL nonzero means the insn is volatile; don't optimize it. */
1340 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1341 tree string, outputs, inputs, clobbers;
1346 rtvec argvec, constraints;
1348 int ninputs = list_length (inputs);
1349 int noutputs = list_length (outputs);
1353 /* Vector of RTX's of evaluated output operands. */
1354 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1355 /* The insn we have emitted. */
1358 if (output_bytecode)
1360 error ("`asm' is illegal when generating bytecode");
1364 /* Count the number of meaningful clobbered registers, ignoring what
1365 we would ignore later. */
1367 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1369 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1370 i = decode_reg_name (regname);
1371 if (i >= 0 || i == -4)
1377 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1379 tree val = TREE_VALUE (tail);
1384 /* If there's an erroneous arg, emit no insn. */
1385 if (TREE_TYPE (val) == error_mark_node)
1388 /* Make sure constraint has `=' and does not have `+'. */
1391 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1393 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1395 error ("output operand constraint contains `+'");
1398 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1403 error ("output operand constraint lacks `='");
1407 /* If an output operand is not a variable or indirect ref,
1409 create a SAVE_EXPR which is a pseudo-reg
1410 to act as an intermediate temporary.
1411 Make the asm insn write into that, then copy it to
1412 the real output operand. */
1414 while (TREE_CODE (val) == COMPONENT_REF
1415 || TREE_CODE (val) == ARRAY_REF)
1416 val = TREE_OPERAND (val, 0);
1418 if (TREE_CODE (val) != VAR_DECL
1419 && TREE_CODE (val) != PARM_DECL
1420 && TREE_CODE (val) != INDIRECT_REF)
1422 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1423 /* If it's a constant, print error now so don't crash later. */
1424 if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR)
1426 error ("invalid output in `asm'");
1431 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1434 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1436 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1440 /* Make vectors for the expression-rtx and constraint strings. */
1442 argvec = rtvec_alloc (ninputs);
1443 constraints = rtvec_alloc (ninputs);
1445 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1446 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1448 MEM_VOLATILE_P (body) = vol;
1450 /* Eval the inputs and put them into ARGVEC.
1451 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1454 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1458 /* If there's an erroneous arg, emit no insn,
1459 because the ASM_INPUT would get VOIDmode
1460 and that could cause a crash in reload. */
1461 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1463 if (TREE_PURPOSE (tail) == NULL_TREE)
1465 error ("hard register `%s' listed as input operand to `asm'",
1466 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1470 /* Make sure constraint has neither `=' nor `+'. */
1472 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1473 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1474 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1476 error ("input operand constraint contains `%c'",
1477 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1481 XVECEXP (body, 3, i) /* argvec */
1482 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1483 XVECEXP (body, 4, i) /* constraints */
1484 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1485 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1489 /* Protect all the operands from the queue,
1490 now that they have all been evaluated. */
1492 for (i = 0; i < ninputs; i++)
1493 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1495 for (i = 0; i < noutputs; i++)
1496 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1498 /* Now, for each output, construct an rtx
1499 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1500 ARGVEC CONSTRAINTS))
1501 If there is more than one, put them inside a PARALLEL. */
1503 if (noutputs == 1 && nclobbers == 0)
1505 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1506 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1508 else if (noutputs == 0 && nclobbers == 0)
1510 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1511 insn = emit_insn (body);
1517 if (num == 0) num = 1;
1518 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1520 /* For each output operand, store a SET. */
1522 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1524 XVECEXP (body, 0, i)
1525 = gen_rtx (SET, VOIDmode,
1527 gen_rtx (ASM_OPERANDS, VOIDmode,
1528 TREE_STRING_POINTER (string),
1529 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1530 i, argvec, constraints,
1532 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1535 /* If there are no outputs (but there are some clobbers)
1536 store the bare ASM_OPERANDS into the PARALLEL. */
1539 XVECEXP (body, 0, i++) = obody;
1541 /* Store (clobber REG) for each clobbered register specified. */
1543 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1545 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1546 int j = decode_reg_name (regname);
1550 if (j == -3) /* `cc', which is not a register */
1553 if (j == -4) /* `memory', don't cache memory across asm */
1555 XVECEXP (body, 0, i++)
1556 = gen_rtx (CLOBBER, VOIDmode,
1557 gen_rtx (MEM, QImode,
1558 gen_rtx (SCRATCH, VOIDmode, 0)));
1562 error ("unknown register name `%s' in `asm'", regname);
1566 /* Use QImode since that's guaranteed to clobber just one reg. */
1567 XVECEXP (body, 0, i++)
1568 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1571 insn = emit_insn (body);
1577 /* Generate RTL to evaluate the expression EXP
1578 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1581 expand_expr_stmt (exp)
1584 if (output_bytecode)
1586 int org_stack_depth = stack_depth;
1588 bc_expand_expr (exp);
1590 /* Restore stack depth */
1591 if (stack_depth < org_stack_depth)
1594 bc_emit_instruction (drop);
1596 last_expr_type = TREE_TYPE (exp);
1600 /* If -W, warn about statements with no side effects,
1601 except for an explicit cast to void (e.g. for assert()), and
1602 except inside a ({...}) where they may be useful. */
1603 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1605 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1606 && !(TREE_CODE (exp) == CONVERT_EXPR
1607 && TREE_TYPE (exp) == void_type_node))
1608 warning_with_file_and_line (emit_filename, emit_lineno,
1609 "statement with no effect");
1610 else if (warn_unused)
1611 warn_if_unused_value (exp);
1613 last_expr_type = TREE_TYPE (exp);
1614 if (! flag_syntax_only)
1615 last_expr_value = expand_expr (exp,
1616 (expr_stmts_for_value
1617 ? NULL_RTX : const0_rtx),
1620 /* If all we do is reference a volatile value in memory,
1621 copy it to a register to be sure it is actually touched. */
1622 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1623 && TREE_THIS_VOLATILE (exp))
1625 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1627 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1628 copy_to_reg (last_expr_value);
1631 rtx lab = gen_label_rtx ();
1633 /* Compare the value with itself to reference it. */
1634 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1635 expand_expr (TYPE_SIZE (last_expr_type),
1636 NULL_RTX, VOIDmode, 0),
1638 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1639 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1644 /* If this expression is part of a ({...}) and is in memory, we may have
1645 to preserve temporaries. */
1646 preserve_temp_slots (last_expr_value);
1648 /* Free any temporaries used to evaluate this expression. Any temporary
1649 used as a result of this expression will already have been preserved
1656 /* Warn if EXP contains any computations whose results are not used.
1657 Return 1 if a warning is printed; 0 otherwise. */
1660 warn_if_unused_value (exp)
1663 if (TREE_USED (exp))
1666 switch (TREE_CODE (exp))
1668 case PREINCREMENT_EXPR:
1669 case POSTINCREMENT_EXPR:
1670 case PREDECREMENT_EXPR:
1671 case POSTDECREMENT_EXPR:
1676 case METHOD_CALL_EXPR:
1678 case WITH_CLEANUP_EXPR:
1680 /* We don't warn about COND_EXPR because it may be a useful
1681 construct if either arm contains a side effect. */
1686 /* For a binding, warn if no side effect within it. */
1687 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1689 case TRUTH_ORIF_EXPR:
1690 case TRUTH_ANDIF_EXPR:
1691 /* In && or ||, warn if 2nd operand has no side effect. */
1692 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1695 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1697 /* Let people do `(foo (), 0)' without a warning. */
1698 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1700 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1704 case NON_LVALUE_EXPR:
1705 /* Don't warn about values cast to void. */
1706 if (TREE_TYPE (exp) == void_type_node)
1708 /* Don't warn about conversions not explicit in the user's program. */
1709 if (TREE_NO_UNUSED_WARNING (exp))
1711 /* Assignment to a cast usually results in a cast of a modify.
1712 Don't complain about that. */
1713 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1715 /* Sometimes it results in a cast of a cast of a modify.
1716 Don't complain about that. */
1717 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1718 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1719 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1723 /* Referencing a volatile value is a side effect, so don't warn. */
1724 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1725 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1726 && TREE_THIS_VOLATILE (exp))
1728 warning_with_file_and_line (emit_filename, emit_lineno,
1729 "value computed is not used");
1734 /* Clear out the memory of the last expression evaluated. */
1742 /* Begin a statement which will return a value.
1743 Return the RTL_EXPR for this statement expr.
1744 The caller must save that value and pass it to expand_end_stmt_expr. */
1747 expand_start_stmt_expr ()
1752 /* When generating bytecode just note down the stack depth */
1753 if (output_bytecode)
1754 return (build_int_2 (stack_depth, 0));
1756 /* Make the RTL_EXPR node temporary, not momentary,
1757 so that rtl_expr_chain doesn't become garbage. */
1758 momentary = suspend_momentary ();
1759 t = make_node (RTL_EXPR);
1760 resume_momentary (momentary);
1763 expr_stmts_for_value++;
1767 /* Restore the previous state at the end of a statement that returns a value.
1768 Returns a tree node representing the statement's value and the
1769 insns to compute the value.
1771 The nodes of that expression have been freed by now, so we cannot use them.
1772 But we don't want to do that anyway; the expression has already been
1773 evaluated and now we just want to use the value. So generate a RTL_EXPR
1774 with the proper type and RTL value.
1776 If the last substatement was not an expression,
1777 return something with type `void'. */
1780 expand_end_stmt_expr (t)
1783 if (output_bytecode)
1789 /* At this point, all expressions have been evaluated in order.
1790 However, all expression values have been popped when evaluated,
1791 which means we have to recover the last expression value. This is
1792 the last value removed by means of a `drop' instruction. Instead
1793 of adding code to inhibit dropping the last expression value, it
1794 is here recovered by undoing the `drop'. Since `drop' is
1795 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1798 bc_adjust_stack (-1);
1800 if (!last_expr_type)
1801 last_expr_type = void_type_node;
1803 t = make_node (RTL_EXPR);
1804 TREE_TYPE (t) = last_expr_type;
1805 RTL_EXPR_RTL (t) = NULL;
1806 RTL_EXPR_SEQUENCE (t) = NULL;
1808 /* Don't consider deleting this expr or containing exprs at tree level. */
1809 TREE_THIS_VOLATILE (t) = 1;
1817 if (last_expr_type == 0)
1819 last_expr_type = void_type_node;
1820 last_expr_value = const0_rtx;
1822 else if (last_expr_value == 0)
1823 /* There are some cases where this can happen, such as when the
1824 statement is void type. */
1825 last_expr_value = const0_rtx;
1826 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1827 /* Remove any possible QUEUED. */
1828 last_expr_value = protect_from_queue (last_expr_value, 0);
1832 TREE_TYPE (t) = last_expr_type;
1833 RTL_EXPR_RTL (t) = last_expr_value;
1834 RTL_EXPR_SEQUENCE (t) = get_insns ();
1836 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1840 /* Don't consider deleting this expr or containing exprs at tree level. */
1841 TREE_SIDE_EFFECTS (t) = 1;
1842 /* Propagate volatility of the actual RTL expr. */
1843 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1846 expr_stmts_for_value--;
1851 /* The exception handling nesting looks like this:
1854 { <-- exception handler block
1856 <-- in an exception handler
1858 : <-- in a TRY block
1859 : <-- in an exception handler
1864 : <-- in an except block
1865 : <-- in an exception handler
1872 /* Return nonzero iff in a try block at level LEVEL. */
1875 in_try_block (level)
1878 struct nesting *n = except_stack;
1881 while (n && n->data.except_stmt.after_label != 0)
1892 /* Return nonzero iff in an except block at level LEVEL. */
1895 in_except_block (level)
1898 struct nesting *n = except_stack;
1901 while (n && n->data.except_stmt.after_label == 0)
1912 /* Return nonzero iff in an exception handler at level LEVEL. */
1915 in_exception_handler (level)
1918 struct nesting *n = except_stack;
1919 while (n && level--)
1924 /* Record the fact that the current exception nesting raises
1925 exception EX. If not in an exception handler, return 0. */
1932 if (except_stack == 0)
1934 raises_ptr = &except_stack->data.except_stmt.raised;
1935 if (! value_member (ex, *raises_ptr))
1936 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1940 /* Generate RTL for the start of a try block.
1942 TRY_CLAUSE is the condition to test to enter the try block. */
1945 expand_start_try (try_clause, exitflag, escapeflag)
1950 struct nesting *thishandler = ALLOC_NESTING ();
1952 /* Make an entry on cond_stack for the cond we are entering. */
1954 thishandler->next = except_stack;
1955 thishandler->all = nesting_stack;
1956 thishandler->depth = ++nesting_depth;
1957 thishandler->data.except_stmt.raised = 0;
1958 thishandler->data.except_stmt.handled = 0;
1959 thishandler->data.except_stmt.first_insn = get_insns ();
1960 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1961 thishandler->data.except_stmt.unhandled_label = 0;
1962 thishandler->data.except_stmt.after_label = 0;
1963 thishandler->data.except_stmt.escape_label
1964 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1965 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1966 except_stack = thishandler;
1967 nesting_stack = thishandler;
1969 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX);
1972 /* End of a TRY block. Nothing to do for now. */
1977 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1978 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1982 /* Start an `except' nesting contour.
1983 EXITFLAG says whether this contour should be able to `exit' something.
1984 ESCAPEFLAG says whether this contour should be escapable. */
1987 expand_start_except (exitflag, escapeflag)
1994 /* An `exit' from catch clauses goes out to next exit level,
1995 if there is one. Otherwise, it just goes to the end
1996 of the construct. */
1997 for (n = except_stack->next; n; n = n->next)
1998 if (n->exit_label != 0)
2000 except_stack->exit_label = n->exit_label;
2004 except_stack->exit_label = except_stack->data.except_stmt.after_label;
2009 /* An `escape' from catch clauses goes out to next escape level,
2010 if there is one. Otherwise, it just goes to the end
2011 of the construct. */
2012 for (n = except_stack->next; n; n = n->next)
2013 if (n->data.except_stmt.escape_label != 0)
2015 except_stack->data.except_stmt.escape_label
2016 = n->data.except_stmt.escape_label;
2020 except_stack->data.except_stmt.escape_label
2021 = except_stack->data.except_stmt.after_label;
2023 do_pending_stack_adjust ();
2024 emit_label (except_stack->data.except_stmt.except_label);
2027 /* Generate code to `escape' from an exception contour. This
2028 is like `exiting', but does not conflict with constructs which
2031 Return nonzero if this contour is escapable, otherwise
2032 return zero, and language-specific code will emit the
2033 appropriate error message. */
2035 expand_escape_except ()
2039 for (n = except_stack; n; n = n->next)
2040 if (n->data.except_stmt.escape_label != 0)
2042 expand_goto_internal (NULL_TREE,
2043 n->data.except_stmt.escape_label, NULL_RTX);
2050 /* Finish processing and `except' contour.
2051 Culls out all exceptions which might be raise but not
2052 handled, and returns the list to the caller.
2053 Language-specific code is responsible for dealing with these
2057 expand_end_except ()
2060 tree raised = NULL_TREE;
2062 do_pending_stack_adjust ();
2063 emit_label (except_stack->data.except_stmt.after_label);
2065 n = except_stack->next;
2068 /* Propagate exceptions raised but not handled to next
2070 tree handled = except_stack->data.except_stmt.raised;
2071 if (handled != void_type_node)
2073 tree prev = NULL_TREE;
2074 raised = except_stack->data.except_stmt.raised;
2078 for (this_raise = raised, prev = 0; this_raise;
2079 this_raise = TREE_CHAIN (this_raise))
2081 if (value_member (TREE_VALUE (this_raise), handled))
2084 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
2087 raised = TREE_CHAIN (raised);
2088 if (raised == NULL_TREE)
2095 handled = TREE_CHAIN (handled);
2097 if (prev == NULL_TREE)
2100 TREE_CHAIN (prev) = n->data.except_stmt.raised;
2102 n->data.except_stmt.raised = raised;
2106 POPSTACK (except_stack);
2111 /* Record that exception EX is caught by this exception handler.
2112 Return nonzero if in exception handling construct, otherwise return 0. */
2119 if (except_stack == 0)
2121 raises_ptr = &except_stack->data.except_stmt.handled;
2122 if (*raises_ptr != void_type_node
2124 && ! value_member (ex, *raises_ptr))
2125 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
2129 /* Record that this exception handler catches all exceptions.
2130 Return nonzero if in exception handling construct, otherwise return 0. */
2133 expand_catch_default ()
2135 if (except_stack == 0)
2137 except_stack->data.except_stmt.handled = void_type_node;
2144 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
2146 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
2151 /* Generate RTL for the start of an if-then. COND is the expression
2152 whose truth should be tested.
2154 If EXITFLAG is nonzero, this conditional is visible to
2155 `exit_something'. */
2158 expand_start_cond (cond, exitflag)
2162 struct nesting *thiscond = ALLOC_NESTING ();
2164 /* Make an entry on cond_stack for the cond we are entering. */
2166 thiscond->next = cond_stack;
2167 thiscond->all = nesting_stack;
2168 thiscond->depth = ++nesting_depth;
2169 thiscond->data.cond.next_label = gen_label_rtx ();
2170 /* Before we encounter an `else', we don't need a separate exit label
2171 unless there are supposed to be exit statements
2172 to exit this conditional. */
2173 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2174 thiscond->data.cond.endif_label = thiscond->exit_label;
2175 cond_stack = thiscond;
2176 nesting_stack = thiscond;
2178 if (output_bytecode)
2179 bc_expand_start_cond (cond, exitflag);
2181 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2184 /* Generate RTL between then-clause and the elseif-clause
2185 of an if-then-elseif-.... */
2188 expand_start_elseif (cond)
2191 if (cond_stack->data.cond.endif_label == 0)
2192 cond_stack->data.cond.endif_label = gen_label_rtx ();
2193 emit_jump (cond_stack->data.cond.endif_label);
2194 emit_label (cond_stack->data.cond.next_label);
2195 cond_stack->data.cond.next_label = gen_label_rtx ();
2196 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2199 /* Generate RTL between the then-clause and the else-clause
2200 of an if-then-else. */
2203 expand_start_else ()
2205 if (cond_stack->data.cond.endif_label == 0)
2206 cond_stack->data.cond.endif_label = gen_label_rtx ();
2208 if (output_bytecode)
2210 bc_expand_start_else ();
2214 emit_jump (cond_stack->data.cond.endif_label);
2215 emit_label (cond_stack->data.cond.next_label);
2216 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2219 /* Generate RTL for the end of an if-then.
2220 Pop the record for it off of cond_stack. */
2225 struct nesting *thiscond = cond_stack;
2227 if (output_bytecode)
2228 bc_expand_end_cond ();
2231 do_pending_stack_adjust ();
2232 if (thiscond->data.cond.next_label)
2233 emit_label (thiscond->data.cond.next_label);
2234 if (thiscond->data.cond.endif_label)
2235 emit_label (thiscond->data.cond.endif_label);
2238 POPSTACK (cond_stack);
2243 /* Generate code for the start of an if-then. COND is the expression
2244 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2245 is to be visible to exit_something. It is assumed that the caller
2246 has pushed the previous context on the cond stack. */
2248 bc_expand_start_cond (cond, exitflag)
2252 struct nesting *thiscond = cond_stack;
2254 thiscond->data.case_stmt.nominal_type = cond;
2255 bc_expand_expr (cond);
2256 bc_emit_bytecode (xjumpifnot);
2257 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2259 #ifdef DEBUG_PRINT_CODE
2260 fputc ('\n', stderr);
2264 /* Generate the label for the end of an if with
2267 bc_expand_end_cond ()
2269 struct nesting *thiscond = cond_stack;
2271 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2274 /* Generate code for the start of the else- clause of
2277 bc_expand_start_else ()
2279 struct nesting *thiscond = cond_stack;
2281 thiscond->data.cond.endif_label = thiscond->exit_label;
2282 thiscond->exit_label = gen_label_rtx ();
2283 bc_emit_bytecode (jump);
2284 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2286 #ifdef DEBUG_PRINT_CODE
2287 fputc ('\n', stderr);
2290 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2293 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2294 loop should be exited by `exit_something'. This is a loop for which
2295 `expand_continue' will jump to the top of the loop.
2297 Make an entry on loop_stack to record the labels associated with
2301 expand_start_loop (exit_flag)
2304 register struct nesting *thisloop = ALLOC_NESTING ();
2306 /* Make an entry on loop_stack for the loop we are entering. */
2308 thisloop->next = loop_stack;
2309 thisloop->all = nesting_stack;
2310 thisloop->depth = ++nesting_depth;
2311 thisloop->data.loop.start_label = gen_label_rtx ();
2312 thisloop->data.loop.end_label = gen_label_rtx ();
2313 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2314 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2315 loop_stack = thisloop;
2316 nesting_stack = thisloop;
2318 if (output_bytecode)
2320 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2324 do_pending_stack_adjust ();
2326 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2327 emit_label (thisloop->data.loop.start_label);
2332 /* Like expand_start_loop but for a loop where the continuation point
2333 (for expand_continue_loop) will be specified explicitly. */
2336 expand_start_loop_continue_elsewhere (exit_flag)
2339 struct nesting *thisloop = expand_start_loop (exit_flag);
2340 loop_stack->data.loop.continue_label = gen_label_rtx ();
2344 /* Specify the continuation point for a loop started with
2345 expand_start_loop_continue_elsewhere.
2346 Use this at the point in the code to which a continue statement
2350 expand_loop_continue_here ()
2352 if (output_bytecode)
2354 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2357 do_pending_stack_adjust ();
2358 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2359 emit_label (loop_stack->data.loop.continue_label);
2364 bc_expand_end_loop ()
2366 struct nesting *thisloop = loop_stack;
2368 bc_emit_bytecode (jump);
2369 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2371 #ifdef DEBUG_PRINT_CODE
2372 fputc ('\n', stderr);
2375 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2376 POPSTACK (loop_stack);
2381 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2382 Pop the block off of loop_stack. */
2388 register rtx start_label;
2389 rtx last_test_insn = 0;
2392 if (output_bytecode)
2394 bc_expand_end_loop ();
2398 insn = get_last_insn ();
2399 start_label = loop_stack->data.loop.start_label;
2401 /* Mark the continue-point at the top of the loop if none elsewhere. */
2402 if (start_label == loop_stack->data.loop.continue_label)
2403 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2405 do_pending_stack_adjust ();
2407 /* If optimizing, perhaps reorder the loop. If the loop
2408 starts with a conditional exit, roll that to the end
2409 where it will optimize together with the jump back.
2411 We look for the last conditional branch to the exit that we encounter
2412 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2413 branch to the exit first, use it.
2415 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2416 because moving them is not valid. */
2420 ! (GET_CODE (insn) == JUMP_INSN
2421 && GET_CODE (PATTERN (insn)) == SET
2422 && SET_DEST (PATTERN (insn)) == pc_rtx
2423 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2425 /* Scan insns from the top of the loop looking for a qualified
2426 conditional exit. */
2427 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2428 insn = NEXT_INSN (insn))
2430 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2433 if (GET_CODE (insn) == NOTE
2434 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2435 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2438 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2441 if (last_test_insn && num_insns > 30)
2444 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2445 && SET_DEST (PATTERN (insn)) == pc_rtx
2446 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2447 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2448 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2449 == loop_stack->data.loop.end_label))
2450 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2451 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2452 == loop_stack->data.loop.end_label))))
2453 last_test_insn = insn;
2455 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2456 && GET_CODE (PATTERN (insn)) == SET
2457 && SET_DEST (PATTERN (insn)) == pc_rtx
2458 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2459 && (XEXP (SET_SRC (PATTERN (insn)), 0)
2460 == loop_stack->data.loop.end_label))
2461 /* Include BARRIER. */
2462 last_test_insn = NEXT_INSN (insn);
2465 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2467 /* We found one. Move everything from there up
2468 to the end of the loop, and add a jump into the loop
2469 to jump to there. */
2470 register rtx newstart_label = gen_label_rtx ();
2471 register rtx start_move = start_label;
2473 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2474 then we want to move this note also. */
2475 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2476 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2477 == NOTE_INSN_LOOP_CONT))
2478 start_move = PREV_INSN (start_move);
2480 emit_label_after (newstart_label, PREV_INSN (start_move));
2481 reorder_insns (start_move, last_test_insn, get_last_insn ());
2482 emit_jump_insn_after (gen_jump (start_label),
2483 PREV_INSN (newstart_label));
2484 emit_barrier_after (PREV_INSN (newstart_label));
2485 start_label = newstart_label;
2489 emit_jump (start_label);
2490 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2491 emit_label (loop_stack->data.loop.end_label);
2493 POPSTACK (loop_stack);
2498 /* Generate a jump to the current loop's continue-point.
2499 This is usually the top of the loop, but may be specified
2500 explicitly elsewhere. If not currently inside a loop,
2501 return 0 and do nothing; caller will print an error message. */
2504 expand_continue_loop (whichloop)
2505 struct nesting *whichloop;
2509 whichloop = loop_stack;
2512 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2517 /* Generate a jump to exit the current loop. If not currently inside a loop,
2518 return 0 and do nothing; caller will print an error message. */
2521 expand_exit_loop (whichloop)
2522 struct nesting *whichloop;
2526 whichloop = loop_stack;
2529 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2533 /* Generate a conditional jump to exit the current loop if COND
2534 evaluates to zero. If not currently inside a loop,
2535 return 0 and do nothing; caller will print an error message. */
2538 expand_exit_loop_if_false (whichloop, cond)
2539 struct nesting *whichloop;
2544 whichloop = loop_stack;
2547 if (output_bytecode)
2549 bc_expand_expr (cond);
2550 bc_expand_goto_internal (xjumpifnot,
2551 BYTECODE_BC_LABEL (whichloop->exit_label),
2555 do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
2560 /* Return non-zero if we should preserve sub-expressions as separate
2561 pseudos. We never do so if we aren't optimizing. We always do so
2562 if -fexpensive-optimizations.
2564 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2565 the loop may still be a small one. */
2568 preserve_subexpressions_p ()
2572 if (flag_expensive_optimizations)
2575 if (optimize == 0 || loop_stack == 0)
2578 insn = get_last_insn_anywhere ();
2581 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2582 < n_non_fixed_regs * 3));
2586 /* Generate a jump to exit the current loop, conditional, binding contour
2587 or case statement. Not all such constructs are visible to this function,
2588 only those started with EXIT_FLAG nonzero. Individual languages use
2589 the EXIT_FLAG parameter to control which kinds of constructs you can
2592 If not currently inside anything that can be exited,
2593 return 0 and do nothing; caller will print an error message. */
2596 expand_exit_something ()
2600 for (n = nesting_stack; n; n = n->all)
2601 if (n->exit_label != 0)
2603 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2610 /* Generate RTL to return from the current function, with no value.
2611 (That is, we do not do anything about returning any value.) */
2614 expand_null_return ()
2616 struct nesting *block = block_stack;
2619 if (output_bytecode)
2621 bc_emit_instruction (ret);
2625 /* Does any pending block have cleanups? */
2627 while (block && block->data.block.cleanups == 0)
2628 block = block->next;
2630 /* If yes, use a goto to return, since that runs cleanups. */
2632 expand_null_return_1 (last_insn, block != 0);
2635 /* Generate RTL to return from the current function, with value VAL. */
2638 expand_value_return (val)
2641 struct nesting *block = block_stack;
2642 rtx last_insn = get_last_insn ();
2643 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2645 /* Copy the value to the return location
2646 unless it's already there. */
2648 if (return_reg != val)
2650 #ifdef PROMOTE_FUNCTION_RETURN
2651 enum machine_mode mode = DECL_MODE (DECL_RESULT (current_function_decl));
2652 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2653 int unsignedp = TREE_UNSIGNED (type);
2655 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
2656 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
2657 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
2658 || TREE_CODE (type) == OFFSET_TYPE)
2660 PROMOTE_MODE (mode, unsignedp, type);
2663 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2664 convert_move (return_reg, val, unsignedp);
2667 emit_move_insn (return_reg, val);
2669 if (GET_CODE (return_reg) == REG
2670 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2671 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2673 /* Does any pending block have cleanups? */
2675 while (block && block->data.block.cleanups == 0)
2676 block = block->next;
2678 /* If yes, use a goto to return, since that runs cleanups.
2679 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2681 expand_null_return_1 (last_insn, block != 0);
2684 /* Output a return with no value. If LAST_INSN is nonzero,
2685 pretend that the return takes place after LAST_INSN.
2686 If USE_GOTO is nonzero then don't use a return instruction;
2687 go to the return label instead. This causes any cleanups
2688 of pending blocks to be executed normally. */
2691 expand_null_return_1 (last_insn, use_goto)
2695 rtx end_label = cleanup_label ? cleanup_label : return_label;
2697 clear_pending_stack_adjust ();
2698 do_pending_stack_adjust ();
2701 /* PCC-struct return always uses an epilogue. */
2702 if (current_function_returns_pcc_struct || use_goto)
2705 end_label = return_label = gen_label_rtx ();
2706 expand_goto_internal (NULL_TREE, end_label, last_insn);
2710 /* Otherwise output a simple return-insn if one is available,
2711 unless it won't do the job. */
2713 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2715 emit_jump_insn (gen_return ());
2721 /* Otherwise jump to the epilogue. */
2722 expand_goto_internal (NULL_TREE, end_label, last_insn);
2725 /* Generate RTL to evaluate the expression RETVAL and return it
2726 from the current function. */
2729 expand_return (retval)
2732 /* If there are any cleanups to be performed, then they will
2733 be inserted following LAST_INSN. It is desirable
2734 that the last_insn, for such purposes, should be the
2735 last insn before computing the return value. Otherwise, cleanups
2736 which call functions can clobber the return value. */
2737 /* ??? rms: I think that is erroneous, because in C++ it would
2738 run destructors on variables that might be used in the subsequent
2739 computation of the return value. */
2741 register rtx val = 0;
2745 struct nesting *block;
2747 /* Bytecode returns are quite simple, just leave the result on the
2748 arithmetic stack. */
2749 if (output_bytecode)
2751 bc_expand_expr (retval);
2752 bc_emit_instruction (ret);
2756 /* If function wants no value, give it none. */
2757 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2759 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2761 expand_null_return ();
2765 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2766 cleanups = any_pending_cleanups (1);
2768 if (TREE_CODE (retval) == RESULT_DECL)
2769 retval_rhs = retval;
2770 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2771 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2772 retval_rhs = TREE_OPERAND (retval, 1);
2773 else if (TREE_TYPE (retval) == void_type_node)
2774 /* Recognize tail-recursive call to void function. */
2775 retval_rhs = retval;
2777 retval_rhs = NULL_TREE;
2779 /* Only use `last_insn' if there are cleanups which must be run. */
2780 if (cleanups || cleanup_label != 0)
2781 last_insn = get_last_insn ();
2783 /* Distribute return down conditional expr if either of the sides
2784 may involve tail recursion (see test below). This enhances the number
2785 of tail recursions we see. Don't do this always since it can produce
2786 sub-optimal code in some cases and we distribute assignments into
2787 conditional expressions when it would help. */
2789 if (optimize && retval_rhs != 0
2790 && frame_offset == 0
2791 && TREE_CODE (retval_rhs) == COND_EXPR
2792 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2793 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2795 rtx label = gen_label_rtx ();
2798 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2799 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2800 DECL_RESULT (current_function_decl),
2801 TREE_OPERAND (retval_rhs, 1));
2802 TREE_SIDE_EFFECTS (expr) = 1;
2803 expand_return (expr);
2806 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2807 DECL_RESULT (current_function_decl),
2808 TREE_OPERAND (retval_rhs, 2));
2809 TREE_SIDE_EFFECTS (expr) = 1;
2810 expand_return (expr);
2814 /* For tail-recursive call to current function,
2815 just jump back to the beginning.
2816 It's unsafe if any auto variable in this function
2817 has its address taken; for simplicity,
2818 require stack frame to be empty. */
2819 if (optimize && retval_rhs != 0
2820 && frame_offset == 0
2821 && TREE_CODE (retval_rhs) == CALL_EXPR
2822 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2823 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2824 /* Finish checking validity, and if valid emit code
2825 to set the argument variables for the new call. */
2826 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2827 DECL_ARGUMENTS (current_function_decl)))
2829 if (tail_recursion_label == 0)
2831 tail_recursion_label = gen_label_rtx ();
2832 emit_label_after (tail_recursion_label,
2833 tail_recursion_reentry);
2836 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2841 /* This optimization is safe if there are local cleanups
2842 because expand_null_return takes care of them.
2843 ??? I think it should also be safe when there is a cleanup label,
2844 because expand_null_return takes care of them, too.
2845 Any reason why not? */
2846 if (HAVE_return && cleanup_label == 0
2847 && ! current_function_returns_pcc_struct
2848 && BRANCH_COST <= 1)
2850 /* If this is return x == y; then generate
2851 if (x == y) return 1; else return 0;
2852 if we can do it with explicit return insns and
2853 branches are cheap. */
2855 switch (TREE_CODE (retval_rhs))
2863 case TRUTH_ANDIF_EXPR:
2864 case TRUTH_ORIF_EXPR:
2865 case TRUTH_AND_EXPR:
2867 case TRUTH_NOT_EXPR:
2868 case TRUTH_XOR_EXPR:
2869 op0 = gen_label_rtx ();
2870 jumpifnot (retval_rhs, op0);
2871 expand_value_return (const1_rtx);
2873 expand_value_return (const0_rtx);
2877 #endif /* HAVE_return */
2881 && TREE_TYPE (retval_rhs) != void_type_node
2882 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2884 /* Calculate the return value into a pseudo reg. */
2885 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2887 /* All temporaries have now been used. */
2889 /* Return the calculated value, doing cleanups first. */
2890 expand_value_return (val);
2894 /* No cleanups or no hard reg used;
2895 calculate value into hard return reg. */
2896 expand_expr (retval, const0_rtx, VOIDmode, 0);
2899 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2903 /* Return 1 if the end of the generated RTX is not a barrier.
2904 This means code already compiled can drop through. */
2907 drop_through_at_end_p ()
2909 rtx insn = get_last_insn ();
2910 while (insn && GET_CODE (insn) == NOTE)
2911 insn = PREV_INSN (insn);
2912 return insn && GET_CODE (insn) != BARRIER;
2915 /* Emit code to alter this function's formal parms for a tail-recursive call.
2916 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2917 FORMALS is the chain of decls of formals.
2918 Return 1 if this can be done;
2919 otherwise return 0 and do not emit any code. */
2922 tail_recursion_args (actuals, formals)
2923 tree actuals, formals;
2925 register tree a = actuals, f = formals;
2927 register rtx *argvec;
2929 /* Check that number and types of actuals are compatible
2930 with the formals. This is not always true in valid C code.
2931 Also check that no formal needs to be addressable
2932 and that all formals are scalars. */
2934 /* Also count the args. */
2936 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2938 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2940 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2943 if (a != 0 || f != 0)
2946 /* Compute all the actuals. */
2948 argvec = (rtx *) alloca (i * sizeof (rtx));
2950 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2951 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2953 /* Find which actual values refer to current values of previous formals.
2954 Copy each of them now, before any formal is changed. */
2956 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2960 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2961 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2962 { copy = 1; break; }
2964 argvec[i] = copy_to_reg (argvec[i]);
2967 /* Store the values of the actuals into the formals. */
2969 for (f = formals, a = actuals, i = 0; f;
2970 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2972 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2973 emit_move_insn (DECL_RTL (f), argvec[i]);
2975 convert_move (DECL_RTL (f), argvec[i],
2976 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2983 /* Generate the RTL code for entering a binding contour.
2984 The variables are declared one by one, by calls to `expand_decl'.
2986 EXIT_FLAG is nonzero if this construct should be visible to
2987 `exit_something'. */
2990 expand_start_bindings (exit_flag)
2993 struct nesting *thisblock = ALLOC_NESTING ();
2996 if (!output_bytecode)
2997 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2999 /* Make an entry on block_stack for the block we are entering. */
3001 thisblock->next = block_stack;
3002 thisblock->all = nesting_stack;
3003 thisblock->depth = ++nesting_depth;
3004 thisblock->data.block.stack_level = 0;
3005 thisblock->data.block.cleanups = 0;
3006 thisblock->data.block.function_call_count = 0;
3010 if (block_stack->data.block.cleanups == NULL_TREE
3011 && (block_stack->data.block.outer_cleanups == NULL_TREE
3012 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
3013 thisblock->data.block.outer_cleanups = empty_cleanup_list;
3015 thisblock->data.block.outer_cleanups
3016 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3017 block_stack->data.block.outer_cleanups);
3020 thisblock->data.block.outer_cleanups = 0;
3024 && !(block_stack->data.block.cleanups == NULL_TREE
3025 && block_stack->data.block.outer_cleanups == NULL_TREE))
3026 thisblock->data.block.outer_cleanups
3027 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3028 block_stack->data.block.outer_cleanups);
3030 thisblock->data.block.outer_cleanups = 0;
3032 thisblock->data.block.label_chain = 0;
3033 thisblock->data.block.innermost_stack_block = stack_block_stack;
3034 thisblock->data.block.first_insn = note;
3035 thisblock->data.block.block_start_count = ++block_start_count;
3036 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3037 block_stack = thisblock;
3038 nesting_stack = thisblock;
3040 if (!output_bytecode)
3042 /* Make a new level for allocating stack slots. */
3047 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3048 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3052 remember_end_note (block)
3053 register tree block;
3055 BLOCK_END_NOTE (block) = last_block_end_note;
3056 last_block_end_note = NULL_RTX;
3059 /* Generate RTL code to terminate a binding contour.
3060 VARS is the chain of VAR_DECL nodes
3061 for the variables bound in this contour.
3062 MARK_ENDS is nonzero if we should put a note at the beginning
3063 and end of this binding contour.
3065 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3066 (That is true automatically if the contour has a saved stack level.) */
3069 expand_end_bindings (vars, mark_ends, dont_jump_in)
3074 register struct nesting *thisblock = block_stack;
3077 if (output_bytecode)
3079 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3084 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3085 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3086 && ! DECL_IN_SYSTEM_HEADER (decl))
3087 warning_with_decl (decl, "unused variable `%s'");
3089 if (thisblock->exit_label)
3091 do_pending_stack_adjust ();
3092 emit_label (thisblock->exit_label);
3095 /* If necessary, make a handler for nonlocal gotos taking
3096 place in the function calls in this block. */
3097 if (function_call_count != thisblock->data.block.function_call_count
3099 /* Make handler for outermost block
3100 if there were any nonlocal gotos to this function. */
3101 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3102 /* Make handler for inner block if it has something
3103 special to do when you jump out of it. */
3104 : (thisblock->data.block.cleanups != 0
3105 || thisblock->data.block.stack_level != 0)))
3108 rtx afterward = gen_label_rtx ();
3109 rtx handler_label = gen_label_rtx ();
3110 rtx save_receiver = gen_reg_rtx (Pmode);
3112 /* Don't let jump_optimize delete the handler. */
3113 LABEL_PRESERVE_P (handler_label) = 1;
3115 /* Record the handler address in the stack slot for that purpose,
3116 during this block, saving and restoring the outer value. */
3117 if (thisblock->next != 0)
3119 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3120 emit_insn_before (gen_move_insn (save_receiver,
3121 nonlocal_goto_handler_slot),
3122 thisblock->data.block.first_insn);
3124 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
3125 gen_rtx (LABEL_REF, Pmode,
3127 thisblock->data.block.first_insn);
3129 /* Jump around the handler; it runs only when specially invoked. */
3130 emit_jump (afterward);
3131 emit_label (handler_label);
3133 #ifdef HAVE_nonlocal_goto
3134 if (! HAVE_nonlocal_goto)
3136 /* First adjust our frame pointer to its actual value. It was
3137 previously set to the start of the virtual area corresponding to
3138 the stacked variables when we branched here and now needs to be
3139 adjusted to the actual hardware fp value.
3141 Assignments are to virtual registers are converted by
3142 instantiate_virtual_regs into the corresponding assignment
3143 to the underlying register (fp in this case) that makes
3144 the original assignment true.
3145 So the following insn will actually be
3146 decrementing fp by STARTING_FRAME_OFFSET. */
3147 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
3149 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3150 if (fixed_regs[ARG_POINTER_REGNUM])
3152 #ifdef ELIMINABLE_REGS
3153 /* If the argument pointer can be eliminated in favor of the
3154 frame pointer, we don't need to restore it. We assume here
3155 that if such an elimination is present, it can always be used.
3156 This is the case on all known machines; if we don't make this
3157 assumption, we do unnecessary saving on many machines. */
3158 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3161 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3162 if (elim_regs[i].from == ARG_POINTER_REGNUM
3163 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3166 if (i == sizeof elim_regs / sizeof elim_regs [0])
3169 /* Now restore our arg pointer from the address at which it
3170 was saved in our stack frame.
3171 If there hasn't be space allocated for it yet, make
3173 if (arg_pointer_save_area == 0)
3174 arg_pointer_save_area
3175 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3176 emit_move_insn (virtual_incoming_args_rtx,
3177 /* We need a pseudo here, or else
3178 instantiate_virtual_regs_1 complains. */
3179 copy_to_reg (arg_pointer_save_area));
3184 /* The handler expects the desired label address in the static chain
3185 register. It tests the address and does an appropriate jump
3186 to whatever label is desired. */
3187 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3188 /* Skip any labels we shouldn't be able to jump to from here. */
3189 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3191 rtx not_this = gen_label_rtx ();
3192 rtx this = gen_label_rtx ();
3193 do_jump_if_equal (static_chain_rtx,
3194 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3196 emit_jump (not_this);
3198 expand_goto (TREE_VALUE (link));
3199 emit_label (not_this);
3201 /* If label is not recognized, abort. */
3202 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3204 emit_label (afterward);
3207 /* Don't allow jumping into a block that has cleanups or a stack level. */
3209 || thisblock->data.block.stack_level != 0
3210 || thisblock->data.block.cleanups != 0)
3212 struct label_chain *chain;
3214 /* Any labels in this block are no longer valid to go to.
3215 Mark them to cause an error message. */
3216 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3218 DECL_TOO_LATE (chain->label) = 1;
3219 /* If any goto without a fixup came to this label,
3220 that must be an error, because gotos without fixups
3221 come from outside all saved stack-levels and all cleanups. */
3222 if (TREE_ADDRESSABLE (chain->label))
3223 error_with_decl (chain->label,
3224 "label `%s' used before containing binding contour");
3228 /* Restore stack level in effect before the block
3229 (only if variable-size objects allocated). */
3230 /* Perform any cleanups associated with the block. */
3232 if (thisblock->data.block.stack_level != 0
3233 || thisblock->data.block.cleanups != 0)
3235 /* Don't let cleanups affect ({...}) constructs. */
3236 int old_expr_stmts_for_value = expr_stmts_for_value;
3237 rtx old_last_expr_value = last_expr_value;
3238 tree old_last_expr_type = last_expr_type;
3239 expr_stmts_for_value = 0;
3241 /* Do the cleanups. */
3242 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
3243 do_pending_stack_adjust ();
3245 expr_stmts_for_value = old_expr_stmts_for_value;
3246 last_expr_value = old_last_expr_value;
3247 last_expr_type = old_last_expr_type;
3249 /* Restore the stack level. */
3251 if (thisblock->data.block.stack_level != 0)
3253 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3254 thisblock->data.block.stack_level, NULL_RTX);
3255 if (nonlocal_goto_handler_slot != 0)
3256 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3260 /* Any gotos out of this block must also do these things.
3261 Also report any gotos with fixups that came to labels in this
3263 fixup_gotos (thisblock,
3264 thisblock->data.block.stack_level,
3265 thisblock->data.block.cleanups,
3266 thisblock->data.block.first_insn,
3270 /* Mark the beginning and end of the scope if requested.
3271 We do this now, after running cleanups on the variables
3272 just going out of scope, so they are in scope for their cleanups. */
3275 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3277 /* Get rid of the beginning-mark if we don't make an end-mark. */
3278 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3280 /* If doing stupid register allocation, make sure lives of all
3281 register variables declared here extend thru end of scope. */
3284 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3286 rtx rtl = DECL_RTL (decl);
3287 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3291 /* Restore block_stack level for containing block. */
3293 stack_block_stack = thisblock->data.block.innermost_stack_block;
3294 POPSTACK (block_stack);
3296 /* Pop the stack slot nesting and free any slots at this level. */
3301 /* End a binding contour.
3302 VARS is the chain of VAR_DECL nodes for the variables bound
3303 in this contour. MARK_ENDS is nonzer if we should put a note
3304 at the beginning and end of this binding contour.
3305 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3309 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3314 struct nesting *thisbind = nesting_stack;
3318 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3319 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3320 warning_with_decl (decl, "unused variable `%s'");
3322 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3324 /* Pop block/bindings off stack */
3325 POPSTACK (nesting_stack);
3326 POPSTACK (block_stack);
3329 /* Generate RTL for the automatic variable declaration DECL.
3330 (Other kinds of declarations are simply ignored if seen here.)
3331 CLEANUP is an expression to be executed at exit from this binding contour;
3332 for example, in C++, it might call the destructor for this variable.
3334 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3335 either before or after calling `expand_decl' but before compiling
3336 any subsequent expressions. This is because CLEANUP may be expanded
3337 more than once, on different branches of execution.
3338 For the same reason, CLEANUP may not contain a CALL_EXPR
3339 except as its topmost node--else `preexpand_calls' would get confused.
3341 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3342 that is not associated with any particular variable.
3344 There is no special support here for C++ constructors.
3345 They should be handled by the proper code in DECL_INITIAL. */
3351 struct nesting *thisblock = block_stack;
3354 if (output_bytecode)
3356 bc_expand_decl (decl, 0);
3360 type = TREE_TYPE (decl);
3362 /* Only automatic variables need any expansion done.
3363 Static and external variables, and external functions,
3364 will be handled by `assemble_variable' (called from finish_decl).
3365 TYPE_DECL and CONST_DECL require nothing.
3366 PARM_DECLs are handled in `assign_parms'. */
3368 if (TREE_CODE (decl) != VAR_DECL)
3370 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3373 /* Create the RTL representation for the variable. */
3375 if (type == error_mark_node)
3376 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3377 else if (DECL_SIZE (decl) == 0)
3378 /* Variable with incomplete type. */
3380 if (DECL_INITIAL (decl) == 0)
3381 /* Error message was already done; now avoid a crash. */
3382 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3384 /* An initializer is going to decide the size of this array.
3385 Until we know the size, represent its address with a reg. */
3386 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3388 else if (DECL_MODE (decl) != BLKmode
3389 /* If -ffloat-store, don't put explicit float vars
3391 && !(flag_float_store
3392 && TREE_CODE (type) == REAL_TYPE)
3393 && ! TREE_THIS_VOLATILE (decl)
3394 && ! TREE_ADDRESSABLE (decl)
3395 && (DECL_REGISTER (decl) || ! obey_regdecls))
3397 /* Automatic variable that can go in a register. */
3398 enum machine_mode reg_mode = DECL_MODE (decl);
3399 int unsignedp = TREE_UNSIGNED (type);
3401 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
3402 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
3403 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
3404 || TREE_CODE (type) == OFFSET_TYPE)
3406 PROMOTE_MODE (reg_mode, unsignedp, type);
3409 if (TREE_CODE (type) == COMPLEX_TYPE)
3411 rtx realpart, imagpart;
3412 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3414 /* For a complex type variable, make a CONCAT of two pseudos
3415 so that the real and imaginary parts
3416 can be allocated separately. */
3417 realpart = gen_reg_rtx (partmode);
3418 REG_USERVAR_P (realpart) = 1;
3419 imagpart = gen_reg_rtx (partmode);
3420 REG_USERVAR_P (imagpart) = 1;
3421 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3425 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3426 if (TREE_CODE (type) == POINTER_TYPE)
3427 mark_reg_pointer (DECL_RTL (decl));
3428 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3431 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3433 /* Variable of fixed size that goes on the stack. */
3437 /* If we previously made RTL for this decl, it must be an array
3438 whose size was determined by the initializer.
3439 The old address was a register; set that register now
3440 to the proper address. */
3441 if (DECL_RTL (decl) != 0)
3443 if (GET_CODE (DECL_RTL (decl)) != MEM
3444 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3446 oldaddr = XEXP (DECL_RTL (decl), 0);
3450 = assign_stack_temp (DECL_MODE (decl),
3451 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3452 + BITS_PER_UNIT - 1)
3456 /* Set alignment we actually gave this decl. */
3457 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3458 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3462 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3463 if (addr != oldaddr)
3464 emit_move_insn (oldaddr, addr);
3467 /* If this is a memory ref that contains aggregate components,
3468 mark it as such for cse and loop optimize. */
3469 MEM_IN_STRUCT_P (DECL_RTL (decl))
3470 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
3471 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
3472 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
3473 || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
3475 /* If this is in memory because of -ffloat-store,
3476 set the volatile bit, to prevent optimizations from
3477 undoing the effects. */
3478 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3479 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3483 /* Dynamic-size object: must push space on the stack. */
3487 /* Record the stack pointer on entry to block, if have
3488 not already done so. */
3489 if (thisblock->data.block.stack_level == 0)
3491 do_pending_stack_adjust ();
3492 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3493 &thisblock->data.block.stack_level,
3494 thisblock->data.block.first_insn);
3495 stack_block_stack = thisblock;
3498 /* Compute the variable's size, in bytes. */
3499 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3501 size_int (BITS_PER_UNIT)),
3502 NULL_RTX, VOIDmode, 0);
3505 /* This is equivalent to calling alloca. */
3506 current_function_calls_alloca = 1;
3508 /* Allocate space on the stack for the variable. */
3509 address = allocate_dynamic_stack_space (size, NULL_RTX,
3512 if (nonlocal_goto_handler_slot != 0)
3513 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3515 /* Reference the variable indirect through that rtx. */
3516 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3518 /* If this is a memory ref that contains aggregate components,
3519 mark it as such for cse and loop optimize. */
3520 MEM_IN_STRUCT_P (DECL_RTL (decl))
3521 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
3522 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
3523 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
3524 || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
3526 /* Indicate the alignment we actually gave this variable. */
3527 #ifdef STACK_BOUNDARY
3528 DECL_ALIGN (decl) = STACK_BOUNDARY;
3530 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3534 if (TREE_THIS_VOLATILE (decl))
3535 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3536 #if 0 /* A variable is not necessarily unchanging
3537 just because it is const. RTX_UNCHANGING_P
3538 means no change in the function,
3539 not merely no change in the variable's scope.
3540 It is correct to set RTX_UNCHANGING_P if the variable's scope
3541 is the whole function. There's no convenient way to test that. */
3542 if (TREE_READONLY (decl))
3543 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3546 /* If doing stupid register allocation, make sure life of any
3547 register variable starts here, at the start of its scope. */
3550 use_variable (DECL_RTL (decl));
3554 /* Generate code for the automatic variable declaration DECL. For
3555 most variables this just means we give it a stack offset. The
3556 compiler sometimes emits cleanups without variables and we will
3557 have to deal with those too. */
3560 bc_expand_decl (decl, cleanup)
3568 /* A cleanup with no variable. */
3575 /* Only auto variables need any work. */
3576 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3579 type = TREE_TYPE (decl);
3581 if (type == error_mark_node)
3582 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3584 else if (DECL_SIZE (decl) == 0)
3586 /* Variable with incomplete type. The stack offset herein will be
3587 fixed later in expand_decl_init (). */
3588 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3590 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3592 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3596 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3599 /* Emit code to perform the initialization of a declaration DECL. */
3602 expand_decl_init (decl)
3605 int was_used = TREE_USED (decl);
3607 /* If this is a CONST_DECL, we don't have to generate any code, but
3608 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3609 to be set while in the obstack containing the constant. If we don't
3610 do this, we can lose if we have functions nested three deep and the middle
3611 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3612 the innermost function is the first to expand that STRING_CST. */
3613 if (TREE_CODE (decl) == CONST_DECL)
3615 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3616 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3617 EXPAND_INITIALIZER);
3621 if (TREE_STATIC (decl))
3624 /* Compute and store the initial value now. */
3626 if (DECL_INITIAL (decl) == error_mark_node)
3628 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3629 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3630 || code == POINTER_TYPE)
3631 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3635 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3637 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3638 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3642 /* Don't let the initialization count as "using" the variable. */
3643 TREE_USED (decl) = was_used;
3645 /* Free any temporaries we made while initializing the decl. */
3649 /* Expand initialization for variable-sized types. Allocate array
3650 using newlocalSI and set local variable, which is a pointer to the
3653 bc_expand_variable_local_init (decl)
3656 /* Evaluate size expression and coerce to SI */
3657 bc_expand_expr (DECL_SIZE (decl));
3659 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3660 no coercion is necessary (?) */
3662 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3663 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3665 /* Emit code to allocate array */
3666 bc_emit_instruction (newlocalSI);
3668 /* Store array pointer in local variable. This is the only instance
3669 where we actually want the address of the pointer to the
3670 variable-size block, rather than the pointer itself. We avoid
3671 using expand_address() since that would cause the pointer to be
3672 pushed rather than its address. Hence the hard-coded reference;
3673 notice also that the variable is always local (no global
3674 variable-size type variables). */
3676 bc_load_localaddr (DECL_RTL (decl));
3677 bc_emit_instruction (storeP);
3681 /* Emit code to initialize a declaration. */
3683 bc_expand_decl_init (decl)
3686 int org_stack_depth;
3688 /* Statical initializers are handled elsewhere */
3690 if (TREE_STATIC (decl))
3693 /* Memory original stack depth */
3694 org_stack_depth = stack_depth;
3696 /* If the type is variable-size, we first create its space (we ASSUME
3697 it CAN'T be static). We do this regardless of whether there's an
3698 initializer assignment or not. */
3700 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3701 bc_expand_variable_local_init (decl);
3703 /* Expand initializer assignment */
3704 if (DECL_INITIAL (decl) == error_mark_node)
3706 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3708 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3709 || code == POINTER_TYPE)
3711 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3713 else if (DECL_INITIAL (decl))
3714 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3716 /* Restore stack depth */
3717 if (org_stack_depth > stack_depth)
3720 bc_adjust_stack (stack_depth - org_stack_depth);
3724 /* CLEANUP is an expression to be executed at exit from this binding contour;
3725 for example, in C++, it might call the destructor for this variable.
3727 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3728 either before or after calling `expand_decl' but before compiling
3729 any subsequent expressions. This is because CLEANUP may be expanded
3730 more than once, on different branches of execution.
3731 For the same reason, CLEANUP may not contain a CALL_EXPR
3732 except as its topmost node--else `preexpand_calls' would get confused.
3734 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3735 that is not associated with any particular variable. */
3738 expand_decl_cleanup (decl, cleanup)
3741 struct nesting *thisblock = block_stack;
3743 /* Error if we are not in any block. */
3747 /* Record the cleanup if there is one. */
3751 thisblock->data.block.cleanups
3752 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3753 /* If this block has a cleanup, it belongs in stack_block_stack. */
3754 stack_block_stack = thisblock;
3759 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3760 DECL_ELTS is the list of elements that belong to DECL's type.
3761 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3764 expand_anon_union_decl (decl, cleanup, decl_elts)
3765 tree decl, cleanup, decl_elts;
3767 struct nesting *thisblock = block_stack;
3770 expand_decl (decl, cleanup);
3771 x = DECL_RTL (decl);
3775 tree decl_elt = TREE_VALUE (decl_elts);
3776 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3777 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3779 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3780 instead create a new MEM rtx with the proper mode. */
3781 if (GET_CODE (x) == MEM)
3783 if (mode == GET_MODE (x))
3784 DECL_RTL (decl_elt) = x;
3787 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3788 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3789 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3792 else if (GET_CODE (x) == REG)
3794 if (mode == GET_MODE (x))
3795 DECL_RTL (decl_elt) = x;
3797 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3802 /* Record the cleanup if there is one. */
3805 thisblock->data.block.cleanups
3806 = temp_tree_cons (decl_elt, cleanup_elt,
3807 thisblock->data.block.cleanups);
3809 decl_elts = TREE_CHAIN (decl_elts);
3813 /* Expand a list of cleanups LIST.
3814 Elements may be expressions or may be nested lists.
3816 If DONT_DO is nonnull, then any list-element
3817 whose TREE_PURPOSE matches DONT_DO is omitted.
3818 This is sometimes used to avoid a cleanup associated with
3819 a value that is being returned out of the scope. */
3822 expand_cleanups (list, dont_do)
3827 for (tail = list; tail; tail = TREE_CHAIN (tail))
3828 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3830 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3831 expand_cleanups (TREE_VALUE (tail), dont_do);
3834 /* Cleanups may be run multiple times. For example,
3835 when exiting a binding contour, we expand the
3836 cleanups associated with that contour. When a goto
3837 within that binding contour has a target outside that
3838 contour, it will expand all cleanups from its scope to
3839 the target. Though the cleanups are expanded multiple
3840 times, the control paths are non-overlapping so the
3841 cleanups will not be executed twice. */
3842 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3848 /* Move all cleanups from the current block_stack
3849 to the containing block_stack, where they are assumed to
3850 have been created. If anything can cause a temporary to
3851 be created, but not expanded for more than one level of
3852 block_stacks, then this code will have to change. */
3857 struct nesting *block = block_stack;
3858 struct nesting *outer = block->next;
3860 outer->data.block.cleanups
3861 = chainon (block->data.block.cleanups,
3862 outer->data.block.cleanups);
3863 block->data.block.cleanups = 0;
3867 last_cleanup_this_contour ()
3869 if (block_stack == 0)
3872 return block_stack->data.block.cleanups;
3875 /* Return 1 if there are any pending cleanups at this point.
3876 If THIS_CONTOUR is nonzero, check the current contour as well.
3877 Otherwise, look only at the contours that enclose this one. */
3880 any_pending_cleanups (this_contour)
3883 struct nesting *block;
3885 if (block_stack == 0)
3888 if (this_contour && block_stack->data.block.cleanups != NULL)
3890 if (block_stack->data.block.cleanups == 0
3891 && (block_stack->data.block.outer_cleanups == 0
3893 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3898 for (block = block_stack->next; block; block = block->next)
3899 if (block->data.block.cleanups != 0)
3905 /* Enter a case (Pascal) or switch (C) statement.
3906 Push a block onto case_stack and nesting_stack
3907 to accumulate the case-labels that are seen
3908 and to record the labels generated for the statement.
3910 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3911 Otherwise, this construct is transparent for `exit_something'.
3913 EXPR is the index-expression to be dispatched on.
3914 TYPE is its nominal type. We could simply convert EXPR to this type,
3915 but instead we take short cuts. */
3918 expand_start_case (exit_flag, expr, type, printname)
3924 register struct nesting *thiscase = ALLOC_NESTING ();
3926 /* Make an entry on case_stack for the case we are entering. */
3928 thiscase->next = case_stack;
3929 thiscase->all = nesting_stack;
3930 thiscase->depth = ++nesting_depth;
3931 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3932 thiscase->data.case_stmt.case_list = 0;
3933 thiscase->data.case_stmt.index_expr = expr;
3934 thiscase->data.case_stmt.nominal_type = type;
3935 thiscase->data.case_stmt.default_label = 0;
3936 thiscase->data.case_stmt.num_ranges = 0;
3937 thiscase->data.case_stmt.printname = printname;
3938 thiscase->data.case_stmt.seenlabel = 0;
3939 case_stack = thiscase;
3940 nesting_stack = thiscase;
3942 if (output_bytecode)
3944 bc_expand_start_case (thiscase, expr, type, printname);
3948 do_pending_stack_adjust ();
3950 /* Make sure case_stmt.start points to something that won't
3951 need any transformation before expand_end_case. */
3952 if (GET_CODE (get_last_insn ()) != NOTE)
3953 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3955 thiscase->data.case_stmt.start = get_last_insn ();
3959 /* Enter a case statement. It is assumed that the caller has pushed
3960 the current context onto the case stack. */
3962 bc_expand_start_case (thiscase, expr, type, printname)
3963 struct nesting *thiscase;
3968 bc_expand_expr (expr);
3969 bc_expand_conversion (TREE_TYPE (expr), type);
3971 /* For cases, the skip is a place we jump to that's emitted after
3972 the size of the jump table is known. */
3974 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3975 bc_emit_bytecode (jump);
3976 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
3978 #ifdef DEBUG_PRINT_CODE
3979 fputc ('\n', stderr);
3984 /* Start a "dummy case statement" within which case labels are invalid
3985 and are not connected to any larger real case statement.
3986 This can be used if you don't want to let a case statement jump
3987 into the middle of certain kinds of constructs. */
3990 expand_start_case_dummy ()
3992 register struct nesting *thiscase = ALLOC_NESTING ();
3994 /* Make an entry on case_stack for the dummy. */
3996 thiscase->next = case_stack;
3997 thiscase->all = nesting_stack;
3998 thiscase->depth = ++nesting_depth;
3999 thiscase->exit_label = 0;
4000 thiscase->data.case_stmt.case_list = 0;
4001 thiscase->data.case_stmt.start = 0;
4002 thiscase->data.case_stmt.nominal_type = 0;
4003 thiscase->data.case_stmt.default_label = 0;
4004 thiscase->data.case_stmt.num_ranges = 0;
4005 case_stack = thiscase;
4006 nesting_stack = thiscase;
4009 /* End a dummy case statement. */
4012 expand_end_case_dummy ()
4014 POPSTACK (case_stack);
4017 /* Return the data type of the index-expression
4018 of the innermost case statement, or null if none. */
4021 case_index_expr_type ()
4024 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4028 /* Accumulate one case or default label inside a case or switch statement.
4029 VALUE is the value of the case (a null pointer, for a default label).
4030 The function CONVERTER, when applied to arguments T and V,
4031 converts the value V to the type T.
4033 If not currently inside a case or switch statement, return 1 and do
4034 nothing. The caller will print a language-specific error message.
4035 If VALUE is a duplicate or overlaps, return 2 and do nothing
4036 except store the (first) duplicate node in *DUPLICATE.
4037 If VALUE is out of range, return 3 and do nothing.
4038 If we are jumping into the scope of a cleaup or var-sized array, return 5.
4039 Return 0 on success.
4041 Extended to handle range statements. */
4044 pushcase (value, converter, label, duplicate)
4045 register tree value;
4046 tree (*converter) PROTO((tree, tree));
4047 register tree label;
4050 register struct case_node **l;
4051 register struct case_node *n;
4055 if (output_bytecode)
4056 return bc_pushcase (value, label);
4058 /* Fail if not inside a real case statement. */
4059 if (! (case_stack && case_stack->data.case_stmt.start))
4062 if (stack_block_stack
4063 && stack_block_stack->depth > case_stack->depth)
4066 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4067 nominal_type = case_stack->data.case_stmt.nominal_type;
4069 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4070 if (index_type == error_mark_node)
4073 /* Convert VALUE to the type in which the comparisons are nominally done. */
4075 value = (*converter) (nominal_type, value);
4077 /* If this is the first label, warn if any insns have been emitted. */
4078 if (case_stack->data.case_stmt.seenlabel == 0)
4081 for (insn = case_stack->data.case_stmt.start;
4083 insn = NEXT_INSN (insn))
4085 if (GET_CODE (insn) == CODE_LABEL)
4087 if (GET_CODE (insn) != NOTE
4088 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4090 warning ("unreachable code at beginning of %s",
4091 case_stack->data.case_stmt.printname);
4096 case_stack->data.case_stmt.seenlabel = 1;
4098 /* Fail if this value is out of range for the actual type of the index
4099 (which may be narrower than NOMINAL_TYPE). */
4100 if (value != 0 && ! int_fits_type_p (value, index_type))
4103 /* Fail if this is a duplicate or overlaps another entry. */
4106 if (case_stack->data.case_stmt.default_label != 0)
4108 *duplicate = case_stack->data.case_stmt.default_label;
4111 case_stack->data.case_stmt.default_label = label;
4115 /* Find the elt in the chain before which to insert the new value,
4116 to keep the chain sorted in increasing order.
4117 But report an error if this element is a duplicate. */
4118 for (l = &case_stack->data.case_stmt.case_list;
4119 /* Keep going past elements distinctly less than VALUE. */
4120 *l != 0 && tree_int_cst_lt ((*l)->high, value);
4125 /* Element we will insert before must be distinctly greater;
4126 overlap means error. */
4127 if (! tree_int_cst_lt (value, (*l)->low))
4129 *duplicate = (*l)->code_label;
4134 /* Add this label to the chain, and succeed.
4135 Copy VALUE so it is on temporary rather than momentary
4136 obstack and will thus survive till the end of the case statement. */
4137 n = (struct case_node *) oballoc (sizeof (struct case_node));
4140 n->high = n->low = copy_node (value);
4141 n->code_label = label;
4145 expand_label (label);
4149 /* Like pushcase but this case applies to all values
4150 between VALUE1 and VALUE2 (inclusive).
4151 The return value is the same as that of pushcase
4152 but there is one additional error code:
4153 4 means the specified range was empty. */
4156 pushcase_range (value1, value2, converter, label, duplicate)
4157 register tree value1, value2;
4158 tree (*converter) PROTO((tree, tree));
4159 register tree label;
4162 register struct case_node **l;
4163 register struct case_node *n;
4167 /* Fail if not inside a real case statement. */
4168 if (! (case_stack && case_stack->data.case_stmt.start))
4171 if (stack_block_stack
4172 && stack_block_stack->depth > case_stack->depth)
4175 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4176 nominal_type = case_stack->data.case_stmt.nominal_type;
4178 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4179 if (index_type == error_mark_node)
4182 /* If this is the first label, warn if any insns have been emitted. */
4183 if (case_stack->data.case_stmt.seenlabel == 0)
4186 for (insn = case_stack->data.case_stmt.start;
4188 insn = NEXT_INSN (insn))
4190 if (GET_CODE (insn) == CODE_LABEL)
4192 if (GET_CODE (insn) != NOTE
4193 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4195 warning ("unreachable code at beginning of %s",
4196 case_stack->data.case_stmt.printname);
4201 case_stack->data.case_stmt.seenlabel = 1;
4203 /* Convert VALUEs to type in which the comparisons are nominally done. */
4204 if (value1 == 0) /* Negative infinity. */
4205 value1 = TYPE_MIN_VALUE(index_type);
4206 value1 = (*converter) (nominal_type, value1);
4208 if (value2 == 0) /* Positive infinity. */
4209 value2 = TYPE_MAX_VALUE(index_type);
4210 value2 = (*converter) (nominal_type, value2);
4212 /* Fail if these values are out of range. */
4213 if (! int_fits_type_p (value1, index_type))
4216 if (! int_fits_type_p (value2, index_type))
4219 /* Fail if the range is empty. */
4220 if (tree_int_cst_lt (value2, value1))
4223 /* If the bounds are equal, turn this into the one-value case. */
4224 if (tree_int_cst_equal (value1, value2))
4225 return pushcase (value1, converter, label, duplicate);
4227 /* Find the elt in the chain before which to insert the new value,
4228 to keep the chain sorted in increasing order.
4229 But report an error if this element is a duplicate. */
4230 for (l = &case_stack->data.case_stmt.case_list;
4231 /* Keep going past elements distinctly less than this range. */
4232 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
4237 /* Element we will insert before must be distinctly greater;
4238 overlap means error. */
4239 if (! tree_int_cst_lt (value2, (*l)->low))
4241 *duplicate = (*l)->code_label;
4246 /* Add this label to the chain, and succeed.
4247 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
4248 obstack and will thus survive till the end of the case statement. */
4250 n = (struct case_node *) oballoc (sizeof (struct case_node));
4253 n->low = copy_node (value1);
4254 n->high = copy_node (value2);
4255 n->code_label = label;
4258 expand_label (label);
4260 case_stack->data.case_stmt.num_ranges++;
4266 /* Accumulate one case or default label; VALUE is the value of the
4267 case, or nil for a default label. If not currently inside a case,
4268 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4269 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4270 Return 0 on success. This function is a leftover from the earlier
4271 bytecode compiler, which was based on gcc 1.37. It should be
4272 merged into pushcase. */
4275 bc_pushcase (value, label)
4279 struct nesting *thiscase = case_stack;
4280 struct case_node *case_label, *new_label;
4285 /* Fail if duplicate, overlap, or out of type range. */
4288 value = convert (thiscase->data.case_stmt.nominal_type, value);
4289 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4292 for (case_label = thiscase->data.case_stmt.case_list;
4293 case_label->left; case_label = case_label->left)
4294 if (! tree_int_cst_lt (case_label->left->high, value))
4297 if (case_label != thiscase->data.case_stmt.case_list
4298 && ! tree_int_cst_lt (case_label->high, value)
4299 || case_label->left && ! tree_int_cst_lt (value, case_label->left->low))
4302 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4303 new_label->low = new_label->high = copy_node (value);
4304 new_label->code_label = label;
4305 new_label->left = case_label->left;
4307 case_label->left = new_label;
4308 thiscase->data.case_stmt.num_ranges++;
4312 if (thiscase->data.case_stmt.default_label)
4314 thiscase->data.case_stmt.default_label = label;
4317 expand_label (label);
4321 /* Called when the index of a switch statement is an enumerated type
4322 and there is no default label.
4324 Checks that all enumeration literals are covered by the case
4325 expressions of a switch. Also, warn if there are any extra
4326 switch cases that are *not* elements of the enumerated type.
4328 If all enumeration literals were covered by the case expressions,
4329 turn one of the expressions into the default expression since it should
4330 not be possible to fall through such a switch. */
4333 check_for_full_enumeration_handling (type)
4336 register struct case_node *n;
4337 register struct case_node **l;
4338 register tree chain;
4341 if (output_bytecode)
4343 bc_check_for_full_enumeration_handling (type);
4347 /* The time complexity of this loop is currently O(N * M), with
4348 N being the number of members in the enumerated type, and
4349 M being the number of case expressions in the switch. */
4351 for (chain = TYPE_VALUES (type);
4353 chain = TREE_CHAIN (chain))
4355 /* Find a match between enumeral and case expression, if possible.
4356 Quit looking when we've gone too far (since case expressions
4357 are kept sorted in ascending order). Warn about enumerators not
4358 handled in the switch statement case expression list. */
4360 for (n = case_stack->data.case_stmt.case_list;
4361 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
4365 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
4368 warning ("enumeration value `%s' not handled in switch",
4369 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
4374 /* Now we go the other way around; we warn if there are case
4375 expressions that don't correspond to enumerators. This can
4376 occur since C and C++ don't enforce type-checking of
4377 assignments to enumeration variables. */
4380 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4382 for (chain = TYPE_VALUES (type);
4383 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4384 chain = TREE_CHAIN (chain))
4389 if (TYPE_NAME (type) == 0)
4390 warning ("case value `%d' not in enumerated type",
4391 TREE_INT_CST_LOW (n->low));
4393 warning ("case value `%d' not in enumerated type `%s'",
4394 TREE_INT_CST_LOW (n->low),
4395 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4398 : DECL_NAME (TYPE_NAME (type))));
4400 if (!tree_int_cst_equal (n->low, n->high))
4402 for (chain = TYPE_VALUES (type);
4403 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4404 chain = TREE_CHAIN (chain))
4409 if (TYPE_NAME (type) == 0)
4410 warning ("case value `%d' not in enumerated type",
4411 TREE_INT_CST_LOW (n->high));
4413 warning ("case value `%d' not in enumerated type `%s'",
4414 TREE_INT_CST_LOW (n->high),
4415 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4418 : DECL_NAME (TYPE_NAME (type))));
4424 /* ??? This optimization is disabled because it causes valid programs to
4425 fail. ANSI C does not guarantee that an expression with enum type
4426 will have a value that is the same as one of the enumation literals. */
4428 /* If all values were found as case labels, make one of them the default
4429 label. Thus, this switch will never fall through. We arbitrarily pick
4430 the last one to make the default since this is likely the most
4431 efficient choice. */
4435 for (l = &case_stack->data.case_stmt.case_list;
4440 case_stack->data.case_stmt.default_label = (*l)->code_label;
4447 /* Check that all enumeration literals are covered by the case
4448 expressions of a switch. Also warn if there are any cases
4449 that are not elements of the enumerated type. */
4451 bc_check_for_full_enumeration_handling (type)
4454 struct nesting *thiscase = case_stack;
4455 struct case_node *c;
4458 /* Check for enums not handled. */
4459 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4461 for (c = thiscase->data.case_stmt.case_list->left;
4462 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4465 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4466 warning ("enumerated value `%s' not handled in switch",
4467 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4470 /* Check for cases not in the enumeration. */
4471 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4473 for (e = TYPE_VALUES (type);
4474 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4478 warning ("case value `%d' not in enumerated type `%s'",
4479 TREE_INT_CST_LOW (c->low),
4480 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4482 : DECL_NAME (TYPE_NAME (type))));
4486 /* Terminate a case (Pascal) or switch (C) statement
4487 in which ORIG_INDEX is the expression to be tested.
4488 Generate the code to test it and jump to the right place. */
4491 expand_end_case (orig_index)
4494 tree minval, maxval, range, orig_minval;
4495 rtx default_label = 0;
4496 register struct case_node *n;
4504 register struct nesting *thiscase = case_stack;
4508 if (output_bytecode)
4510 bc_expand_end_case (orig_index);
4514 table_label = gen_label_rtx ();
4515 index_expr = thiscase->data.case_stmt.index_expr;
4516 unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
4518 do_pending_stack_adjust ();
4520 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4521 if (TREE_TYPE (index_expr) != error_mark_node)
4523 /* If switch expression was an enumerated type, check that all
4524 enumeration literals are covered by the cases.
4525 No sense trying this if there's a default case, however. */
4527 if (!thiscase->data.case_stmt.default_label
4528 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4529 && TREE_CODE (index_expr) != INTEGER_CST)
4530 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4532 /* If this is the first label, warn if any insns have been emitted. */
4533 if (thiscase->data.case_stmt.seenlabel == 0)
4536 for (insn = get_last_insn ();
4537 insn != case_stack->data.case_stmt.start;
4538 insn = PREV_INSN (insn))
4539 if (GET_CODE (insn) != NOTE
4540 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4542 warning ("unreachable code at beginning of %s",
4543 case_stack->data.case_stmt.printname);
4548 /* If we don't have a default-label, create one here,
4549 after the body of the switch. */
4550 if (thiscase->data.case_stmt.default_label == 0)
4552 thiscase->data.case_stmt.default_label
4553 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4554 expand_label (thiscase->data.case_stmt.default_label);
4556 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4558 before_case = get_last_insn ();
4560 /* Simplify the case-list before we count it. */
4561 group_case_nodes (thiscase->data.case_stmt.case_list);
4563 /* Get upper and lower bounds of case values.
4564 Also convert all the case values to the index expr's data type. */
4567 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4569 /* Check low and high label values are integers. */
4570 if (TREE_CODE (n->low) != INTEGER_CST)
4572 if (TREE_CODE (n->high) != INTEGER_CST)
4575 n->low = convert (TREE_TYPE (index_expr), n->low);
4576 n->high = convert (TREE_TYPE (index_expr), n->high);
4578 /* Count the elements and track the largest and smallest
4579 of them (treating them as signed even if they are not). */
4587 if (INT_CST_LT (n->low, minval))
4589 if (INT_CST_LT (maxval, n->high))
4592 /* A range counts double, since it requires two compares. */
4593 if (! tree_int_cst_equal (n->low, n->high))
4597 orig_minval = minval;
4599 /* Compute span of values. */
4601 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
4604 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
4606 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4608 emit_jump (default_label);
4611 /* If range of values is much bigger than number of values,
4612 make a sequence of conditional branches instead of a dispatch.
4613 If the switch-index is a constant, do it this way
4614 because we can optimize it. */
4616 #ifndef CASE_VALUES_THRESHOLD
4618 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4620 /* If machine does not have a case insn that compares the
4621 bounds, this means extra overhead for dispatch tables
4622 which raises the threshold for using them. */
4623 #define CASE_VALUES_THRESHOLD 5
4624 #endif /* HAVE_casesi */
4625 #endif /* CASE_VALUES_THRESHOLD */
4627 else if (TREE_INT_CST_HIGH (range) != 0
4628 || count < CASE_VALUES_THRESHOLD
4629 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4631 || TREE_CODE (index_expr) == INTEGER_CST
4632 /* These will reduce to a constant. */
4633 || (TREE_CODE (index_expr) == CALL_EXPR
4634 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4635 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4636 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4637 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4638 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4640 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4642 /* If the index is a short or char that we do not have
4643 an insn to handle comparisons directly, convert it to
4644 a full integer now, rather than letting each comparison
4645 generate the conversion. */
4647 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4648 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4649 == CODE_FOR_nothing))
4651 enum machine_mode wider_mode;
4652 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4653 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4654 if (cmp_optab->handlers[(int) wider_mode].insn_code
4655 != CODE_FOR_nothing)
4657 index = convert_to_mode (wider_mode, index, unsignedp);
4663 do_pending_stack_adjust ();
4665 index = protect_from_queue (index, 0);
4666 if (GET_CODE (index) == MEM)
4667 index = copy_to_reg (index);
4668 if (GET_CODE (index) == CONST_INT
4669 || TREE_CODE (index_expr) == INTEGER_CST)
4671 /* Make a tree node with the proper constant value
4672 if we don't already have one. */
4673 if (TREE_CODE (index_expr) != INTEGER_CST)
4676 = build_int_2 (INTVAL (index),
4677 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
4678 index_expr = convert (TREE_TYPE (index_expr), index_expr);
4681 /* For constant index expressions we need only
4682 issue a unconditional branch to the appropriate
4683 target code. The job of removing any unreachable
4684 code is left to the optimisation phase if the
4685 "-O" option is specified. */
4686 for (n = thiscase->data.case_stmt.case_list;
4690 if (! tree_int_cst_lt (index_expr, n->low)
4691 && ! tree_int_cst_lt (n->high, index_expr))
4695 emit_jump (label_rtx (n->code_label));
4697 emit_jump (default_label);
4701 /* If the index expression is not constant we generate
4702 a binary decision tree to select the appropriate
4703 target code. This is done as follows:
4705 The list of cases is rearranged into a binary tree,
4706 nearly optimal assuming equal probability for each case.
4708 The tree is transformed into RTL, eliminating
4709 redundant test conditions at the same time.
4711 If program flow could reach the end of the
4712 decision tree an unconditional jump to the
4713 default code is emitted. */
4716 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4717 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4718 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4720 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4721 default_label, TREE_TYPE (index_expr));
4722 emit_jump_if_reachable (default_label);
4731 enum machine_mode index_mode = SImode;
4732 int index_bits = GET_MODE_BITSIZE (index_mode);
4734 /* Convert the index to SImode. */
4735 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
4736 > GET_MODE_BITSIZE (index_mode))
4738 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
4739 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4741 /* We must handle the endpoints in the original mode. */
4742 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
4743 index_expr, minval);
4744 minval = integer_zero_node;
4745 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4746 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4747 emit_jump_insn (gen_bltu (default_label));
4748 /* Now we can safely truncate. */
4749 index = convert_to_mode (index_mode, index, 0);
4753 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
4754 index_expr = convert (type_for_size (index_bits, 0),
4756 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4759 index = protect_from_queue (index, 0);
4760 do_pending_stack_adjust ();
4762 emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX,
4764 expand_expr (range, NULL_RTX,
4766 table_label, default_label));
4770 #ifdef HAVE_tablejump
4771 if (! win && HAVE_tablejump)
4773 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4774 fold (build (MINUS_EXPR,
4775 TREE_TYPE (index_expr),
4776 index_expr, minval)));
4777 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4779 index = protect_from_queue (index, 0);
4780 do_pending_stack_adjust ();
4782 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
4783 expand_expr (range, NULL_RTX, VOIDmode, 0),
4784 table_label, default_label);
4791 /* Get table of labels to jump to, in order of case index. */
4793 ncases = TREE_INT_CST_LOW (range) + 1;
4794 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4795 bzero (labelvec, ncases * sizeof (rtx));
4797 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4799 register HOST_WIDE_INT i
4800 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4805 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
4806 if (i + TREE_INT_CST_LOW (orig_minval)
4807 == TREE_INT_CST_LOW (n->high))
4813 /* Fill in the gaps with the default. */
4814 for (i = 0; i < ncases; i++)
4815 if (labelvec[i] == 0)
4816 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
4818 /* Output the table */
4819 emit_label (table_label);
4821 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
4822 were an expression, instead of an #ifdef/#ifndef. */
4824 #ifdef CASE_VECTOR_PC_RELATIVE
4828 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
4829 gen_rtx (LABEL_REF, Pmode, table_label),
4830 gen_rtvec_v (ncases, labelvec)));
4832 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
4833 gen_rtvec_v (ncases, labelvec)));
4835 /* If the case insn drops through the table,
4836 after the table we must jump to the default-label.
4837 Otherwise record no drop-through after the table. */
4838 #ifdef CASE_DROPS_THROUGH
4839 emit_jump (default_label);
4845 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4846 reorder_insns (before_case, get_last_insn (),
4847 thiscase->data.case_stmt.start);
4849 if (thiscase->exit_label)
4850 emit_label (thiscase->exit_label);
4852 POPSTACK (case_stack);
4858 /* Terminate a case statement. EXPR is the original index
4861 bc_expand_end_case (expr)
4864 struct nesting *thiscase = case_stack;
4865 enum bytecode_opcode opcode;
4866 struct bc_label *jump_label;
4867 struct case_node *c;
4869 bc_emit_bytecode (jump);
4870 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4872 #ifdef DEBUG_PRINT_CODE
4873 fputc ('\n', stderr);
4876 /* Now that the size of the jump table is known, emit the actual
4877 indexed jump instruction. */
4878 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4880 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
4881 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
4882 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
4884 bc_emit_bytecode (opcode);
4886 /* Now emit the case instructions literal arguments, in order.
4887 In addition to the value on the stack, it uses:
4888 1. The address of the jump table.
4889 2. The size of the jump table.
4890 3. The default label. */
4892 jump_label = bc_get_bytecode_label ();
4893 bc_emit_bytecode_labelref (jump_label);
4894 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
4895 sizeof thiscase->data.case_stmt.num_ranges);
4897 if (thiscase->data.case_stmt.default_label)
4898 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
4900 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4902 /* Output the jump table. */
4904 bc_align_bytecode (3 /* PTR_ALIGN */);
4905 bc_emit_bytecode_labeldef (jump_label);
4907 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
4908 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4910 opcode = TREE_INT_CST_LOW (c->low);
4911 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4913 opcode = TREE_INT_CST_LOW (c->high);
4914 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4916 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4919 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
4920 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4922 bc_emit_bytecode_DI_const (c->low);
4923 bc_emit_bytecode_DI_const (c->high);
4925 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4932 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
4934 /* Possibly issue enumeration warnings. */
4936 if (!thiscase->data.case_stmt.default_label
4937 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
4938 && TREE_CODE (expr) != INTEGER_CST
4940 check_for_full_enumeration_handling (TREE_TYPE (expr));
4943 #ifdef DEBUG_PRINT_CODE
4944 fputc ('\n', stderr);
4947 POPSTACK (case_stack);
4951 /* Return unique bytecode ID. */
4955 static int bc_uid = 0;
4960 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4963 do_jump_if_equal (op1, op2, label, unsignedp)
4964 rtx op1, op2, label;
4967 if (GET_CODE (op1) == CONST_INT
4968 && GET_CODE (op2) == CONST_INT)
4970 if (INTVAL (op1) == INTVAL (op2))
4975 enum machine_mode mode = GET_MODE (op1);
4976 if (mode == VOIDmode)
4977 mode = GET_MODE (op2);
4978 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
4979 emit_jump_insn (gen_beq (label));
4983 /* Not all case values are encountered equally. This function
4984 uses a heuristic to weight case labels, in cases where that
4985 looks like a reasonable thing to do.
4987 Right now, all we try to guess is text, and we establish the
4990 chars above space: 16
4999 If we find any cases in the switch that are not either -1 or in the range
5000 of valid ASCII characters, or are control characters other than those
5001 commonly used with "\", don't treat this switch scanning text.
5003 Return 1 if these nodes are suitable for cost estimation, otherwise
5007 estimate_case_costs (node)
5010 tree min_ascii = build_int_2 (-1, -1);
5011 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5015 /* If we haven't already made the cost table, make it now. Note that the
5016 lower bound of the table is -1, not zero. */
5018 if (cost_table == NULL)
5020 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5021 bzero (cost_table - 1, 129 * sizeof (short));
5023 for (i = 0; i < 128; i++)
5027 else if (ispunct (i))
5029 else if (iscntrl (i))
5033 cost_table[' '] = 8;
5034 cost_table['\t'] = 4;
5035 cost_table['\0'] = 4;
5036 cost_table['\n'] = 2;
5037 cost_table['\f'] = 1;
5038 cost_table['\v'] = 1;
5039 cost_table['\b'] = 1;
5042 /* See if all the case expressions look like text. It is text if the
5043 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5044 as signed arithmetic since we don't want to ever access cost_table with a
5045 value less than -1. Also check that none of the constants in a range
5046 are strange control characters. */
5048 for (n = node; n; n = n->right)
5050 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5053 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5054 if (cost_table[i] < 0)
5058 /* All interesting values are within the range of interesting
5059 ASCII characters. */
5063 /* Scan an ordered list of case nodes
5064 combining those with consecutive values or ranges.
5066 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5069 group_case_nodes (head)
5072 case_node_ptr node = head;
5076 rtx lb = next_real_insn (label_rtx (node->code_label));
5077 case_node_ptr np = node;
5079 /* Try to group the successors of NODE with NODE. */
5080 while (((np = np->right) != 0)
5081 /* Do they jump to the same place? */
5082 && next_real_insn (label_rtx (np->code_label)) == lb
5083 /* Are their ranges consecutive? */
5084 && tree_int_cst_equal (np->low,
5085 fold (build (PLUS_EXPR,
5086 TREE_TYPE (node->high),
5089 /* An overflow is not consecutive. */
5090 && tree_int_cst_lt (node->high,
5091 fold (build (PLUS_EXPR,
5092 TREE_TYPE (node->high),
5094 integer_one_node))))
5096 node->high = np->high;
5098 /* NP is the first node after NODE which can't be grouped with it.
5099 Delete the nodes in between, and move on to that node. */
5105 /* Take an ordered list of case nodes
5106 and transform them into a near optimal binary tree,
5107 on the assumption that any target code selection value is as
5108 likely as any other.
5110 The transformation is performed by splitting the ordered
5111 list into two equal sections plus a pivot. The parts are
5112 then attached to the pivot as left and right branches. Each
5113 branch is is then transformed recursively. */
5116 balance_case_nodes (head, parent)
5117 case_node_ptr *head;
5118 case_node_ptr parent;
5120 register case_node_ptr np;
5128 register case_node_ptr *npp;
5131 /* Count the number of entries on branch. Also count the ranges. */
5135 if (!tree_int_cst_equal (np->low, np->high))
5139 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5143 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5151 /* Split this list if it is long enough for that to help. */
5156 /* Find the place in the list that bisects the list's total cost,
5157 Here I gets half the total cost. */
5162 /* Skip nodes while their cost does not reach that amount. */
5163 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5164 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5165 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5168 npp = &(*npp)->right;
5173 /* Leave this branch lopsided, but optimize left-hand
5174 side and fill in `parent' fields for right-hand side. */
5176 np->parent = parent;
5177 balance_case_nodes (&np->left, np);
5178 for (; np->right; np = np->right)
5179 np->right->parent = np;
5183 /* If there are just three nodes, split at the middle one. */
5185 npp = &(*npp)->right;
5188 /* Find the place in the list that bisects the list's total cost,
5189 where ranges count as 2.
5190 Here I gets half the total cost. */
5191 i = (i + ranges + 1) / 2;
5194 /* Skip nodes while their cost does not reach that amount. */
5195 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5200 npp = &(*npp)->right;
5205 np->parent = parent;
5208 /* Optimize each of the two split parts. */
5209 balance_case_nodes (&np->left, np);
5210 balance_case_nodes (&np->right, np);
5214 /* Else leave this branch as one level,
5215 but fill in `parent' fields. */
5217 np->parent = parent;
5218 for (; np->right; np = np->right)
5219 np->right->parent = np;
5224 /* Search the parent sections of the case node tree
5225 to see if a test for the lower bound of NODE would be redundant.
5226 INDEX_TYPE is the type of the index expression.
5228 The instructions to generate the case decision tree are
5229 output in the same order as nodes are processed so it is
5230 known that if a parent node checks the range of the current
5231 node minus one that the current node is bounded at its lower
5232 span. Thus the test would be redundant. */
5235 node_has_low_bound (node, index_type)
5240 case_node_ptr pnode;
5242 /* If the lower bound of this node is the lowest value in the index type,
5243 we need not test it. */
5245 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5248 /* If this node has a left branch, the value at the left must be less
5249 than that at this node, so it cannot be bounded at the bottom and
5250 we need not bother testing any further. */
5255 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5256 node->low, integer_one_node));
5258 /* If the subtraction above overflowed, we can't verify anything.
5259 Otherwise, look for a parent that tests our value - 1. */
5261 if (! tree_int_cst_lt (low_minus_one, node->low))
5264 for (pnode = node->parent; pnode; pnode = pnode->parent)
5265 if (tree_int_cst_equal (low_minus_one, pnode->high))
5271 /* Search the parent sections of the case node tree
5272 to see if a test for the upper bound of NODE would be redundant.
5273 INDEX_TYPE is the type of the index expression.
5275 The instructions to generate the case decision tree are
5276 output in the same order as nodes are processed so it is
5277 known that if a parent node checks the range of the current
5278 node plus one that the current node is bounded at its upper
5279 span. Thus the test would be redundant. */
5282 node_has_high_bound (node, index_type)
5287 case_node_ptr pnode;
5289 /* If the upper bound of this node is the highest value in the type
5290 of the index expression, we need not test against it. */
5292 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5295 /* If this node has a right branch, the value at the right must be greater
5296 than that at this node, so it cannot be bounded at the top and
5297 we need not bother testing any further. */
5302 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5303 node->high, integer_one_node));
5305 /* If the addition above overflowed, we can't verify anything.
5306 Otherwise, look for a parent that tests our value + 1. */
5308 if (! tree_int_cst_lt (node->high, high_plus_one))
5311 for (pnode = node->parent; pnode; pnode = pnode->parent)
5312 if (tree_int_cst_equal (high_plus_one, pnode->low))
5318 /* Search the parent sections of the
5319 case node tree to see if both tests for the upper and lower
5320 bounds of NODE would be redundant. */
5323 node_is_bounded (node, index_type)
5327 return (node_has_low_bound (node, index_type)
5328 && node_has_high_bound (node, index_type));
5331 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5334 emit_jump_if_reachable (label)
5337 if (GET_CODE (get_last_insn ()) != BARRIER)
5341 /* Emit step-by-step code to select a case for the value of INDEX.
5342 The thus generated decision tree follows the form of the
5343 case-node binary tree NODE, whose nodes represent test conditions.
5344 INDEX_TYPE is the type of the index of the switch.
5346 Care is taken to prune redundant tests from the decision tree
5347 by detecting any boundary conditions already checked by
5348 emitted rtx. (See node_has_high_bound, node_has_low_bound
5349 and node_is_bounded, above.)
5351 Where the test conditions can be shown to be redundant we emit
5352 an unconditional jump to the target code. As a further
5353 optimization, the subordinates of a tree node are examined to
5354 check for bounded nodes. In this case conditional and/or
5355 unconditional jumps as a result of the boundary check for the
5356 current node are arranged to target the subordinates associated
5357 code for out of bound conditions on the current node node.
5359 We can assume that when control reaches the code generated here,
5360 the index value has already been compared with the parents
5361 of this node, and determined to be on the same side of each parent
5362 as this node is. Thus, if this node tests for the value 51,
5363 and a parent tested for 52, we don't need to consider
5364 the possibility of a value greater than 51. If another parent
5365 tests for the value 50, then this node need not test anything. */
5368 emit_case_nodes (index, node, default_label, index_type)
5374 /* If INDEX has an unsigned type, we must make unsigned branches. */
5375 int unsignedp = TREE_UNSIGNED (index_type);
5376 typedef rtx rtx_function ();
5377 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5378 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5379 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5380 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5381 enum machine_mode mode = GET_MODE (index);
5383 /* See if our parents have already tested everything for us.
5384 If they have, emit an unconditional jump for this node. */
5385 if (node_is_bounded (node, index_type))
5386 emit_jump (label_rtx (node->code_label));
5388 else if (tree_int_cst_equal (node->low, node->high))
5390 /* Node is single valued. First see if the index expression matches
5391 this node and then check our children, if any. */
5393 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5394 label_rtx (node->code_label), unsignedp);
5396 if (node->right != 0 && node->left != 0)
5398 /* This node has children on both sides.
5399 Dispatch to one side or the other
5400 by comparing the index value with this node's value.
5401 If one subtree is bounded, check that one first,
5402 so we can avoid real branches in the tree. */
5404 if (node_is_bounded (node->right, index_type))
5406 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5408 GT, NULL_RTX, mode, unsignedp, 0);
5410 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5411 emit_case_nodes (index, node->left, default_label, index_type);
5414 else if (node_is_bounded (node->left, index_type))
5416 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5418 LT, NULL_RTX, mode, unsignedp, 0);
5419 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5420 emit_case_nodes (index, node->right, default_label, index_type);
5425 /* Neither node is bounded. First distinguish the two sides;
5426 then emit the code for one side at a time. */
5429 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5431 /* See if the value is on the right. */
5432 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5434 GT, NULL_RTX, mode, unsignedp, 0);
5435 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5437 /* Value must be on the left.
5438 Handle the left-hand subtree. */
5439 emit_case_nodes (index, node->left, default_label, index_type);
5440 /* If left-hand subtree does nothing,
5442 emit_jump_if_reachable (default_label);
5444 /* Code branches here for the right-hand subtree. */
5445 expand_label (test_label);
5446 emit_case_nodes (index, node->right, default_label, index_type);
5450 else if (node->right != 0 && node->left == 0)
5452 /* Here we have a right child but no left so we issue conditional
5453 branch to default and process the right child.
5455 Omit the conditional branch to default if we it avoid only one
5456 right child; it costs too much space to save so little time. */
5458 if (node->right->right || node->right->left
5459 || !tree_int_cst_equal (node->right->low, node->right->high))
5461 if (!node_has_low_bound (node, index_type))
5463 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5465 LT, NULL_RTX, mode, unsignedp, 0);
5466 emit_jump_insn ((*gen_blt_pat) (default_label));
5469 emit_case_nodes (index, node->right, default_label, index_type);
5472 /* We cannot process node->right normally
5473 since we haven't ruled out the numbers less than
5474 this node's value. So handle node->right explicitly. */
5475 do_jump_if_equal (index,
5476 expand_expr (node->right->low, NULL_RTX,
5478 label_rtx (node->right->code_label), unsignedp);
5481 else if (node->right == 0 && node->left != 0)
5483 /* Just one subtree, on the left. */
5485 #if 0 /* The following code and comment were formerly part
5486 of the condition here, but they didn't work
5487 and I don't understand what the idea was. -- rms. */
5488 /* If our "most probable entry" is less probable
5489 than the default label, emit a jump to
5490 the default label using condition codes
5491 already lying around. With no right branch,
5492 a branch-greater-than will get us to the default
5495 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5498 if (node->left->left || node->left->right
5499 || !tree_int_cst_equal (node->left->low, node->left->high))
5501 if (!node_has_high_bound (node, index_type))
5503 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5505 GT, NULL_RTX, mode, unsignedp, 0);
5506 emit_jump_insn ((*gen_bgt_pat) (default_label));
5509 emit_case_nodes (index, node->left, default_label, index_type);
5512 /* We cannot process node->left normally
5513 since we haven't ruled out the numbers less than
5514 this node's value. So handle node->left explicitly. */
5515 do_jump_if_equal (index,
5516 expand_expr (node->left->low, NULL_RTX,
5518 label_rtx (node->left->code_label), unsignedp);
5523 /* Node is a range. These cases are very similar to those for a single
5524 value, except that we do not start by testing whether this node
5525 is the one to branch to. */
5527 if (node->right != 0 && node->left != 0)
5529 /* Node has subtrees on both sides.
5530 If the right-hand subtree is bounded,
5531 test for it first, since we can go straight there.
5532 Otherwise, we need to make a branch in the control structure,
5533 then handle the two subtrees. */
5534 tree test_label = 0;
5536 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5538 GT, NULL_RTX, mode, unsignedp, 0);
5540 if (node_is_bounded (node->right, index_type))
5541 /* Right hand node is fully bounded so we can eliminate any
5542 testing and branch directly to the target code. */
5543 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5546 /* Right hand node requires testing.
5547 Branch to a label where we will handle it later. */
5549 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5550 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5553 /* Value belongs to this node or to the left-hand subtree. */
5555 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5556 GE, NULL_RTX, mode, unsignedp, 0);
5557 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5559 /* Handle the left-hand subtree. */
5560 emit_case_nodes (index, node->left, default_label, index_type);
5562 /* If right node had to be handled later, do that now. */
5566 /* If the left-hand subtree fell through,
5567 don't let it fall into the right-hand subtree. */
5568 emit_jump_if_reachable (default_label);
5570 expand_label (test_label);
5571 emit_case_nodes (index, node->right, default_label, index_type);
5575 else if (node->right != 0 && node->left == 0)
5577 /* Deal with values to the left of this node,
5578 if they are possible. */
5579 if (!node_has_low_bound (node, index_type))
5581 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5583 LT, NULL_RTX, mode, unsignedp, 0);
5584 emit_jump_insn ((*gen_blt_pat) (default_label));
5587 /* Value belongs to this node or to the right-hand subtree. */
5589 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5591 LE, NULL_RTX, mode, unsignedp, 0);
5592 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5594 emit_case_nodes (index, node->right, default_label, index_type);
5597 else if (node->right == 0 && node->left != 0)
5599 /* Deal with values to the right of this node,
5600 if they are possible. */
5601 if (!node_has_high_bound (node, index_type))
5603 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5605 GT, NULL_RTX, mode, unsignedp, 0);
5606 emit_jump_insn ((*gen_bgt_pat) (default_label));
5609 /* Value belongs to this node or to the left-hand subtree. */
5611 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5612 GE, NULL_RTX, mode, unsignedp, 0);
5613 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5615 emit_case_nodes (index, node->left, default_label, index_type);
5620 /* Node has no children so we check low and high bounds to remove
5621 redundant tests. Only one of the bounds can exist,
5622 since otherwise this node is bounded--a case tested already. */
5624 if (!node_has_high_bound (node, index_type))
5626 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5628 GT, NULL_RTX, mode, unsignedp, 0);
5629 emit_jump_insn ((*gen_bgt_pat) (default_label));
5632 if (!node_has_low_bound (node, index_type))
5634 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5636 LT, NULL_RTX, mode, unsignedp, 0);
5637 emit_jump_insn ((*gen_blt_pat) (default_label));
5640 emit_jump (label_rtx (node->code_label));
5645 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5646 so that the debugging info will be correct for the unrolled loop. */
5648 /* Indexed by block number, contains a pointer to the N'th block node. */
5650 static tree *block_vector;
5653 find_loop_tree_blocks ()
5655 tree block = DECL_INITIAL (current_function_decl);
5657 /* There first block is for the function body, and does not have
5658 corresponding block notes. Don't include it in the block vector. */
5659 block = BLOCK_SUBBLOCKS (block);
5661 block_vector = identify_blocks (block, get_insns ());
5665 unroll_block_trees ()
5667 tree block = DECL_INITIAL (current_function_decl);
5669 reorder_blocks (block_vector, block, get_insns ());