1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92, 93, 94, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
55 #include "bc-typecd.h"
56 #include "bc-opcode.h"
60 #define obstack_chunk_alloc xmalloc
61 #define obstack_chunk_free free
62 struct obstack stmt_obstack;
64 /* Filename and line number of last line-number note,
65 whether we actually emitted it or not. */
69 /* Nonzero if within a ({...}) grouping, in which case we must
70 always compute a value for each expr-stmt in case it is the last one. */
72 int expr_stmts_for_value;
74 /* Each time we expand an expression-statement,
75 record the expr's type and its RTL value here. */
77 static tree last_expr_type;
78 static rtx last_expr_value;
80 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
81 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
82 This is used by the `remember_end_note' function to record the endpoint
83 of each generated block in its associated BLOCK node. */
85 static rtx last_block_end_note;
87 /* Number of binding contours started so far in this function. */
89 int block_start_count;
91 /* Nonzero if function being compiled needs to
92 return the address of where it has put a structure value. */
94 extern int current_function_returns_pcc_struct;
96 /* Label that will go on parm cleanup code, if any.
97 Jumping to this label runs cleanup code for parameters, if
98 such code must be run. Following this code is the logical return label. */
100 extern rtx cleanup_label;
102 /* Label that will go on function epilogue.
103 Jumping to this label serves as a "return" instruction
104 on machines which require execution of the epilogue on all returns. */
106 extern rtx return_label;
108 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
109 So we can mark them all live at the end of the function, if nonopt. */
110 extern rtx save_expr_regs;
112 /* Offset to end of allocated area of stack frame.
113 If stack grows down, this is the address of the last stack slot allocated.
114 If stack grows up, this is the address for the next slot. */
115 extern int frame_offset;
117 /* Label to jump back to for tail recursion, or 0 if we have
118 not yet needed one for this function. */
119 extern rtx tail_recursion_label;
121 /* Place after which to insert the tail_recursion_label if we need one. */
122 extern rtx tail_recursion_reentry;
124 /* Location at which to save the argument pointer if it will need to be
125 referenced. There are two cases where this is done: if nonlocal gotos
126 exist, or if vars whose is an offset from the argument pointer will be
127 needed by inner routines. */
129 extern rtx arg_pointer_save_area;
131 /* Chain of all RTL_EXPRs that have insns in them. */
132 extern tree rtl_expr_chain;
134 #if 0 /* Turned off because 0 seems to work just as well. */
135 /* Cleanup lists are required for binding levels regardless of whether
136 that binding level has cleanups or not. This node serves as the
137 cleanup list whenever an empty list is required. */
138 static tree empty_cleanup_list;
141 extern void (*interim_eh_hook) PROTO((tree));
143 /* Functions and data structures for expanding case statements. */
145 /* Case label structure, used to hold info on labels within case
146 statements. We handle "range" labels; for a single-value label
147 as in C, the high and low limits are the same.
149 A chain of case nodes is initially maintained via the RIGHT fields
150 in the nodes. Nodes with higher case values are later in the list.
152 Switch statements can be output in one of two forms. A branch table
153 is used if there are more than a few labels and the labels are dense
154 within the range between the smallest and largest case value. If a
155 branch table is used, no further manipulations are done with the case
158 The alternative to the use of a branch table is to generate a series
159 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
160 and PARENT fields to hold a binary tree. Initially the tree is
161 totally unbalanced, with everything on the right. We balance the tree
162 with nodes on the left having lower case values than the parent
163 and nodes on the right having higher values. We then output the tree
168 struct case_node *left; /* Left son in binary tree */
169 struct case_node *right; /* Right son in binary tree; also node chain */
170 struct case_node *parent; /* Parent of node in binary tree */
171 tree low; /* Lowest index value for this label */
172 tree high; /* Highest index value for this label */
173 tree code_label; /* Label to jump to when node matches */
176 typedef struct case_node case_node;
177 typedef struct case_node *case_node_ptr;
179 /* These are used by estimate_case_costs and balance_case_nodes. */
181 /* This must be a signed type, and non-ANSI compilers lack signed char. */
182 static short *cost_table;
183 static int use_cost_table;
185 /* Stack of control and binding constructs we are currently inside.
187 These constructs begin when you call `expand_start_WHATEVER'
188 and end when you call `expand_end_WHATEVER'. This stack records
189 info about how the construct began that tells the end-function
190 what to do. It also may provide information about the construct
191 to alter the behavior of other constructs within the body.
192 For example, they may affect the behavior of C `break' and `continue'.
194 Each construct gets one `struct nesting' object.
195 All of these objects are chained through the `all' field.
196 `nesting_stack' points to the first object (innermost construct).
197 The position of an entry on `nesting_stack' is in its `depth' field.
199 Each type of construct has its own individual stack.
200 For example, loops have `loop_stack'. Each object points to the
201 next object of the same type through the `next' field.
203 Some constructs are visible to `break' exit-statements and others
204 are not. Which constructs are visible depends on the language.
205 Therefore, the data structure allows each construct to be visible
206 or not, according to the args given when the construct is started.
207 The construct is visible if the `exit_label' field is non-null.
208 In that case, the value should be a CODE_LABEL rtx. */
213 struct nesting *next;
218 /* For conds (if-then and if-then-else statements). */
221 /* Label for the end of the if construct.
222 There is none if EXITFLAG was not set
223 and no `else' has been seen yet. */
225 /* Label for the end of this alternative.
226 This may be the end of the if or the next else/elseif. */
232 /* Label at the top of the loop; place to loop back to. */
234 /* Label at the end of the whole construct. */
236 /* Label before a jump that branches to the end of the whole
237 construct. This is where destructors go if any. */
239 /* Label for `continue' statement to jump to;
240 this is in front of the stepper of the loop. */
243 /* For variable binding contours. */
246 /* Sequence number of this binding contour within the function,
247 in order of entry. */
248 int block_start_count;
249 /* Nonzero => value to restore stack to on exit. Complemented by
250 bc_stack_level (see below) when generating bytecodes. */
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting *innermost_stack_block;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain *label_chain;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count;
277 /* Bytecode specific: stack level to restore stack to on exit. */
280 /* For switch (C) or case (Pascal) statements,
281 and also for dummies (see `expand_start_case_dummy'). */
284 /* The insn after which the case dispatch should finally
285 be emitted. Zero for a dummy. */
287 /* For bytecodes, the case table is in-lined right in the code.
288 A label is needed for skipping over this block. It is only
289 used when generating bytecodes. */
291 /* A list of case labels, kept in ascending order by value
292 as the list is built.
293 During expand_end_case, this list may be rearranged into a
294 nearly balanced binary tree. */
295 struct case_node *case_list;
296 /* Label to jump to if no case matches. */
298 /* The expression to be dispatched on. */
300 /* Type that INDEX_EXPR should be converted to. */
302 /* Number of range exprs in case statement. */
304 /* Name of this kind of statement, for warnings. */
306 /* Nonzero if a case label has been seen in this case stmt. */
312 /* Chain of all pending binding contours. */
313 struct nesting *block_stack;
315 /* If any new stacks are added here, add them to POPSTACKS too. */
317 /* Chain of all pending binding contours that restore stack levels
319 struct nesting *stack_block_stack;
321 /* Chain of all pending conditional statements. */
322 struct nesting *cond_stack;
324 /* Chain of all pending loops. */
325 struct nesting *loop_stack;
327 /* Chain of all pending case or switch statements. */
328 struct nesting *case_stack;
330 /* Separate chain including all of the above,
331 chained through the `all' field. */
332 struct nesting *nesting_stack;
334 /* Number of entries on nesting_stack now. */
337 /* Allocate and return a new `struct nesting'. */
339 #define ALLOC_NESTING() \
340 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
342 /* Pop the nesting stack element by element until we pop off
343 the element which is at the top of STACK.
344 Update all the other stacks, popping off elements from them
345 as we pop them from nesting_stack. */
347 #define POPSTACK(STACK) \
348 do { struct nesting *target = STACK; \
349 struct nesting *this; \
350 do { this = nesting_stack; \
351 if (loop_stack == this) \
352 loop_stack = loop_stack->next; \
353 if (cond_stack == this) \
354 cond_stack = cond_stack->next; \
355 if (block_stack == this) \
356 block_stack = block_stack->next; \
357 if (stack_block_stack == this) \
358 stack_block_stack = stack_block_stack->next; \
359 if (case_stack == this) \
360 case_stack = case_stack->next; \
361 nesting_depth = nesting_stack->depth - 1; \
362 nesting_stack = this->all; \
363 obstack_free (&stmt_obstack, this); } \
364 while (this != target); } while (0)
366 /* In some cases it is impossible to generate code for a forward goto
367 until the label definition is seen. This happens when it may be necessary
368 for the goto to reset the stack pointer: we don't yet know how to do that.
369 So expand_goto puts an entry on this fixup list.
370 Each time a binding contour that resets the stack is exited,
372 If the target label has now been defined, we can insert the proper code. */
376 /* Points to following fixup. */
377 struct goto_fixup *next;
378 /* Points to the insn before the jump insn.
379 If more code must be inserted, it goes after this insn. */
381 /* The LABEL_DECL that this jump is jumping to, or 0
382 for break, continue or return. */
384 /* The BLOCK for the place where this goto was found. */
386 /* The CODE_LABEL rtx that this is jumping to. */
388 /* Number of binding contours started in current function
389 before the label reference. */
390 int block_start_count;
391 /* The outermost stack level that should be restored for this jump.
392 Each time a binding contour that resets the stack is exited,
393 if the target label is *not* yet defined, this slot is updated. */
395 /* List of lists of cleanup expressions to be run by this goto.
396 There is one element for each block that this goto is within.
397 The tail of this list can be 0 (was empty_cleanup_list),
398 if all remaining elements would be empty.
399 The TREE_VALUE contains the cleanup list of that block as of the
400 time this goto was seen.
401 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
402 tree cleanup_list_list;
404 /* Bytecode specific members follow */
406 /* The label that this jump is jumping to, or 0 for break, continue
408 struct bc_label *bc_target;
410 /* The label we use for the fixup patch */
411 struct bc_label *label;
413 /* True (non-0) if fixup has been handled */
416 /* Like stack_level above, except refers to the interpreter stack */
420 static struct goto_fixup *goto_fixup_chain;
422 /* Within any binding contour that must restore a stack level,
423 all labels are recorded with a chain of these structures. */
427 /* Points to following fixup. */
428 struct label_chain *next;
431 static void expand_goto_internal PROTO((tree, rtx, rtx));
432 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
433 struct bc_label *, tree));
434 static int expand_fixup PROTO((tree, rtx, rtx));
435 static void bc_expand_fixup PROTO((enum bytecode_opcode,
436 struct bc_label *, int));
437 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
439 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
441 static int warn_if_unused_value PROTO((tree));
442 static void bc_expand_start_cond PROTO((tree, int));
443 static void bc_expand_end_cond PROTO((void));
444 static void bc_expand_start_else PROTO((void));
445 static void bc_expand_end_loop PROTO((void));
446 static void bc_expand_end_bindings PROTO((tree, int, int));
447 static void bc_expand_decl PROTO((tree, tree));
448 static void bc_expand_variable_local_init PROTO((tree));
449 static void bc_expand_decl_init PROTO((tree));
450 static void expand_null_return_1 PROTO((rtx, int));
451 static int tail_recursion_args PROTO((tree, tree));
452 static void expand_cleanups PROTO((tree, tree, int, int));
453 static void bc_expand_start_case PROTO((struct nesting *, tree,
455 static int bc_pushcase PROTO((tree, tree));
456 static void bc_check_for_full_enumeration_handling PROTO((tree));
457 static void bc_expand_end_case PROTO((tree));
458 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
459 static int estimate_case_costs PROTO((case_node_ptr));
460 static void group_case_nodes PROTO((case_node_ptr));
461 static void balance_case_nodes PROTO((case_node_ptr *,
463 static int node_has_low_bound PROTO((case_node_ptr, tree));
464 static int node_has_high_bound PROTO((case_node_ptr, tree));
465 static int node_is_bounded PROTO((case_node_ptr, tree));
466 static void emit_jump_if_reachable PROTO((rtx));
467 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
469 int bc_expand_exit_loop_if_false ();
470 void bc_expand_start_cond ();
471 void bc_expand_end_cond ();
472 void bc_expand_start_else ();
473 void bc_expand_end_bindings ();
474 void bc_expand_start_case ();
475 void bc_check_for_full_enumeration_handling ();
476 void bc_expand_end_case ();
477 void bc_expand_decl ();
479 extern rtx bc_allocate_local ();
480 extern rtx bc_allocate_variable_array ();
485 gcc_obstack_init (&stmt_obstack);
487 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
492 init_stmt_for_function ()
494 /* We are not currently within any block, conditional, loop or case. */
496 stack_block_stack = 0;
503 block_start_count = 0;
505 /* No gotos have been expanded yet. */
506 goto_fixup_chain = 0;
508 /* We are not processing a ({...}) grouping. */
509 expr_stmts_for_value = 0;
517 p->block_stack = block_stack;
518 p->stack_block_stack = stack_block_stack;
519 p->cond_stack = cond_stack;
520 p->loop_stack = loop_stack;
521 p->case_stack = case_stack;
522 p->nesting_stack = nesting_stack;
523 p->nesting_depth = nesting_depth;
524 p->block_start_count = block_start_count;
525 p->last_expr_type = last_expr_type;
526 p->last_expr_value = last_expr_value;
527 p->expr_stmts_for_value = expr_stmts_for_value;
528 p->emit_filename = emit_filename;
529 p->emit_lineno = emit_lineno;
530 p->goto_fixup_chain = goto_fixup_chain;
534 restore_stmt_status (p)
537 block_stack = p->block_stack;
538 stack_block_stack = p->stack_block_stack;
539 cond_stack = p->cond_stack;
540 loop_stack = p->loop_stack;
541 case_stack = p->case_stack;
542 nesting_stack = p->nesting_stack;
543 nesting_depth = p->nesting_depth;
544 block_start_count = p->block_start_count;
545 last_expr_type = p->last_expr_type;
546 last_expr_value = p->last_expr_value;
547 expr_stmts_for_value = p->expr_stmts_for_value;
548 emit_filename = p->emit_filename;
549 emit_lineno = p->emit_lineno;
550 goto_fixup_chain = p->goto_fixup_chain;
553 /* Emit a no-op instruction. */
560 if (!output_bytecode)
562 last_insn = get_last_insn ();
564 && (GET_CODE (last_insn) == CODE_LABEL
565 || prev_real_insn (last_insn) == 0))
566 emit_insn (gen_nop ());
570 /* Return the rtx-label that corresponds to a LABEL_DECL,
571 creating it if necessary. */
577 if (TREE_CODE (label) != LABEL_DECL)
580 if (DECL_RTL (label))
581 return DECL_RTL (label);
583 return DECL_RTL (label) = gen_label_rtx ();
586 /* Add an unconditional jump to LABEL as the next sequential instruction. */
592 do_pending_stack_adjust ();
593 emit_jump_insn (gen_jump (label));
597 /* Emit code to jump to the address
598 specified by the pointer expression EXP. */
601 expand_computed_goto (exp)
606 bc_expand_expr (exp);
607 bc_emit_instruction (jumpP);
611 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
613 emit_indirect_jump (x);
617 /* Handle goto statements and the labels that they can go to. */
619 /* Specify the location in the RTL code of a label LABEL,
620 which is a LABEL_DECL tree node.
622 This is used for the kind of label that the user can jump to with a
623 goto statement, and for alternatives of a switch or case statement.
624 RTL labels generated for loops and conditionals don't go through here;
625 they are generated directly at the RTL level, by other functions below.
627 Note that this has nothing to do with defining label *names*.
628 Languages vary in how they do that and what that even means. */
634 struct label_chain *p;
638 if (! DECL_RTL (label))
639 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
640 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
641 error ("multiply defined label");
645 do_pending_stack_adjust ();
646 emit_label (label_rtx (label));
647 if (DECL_NAME (label))
648 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
650 if (stack_block_stack != 0)
652 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
653 p->next = stack_block_stack->data.block.label_chain;
654 stack_block_stack->data.block.label_chain = p;
659 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
660 from nested functions. */
663 declare_nonlocal_label (label)
666 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
667 LABEL_PRESERVE_P (label_rtx (label)) = 1;
668 if (nonlocal_goto_handler_slot == 0)
670 nonlocal_goto_handler_slot
671 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
672 emit_stack_save (SAVE_NONLOCAL,
673 &nonlocal_goto_stack_level,
674 PREV_INSN (tail_recursion_reentry));
678 /* Generate RTL code for a `goto' statement with target label LABEL.
679 LABEL should be a LABEL_DECL tree node that was or will later be
680 defined with `expand_label'. */
690 expand_goto_internal (label, label_rtx (label), NULL_RTX);
694 /* Check for a nonlocal goto to a containing function. */
695 context = decl_function_context (label);
696 if (context != 0 && context != current_function_decl)
698 struct function *p = find_function_data (context);
699 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
702 p->has_nonlocal_label = 1;
703 current_function_has_nonlocal_goto = 1;
704 LABEL_REF_NONLOCAL_P (label_ref) = 1;
706 /* Copy the rtl for the slots so that they won't be shared in
707 case the virtual stack vars register gets instantiated differently
708 in the parent than in the child. */
710 #if HAVE_nonlocal_goto
711 if (HAVE_nonlocal_goto)
712 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
713 copy_rtx (p->nonlocal_goto_handler_slot),
714 copy_rtx (p->nonlocal_goto_stack_level),
721 /* Restore frame pointer for containing function.
722 This sets the actual hard register used for the frame pointer
723 to the location of the function's incoming static chain info.
724 The non-local goto handler will then adjust it to contain the
725 proper value and reload the argument pointer, if needed. */
726 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
728 /* We have now loaded the frame pointer hardware register with
729 the address of that corresponds to the start of the virtual
730 stack vars. So replace virtual_stack_vars_rtx in all
731 addresses we use with stack_pointer_rtx. */
733 /* Get addr of containing function's current nonlocal goto handler,
734 which will do any cleanups and then jump to the label. */
735 addr = copy_rtx (p->nonlocal_goto_handler_slot);
736 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
737 hard_frame_pointer_rtx));
739 /* Restore the stack pointer. Note this uses fp just restored. */
740 addr = p->nonlocal_goto_stack_level;
742 addr = replace_rtx (copy_rtx (addr),
743 virtual_stack_vars_rtx,
744 hard_frame_pointer_rtx);
746 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
748 /* Put in the static chain register the nonlocal label address. */
749 emit_move_insn (static_chain_rtx, label_ref);
750 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
752 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
753 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
754 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
755 emit_indirect_jump (temp);
759 expand_goto_internal (label, label_rtx (label), NULL_RTX);
762 /* Generate RTL code for a `goto' statement with target label BODY.
763 LABEL should be a LABEL_REF.
764 LAST_INSN, if non-0, is the rtx we should consider as the last
765 insn emitted (for the purposes of cleaning up a return). */
768 expand_goto_internal (body, label, last_insn)
773 struct nesting *block;
776 /* NOTICE! If a bytecode instruction other than `jump' is needed,
777 then the caller has to call bc_expand_goto_internal()
778 directly. This is rather an exceptional case, and there aren't
779 that many places where this is necessary. */
782 expand_goto_internal (body, label, last_insn);
786 if (GET_CODE (label) != CODE_LABEL)
789 /* If label has already been defined, we can tell now
790 whether and how we must alter the stack level. */
792 if (PREV_INSN (label) != 0)
794 /* Find the innermost pending block that contains the label.
795 (Check containment by comparing insn-uids.)
796 Then restore the outermost stack level within that block,
797 and do cleanups of all blocks contained in it. */
798 for (block = block_stack; block; block = block->next)
800 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
802 if (block->data.block.stack_level != 0)
803 stack_level = block->data.block.stack_level;
804 /* Execute the cleanups for blocks we are exiting. */
805 if (block->data.block.cleanups != 0)
807 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
808 do_pending_stack_adjust ();
814 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
815 the stack pointer. This one should be deleted as dead by flow. */
816 clear_pending_stack_adjust ();
817 do_pending_stack_adjust ();
818 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
821 if (body != 0 && DECL_TOO_LATE (body))
822 error ("jump to `%s' invalidly jumps into binding contour",
823 IDENTIFIER_POINTER (DECL_NAME (body)));
825 /* Label not yet defined: may need to put this goto
826 on the fixup list. */
827 else if (! expand_fixup (body, label, last_insn))
829 /* No fixup needed. Record that the label is the target
830 of at least one goto that has no fixup. */
832 TREE_ADDRESSABLE (body) = 1;
838 /* Generate a jump with OPCODE to the given bytecode LABEL which is
839 found within BODY. */
842 bc_expand_goto_internal (opcode, label, body)
843 enum bytecode_opcode opcode;
844 struct bc_label *label;
847 struct nesting *block;
848 int stack_level = -1;
850 /* If the label is defined, adjust the stack as necessary.
851 If it's not defined, we have to push the reference on the
857 /* Find the innermost pending block that contains the label.
858 (Check containment by comparing bytecode uids.) Then restore the
859 outermost stack level within that block. */
861 for (block = block_stack; block; block = block->next)
863 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
865 if (block->data.block.bc_stack_level)
866 stack_level = block->data.block.bc_stack_level;
868 /* Execute the cleanups for blocks we are exiting. */
869 if (block->data.block.cleanups != 0)
871 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
872 do_pending_stack_adjust ();
876 /* Restore the stack level. If we need to adjust the stack, we
877 must do so after the jump, since the jump may depend on
878 what's on the stack. Thus, any stack-modifying conditional
879 jumps (these are the only ones that rely on what's on the
880 stack) go into the fixup list. */
883 && stack_depth != stack_level
886 bc_expand_fixup (opcode, label, stack_level);
889 if (stack_level >= 0)
890 bc_adjust_stack (stack_depth - stack_level);
892 if (body && DECL_BIT_FIELD (body))
893 error ("jump to `%s' invalidly jumps into binding contour",
894 IDENTIFIER_POINTER (DECL_NAME (body)));
896 /* Emit immediate jump */
897 bc_emit_bytecode (opcode);
898 bc_emit_bytecode_labelref (label);
900 #ifdef DEBUG_PRINT_CODE
901 fputc ('\n', stderr);
906 /* Put goto in the fixup list */
907 bc_expand_fixup (opcode, label, stack_level);
910 /* Generate if necessary a fixup for a goto
911 whose target label in tree structure (if any) is TREE_LABEL
912 and whose target in rtl is RTL_LABEL.
914 If LAST_INSN is nonzero, we pretend that the jump appears
915 after insn LAST_INSN instead of at the current point in the insn stream.
917 The fixup will be used later to insert insns just before the goto.
918 Those insns will restore the stack level as appropriate for the
919 target label, and will (in the case of C++) also invoke any object
920 destructors which have to be invoked when we exit the scopes which
921 are exited by the goto.
923 Value is nonzero if a fixup is made. */
926 expand_fixup (tree_label, rtl_label, last_insn)
931 struct nesting *block, *end_block;
933 /* See if we can recognize which block the label will be output in.
934 This is possible in some very common cases.
935 If we succeed, set END_BLOCK to that block.
936 Otherwise, set it to 0. */
939 && (rtl_label == cond_stack->data.cond.endif_label
940 || rtl_label == cond_stack->data.cond.next_label))
941 end_block = cond_stack;
942 /* If we are in a loop, recognize certain labels which
943 are likely targets. This reduces the number of fixups
944 we need to create. */
946 && (rtl_label == loop_stack->data.loop.start_label
947 || rtl_label == loop_stack->data.loop.end_label
948 || rtl_label == loop_stack->data.loop.continue_label))
949 end_block = loop_stack;
953 /* Now set END_BLOCK to the binding level to which we will return. */
957 struct nesting *next_block = end_block->all;
960 /* First see if the END_BLOCK is inside the innermost binding level.
961 If so, then no cleanups or stack levels are relevant. */
962 while (next_block && next_block != block)
963 next_block = next_block->all;
968 /* Otherwise, set END_BLOCK to the innermost binding level
969 which is outside the relevant control-structure nesting. */
970 next_block = block_stack->next;
971 for (block = block_stack; block != end_block; block = block->all)
972 if (block == next_block)
973 next_block = next_block->next;
974 end_block = next_block;
977 /* Does any containing block have a stack level or cleanups?
978 If not, no fixup is needed, and that is the normal case
979 (the only case, for standard C). */
980 for (block = block_stack; block != end_block; block = block->next)
981 if (block->data.block.stack_level != 0
982 || block->data.block.cleanups != 0)
985 if (block != end_block)
987 /* Ok, a fixup is needed. Add a fixup to the list of such. */
988 struct goto_fixup *fixup
989 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
990 /* In case an old stack level is restored, make sure that comes
991 after any pending stack adjust. */
992 /* ?? If the fixup isn't to come at the present position,
993 doing the stack adjust here isn't useful. Doing it with our
994 settings at that location isn't useful either. Let's hope
997 do_pending_stack_adjust ();
998 fixup->target = tree_label;
999 fixup->target_rtl = rtl_label;
1001 /* Create a BLOCK node and a corresponding matched set of
1002 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1003 this point. The notes will encapsulate any and all fixup
1004 code which we might later insert at this point in the insn
1005 stream. Also, the BLOCK node will be the parent (i.e. the
1006 `SUPERBLOCK') of any other BLOCK nodes which we might create
1007 later on when we are expanding the fixup code. */
1010 register rtx original_before_jump
1011 = last_insn ? last_insn : get_last_insn ();
1015 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1016 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1017 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1019 emit_insns_after (fixup->before_jump, original_before_jump);
1022 fixup->block_start_count = block_start_count;
1023 fixup->stack_level = 0;
1024 fixup->cleanup_list_list
1025 = (((block->data.block.outer_cleanups
1027 && block->data.block.outer_cleanups != empty_cleanup_list
1030 || block->data.block.cleanups)
1031 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1032 block->data.block.outer_cleanups)
1034 fixup->next = goto_fixup_chain;
1035 goto_fixup_chain = fixup;
1042 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1043 Make the fixup restore the stack level to STACK_LEVEL. */
1046 bc_expand_fixup (opcode, label, stack_level)
1047 enum bytecode_opcode opcode;
1048 struct bc_label *label;
1051 struct goto_fixup *fixup
1052 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1054 fixup->label = bc_get_bytecode_label ();
1055 fixup->bc_target = label;
1056 fixup->bc_stack_level = stack_level;
1057 fixup->bc_handled = FALSE;
1059 fixup->next = goto_fixup_chain;
1060 goto_fixup_chain = fixup;
1062 /* Insert a jump to the fixup code */
1063 bc_emit_bytecode (opcode);
1064 bc_emit_bytecode_labelref (fixup->label);
1066 #ifdef DEBUG_PRINT_CODE
1067 fputc ('\n', stderr);
1071 /* Expand any needed fixups in the outputmost binding level of the
1072 function. FIRST_INSN is the first insn in the function. */
1075 expand_fixups (first_insn)
1078 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1081 /* When exiting a binding contour, process all pending gotos requiring fixups.
1082 THISBLOCK is the structure that describes the block being exited.
1083 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1084 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1085 FIRST_INSN is the insn that began this contour.
1087 Gotos that jump out of this contour must restore the
1088 stack level and do the cleanups before actually jumping.
1090 DONT_JUMP_IN nonzero means report error there is a jump into this
1091 contour from before the beginning of the contour.
1092 This is also done if STACK_LEVEL is nonzero. */
1095 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1096 struct nesting *thisblock;
1102 register struct goto_fixup *f, *prev;
1104 if (output_bytecode)
1106 /* ??? The second arg is the bc stack level, which is not the same
1107 as STACK_LEVEL. I have no idea what should go here, so I'll
1109 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1113 /* F is the fixup we are considering; PREV is the previous one. */
1114 /* We run this loop in two passes so that cleanups of exited blocks
1115 are run first, and blocks that are exited are marked so
1118 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1120 /* Test for a fixup that is inactive because it is already handled. */
1121 if (f->before_jump == 0)
1123 /* Delete inactive fixup from the chain, if that is easy to do. */
1125 prev->next = f->next;
1127 /* Has this fixup's target label been defined?
1128 If so, we can finalize it. */
1129 else if (PREV_INSN (f->target_rtl) != 0)
1131 register rtx cleanup_insns;
1133 /* Get the first non-label after the label
1134 this goto jumps to. If that's before this scope begins,
1135 we don't have a jump into the scope. */
1136 rtx after_label = f->target_rtl;
1137 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1138 after_label = NEXT_INSN (after_label);
1140 /* If this fixup jumped into this contour from before the beginning
1141 of this contour, report an error. */
1142 /* ??? Bug: this does not detect jumping in through intermediate
1143 blocks that have stack levels or cleanups.
1144 It detects only a problem with the innermost block
1145 around the label. */
1147 && (dont_jump_in || stack_level || cleanup_list)
1148 /* If AFTER_LABEL is 0, it means the jump goes to the end
1149 of the rtl, which means it jumps into this scope. */
1150 && (after_label == 0
1151 || INSN_UID (first_insn) < INSN_UID (after_label))
1152 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1153 && ! DECL_REGISTER (f->target))
1155 error_with_decl (f->target,
1156 "label `%s' used before containing binding contour");
1157 /* Prevent multiple errors for one label. */
1158 DECL_REGISTER (f->target) = 1;
1161 /* We will expand the cleanups into a sequence of their own and
1162 then later on we will attach this new sequence to the insn
1163 stream just ahead of the actual jump insn. */
1167 /* Temporarily restore the lexical context where we will
1168 logically be inserting the fixup code. We do this for the
1169 sake of getting the debugging information right. */
1172 set_block (f->context);
1174 /* Expand the cleanups for blocks this jump exits. */
1175 if (f->cleanup_list_list)
1178 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1179 /* Marked elements correspond to blocks that have been closed.
1180 Do their cleanups. */
1181 if (TREE_ADDRESSABLE (lists)
1182 && TREE_VALUE (lists) != 0)
1184 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1185 /* Pop any pushes done in the cleanups,
1186 in case function is about to return. */
1187 do_pending_stack_adjust ();
1191 /* Restore stack level for the biggest contour that this
1192 jump jumps out of. */
1194 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1196 /* Finish up the sequence containing the insns which implement the
1197 necessary cleanups, and then attach that whole sequence to the
1198 insn stream just ahead of the actual jump insn. Attaching it
1199 at that point insures that any cleanups which are in fact
1200 implicit C++ object destructions (which must be executed upon
1201 leaving the block) appear (to the debugger) to be taking place
1202 in an area of the generated code where the object(s) being
1203 destructed are still "in scope". */
1205 cleanup_insns = get_insns ();
1209 emit_insns_after (cleanup_insns, f->before_jump);
1216 /* Mark the cleanups of exited blocks so that they are executed
1217 by the code above. */
1218 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1219 if (f->before_jump != 0
1220 && PREV_INSN (f->target_rtl) == 0
1221 /* Label has still not appeared. If we are exiting a block with
1222 a stack level to restore, that started before the fixup,
1223 mark this stack level as needing restoration
1224 when the fixup is later finalized.
1225 Also mark the cleanup_list_list element for F
1226 that corresponds to this block, so that ultimately
1227 this block's cleanups will be executed by the code above. */
1229 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1230 it means the label is undefined. That's erroneous, but possible. */
1231 && (thisblock->data.block.block_start_count
1232 <= f->block_start_count))
1234 tree lists = f->cleanup_list_list;
1235 for (; lists; lists = TREE_CHAIN (lists))
1236 /* If the following elt. corresponds to our containing block
1237 then the elt. must be for this block. */
1238 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1239 TREE_ADDRESSABLE (lists) = 1;
1242 f->stack_level = stack_level;
1247 /* When exiting a binding contour, process all pending gotos requiring fixups.
1248 Note: STACK_DEPTH is not altered.
1250 The arguments are currently not used in the bytecode compiler, but we may
1251 need them one day for languages other than C.
1253 THISBLOCK is the structure that describes the block being exited.
1254 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1255 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1256 FIRST_INSN is the insn that began this contour.
1258 Gotos that jump out of this contour must restore the
1259 stack level and do the cleanups before actually jumping.
1261 DONT_JUMP_IN nonzero means report error there is a jump into this
1262 contour from before the beginning of the contour.
1263 This is also done if STACK_LEVEL is nonzero. */
1266 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1267 struct nesting *thisblock;
1273 register struct goto_fixup *f, *prev;
1274 int saved_stack_depth;
1276 /* F is the fixup we are considering; PREV is the previous one. */
1278 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1280 /* Test for a fixup that is inactive because it is already handled. */
1281 if (f->before_jump == 0)
1283 /* Delete inactive fixup from the chain, if that is easy to do. */
1285 prev->next = f->next;
1288 /* Emit code to restore the stack and continue */
1289 bc_emit_bytecode_labeldef (f->label);
1291 /* Save stack_depth across call, since bc_adjust_stack () will alter
1292 the perceived stack depth via the instructions generated. */
1294 if (f->bc_stack_level >= 0)
1296 saved_stack_depth = stack_depth;
1297 bc_adjust_stack (stack_depth - f->bc_stack_level);
1298 stack_depth = saved_stack_depth;
1301 bc_emit_bytecode (jump);
1302 bc_emit_bytecode_labelref (f->bc_target);
1304 #ifdef DEBUG_PRINT_CODE
1305 fputc ('\n', stderr);
1309 goto_fixup_chain = NULL;
1312 /* Generate RTL for an asm statement (explicit assembler code).
1313 BODY is a STRING_CST node containing the assembler code text,
1314 or an ADDR_EXPR containing a STRING_CST. */
1320 if (output_bytecode)
1322 error ("`asm' is invalid when generating bytecode");
1326 if (TREE_CODE (body) == ADDR_EXPR)
1327 body = TREE_OPERAND (body, 0);
1329 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1330 TREE_STRING_POINTER (body)));
1334 /* Generate RTL for an asm statement with arguments.
1335 STRING is the instruction template.
1336 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1337 Each output or input has an expression in the TREE_VALUE and
1338 a constraint-string in the TREE_PURPOSE.
1339 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1340 that is clobbered by this insn.
1342 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1343 Some elements of OUTPUTS may be replaced with trees representing temporary
1344 values. The caller should copy those temporary values to the originally
1347 VOL nonzero means the insn is volatile; don't optimize it. */
1350 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1351 tree string, outputs, inputs, clobbers;
1356 rtvec argvec, constraints;
1358 int ninputs = list_length (inputs);
1359 int noutputs = list_length (outputs);
1363 /* Vector of RTX's of evaluated output operands. */
1364 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1365 /* The insn we have emitted. */
1368 if (output_bytecode)
1370 error ("`asm' is invalid when generating bytecode");
1374 /* Count the number of meaningful clobbered registers, ignoring what
1375 we would ignore later. */
1377 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1379 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1380 i = decode_reg_name (regname);
1381 if (i >= 0 || i == -4)
1384 error ("unknown register name `%s' in `asm'", regname);
1389 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1391 tree val = TREE_VALUE (tail);
1396 /* If there's an erroneous arg, emit no insn. */
1397 if (TREE_TYPE (val) == error_mark_node)
1400 /* Make sure constraint has `=' and does not have `+'. */
1403 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1405 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1407 error ("output operand constraint contains `+'");
1410 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1415 error ("output operand constraint lacks `='");
1419 /* If an output operand is not a variable or indirect ref,
1421 create a SAVE_EXPR which is a pseudo-reg
1422 to act as an intermediate temporary.
1423 Make the asm insn write into that, then copy it to
1424 the real output operand. */
1426 while (TREE_CODE (val) == COMPONENT_REF
1427 || TREE_CODE (val) == ARRAY_REF)
1428 val = TREE_OPERAND (val, 0);
1430 if (TREE_CODE (val) != VAR_DECL
1431 && TREE_CODE (val) != PARM_DECL
1432 && TREE_CODE (val) != INDIRECT_REF)
1434 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1435 /* If it's a constant, print error now so don't crash later. */
1436 if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR)
1438 error ("invalid output in `asm'");
1443 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1446 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1448 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1452 /* Make vectors for the expression-rtx and constraint strings. */
1454 argvec = rtvec_alloc (ninputs);
1455 constraints = rtvec_alloc (ninputs);
1457 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1458 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1460 MEM_VOLATILE_P (body) = vol;
1462 /* Eval the inputs and put them into ARGVEC.
1463 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1466 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1470 /* If there's an erroneous arg, emit no insn,
1471 because the ASM_INPUT would get VOIDmode
1472 and that could cause a crash in reload. */
1473 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1475 if (TREE_PURPOSE (tail) == NULL_TREE)
1477 error ("hard register `%s' listed as input operand to `asm'",
1478 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1482 /* Make sure constraint has neither `=' nor `+'. */
1484 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1485 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1486 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1488 error ("input operand constraint contains `%c'",
1489 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1493 XVECEXP (body, 3, i) /* argvec */
1494 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1495 if (CONSTANT_P (XVECEXP (body, 3, i))
1496 && ! general_operand (XVECEXP (body, 3, i),
1497 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1498 XVECEXP (body, 3, i)
1499 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1500 XVECEXP (body, 3, i));
1501 XVECEXP (body, 4, i) /* constraints */
1502 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1503 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1507 /* Protect all the operands from the queue,
1508 now that they have all been evaluated. */
1510 for (i = 0; i < ninputs; i++)
1511 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1513 for (i = 0; i < noutputs; i++)
1514 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1516 /* Now, for each output, construct an rtx
1517 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1518 ARGVEC CONSTRAINTS))
1519 If there is more than one, put them inside a PARALLEL. */
1521 if (noutputs == 1 && nclobbers == 0)
1523 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1524 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1526 else if (noutputs == 0 && nclobbers == 0)
1528 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1529 insn = emit_insn (body);
1535 if (num == 0) num = 1;
1536 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1538 /* For each output operand, store a SET. */
1540 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1542 XVECEXP (body, 0, i)
1543 = gen_rtx (SET, VOIDmode,
1545 gen_rtx (ASM_OPERANDS, VOIDmode,
1546 TREE_STRING_POINTER (string),
1547 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1548 i, argvec, constraints,
1550 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1553 /* If there are no outputs (but there are some clobbers)
1554 store the bare ASM_OPERANDS into the PARALLEL. */
1557 XVECEXP (body, 0, i++) = obody;
1559 /* Store (clobber REG) for each clobbered register specified. */
1561 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1563 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1564 int j = decode_reg_name (regname);
1568 if (j == -3) /* `cc', which is not a register */
1571 if (j == -4) /* `memory', don't cache memory across asm */
1573 XVECEXP (body, 0, i++)
1574 = gen_rtx (CLOBBER, VOIDmode,
1575 gen_rtx (MEM, BLKmode,
1576 gen_rtx (SCRATCH, VOIDmode, 0)));
1580 /* Ignore unknown register, error already signalled. */
1583 /* Use QImode since that's guaranteed to clobber just one reg. */
1584 XVECEXP (body, 0, i++)
1585 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1588 insn = emit_insn (body);
1594 /* Generate RTL to evaluate the expression EXP
1595 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1598 expand_expr_stmt (exp)
1601 if (output_bytecode)
1603 int org_stack_depth = stack_depth;
1605 bc_expand_expr (exp);
1607 /* Restore stack depth */
1608 if (stack_depth < org_stack_depth)
1611 bc_emit_instruction (drop);
1613 last_expr_type = TREE_TYPE (exp);
1617 /* If -W, warn about statements with no side effects,
1618 except for an explicit cast to void (e.g. for assert()), and
1619 except inside a ({...}) where they may be useful. */
1620 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1622 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1623 && !(TREE_CODE (exp) == CONVERT_EXPR
1624 && TREE_TYPE (exp) == void_type_node))
1625 warning_with_file_and_line (emit_filename, emit_lineno,
1626 "statement with no effect");
1627 else if (warn_unused)
1628 warn_if_unused_value (exp);
1631 /* If EXP is of function type and we are expanding statements for
1632 value, convert it to pointer-to-function. */
1633 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1634 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1636 last_expr_type = TREE_TYPE (exp);
1637 if (! flag_syntax_only)
1638 last_expr_value = expand_expr (exp,
1639 (expr_stmts_for_value
1640 ? NULL_RTX : const0_rtx),
1643 /* If all we do is reference a volatile value in memory,
1644 copy it to a register to be sure it is actually touched. */
1645 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1646 && TREE_THIS_VOLATILE (exp))
1648 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1650 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1651 copy_to_reg (last_expr_value);
1654 rtx lab = gen_label_rtx ();
1656 /* Compare the value with itself to reference it. */
1657 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1658 expand_expr (TYPE_SIZE (last_expr_type),
1659 NULL_RTX, VOIDmode, 0),
1661 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1662 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1667 /* If this expression is part of a ({...}) and is in memory, we may have
1668 to preserve temporaries. */
1669 preserve_temp_slots (last_expr_value);
1671 /* Free any temporaries used to evaluate this expression. Any temporary
1672 used as a result of this expression will already have been preserved
1679 /* Warn if EXP contains any computations whose results are not used.
1680 Return 1 if a warning is printed; 0 otherwise. */
1683 warn_if_unused_value (exp)
1686 if (TREE_USED (exp))
1689 switch (TREE_CODE (exp))
1691 case PREINCREMENT_EXPR:
1692 case POSTINCREMENT_EXPR:
1693 case PREDECREMENT_EXPR:
1694 case POSTDECREMENT_EXPR:
1699 case METHOD_CALL_EXPR:
1701 case WITH_CLEANUP_EXPR:
1703 /* We don't warn about COND_EXPR because it may be a useful
1704 construct if either arm contains a side effect. */
1709 /* For a binding, warn if no side effect within it. */
1710 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1712 case TRUTH_ORIF_EXPR:
1713 case TRUTH_ANDIF_EXPR:
1714 /* In && or ||, warn if 2nd operand has no side effect. */
1715 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1718 if (TREE_NO_UNUSED_WARNING (exp))
1720 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1722 /* Let people do `(foo (), 0)' without a warning. */
1723 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1725 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1729 case NON_LVALUE_EXPR:
1730 /* Don't warn about values cast to void. */
1731 if (TREE_TYPE (exp) == void_type_node)
1733 /* Don't warn about conversions not explicit in the user's program. */
1734 if (TREE_NO_UNUSED_WARNING (exp))
1736 /* Assignment to a cast usually results in a cast of a modify.
1737 Don't complain about that. There can be an arbitrary number of
1738 casts before the modify, so we must loop until we find the first
1739 non-cast expression and then test to see if that is a modify. */
1741 tree tem = TREE_OPERAND (exp, 0);
1743 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1744 tem = TREE_OPERAND (tem, 0);
1746 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR)
1752 /* Don't warn about automatic dereferencing of references, since
1753 the user cannot control it. */
1754 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1755 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1756 /* ... fall through ... */
1759 /* Referencing a volatile value is a side effect, so don't warn. */
1760 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1761 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1762 && TREE_THIS_VOLATILE (exp))
1765 warning_with_file_and_line (emit_filename, emit_lineno,
1766 "value computed is not used");
1771 /* Clear out the memory of the last expression evaluated. */
1779 /* Begin a statement which will return a value.
1780 Return the RTL_EXPR for this statement expr.
1781 The caller must save that value and pass it to expand_end_stmt_expr. */
1784 expand_start_stmt_expr ()
1789 /* When generating bytecode just note down the stack depth */
1790 if (output_bytecode)
1791 return (build_int_2 (stack_depth, 0));
1793 /* Make the RTL_EXPR node temporary, not momentary,
1794 so that rtl_expr_chain doesn't become garbage. */
1795 momentary = suspend_momentary ();
1796 t = make_node (RTL_EXPR);
1797 resume_momentary (momentary);
1798 start_sequence_for_rtl_expr (t);
1800 expr_stmts_for_value++;
1804 /* Restore the previous state at the end of a statement that returns a value.
1805 Returns a tree node representing the statement's value and the
1806 insns to compute the value.
1808 The nodes of that expression have been freed by now, so we cannot use them.
1809 But we don't want to do that anyway; the expression has already been
1810 evaluated and now we just want to use the value. So generate a RTL_EXPR
1811 with the proper type and RTL value.
1813 If the last substatement was not an expression,
1814 return something with type `void'. */
1817 expand_end_stmt_expr (t)
1820 if (output_bytecode)
1826 /* At this point, all expressions have been evaluated in order.
1827 However, all expression values have been popped when evaluated,
1828 which means we have to recover the last expression value. This is
1829 the last value removed by means of a `drop' instruction. Instead
1830 of adding code to inhibit dropping the last expression value, it
1831 is here recovered by undoing the `drop'. Since `drop' is
1832 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1835 bc_adjust_stack (-1);
1837 if (!last_expr_type)
1838 last_expr_type = void_type_node;
1840 t = make_node (RTL_EXPR);
1841 TREE_TYPE (t) = last_expr_type;
1842 RTL_EXPR_RTL (t) = NULL;
1843 RTL_EXPR_SEQUENCE (t) = NULL;
1845 /* Don't consider deleting this expr or containing exprs at tree level. */
1846 TREE_THIS_VOLATILE (t) = 1;
1854 if (last_expr_type == 0)
1856 last_expr_type = void_type_node;
1857 last_expr_value = const0_rtx;
1859 else if (last_expr_value == 0)
1860 /* There are some cases where this can happen, such as when the
1861 statement is void type. */
1862 last_expr_value = const0_rtx;
1863 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1864 /* Remove any possible QUEUED. */
1865 last_expr_value = protect_from_queue (last_expr_value, 0);
1869 TREE_TYPE (t) = last_expr_type;
1870 RTL_EXPR_RTL (t) = last_expr_value;
1871 RTL_EXPR_SEQUENCE (t) = get_insns ();
1873 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1877 /* Don't consider deleting this expr or containing exprs at tree level. */
1878 TREE_SIDE_EFFECTS (t) = 1;
1879 /* Propagate volatility of the actual RTL expr. */
1880 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1883 expr_stmts_for_value--;
1888 /* Generate RTL for the start of an if-then. COND is the expression
1889 whose truth should be tested.
1891 If EXITFLAG is nonzero, this conditional is visible to
1892 `exit_something'. */
1895 expand_start_cond (cond, exitflag)
1899 struct nesting *thiscond = ALLOC_NESTING ();
1901 /* Make an entry on cond_stack for the cond we are entering. */
1903 thiscond->next = cond_stack;
1904 thiscond->all = nesting_stack;
1905 thiscond->depth = ++nesting_depth;
1906 thiscond->data.cond.next_label = gen_label_rtx ();
1907 /* Before we encounter an `else', we don't need a separate exit label
1908 unless there are supposed to be exit statements
1909 to exit this conditional. */
1910 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1911 thiscond->data.cond.endif_label = thiscond->exit_label;
1912 cond_stack = thiscond;
1913 nesting_stack = thiscond;
1915 if (output_bytecode)
1916 bc_expand_start_cond (cond, exitflag);
1918 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1921 /* Generate RTL between then-clause and the elseif-clause
1922 of an if-then-elseif-.... */
1925 expand_start_elseif (cond)
1928 if (cond_stack->data.cond.endif_label == 0)
1929 cond_stack->data.cond.endif_label = gen_label_rtx ();
1930 emit_jump (cond_stack->data.cond.endif_label);
1931 emit_label (cond_stack->data.cond.next_label);
1932 cond_stack->data.cond.next_label = gen_label_rtx ();
1933 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1936 /* Generate RTL between the then-clause and the else-clause
1937 of an if-then-else. */
1940 expand_start_else ()
1942 if (cond_stack->data.cond.endif_label == 0)
1943 cond_stack->data.cond.endif_label = gen_label_rtx ();
1945 if (output_bytecode)
1947 bc_expand_start_else ();
1951 emit_jump (cond_stack->data.cond.endif_label);
1952 emit_label (cond_stack->data.cond.next_label);
1953 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1956 /* After calling expand_start_else, turn this "else" into an "else if"
1957 by providing another condition. */
1960 expand_elseif (cond)
1963 cond_stack->data.cond.next_label = gen_label_rtx ();
1964 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1967 /* Generate RTL for the end of an if-then.
1968 Pop the record for it off of cond_stack. */
1973 struct nesting *thiscond = cond_stack;
1975 if (output_bytecode)
1976 bc_expand_end_cond ();
1979 do_pending_stack_adjust ();
1980 if (thiscond->data.cond.next_label)
1981 emit_label (thiscond->data.cond.next_label);
1982 if (thiscond->data.cond.endif_label)
1983 emit_label (thiscond->data.cond.endif_label);
1986 POPSTACK (cond_stack);
1991 /* Generate code for the start of an if-then. COND is the expression
1992 whose truth is to be tested; if EXITFLAG is nonzero this conditional
1993 is to be visible to exit_something. It is assumed that the caller
1994 has pushed the previous context on the cond stack. */
1997 bc_expand_start_cond (cond, exitflag)
2001 struct nesting *thiscond = cond_stack;
2003 thiscond->data.case_stmt.nominal_type = cond;
2005 thiscond->exit_label = gen_label_rtx ();
2006 bc_expand_expr (cond);
2007 bc_emit_bytecode (xjumpifnot);
2008 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2010 #ifdef DEBUG_PRINT_CODE
2011 fputc ('\n', stderr);
2015 /* Generate the label for the end of an if with
2019 bc_expand_end_cond ()
2021 struct nesting *thiscond = cond_stack;
2023 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2026 /* Generate code for the start of the else- clause of
2030 bc_expand_start_else ()
2032 struct nesting *thiscond = cond_stack;
2034 thiscond->data.cond.endif_label = thiscond->exit_label;
2035 thiscond->exit_label = gen_label_rtx ();
2036 bc_emit_bytecode (jump);
2037 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2039 #ifdef DEBUG_PRINT_CODE
2040 fputc ('\n', stderr);
2043 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2046 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2047 loop should be exited by `exit_something'. This is a loop for which
2048 `expand_continue' will jump to the top of the loop.
2050 Make an entry on loop_stack to record the labels associated with
2054 expand_start_loop (exit_flag)
2057 register struct nesting *thisloop = ALLOC_NESTING ();
2059 /* Make an entry on loop_stack for the loop we are entering. */
2061 thisloop->next = loop_stack;
2062 thisloop->all = nesting_stack;
2063 thisloop->depth = ++nesting_depth;
2064 thisloop->data.loop.start_label = gen_label_rtx ();
2065 thisloop->data.loop.end_label = gen_label_rtx ();
2066 thisloop->data.loop.alt_end_label = 0;
2067 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2068 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2069 loop_stack = thisloop;
2070 nesting_stack = thisloop;
2072 if (output_bytecode)
2074 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2078 do_pending_stack_adjust ();
2080 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2081 emit_label (thisloop->data.loop.start_label);
2086 /* Like expand_start_loop but for a loop where the continuation point
2087 (for expand_continue_loop) will be specified explicitly. */
2090 expand_start_loop_continue_elsewhere (exit_flag)
2093 struct nesting *thisloop = expand_start_loop (exit_flag);
2094 loop_stack->data.loop.continue_label = gen_label_rtx ();
2098 /* Specify the continuation point for a loop started with
2099 expand_start_loop_continue_elsewhere.
2100 Use this at the point in the code to which a continue statement
2104 expand_loop_continue_here ()
2106 if (output_bytecode)
2108 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2111 do_pending_stack_adjust ();
2112 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2113 emit_label (loop_stack->data.loop.continue_label);
2119 bc_expand_end_loop ()
2121 struct nesting *thisloop = loop_stack;
2123 bc_emit_bytecode (jump);
2124 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2126 #ifdef DEBUG_PRINT_CODE
2127 fputc ('\n', stderr);
2130 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2131 POPSTACK (loop_stack);
2136 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2137 Pop the block off of loop_stack. */
2143 register rtx start_label;
2144 rtx last_test_insn = 0;
2147 if (output_bytecode)
2149 bc_expand_end_loop ();
2153 insn = get_last_insn ();
2154 start_label = loop_stack->data.loop.start_label;
2156 /* Mark the continue-point at the top of the loop if none elsewhere. */
2157 if (start_label == loop_stack->data.loop.continue_label)
2158 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2160 do_pending_stack_adjust ();
2162 /* If optimizing, perhaps reorder the loop. If the loop
2163 starts with a conditional exit, roll that to the end
2164 where it will optimize together with the jump back.
2166 We look for the last conditional branch to the exit that we encounter
2167 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2168 branch to the exit first, use it.
2170 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2171 because moving them is not valid. */
2175 ! (GET_CODE (insn) == JUMP_INSN
2176 && GET_CODE (PATTERN (insn)) == SET
2177 && SET_DEST (PATTERN (insn)) == pc_rtx
2178 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2180 /* Scan insns from the top of the loop looking for a qualified
2181 conditional exit. */
2182 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2183 insn = NEXT_INSN (insn))
2185 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2188 if (GET_CODE (insn) == NOTE
2189 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2190 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2193 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2196 if (last_test_insn && num_insns > 30)
2199 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2200 && SET_DEST (PATTERN (insn)) == pc_rtx
2201 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2202 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2203 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2204 == loop_stack->data.loop.end_label)
2205 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2206 == loop_stack->data.loop.alt_end_label)))
2207 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2208 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2209 == loop_stack->data.loop.end_label)
2210 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2211 == loop_stack->data.loop.alt_end_label)))))
2212 last_test_insn = insn;
2214 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2215 && GET_CODE (PATTERN (insn)) == SET
2216 && SET_DEST (PATTERN (insn)) == pc_rtx
2217 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2218 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2219 == loop_stack->data.loop.end_label)
2220 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2221 == loop_stack->data.loop.alt_end_label)))
2222 /* Include BARRIER. */
2223 last_test_insn = NEXT_INSN (insn);
2226 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2228 /* We found one. Move everything from there up
2229 to the end of the loop, and add a jump into the loop
2230 to jump to there. */
2231 register rtx newstart_label = gen_label_rtx ();
2232 register rtx start_move = start_label;
2234 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2235 then we want to move this note also. */
2236 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2237 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2238 == NOTE_INSN_LOOP_CONT))
2239 start_move = PREV_INSN (start_move);
2241 emit_label_after (newstart_label, PREV_INSN (start_move));
2242 reorder_insns (start_move, last_test_insn, get_last_insn ());
2243 emit_jump_insn_after (gen_jump (start_label),
2244 PREV_INSN (newstart_label));
2245 emit_barrier_after (PREV_INSN (newstart_label));
2246 start_label = newstart_label;
2250 emit_jump (start_label);
2251 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2252 emit_label (loop_stack->data.loop.end_label);
2254 POPSTACK (loop_stack);
2259 /* Generate a jump to the current loop's continue-point.
2260 This is usually the top of the loop, but may be specified
2261 explicitly elsewhere. If not currently inside a loop,
2262 return 0 and do nothing; caller will print an error message. */
2265 expand_continue_loop (whichloop)
2266 struct nesting *whichloop;
2270 whichloop = loop_stack;
2273 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2278 /* Generate a jump to exit the current loop. If not currently inside a loop,
2279 return 0 and do nothing; caller will print an error message. */
2282 expand_exit_loop (whichloop)
2283 struct nesting *whichloop;
2287 whichloop = loop_stack;
2290 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2294 /* Generate a conditional jump to exit the current loop if COND
2295 evaluates to zero. If not currently inside a loop,
2296 return 0 and do nothing; caller will print an error message. */
2299 expand_exit_loop_if_false (whichloop, cond)
2300 struct nesting *whichloop;
2305 whichloop = loop_stack;
2308 if (output_bytecode)
2310 bc_expand_expr (cond);
2311 bc_expand_goto_internal (xjumpifnot,
2312 BYTECODE_BC_LABEL (whichloop->exit_label),
2317 /* In order to handle fixups, we actually create a conditional jump
2318 around a unconditional branch to exit the loop. If fixups are
2319 necessary, they go before the unconditional branch. */
2321 rtx label = gen_label_rtx ();
2324 do_jump (cond, NULL_RTX, label);
2325 last_insn = get_last_insn ();
2326 if (GET_CODE (last_insn) == CODE_LABEL)
2327 whichloop->data.loop.alt_end_label = last_insn;
2328 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2336 /* Return non-zero if we should preserve sub-expressions as separate
2337 pseudos. We never do so if we aren't optimizing. We always do so
2338 if -fexpensive-optimizations.
2340 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2341 the loop may still be a small one. */
2344 preserve_subexpressions_p ()
2348 if (flag_expensive_optimizations)
2351 if (optimize == 0 || loop_stack == 0)
2354 insn = get_last_insn_anywhere ();
2357 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2358 < n_non_fixed_regs * 3));
2362 /* Generate a jump to exit the current loop, conditional, binding contour
2363 or case statement. Not all such constructs are visible to this function,
2364 only those started with EXIT_FLAG nonzero. Individual languages use
2365 the EXIT_FLAG parameter to control which kinds of constructs you can
2368 If not currently inside anything that can be exited,
2369 return 0 and do nothing; caller will print an error message. */
2372 expand_exit_something ()
2376 for (n = nesting_stack; n; n = n->all)
2377 if (n->exit_label != 0)
2379 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2386 /* Generate RTL to return from the current function, with no value.
2387 (That is, we do not do anything about returning any value.) */
2390 expand_null_return ()
2392 struct nesting *block = block_stack;
2395 if (output_bytecode)
2397 bc_emit_instruction (ret);
2401 /* Does any pending block have cleanups? */
2403 while (block && block->data.block.cleanups == 0)
2404 block = block->next;
2406 /* If yes, use a goto to return, since that runs cleanups. */
2408 expand_null_return_1 (last_insn, block != 0);
2411 /* Generate RTL to return from the current function, with value VAL. */
2414 expand_value_return (val)
2417 struct nesting *block = block_stack;
2418 rtx last_insn = get_last_insn ();
2419 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2421 /* Copy the value to the return location
2422 unless it's already there. */
2424 if (return_reg != val)
2426 #ifdef PROMOTE_FUNCTION_RETURN
2427 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2428 int unsignedp = TREE_UNSIGNED (type);
2429 enum machine_mode mode
2430 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2433 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2434 convert_move (return_reg, val, unsignedp);
2437 emit_move_insn (return_reg, val);
2439 if (GET_CODE (return_reg) == REG
2440 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2441 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2443 /* Does any pending block have cleanups? */
2445 while (block && block->data.block.cleanups == 0)
2446 block = block->next;
2448 /* If yes, use a goto to return, since that runs cleanups.
2449 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2451 expand_null_return_1 (last_insn, block != 0);
2454 /* Output a return with no value. If LAST_INSN is nonzero,
2455 pretend that the return takes place after LAST_INSN.
2456 If USE_GOTO is nonzero then don't use a return instruction;
2457 go to the return label instead. This causes any cleanups
2458 of pending blocks to be executed normally. */
2461 expand_null_return_1 (last_insn, use_goto)
2465 rtx end_label = cleanup_label ? cleanup_label : return_label;
2467 clear_pending_stack_adjust ();
2468 do_pending_stack_adjust ();
2471 /* PCC-struct return always uses an epilogue. */
2472 if (current_function_returns_pcc_struct || use_goto)
2475 end_label = return_label = gen_label_rtx ();
2476 expand_goto_internal (NULL_TREE, end_label, last_insn);
2480 /* Otherwise output a simple return-insn if one is available,
2481 unless it won't do the job. */
2483 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2485 emit_jump_insn (gen_return ());
2491 /* Otherwise jump to the epilogue. */
2492 expand_goto_internal (NULL_TREE, end_label, last_insn);
2495 /* Generate RTL to evaluate the expression RETVAL and return it
2496 from the current function. */
2499 expand_return (retval)
2502 /* If there are any cleanups to be performed, then they will
2503 be inserted following LAST_INSN. It is desirable
2504 that the last_insn, for such purposes, should be the
2505 last insn before computing the return value. Otherwise, cleanups
2506 which call functions can clobber the return value. */
2507 /* ??? rms: I think that is erroneous, because in C++ it would
2508 run destructors on variables that might be used in the subsequent
2509 computation of the return value. */
2511 register rtx val = 0;
2515 struct nesting *block;
2517 /* Bytecode returns are quite simple, just leave the result on the
2518 arithmetic stack. */
2519 if (output_bytecode)
2521 bc_expand_expr (retval);
2522 bc_emit_instruction (ret);
2526 /* If function wants no value, give it none. */
2527 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2529 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2531 expand_null_return ();
2535 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2536 /* This is not sufficient. We also need to watch for cleanups of the
2537 expression we are about to expand. Unfortunately, we cannot know
2538 if it has cleanups until we expand it, and we want to change how we
2539 expand it depending upon if we need cleanups. We can't win. */
2541 cleanups = any_pending_cleanups (1);
2546 if (TREE_CODE (retval) == RESULT_DECL)
2547 retval_rhs = retval;
2548 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2549 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2550 retval_rhs = TREE_OPERAND (retval, 1);
2551 else if (TREE_TYPE (retval) == void_type_node)
2552 /* Recognize tail-recursive call to void function. */
2553 retval_rhs = retval;
2555 retval_rhs = NULL_TREE;
2557 /* Only use `last_insn' if there are cleanups which must be run. */
2558 if (cleanups || cleanup_label != 0)
2559 last_insn = get_last_insn ();
2561 /* Distribute return down conditional expr if either of the sides
2562 may involve tail recursion (see test below). This enhances the number
2563 of tail recursions we see. Don't do this always since it can produce
2564 sub-optimal code in some cases and we distribute assignments into
2565 conditional expressions when it would help. */
2567 if (optimize && retval_rhs != 0
2568 && frame_offset == 0
2569 && TREE_CODE (retval_rhs) == COND_EXPR
2570 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2571 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2573 rtx label = gen_label_rtx ();
2576 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2577 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2578 DECL_RESULT (current_function_decl),
2579 TREE_OPERAND (retval_rhs, 1));
2580 TREE_SIDE_EFFECTS (expr) = 1;
2581 expand_return (expr);
2584 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2585 DECL_RESULT (current_function_decl),
2586 TREE_OPERAND (retval_rhs, 2));
2587 TREE_SIDE_EFFECTS (expr) = 1;
2588 expand_return (expr);
2592 /* For tail-recursive call to current function,
2593 just jump back to the beginning.
2594 It's unsafe if any auto variable in this function
2595 has its address taken; for simplicity,
2596 require stack frame to be empty. */
2597 if (optimize && retval_rhs != 0
2598 && frame_offset == 0
2599 && TREE_CODE (retval_rhs) == CALL_EXPR
2600 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2601 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2602 /* Finish checking validity, and if valid emit code
2603 to set the argument variables for the new call. */
2604 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2605 DECL_ARGUMENTS (current_function_decl)))
2607 if (tail_recursion_label == 0)
2609 tail_recursion_label = gen_label_rtx ();
2610 emit_label_after (tail_recursion_label,
2611 tail_recursion_reentry);
2614 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2619 /* This optimization is safe if there are local cleanups
2620 because expand_null_return takes care of them.
2621 ??? I think it should also be safe when there is a cleanup label,
2622 because expand_null_return takes care of them, too.
2623 Any reason why not? */
2624 if (HAVE_return && cleanup_label == 0
2625 && ! current_function_returns_pcc_struct
2626 && BRANCH_COST <= 1)
2628 /* If this is return x == y; then generate
2629 if (x == y) return 1; else return 0;
2630 if we can do it with explicit return insns and
2631 branches are cheap. */
2633 switch (TREE_CODE (retval_rhs))
2641 case TRUTH_ANDIF_EXPR:
2642 case TRUTH_ORIF_EXPR:
2643 case TRUTH_AND_EXPR:
2645 case TRUTH_NOT_EXPR:
2646 case TRUTH_XOR_EXPR:
2647 op0 = gen_label_rtx ();
2648 jumpifnot (retval_rhs, op0);
2649 expand_value_return (const1_rtx);
2651 expand_value_return (const0_rtx);
2655 #endif /* HAVE_return */
2657 /* If the result is an aggregate that is being returned in one (or more)
2658 registers, load the registers here. The compiler currently can't handle
2659 copying a BLKmode value into registers. We could put this code in a
2660 more general area (for use by everyone instead of just function
2661 call/return), but until this feature is generally usable it is kept here
2662 (and in expand_call). */
2665 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2666 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2669 int big_endian_correction = 0;
2670 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2671 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2672 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2673 rtx result_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2674 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2675 enum machine_mode tmpmode;
2677 /* Structures smaller than a word are aligned to the least significant
2678 byte (to the right). On a BYTES_BIG_ENDIAN machine, this means we
2679 must skip the empty high order bytes when calculating the bit
2681 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
2682 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
2684 for (i = 0; i < n_regs; i++)
2686 rtx reg = gen_reg_rtx (word_mode);
2687 rtx word = operand_subword_force (result_val, i, BLKmode);
2688 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2691 result_pseudos[i] = reg;
2693 /* Clobber REG and move each partword into it. Ensure we don't
2694 go past the end of the structure. Note that the loop below
2695 works because we've already verified that padding and
2696 endianness are compatable. */
2697 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2700 bitpos < BITS_PER_WORD && bytes > 0;
2701 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
2703 int xbitpos = bitpos + big_endian_correction;
2705 store_bit_field (reg, bitsize, xbitpos, word_mode,
2706 extract_bit_field (word, bitsize, bitpos, 1,
2707 NULL_RTX, word_mode,
2709 bitsize / BITS_PER_UNIT,
2711 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2715 /* Now that the value is in pseudos, copy it to the result reg(s). */
2718 for (i = 0; i < n_regs; i++)
2719 emit_move_insn (gen_rtx (REG, word_mode, REGNO (result_reg) + i),
2722 /* Find the smallest integer mode large enough to hold the
2723 entire structure and use that mode instead of BLKmode
2724 on the USE insn for the return register. */
2725 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2726 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2727 tmpmode != MAX_MACHINE_MODE;
2728 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2730 /* Have we found a large enough mode? */
2731 if (GET_MODE_SIZE (tmpmode) >= bytes)
2735 /* No suitable mode found. */
2736 if (tmpmode == MAX_MACHINE_MODE)
2739 PUT_MODE (result_reg, tmpmode);
2741 expand_value_return (result_reg);
2745 && TREE_TYPE (retval_rhs) != void_type_node
2746 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2748 /* Calculate the return value into a pseudo reg. */
2749 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2751 /* All temporaries have now been used. */
2753 /* Return the calculated value, doing cleanups first. */
2754 expand_value_return (val);
2758 /* No cleanups or no hard reg used;
2759 calculate value into hard return reg. */
2760 expand_expr (retval, const0_rtx, VOIDmode, 0);
2763 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2767 /* Return 1 if the end of the generated RTX is not a barrier.
2768 This means code already compiled can drop through. */
2771 drop_through_at_end_p ()
2773 rtx insn = get_last_insn ();
2774 while (insn && GET_CODE (insn) == NOTE)
2775 insn = PREV_INSN (insn);
2776 return insn && GET_CODE (insn) != BARRIER;
2779 /* Emit code to alter this function's formal parms for a tail-recursive call.
2780 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2781 FORMALS is the chain of decls of formals.
2782 Return 1 if this can be done;
2783 otherwise return 0 and do not emit any code. */
2786 tail_recursion_args (actuals, formals)
2787 tree actuals, formals;
2789 register tree a = actuals, f = formals;
2791 register rtx *argvec;
2793 /* Check that number and types of actuals are compatible
2794 with the formals. This is not always true in valid C code.
2795 Also check that no formal needs to be addressable
2796 and that all formals are scalars. */
2798 /* Also count the args. */
2800 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2802 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2804 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2807 if (a != 0 || f != 0)
2810 /* Compute all the actuals. */
2812 argvec = (rtx *) alloca (i * sizeof (rtx));
2814 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2815 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2817 /* Find which actual values refer to current values of previous formals.
2818 Copy each of them now, before any formal is changed. */
2820 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2824 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2825 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2826 { copy = 1; break; }
2828 argvec[i] = copy_to_reg (argvec[i]);
2831 /* Store the values of the actuals into the formals. */
2833 for (f = formals, a = actuals, i = 0; f;
2834 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2836 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2837 emit_move_insn (DECL_RTL (f), argvec[i]);
2839 convert_move (DECL_RTL (f), argvec[i],
2840 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2847 /* Generate the RTL code for entering a binding contour.
2848 The variables are declared one by one, by calls to `expand_decl'.
2850 EXIT_FLAG is nonzero if this construct should be visible to
2851 `exit_something'. */
2854 expand_start_bindings (exit_flag)
2857 struct nesting *thisblock = ALLOC_NESTING ();
2858 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2860 /* Make an entry on block_stack for the block we are entering. */
2862 thisblock->next = block_stack;
2863 thisblock->all = nesting_stack;
2864 thisblock->depth = ++nesting_depth;
2865 thisblock->data.block.stack_level = 0;
2866 thisblock->data.block.cleanups = 0;
2867 thisblock->data.block.function_call_count = 0;
2871 if (block_stack->data.block.cleanups == NULL_TREE
2872 && (block_stack->data.block.outer_cleanups == NULL_TREE
2873 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2874 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2876 thisblock->data.block.outer_cleanups
2877 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2878 block_stack->data.block.outer_cleanups);
2881 thisblock->data.block.outer_cleanups = 0;
2885 && !(block_stack->data.block.cleanups == NULL_TREE
2886 && block_stack->data.block.outer_cleanups == NULL_TREE))
2887 thisblock->data.block.outer_cleanups
2888 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2889 block_stack->data.block.outer_cleanups);
2891 thisblock->data.block.outer_cleanups = 0;
2893 thisblock->data.block.label_chain = 0;
2894 thisblock->data.block.innermost_stack_block = stack_block_stack;
2895 thisblock->data.block.first_insn = note;
2896 thisblock->data.block.block_start_count = ++block_start_count;
2897 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2898 block_stack = thisblock;
2899 nesting_stack = thisblock;
2901 if (!output_bytecode)
2903 /* Make a new level for allocating stack slots. */
2908 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2909 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2913 remember_end_note (block)
2914 register tree block;
2916 BLOCK_END_NOTE (block) = last_block_end_note;
2917 last_block_end_note = NULL_RTX;
2920 /* Generate RTL code to terminate a binding contour.
2921 VARS is the chain of VAR_DECL nodes
2922 for the variables bound in this contour.
2923 MARK_ENDS is nonzero if we should put a note at the beginning
2924 and end of this binding contour.
2926 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2927 (That is true automatically if the contour has a saved stack level.) */
2930 expand_end_bindings (vars, mark_ends, dont_jump_in)
2935 register struct nesting *thisblock = block_stack;
2938 if (output_bytecode)
2940 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
2945 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2946 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
2947 && ! DECL_IN_SYSTEM_HEADER (decl))
2948 warning_with_decl (decl, "unused variable `%s'");
2950 if (thisblock->exit_label)
2952 do_pending_stack_adjust ();
2953 emit_label (thisblock->exit_label);
2956 /* If necessary, make a handler for nonlocal gotos taking
2957 place in the function calls in this block. */
2958 if (function_call_count != thisblock->data.block.function_call_count
2960 /* Make handler for outermost block
2961 if there were any nonlocal gotos to this function. */
2962 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2963 /* Make handler for inner block if it has something
2964 special to do when you jump out of it. */
2965 : (thisblock->data.block.cleanups != 0
2966 || thisblock->data.block.stack_level != 0)))
2969 rtx afterward = gen_label_rtx ();
2970 rtx handler_label = gen_label_rtx ();
2971 rtx save_receiver = gen_reg_rtx (Pmode);
2974 /* Don't let jump_optimize delete the handler. */
2975 LABEL_PRESERVE_P (handler_label) = 1;
2977 /* Record the handler address in the stack slot for that purpose,
2978 during this block, saving and restoring the outer value. */
2979 if (thisblock->next != 0)
2981 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2984 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
2985 insns = get_insns ();
2987 emit_insns_before (insns, thisblock->data.block.first_insn);
2991 emit_move_insn (nonlocal_goto_handler_slot,
2992 gen_rtx (LABEL_REF, Pmode, handler_label));
2993 insns = get_insns ();
2995 emit_insns_before (insns, thisblock->data.block.first_insn);
2997 /* Jump around the handler; it runs only when specially invoked. */
2998 emit_jump (afterward);
2999 emit_label (handler_label);
3001 #ifdef HAVE_nonlocal_goto
3002 if (! HAVE_nonlocal_goto)
3004 /* First adjust our frame pointer to its actual value. It was
3005 previously set to the start of the virtual area corresponding to
3006 the stacked variables when we branched here and now needs to be
3007 adjusted to the actual hardware fp value.
3009 Assignments are to virtual registers are converted by
3010 instantiate_virtual_regs into the corresponding assignment
3011 to the underlying register (fp in this case) that makes
3012 the original assignment true.
3013 So the following insn will actually be
3014 decrementing fp by STARTING_FRAME_OFFSET. */
3015 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3017 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3018 if (fixed_regs[ARG_POINTER_REGNUM])
3020 #ifdef ELIMINABLE_REGS
3021 /* If the argument pointer can be eliminated in favor of the
3022 frame pointer, we don't need to restore it. We assume here
3023 that if such an elimination is present, it can always be used.
3024 This is the case on all known machines; if we don't make this
3025 assumption, we do unnecessary saving on many machines. */
3026 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3029 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3030 if (elim_regs[i].from == ARG_POINTER_REGNUM
3031 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3034 if (i == sizeof elim_regs / sizeof elim_regs [0])
3037 /* Now restore our arg pointer from the address at which it
3038 was saved in our stack frame.
3039 If there hasn't be space allocated for it yet, make
3041 if (arg_pointer_save_area == 0)
3042 arg_pointer_save_area
3043 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3044 emit_move_insn (virtual_incoming_args_rtx,
3045 /* We need a pseudo here, or else
3046 instantiate_virtual_regs_1 complains. */
3047 copy_to_reg (arg_pointer_save_area));
3052 /* The handler expects the desired label address in the static chain
3053 register. It tests the address and does an appropriate jump
3054 to whatever label is desired. */
3055 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3056 /* Skip any labels we shouldn't be able to jump to from here. */
3057 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3059 rtx not_this = gen_label_rtx ();
3060 rtx this = gen_label_rtx ();
3061 do_jump_if_equal (static_chain_rtx,
3062 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3064 emit_jump (not_this);
3066 expand_goto (TREE_VALUE (link));
3067 emit_label (not_this);
3069 /* If label is not recognized, abort. */
3070 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3072 emit_label (afterward);
3075 /* Don't allow jumping into a block that has cleanups or a stack level. */
3077 || thisblock->data.block.stack_level != 0
3078 || thisblock->data.block.cleanups != 0)
3080 struct label_chain *chain;
3082 /* Any labels in this block are no longer valid to go to.
3083 Mark them to cause an error message. */
3084 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3086 DECL_TOO_LATE (chain->label) = 1;
3087 /* If any goto without a fixup came to this label,
3088 that must be an error, because gotos without fixups
3089 come from outside all saved stack-levels and all cleanups. */
3090 if (TREE_ADDRESSABLE (chain->label))
3091 error_with_decl (chain->label,
3092 "label `%s' used before containing binding contour");
3096 /* Restore stack level in effect before the block
3097 (only if variable-size objects allocated). */
3098 /* Perform any cleanups associated with the block. */
3100 if (thisblock->data.block.stack_level != 0
3101 || thisblock->data.block.cleanups != 0)
3103 /* Only clean up here if this point can actually be reached. */
3104 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3106 /* Don't let cleanups affect ({...}) constructs. */
3107 int old_expr_stmts_for_value = expr_stmts_for_value;
3108 rtx old_last_expr_value = last_expr_value;
3109 tree old_last_expr_type = last_expr_type;
3110 expr_stmts_for_value = 0;
3112 /* Do the cleanups. */
3113 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3115 do_pending_stack_adjust ();
3117 expr_stmts_for_value = old_expr_stmts_for_value;
3118 last_expr_value = old_last_expr_value;
3119 last_expr_type = old_last_expr_type;
3121 /* Restore the stack level. */
3123 if (reachable && thisblock->data.block.stack_level != 0)
3125 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3126 thisblock->data.block.stack_level, NULL_RTX);
3127 if (nonlocal_goto_handler_slot != 0)
3128 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3132 /* Any gotos out of this block must also do these things.
3133 Also report any gotos with fixups that came to labels in this
3135 fixup_gotos (thisblock,
3136 thisblock->data.block.stack_level,
3137 thisblock->data.block.cleanups,
3138 thisblock->data.block.first_insn,
3142 /* Mark the beginning and end of the scope if requested.
3143 We do this now, after running cleanups on the variables
3144 just going out of scope, so they are in scope for their cleanups. */
3147 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3149 /* Get rid of the beginning-mark if we don't make an end-mark. */
3150 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3152 /* If doing stupid register allocation, make sure lives of all
3153 register variables declared here extend thru end of scope. */
3156 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3158 rtx rtl = DECL_RTL (decl);
3159 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3163 /* Restore block_stack level for containing block. */
3165 stack_block_stack = thisblock->data.block.innermost_stack_block;
3166 POPSTACK (block_stack);
3168 /* Pop the stack slot nesting and free any slots at this level. */
3173 /* End a binding contour.
3174 VARS is the chain of VAR_DECL nodes for the variables bound
3175 in this contour. MARK_ENDS is nonzer if we should put a note
3176 at the beginning and end of this binding contour.
3177 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3181 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3186 struct nesting *thisbind = nesting_stack;
3190 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3191 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3192 warning_with_decl (decl, "unused variable `%s'");
3194 if (thisbind->exit_label)
3195 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3197 /* Pop block/bindings off stack */
3198 POPSTACK (block_stack);
3201 /* Generate RTL for the automatic variable declaration DECL.
3202 (Other kinds of declarations are simply ignored if seen here.)
3203 CLEANUP is an expression to be executed at exit from this binding contour;
3204 for example, in C++, it might call the destructor for this variable.
3206 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3207 either before or after calling `expand_decl' but before compiling
3208 any subsequent expressions. This is because CLEANUP may be expanded
3209 more than once, on different branches of execution.
3210 For the same reason, CLEANUP may not contain a CALL_EXPR
3211 except as its topmost node--else `preexpand_calls' would get confused.
3213 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3214 that is not associated with any particular variable.
3216 There is no special support here for C++ constructors.
3217 They should be handled by the proper code in DECL_INITIAL. */
3223 struct nesting *thisblock = block_stack;
3226 if (output_bytecode)
3228 bc_expand_decl (decl, 0);
3232 type = TREE_TYPE (decl);
3234 /* Only automatic variables need any expansion done.
3235 Static and external variables, and external functions,
3236 will be handled by `assemble_variable' (called from finish_decl).
3237 TYPE_DECL and CONST_DECL require nothing.
3238 PARM_DECLs are handled in `assign_parms'. */
3240 if (TREE_CODE (decl) != VAR_DECL)
3242 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3245 /* Create the RTL representation for the variable. */
3247 if (type == error_mark_node)
3248 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3249 else if (DECL_SIZE (decl) == 0)
3250 /* Variable with incomplete type. */
3252 if (DECL_INITIAL (decl) == 0)
3253 /* Error message was already done; now avoid a crash. */
3254 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3256 /* An initializer is going to decide the size of this array.
3257 Until we know the size, represent its address with a reg. */
3258 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3259 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3261 else if (DECL_MODE (decl) != BLKmode
3262 /* If -ffloat-store, don't put explicit float vars
3264 && !(flag_float_store
3265 && TREE_CODE (type) == REAL_TYPE)
3266 && ! TREE_THIS_VOLATILE (decl)
3267 && ! TREE_ADDRESSABLE (decl)
3268 && (DECL_REGISTER (decl) || ! obey_regdecls))
3270 /* Automatic variable that can go in a register. */
3271 int unsignedp = TREE_UNSIGNED (type);
3272 enum machine_mode reg_mode
3273 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3275 if (TREE_CODE (type) == COMPLEX_TYPE)
3277 rtx realpart, imagpart;
3278 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3280 /* For a complex type variable, make a CONCAT of two pseudos
3281 so that the real and imaginary parts
3282 can be allocated separately. */
3283 realpart = gen_reg_rtx (partmode);
3284 REG_USERVAR_P (realpart) = 1;
3285 imagpart = gen_reg_rtx (partmode);
3286 REG_USERVAR_P (imagpart) = 1;
3287 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3291 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3292 if (TREE_CODE (type) == POINTER_TYPE)
3293 mark_reg_pointer (DECL_RTL (decl));
3294 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3297 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3299 /* Variable of fixed size that goes on the stack. */
3303 /* If we previously made RTL for this decl, it must be an array
3304 whose size was determined by the initializer.
3305 The old address was a register; set that register now
3306 to the proper address. */
3307 if (DECL_RTL (decl) != 0)
3309 if (GET_CODE (DECL_RTL (decl)) != MEM
3310 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3312 oldaddr = XEXP (DECL_RTL (decl), 0);
3316 = assign_stack_temp (DECL_MODE (decl),
3317 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3318 + BITS_PER_UNIT - 1)
3321 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3323 /* Set alignment we actually gave this decl. */
3324 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3325 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3329 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3330 if (addr != oldaddr)
3331 emit_move_insn (oldaddr, addr);
3334 /* If this is a memory ref that contains aggregate components,
3335 mark it as such for cse and loop optimize. */
3336 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3338 /* If this is in memory because of -ffloat-store,
3339 set the volatile bit, to prevent optimizations from
3340 undoing the effects. */
3341 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3342 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3346 /* Dynamic-size object: must push space on the stack. */
3350 /* Record the stack pointer on entry to block, if have
3351 not already done so. */
3352 if (thisblock->data.block.stack_level == 0)
3354 do_pending_stack_adjust ();
3355 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3356 &thisblock->data.block.stack_level,
3357 thisblock->data.block.first_insn);
3358 stack_block_stack = thisblock;
3361 /* Compute the variable's size, in bytes. */
3362 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3364 size_int (BITS_PER_UNIT)),
3365 NULL_RTX, VOIDmode, 0);
3368 /* Allocate space on the stack for the variable. */
3369 address = allocate_dynamic_stack_space (size, NULL_RTX,
3372 /* Reference the variable indirect through that rtx. */
3373 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3375 /* If this is a memory ref that contains aggregate components,
3376 mark it as such for cse and loop optimize. */
3377 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3379 /* Indicate the alignment we actually gave this variable. */
3380 #ifdef STACK_BOUNDARY
3381 DECL_ALIGN (decl) = STACK_BOUNDARY;
3383 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3387 if (TREE_THIS_VOLATILE (decl))
3388 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3389 #if 0 /* A variable is not necessarily unchanging
3390 just because it is const. RTX_UNCHANGING_P
3391 means no change in the function,
3392 not merely no change in the variable's scope.
3393 It is correct to set RTX_UNCHANGING_P if the variable's scope
3394 is the whole function. There's no convenient way to test that. */
3395 if (TREE_READONLY (decl))
3396 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3399 /* If doing stupid register allocation, make sure life of any
3400 register variable starts here, at the start of its scope. */
3403 use_variable (DECL_RTL (decl));
3407 /* Generate code for the automatic variable declaration DECL. For
3408 most variables this just means we give it a stack offset. The
3409 compiler sometimes emits cleanups without variables and we will
3410 have to deal with those too. */
3413 bc_expand_decl (decl, cleanup)
3421 /* A cleanup with no variable. */
3428 /* Only auto variables need any work. */
3429 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3432 type = TREE_TYPE (decl);
3434 if (type == error_mark_node)
3435 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3437 else if (DECL_SIZE (decl) == 0)
3439 /* Variable with incomplete type. The stack offset herein will be
3440 fixed later in expand_decl_init (). */
3441 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3443 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3445 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3449 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3452 /* Emit code to perform the initialization of a declaration DECL. */
3455 expand_decl_init (decl)
3458 int was_used = TREE_USED (decl);
3460 if (output_bytecode)
3462 bc_expand_decl_init (decl);
3466 /* If this is a CONST_DECL, we don't have to generate any code, but
3467 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3468 to be set while in the obstack containing the constant. If we don't
3469 do this, we can lose if we have functions nested three deep and the middle
3470 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3471 the innermost function is the first to expand that STRING_CST. */
3472 if (TREE_CODE (decl) == CONST_DECL)
3474 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3475 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3476 EXPAND_INITIALIZER);
3480 if (TREE_STATIC (decl))
3483 /* Compute and store the initial value now. */
3485 if (DECL_INITIAL (decl) == error_mark_node)
3487 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3488 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3489 || code == POINTER_TYPE)
3490 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3494 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3496 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3497 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3501 /* Don't let the initialization count as "using" the variable. */
3502 TREE_USED (decl) = was_used;
3504 /* Free any temporaries we made while initializing the decl. */
3508 /* Expand initialization for variable-sized types. Allocate array
3509 using newlocalSI and set local variable, which is a pointer to the
3513 bc_expand_variable_local_init (decl)
3516 /* Evaluate size expression and coerce to SI */
3517 bc_expand_expr (DECL_SIZE (decl));
3519 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3520 no coercion is necessary (?) */
3522 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3523 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3525 /* Emit code to allocate array */
3526 bc_emit_instruction (newlocalSI);
3528 /* Store array pointer in local variable. This is the only instance
3529 where we actually want the address of the pointer to the
3530 variable-size block, rather than the pointer itself. We avoid
3531 using expand_address() since that would cause the pointer to be
3532 pushed rather than its address. Hence the hard-coded reference;
3533 notice also that the variable is always local (no global
3534 variable-size type variables). */
3536 bc_load_localaddr (DECL_RTL (decl));
3537 bc_emit_instruction (storeP);
3541 /* Emit code to initialize a declaration. */
3544 bc_expand_decl_init (decl)
3547 int org_stack_depth;
3549 /* Statical initializers are handled elsewhere */
3551 if (TREE_STATIC (decl))
3554 /* Memory original stack depth */
3555 org_stack_depth = stack_depth;
3557 /* If the type is variable-size, we first create its space (we ASSUME
3558 it CAN'T be static). We do this regardless of whether there's an
3559 initializer assignment or not. */
3561 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3562 bc_expand_variable_local_init (decl);
3564 /* Expand initializer assignment */
3565 if (DECL_INITIAL (decl) == error_mark_node)
3567 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3569 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3570 || code == POINTER_TYPE)
3572 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3574 else if (DECL_INITIAL (decl))
3575 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3577 /* Restore stack depth */
3578 if (org_stack_depth > stack_depth)
3581 bc_adjust_stack (stack_depth - org_stack_depth);
3585 /* CLEANUP is an expression to be executed at exit from this binding contour;
3586 for example, in C++, it might call the destructor for this variable.
3588 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3589 either before or after calling `expand_decl' but before compiling
3590 any subsequent expressions. This is because CLEANUP may be expanded
3591 more than once, on different branches of execution.
3592 For the same reason, CLEANUP may not contain a CALL_EXPR
3593 except as its topmost node--else `preexpand_calls' would get confused.
3595 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3596 that is not associated with any particular variable. */
3599 expand_decl_cleanup (decl, cleanup)
3602 struct nesting *thisblock = block_stack;
3604 /* Error if we are not in any block. */
3608 /* Record the cleanup if there is one. */
3612 thisblock->data.block.cleanups
3613 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3614 /* If this block has a cleanup, it belongs in stack_block_stack. */
3615 stack_block_stack = thisblock;
3616 (*interim_eh_hook) (NULL_TREE);
3621 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3622 DECL_ELTS is the list of elements that belong to DECL's type.
3623 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3626 expand_anon_union_decl (decl, cleanup, decl_elts)
3627 tree decl, cleanup, decl_elts;
3629 struct nesting *thisblock = block_stack;
3632 expand_decl (decl, cleanup);
3633 x = DECL_RTL (decl);
3637 tree decl_elt = TREE_VALUE (decl_elts);
3638 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3639 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3641 /* Propagate the union's alignment to the elements. */
3642 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3644 /* If the element has BLKmode and the union doesn't, the union is
3645 aligned such that the element doesn't need to have BLKmode, so
3646 change the element's mode to the appropriate one for its size. */
3647 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3648 DECL_MODE (decl_elt) = mode
3649 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3652 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3653 instead create a new MEM rtx with the proper mode. */
3654 if (GET_CODE (x) == MEM)
3656 if (mode == GET_MODE (x))
3657 DECL_RTL (decl_elt) = x;
3660 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3661 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3662 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3665 else if (GET_CODE (x) == REG)
3667 if (mode == GET_MODE (x))
3668 DECL_RTL (decl_elt) = x;
3670 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3675 /* Record the cleanup if there is one. */
3678 thisblock->data.block.cleanups
3679 = temp_tree_cons (decl_elt, cleanup_elt,
3680 thisblock->data.block.cleanups);
3682 decl_elts = TREE_CHAIN (decl_elts);
3686 /* Expand a list of cleanups LIST.
3687 Elements may be expressions or may be nested lists.
3689 If DONT_DO is nonnull, then any list-element
3690 whose TREE_PURPOSE matches DONT_DO is omitted.
3691 This is sometimes used to avoid a cleanup associated with
3692 a value that is being returned out of the scope.
3694 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3695 goto and handle protection regions specially in that case.
3697 If REACHABLE, we emit code, otherwise just inform the exception handling
3698 code about this finalization. */
3701 expand_cleanups (list, dont_do, in_fixup, reachable)
3708 for (tail = list; tail; tail = TREE_CHAIN (tail))
3709 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3711 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3712 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3716 (*interim_eh_hook) (TREE_VALUE (tail));
3720 /* Cleanups may be run multiple times. For example,
3721 when exiting a binding contour, we expand the
3722 cleanups associated with that contour. When a goto
3723 within that binding contour has a target outside that
3724 contour, it will expand all cleanups from its scope to
3725 the target. Though the cleanups are expanded multiple
3726 times, the control paths are non-overlapping so the
3727 cleanups will not be executed twice. */
3728 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3735 /* Move all cleanups from the current block_stack
3736 to the containing block_stack, where they are assumed to
3737 have been created. If anything can cause a temporary to
3738 be created, but not expanded for more than one level of
3739 block_stacks, then this code will have to change. */
3744 struct nesting *block = block_stack;
3745 struct nesting *outer = block->next;
3747 outer->data.block.cleanups
3748 = chainon (block->data.block.cleanups,
3749 outer->data.block.cleanups);
3750 block->data.block.cleanups = 0;
3754 last_cleanup_this_contour ()
3756 if (block_stack == 0)
3759 return block_stack->data.block.cleanups;
3762 /* Return 1 if there are any pending cleanups at this point.
3763 If THIS_CONTOUR is nonzero, check the current contour as well.
3764 Otherwise, look only at the contours that enclose this one. */
3767 any_pending_cleanups (this_contour)
3770 struct nesting *block;
3772 if (block_stack == 0)
3775 if (this_contour && block_stack->data.block.cleanups != NULL)
3777 if (block_stack->data.block.cleanups == 0
3778 && (block_stack->data.block.outer_cleanups == 0
3780 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3785 for (block = block_stack->next; block; block = block->next)
3786 if (block->data.block.cleanups != 0)
3792 /* Enter a case (Pascal) or switch (C) statement.
3793 Push a block onto case_stack and nesting_stack
3794 to accumulate the case-labels that are seen
3795 and to record the labels generated for the statement.
3797 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3798 Otherwise, this construct is transparent for `exit_something'.
3800 EXPR is the index-expression to be dispatched on.
3801 TYPE is its nominal type. We could simply convert EXPR to this type,
3802 but instead we take short cuts. */
3805 expand_start_case (exit_flag, expr, type, printname)
3811 register struct nesting *thiscase = ALLOC_NESTING ();
3813 /* Make an entry on case_stack for the case we are entering. */
3815 thiscase->next = case_stack;
3816 thiscase->all = nesting_stack;
3817 thiscase->depth = ++nesting_depth;
3818 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3819 thiscase->data.case_stmt.case_list = 0;
3820 thiscase->data.case_stmt.index_expr = expr;
3821 thiscase->data.case_stmt.nominal_type = type;
3822 thiscase->data.case_stmt.default_label = 0;
3823 thiscase->data.case_stmt.num_ranges = 0;
3824 thiscase->data.case_stmt.printname = printname;
3825 thiscase->data.case_stmt.seenlabel = 0;
3826 case_stack = thiscase;
3827 nesting_stack = thiscase;
3829 if (output_bytecode)
3831 bc_expand_start_case (thiscase, expr, type, printname);
3835 do_pending_stack_adjust ();
3837 /* Make sure case_stmt.start points to something that won't
3838 need any transformation before expand_end_case. */
3839 if (GET_CODE (get_last_insn ()) != NOTE)
3840 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3842 thiscase->data.case_stmt.start = get_last_insn ();
3846 /* Enter a case statement. It is assumed that the caller has pushed
3847 the current context onto the case stack. */
3850 bc_expand_start_case (thiscase, expr, type, printname)
3851 struct nesting *thiscase;
3856 bc_expand_expr (expr);
3857 bc_expand_conversion (TREE_TYPE (expr), type);
3859 /* For cases, the skip is a place we jump to that's emitted after
3860 the size of the jump table is known. */
3862 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3863 bc_emit_bytecode (jump);
3864 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
3866 #ifdef DEBUG_PRINT_CODE
3867 fputc ('\n', stderr);
3872 /* Start a "dummy case statement" within which case labels are invalid
3873 and are not connected to any larger real case statement.
3874 This can be used if you don't want to let a case statement jump
3875 into the middle of certain kinds of constructs. */
3878 expand_start_case_dummy ()
3880 register struct nesting *thiscase = ALLOC_NESTING ();
3882 /* Make an entry on case_stack for the dummy. */
3884 thiscase->next = case_stack;
3885 thiscase->all = nesting_stack;
3886 thiscase->depth = ++nesting_depth;
3887 thiscase->exit_label = 0;
3888 thiscase->data.case_stmt.case_list = 0;
3889 thiscase->data.case_stmt.start = 0;
3890 thiscase->data.case_stmt.nominal_type = 0;
3891 thiscase->data.case_stmt.default_label = 0;
3892 thiscase->data.case_stmt.num_ranges = 0;
3893 case_stack = thiscase;
3894 nesting_stack = thiscase;
3897 /* End a dummy case statement. */
3900 expand_end_case_dummy ()
3902 POPSTACK (case_stack);
3905 /* Return the data type of the index-expression
3906 of the innermost case statement, or null if none. */
3909 case_index_expr_type ()
3912 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3916 /* Accumulate one case or default label inside a case or switch statement.
3917 VALUE is the value of the case (a null pointer, for a default label).
3918 The function CONVERTER, when applied to arguments T and V,
3919 converts the value V to the type T.
3921 If not currently inside a case or switch statement, return 1 and do
3922 nothing. The caller will print a language-specific error message.
3923 If VALUE is a duplicate or overlaps, return 2 and do nothing
3924 except store the (first) duplicate node in *DUPLICATE.
3925 If VALUE is out of range, return 3 and do nothing.
3926 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3927 Return 0 on success.
3929 Extended to handle range statements. */
3932 pushcase (value, converter, label, duplicate)
3933 register tree value;
3934 tree (*converter) PROTO((tree, tree));
3935 register tree label;
3938 register struct case_node **l;
3939 register struct case_node *n;
3943 if (output_bytecode)
3944 return bc_pushcase (value, label);
3946 /* Fail if not inside a real case statement. */
3947 if (! (case_stack && case_stack->data.case_stmt.start))
3950 if (stack_block_stack
3951 && stack_block_stack->depth > case_stack->depth)
3954 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3955 nominal_type = case_stack->data.case_stmt.nominal_type;
3957 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3958 if (index_type == error_mark_node)
3961 /* Convert VALUE to the type in which the comparisons are nominally done. */
3963 value = (*converter) (nominal_type, value);
3965 /* If this is the first label, warn if any insns have been emitted. */
3966 if (case_stack->data.case_stmt.seenlabel == 0)
3969 for (insn = case_stack->data.case_stmt.start;
3971 insn = NEXT_INSN (insn))
3973 if (GET_CODE (insn) == CODE_LABEL)
3975 if (GET_CODE (insn) != NOTE
3976 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3978 warning ("unreachable code at beginning of %s",
3979 case_stack->data.case_stmt.printname);
3984 case_stack->data.case_stmt.seenlabel = 1;
3986 /* Fail if this value is out of range for the actual type of the index
3987 (which may be narrower than NOMINAL_TYPE). */
3988 if (value != 0 && ! int_fits_type_p (value, index_type))
3991 /* Fail if this is a duplicate or overlaps another entry. */
3994 if (case_stack->data.case_stmt.default_label != 0)
3996 *duplicate = case_stack->data.case_stmt.default_label;
3999 case_stack->data.case_stmt.default_label = label;
4003 /* Find the elt in the chain before which to insert the new value,
4004 to keep the chain sorted in increasing order.
4005 But report an error if this element is a duplicate. */
4006 for (l = &case_stack->data.case_stmt.case_list;
4007 /* Keep going past elements distinctly less than VALUE. */
4008 *l != 0 && tree_int_cst_lt ((*l)->high, value);
4013 /* Element we will insert before must be distinctly greater;
4014 overlap means error. */
4015 if (! tree_int_cst_lt (value, (*l)->low))
4017 *duplicate = (*l)->code_label;
4022 /* Add this label to the chain, and succeed.
4023 Copy VALUE so it is on temporary rather than momentary
4024 obstack and will thus survive till the end of the case statement. */
4025 n = (struct case_node *) oballoc (sizeof (struct case_node));
4028 n->high = n->low = copy_node (value);
4029 n->code_label = label;
4033 expand_label (label);
4037 /* Like pushcase but this case applies to all values
4038 between VALUE1 and VALUE2 (inclusive).
4039 The return value is the same as that of pushcase
4040 but there is one additional error code:
4041 4 means the specified range was empty. */
4044 pushcase_range (value1, value2, converter, label, duplicate)
4045 register tree value1, value2;
4046 tree (*converter) PROTO((tree, tree));
4047 register tree label;
4050 register struct case_node **l;
4051 register struct case_node *n;
4055 /* Fail if not inside a real case statement. */
4056 if (! (case_stack && case_stack->data.case_stmt.start))
4059 if (stack_block_stack
4060 && stack_block_stack->depth > case_stack->depth)
4063 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4064 nominal_type = case_stack->data.case_stmt.nominal_type;
4066 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4067 if (index_type == error_mark_node)
4070 /* If this is the first label, warn if any insns have been emitted. */
4071 if (case_stack->data.case_stmt.seenlabel == 0)
4074 for (insn = case_stack->data.case_stmt.start;
4076 insn = NEXT_INSN (insn))
4078 if (GET_CODE (insn) == CODE_LABEL)
4080 if (GET_CODE (insn) != NOTE
4081 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4083 warning ("unreachable code at beginning of %s",
4084 case_stack->data.case_stmt.printname);
4089 case_stack->data.case_stmt.seenlabel = 1;
4091 /* Convert VALUEs to type in which the comparisons are nominally done. */
4092 if (value1 == 0) /* Negative infinity. */
4093 value1 = TYPE_MIN_VALUE(index_type);
4094 value1 = (*converter) (nominal_type, value1);
4096 if (value2 == 0) /* Positive infinity. */
4097 value2 = TYPE_MAX_VALUE(index_type);
4098 value2 = (*converter) (nominal_type, value2);
4100 /* Fail if these values are out of range. */
4101 if (! int_fits_type_p (value1, index_type))
4104 if (! int_fits_type_p (value2, index_type))
4107 /* Fail if the range is empty. */
4108 if (tree_int_cst_lt (value2, value1))
4111 /* If the bounds are equal, turn this into the one-value case. */
4112 if (tree_int_cst_equal (value1, value2))
4113 return pushcase (value1, converter, label, duplicate);
4115 /* Find the elt in the chain before which to insert the new value,
4116 to keep the chain sorted in increasing order.
4117 But report an error if this element is a duplicate. */
4118 for (l = &case_stack->data.case_stmt.case_list;
4119 /* Keep going past elements distinctly less than this range. */
4120 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
4125 /* Element we will insert before must be distinctly greater;
4126 overlap means error. */
4127 if (! tree_int_cst_lt (value2, (*l)->low))
4129 *duplicate = (*l)->code_label;
4134 /* Add this label to the chain, and succeed.
4135 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
4136 obstack and will thus survive till the end of the case statement. */
4138 n = (struct case_node *) oballoc (sizeof (struct case_node));
4141 n->low = copy_node (value1);
4142 n->high = copy_node (value2);
4143 n->code_label = label;
4146 expand_label (label);
4148 case_stack->data.case_stmt.num_ranges++;
4154 /* Accumulate one case or default label; VALUE is the value of the
4155 case, or nil for a default label. If not currently inside a case,
4156 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4157 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4158 Return 0 on success. This function is a leftover from the earlier
4159 bytecode compiler, which was based on gcc 1.37. It should be
4160 merged into pushcase. */
4163 bc_pushcase (value, label)
4167 struct nesting *thiscase = case_stack;
4168 struct case_node *case_label, *new_label;
4173 /* Fail if duplicate, overlap, or out of type range. */
4176 value = convert (thiscase->data.case_stmt.nominal_type, value);
4177 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4180 for (case_label = thiscase->data.case_stmt.case_list;
4181 case_label->left; case_label = case_label->left)
4182 if (! tree_int_cst_lt (case_label->left->high, value))
4185 if (case_label != thiscase->data.case_stmt.case_list
4186 && ! tree_int_cst_lt (case_label->high, value)
4187 || case_label->left && ! tree_int_cst_lt (value, case_label->left->low))
4190 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4191 new_label->low = new_label->high = copy_node (value);
4192 new_label->code_label = label;
4193 new_label->left = case_label->left;
4195 case_label->left = new_label;
4196 thiscase->data.case_stmt.num_ranges++;
4200 if (thiscase->data.case_stmt.default_label)
4202 thiscase->data.case_stmt.default_label = label;
4205 expand_label (label);
4209 /* Returns the number of possible values of TYPE.
4210 Returns -1 if the number is unknown or variable.
4211 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4212 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4213 do not increase monotonically (there may be duplicates);
4214 to 1 if the values increase monotonically, but not always by 1;
4215 otherwise sets it to 0. */
4218 all_cases_count (type, spareness)
4222 HOST_WIDE_INT count, count_high = 0;
4225 switch (TREE_CODE (type))
4232 count = 1 << BITS_PER_UNIT;
4236 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4237 || TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST)
4242 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4243 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4244 but with overflow checking. */
4245 tree mint = TYPE_MIN_VALUE (type);
4246 tree maxt = TYPE_MAX_VALUE (type);
4247 HOST_WIDE_INT lo, hi;
4248 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4250 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4252 add_double (lo, hi, 1, 0, &lo, &hi);
4253 if (hi != 0 || lo < 0)
4260 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4262 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4263 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4264 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4265 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4269 if (*spareness == 1)
4271 tree prev = TREE_VALUE (TYPE_VALUES (type));
4272 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4274 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4279 prev = TREE_VALUE (t);
4288 #define BITARRAY_TEST(ARRAY, INDEX) \
4289 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4290 & (1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR)))
4291 #define BITARRAY_SET(ARRAY, INDEX) \
4292 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4293 |= 1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR))
4295 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4296 with the case values we have seen, assuming the case expression
4298 SPARSENESS is as determined by all_cases_count.
4300 The time needed is propotional to COUNT, unless
4301 SPARSENESS is 2, in which case quadratic time is needed. */
4304 mark_seen_cases (type, cases_seen, count, sparseness)
4306 unsigned char *cases_seen;
4312 tree next_node_to_try = NULL_TREE;
4313 long next_node_offset = 0;
4315 register struct case_node *n;
4316 tree val = make_node (INTEGER_CST);
4317 TREE_TYPE (val) = type;
4318 for (n = case_stack->data.case_stmt.case_list; n;
4321 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4322 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4323 while ( ! tree_int_cst_lt (n->high, val))
4325 /* Calculate (into xlo) the "offset" of the integer (val).
4326 The element with lowest value has offset 0, the next smallest
4327 element has offset 1, etc. */
4329 HOST_WIDE_INT xlo, xhi;
4331 if (sparseness == 2)
4333 /* This less efficient loop is only needed to handle
4334 duplicate case values (multiple enum constants
4335 with the same value). */
4336 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4337 t = TREE_CHAIN (t), xlo++)
4339 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4340 BITARRAY_SET (cases_seen, xlo);
4345 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4347 /* The TYPE_VALUES will be in increasing order, so
4348 starting searching where we last ended. */
4349 t = next_node_to_try;
4350 xlo = next_node_offset;
4356 t = TYPE_VALUES (type);
4359 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4361 next_node_to_try = TREE_CHAIN (t);
4362 next_node_offset = xlo + 1;
4367 if (t == next_node_to_try)
4373 t = TYPE_MIN_VALUE (type);
4375 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4379 add_double (xlo, xhi,
4380 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4384 if (xhi == 0 && xlo >= 0 && xlo < count)
4385 BITARRAY_SET (cases_seen, xlo);
4387 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4389 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4394 /* Called when the index of a switch statement is an enumerated type
4395 and there is no default label.
4397 Checks that all enumeration literals are covered by the case
4398 expressions of a switch. Also, warn if there are any extra
4399 switch cases that are *not* elements of the enumerated type.
4401 If all enumeration literals were covered by the case expressions,
4402 turn one of the expressions into the default expression since it should
4403 not be possible to fall through such a switch. */
4406 check_for_full_enumeration_handling (type)
4409 register struct case_node *n;
4410 register struct case_node **l;
4411 register tree chain;
4414 /* True iff the selector type is a numbered set mode. */
4417 /* The number of possible selector values. */
4420 /* For each possible selector value. a one iff it has been matched
4421 by a case value alternative. */
4422 unsigned char *cases_seen;
4424 /* The allocated size of cases_seen, in chars. */
4428 if (output_bytecode)
4430 bc_check_for_full_enumeration_handling (type);
4437 size = all_cases_count (type, &sparseness);
4438 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4440 if (size > 0 && size < 600000
4441 /* We deliberately use malloc here - not xmalloc. */
4442 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4445 tree v = TYPE_VALUES (type);
4446 bzero (cases_seen, bytes_needed);
4448 /* The time complexity of this code is normally O(N), where
4449 N being the number of members in the enumerated type.
4450 However, if type is a ENUMERAL_TYPE whose values do not
4451 increase monotonically, quadratic time may be needed. */
4453 mark_seen_cases (type, cases_seen, size, sparseness);
4455 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4457 if (BITARRAY_TEST(cases_seen, i) == 0)
4458 warning ("enumeration value `%s' not handled in switch",
4459 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4465 /* Now we go the other way around; we warn if there are case
4466 expressions that don't correspond to enumerators. This can
4467 occur since C and C++ don't enforce type-checking of
4468 assignments to enumeration variables. */
4471 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4473 for (chain = TYPE_VALUES (type);
4474 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4475 chain = TREE_CHAIN (chain))
4480 if (TYPE_NAME (type) == 0)
4481 warning ("case value `%d' not in enumerated type",
4482 TREE_INT_CST_LOW (n->low));
4484 warning ("case value `%d' not in enumerated type `%s'",
4485 TREE_INT_CST_LOW (n->low),
4486 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4489 : DECL_NAME (TYPE_NAME (type))));
4491 if (!tree_int_cst_equal (n->low, n->high))
4493 for (chain = TYPE_VALUES (type);
4494 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4495 chain = TREE_CHAIN (chain))
4500 if (TYPE_NAME (type) == 0)
4501 warning ("case value `%d' not in enumerated type",
4502 TREE_INT_CST_LOW (n->high));
4504 warning ("case value `%d' not in enumerated type `%s'",
4505 TREE_INT_CST_LOW (n->high),
4506 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4509 : DECL_NAME (TYPE_NAME (type))));
4515 /* ??? This optimization is disabled because it causes valid programs to
4516 fail. ANSI C does not guarantee that an expression with enum type
4517 will have a value that is the same as one of the enumation literals. */
4519 /* If all values were found as case labels, make one of them the default
4520 label. Thus, this switch will never fall through. We arbitrarily pick
4521 the last one to make the default since this is likely the most
4522 efficient choice. */
4526 for (l = &case_stack->data.case_stmt.case_list;
4531 case_stack->data.case_stmt.default_label = (*l)->code_label;
4538 /* Check that all enumeration literals are covered by the case
4539 expressions of a switch. Also warn if there are any cases
4540 that are not elements of the enumerated type. */
4543 bc_check_for_full_enumeration_handling (type)
4546 struct nesting *thiscase = case_stack;
4547 struct case_node *c;
4550 /* Check for enums not handled. */
4551 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4553 for (c = thiscase->data.case_stmt.case_list->left;
4554 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4557 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4558 warning ("enumerated value `%s' not handled in switch",
4559 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4562 /* Check for cases not in the enumeration. */
4563 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4565 for (e = TYPE_VALUES (type);
4566 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4570 warning ("case value `%d' not in enumerated type `%s'",
4571 TREE_INT_CST_LOW (c->low),
4572 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4574 : DECL_NAME (TYPE_NAME (type))));
4578 /* Terminate a case (Pascal) or switch (C) statement
4579 in which ORIG_INDEX is the expression to be tested.
4580 Generate the code to test it and jump to the right place. */
4583 expand_end_case (orig_index)
4586 tree minval, maxval, range, orig_minval;
4587 rtx default_label = 0;
4588 register struct case_node *n;
4596 register struct nesting *thiscase = case_stack;
4597 tree index_expr, index_type;
4600 if (output_bytecode)
4602 bc_expand_end_case (orig_index);
4606 table_label = gen_label_rtx ();
4607 index_expr = thiscase->data.case_stmt.index_expr;
4608 index_type = TREE_TYPE (index_expr);
4609 unsignedp = TREE_UNSIGNED (index_type);
4611 do_pending_stack_adjust ();
4613 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4614 if (index_type != error_mark_node)
4616 /* If switch expression was an enumerated type, check that all
4617 enumeration literals are covered by the cases.
4618 No sense trying this if there's a default case, however. */
4620 if (!thiscase->data.case_stmt.default_label
4621 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4622 && TREE_CODE (index_expr) != INTEGER_CST)
4623 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4625 /* If this is the first label, warn if any insns have been emitted. */
4626 if (thiscase->data.case_stmt.seenlabel == 0)
4629 for (insn = get_last_insn ();
4630 insn != case_stack->data.case_stmt.start;
4631 insn = PREV_INSN (insn))
4632 if (GET_CODE (insn) != NOTE
4633 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4635 warning ("unreachable code at beginning of %s",
4636 case_stack->data.case_stmt.printname);
4641 /* If we don't have a default-label, create one here,
4642 after the body of the switch. */
4643 if (thiscase->data.case_stmt.default_label == 0)
4645 thiscase->data.case_stmt.default_label
4646 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4647 expand_label (thiscase->data.case_stmt.default_label);
4649 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4651 before_case = get_last_insn ();
4653 /* Simplify the case-list before we count it. */
4654 group_case_nodes (thiscase->data.case_stmt.case_list);
4656 /* Get upper and lower bounds of case values.
4657 Also convert all the case values to the index expr's data type. */
4660 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4662 /* Check low and high label values are integers. */
4663 if (TREE_CODE (n->low) != INTEGER_CST)
4665 if (TREE_CODE (n->high) != INTEGER_CST)
4668 n->low = convert (index_type, n->low);
4669 n->high = convert (index_type, n->high);
4671 /* Count the elements and track the largest and smallest
4672 of them (treating them as signed even if they are not). */
4680 if (INT_CST_LT (n->low, minval))
4682 if (INT_CST_LT (maxval, n->high))
4685 /* A range counts double, since it requires two compares. */
4686 if (! tree_int_cst_equal (n->low, n->high))
4690 orig_minval = minval;
4692 /* Compute span of values. */
4694 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4698 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4700 emit_jump (default_label);
4703 /* If range of values is much bigger than number of values,
4704 make a sequence of conditional branches instead of a dispatch.
4705 If the switch-index is a constant, do it this way
4706 because we can optimize it. */
4708 #ifndef CASE_VALUES_THRESHOLD
4710 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4712 /* If machine does not have a case insn that compares the
4713 bounds, this means extra overhead for dispatch tables
4714 which raises the threshold for using them. */
4715 #define CASE_VALUES_THRESHOLD 5
4716 #endif /* HAVE_casesi */
4717 #endif /* CASE_VALUES_THRESHOLD */
4719 else if (TREE_INT_CST_HIGH (range) != 0
4720 || count < CASE_VALUES_THRESHOLD
4721 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4723 || TREE_CODE (index_expr) == INTEGER_CST
4724 /* These will reduce to a constant. */
4725 || (TREE_CODE (index_expr) == CALL_EXPR
4726 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4727 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4728 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4729 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4730 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4732 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4734 /* If the index is a short or char that we do not have
4735 an insn to handle comparisons directly, convert it to
4736 a full integer now, rather than letting each comparison
4737 generate the conversion. */
4739 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4740 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4741 == CODE_FOR_nothing))
4743 enum machine_mode wider_mode;
4744 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4745 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4746 if (cmp_optab->handlers[(int) wider_mode].insn_code
4747 != CODE_FOR_nothing)
4749 index = convert_to_mode (wider_mode, index, unsignedp);
4755 do_pending_stack_adjust ();
4757 index = protect_from_queue (index, 0);
4758 if (GET_CODE (index) == MEM)
4759 index = copy_to_reg (index);
4760 if (GET_CODE (index) == CONST_INT
4761 || TREE_CODE (index_expr) == INTEGER_CST)
4763 /* Make a tree node with the proper constant value
4764 if we don't already have one. */
4765 if (TREE_CODE (index_expr) != INTEGER_CST)
4768 = build_int_2 (INTVAL (index),
4769 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4770 index_expr = convert (index_type, index_expr);
4773 /* For constant index expressions we need only
4774 issue a unconditional branch to the appropriate
4775 target code. The job of removing any unreachable
4776 code is left to the optimisation phase if the
4777 "-O" option is specified. */
4778 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4779 if (! tree_int_cst_lt (index_expr, n->low)
4780 && ! tree_int_cst_lt (n->high, index_expr))
4784 emit_jump (label_rtx (n->code_label));
4786 emit_jump (default_label);
4790 /* If the index expression is not constant we generate
4791 a binary decision tree to select the appropriate
4792 target code. This is done as follows:
4794 The list of cases is rearranged into a binary tree,
4795 nearly optimal assuming equal probability for each case.
4797 The tree is transformed into RTL, eliminating
4798 redundant test conditions at the same time.
4800 If program flow could reach the end of the
4801 decision tree an unconditional jump to the
4802 default code is emitted. */
4805 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4806 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4807 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4809 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4810 default_label, index_type);
4811 emit_jump_if_reachable (default_label);
4820 enum machine_mode index_mode = SImode;
4821 int index_bits = GET_MODE_BITSIZE (index_mode);
4823 enum machine_mode op_mode;
4825 /* Convert the index to SImode. */
4826 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
4827 > GET_MODE_BITSIZE (index_mode))
4829 enum machine_mode omode = TYPE_MODE (index_type);
4830 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4832 /* We must handle the endpoints in the original mode. */
4833 index_expr = build (MINUS_EXPR, index_type,
4834 index_expr, minval);
4835 minval = integer_zero_node;
4836 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4837 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4838 emit_jump_insn (gen_bltu (default_label));
4839 /* Now we can safely truncate. */
4840 index = convert_to_mode (index_mode, index, 0);
4844 if (TYPE_MODE (index_type) != index_mode)
4846 index_expr = convert (type_for_size (index_bits, 0),
4848 index_type = TREE_TYPE (index_expr);
4851 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4854 index = protect_from_queue (index, 0);
4855 do_pending_stack_adjust ();
4857 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
4858 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
4860 index = copy_to_mode_reg (op_mode, index);
4862 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
4864 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
4865 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
4867 op1 = copy_to_mode_reg (op_mode, op1);
4869 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
4871 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
4872 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
4874 op2 = copy_to_mode_reg (op_mode, op2);
4876 emit_jump_insn (gen_casesi (index, op1, op2,
4877 table_label, default_label));
4881 #ifdef HAVE_tablejump
4882 if (! win && HAVE_tablejump)
4884 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4885 fold (build (MINUS_EXPR, index_type,
4886 index_expr, minval)));
4887 index_type = TREE_TYPE (index_expr);
4888 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4890 index = protect_from_queue (index, 0);
4891 do_pending_stack_adjust ();
4893 do_tablejump (index, TYPE_MODE (index_type),
4894 expand_expr (range, NULL_RTX, VOIDmode, 0),
4895 table_label, default_label);
4902 /* Get table of labels to jump to, in order of case index. */
4904 ncases = TREE_INT_CST_LOW (range) + 1;
4905 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4906 bzero ((char *) labelvec, ncases * sizeof (rtx));
4908 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4910 register HOST_WIDE_INT i
4911 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4916 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
4917 if (i + TREE_INT_CST_LOW (orig_minval)
4918 == TREE_INT_CST_LOW (n->high))
4924 /* Fill in the gaps with the default. */
4925 for (i = 0; i < ncases; i++)
4926 if (labelvec[i] == 0)
4927 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
4929 /* Output the table */
4930 emit_label (table_label);
4932 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
4933 were an expression, instead of an #ifdef/#ifndef. */
4935 #ifdef CASE_VECTOR_PC_RELATIVE
4939 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
4940 gen_rtx (LABEL_REF, Pmode, table_label),
4941 gen_rtvec_v (ncases, labelvec)));
4943 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
4944 gen_rtvec_v (ncases, labelvec)));
4946 /* If the case insn drops through the table,
4947 after the table we must jump to the default-label.
4948 Otherwise record no drop-through after the table. */
4949 #ifdef CASE_DROPS_THROUGH
4950 emit_jump (default_label);
4956 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4957 reorder_insns (before_case, get_last_insn (),
4958 thiscase->data.case_stmt.start);
4961 if (thiscase->exit_label)
4962 emit_label (thiscase->exit_label);
4964 POPSTACK (case_stack);
4970 /* Terminate a case statement. EXPR is the original index
4974 bc_expand_end_case (expr)
4977 struct nesting *thiscase = case_stack;
4978 enum bytecode_opcode opcode;
4979 struct bc_label *jump_label;
4980 struct case_node *c;
4982 bc_emit_bytecode (jump);
4983 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4985 #ifdef DEBUG_PRINT_CODE
4986 fputc ('\n', stderr);
4989 /* Now that the size of the jump table is known, emit the actual
4990 indexed jump instruction. */
4991 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4993 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
4994 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
4995 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
4997 bc_emit_bytecode (opcode);
4999 /* Now emit the case instructions literal arguments, in order.
5000 In addition to the value on the stack, it uses:
5001 1. The address of the jump table.
5002 2. The size of the jump table.
5003 3. The default label. */
5005 jump_label = bc_get_bytecode_label ();
5006 bc_emit_bytecode_labelref (jump_label);
5007 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
5008 sizeof thiscase->data.case_stmt.num_ranges);
5010 if (thiscase->data.case_stmt.default_label)
5011 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
5013 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5015 /* Output the jump table. */
5017 bc_align_bytecode (3 /* PTR_ALIGN */);
5018 bc_emit_bytecode_labeldef (jump_label);
5020 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
5021 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5023 opcode = TREE_INT_CST_LOW (c->low);
5024 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5026 opcode = TREE_INT_CST_LOW (c->high);
5027 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5029 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5032 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
5033 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5035 bc_emit_bytecode_DI_const (c->low);
5036 bc_emit_bytecode_DI_const (c->high);
5038 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5045 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
5047 /* Possibly issue enumeration warnings. */
5049 if (!thiscase->data.case_stmt.default_label
5050 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
5051 && TREE_CODE (expr) != INTEGER_CST
5053 check_for_full_enumeration_handling (TREE_TYPE (expr));
5056 #ifdef DEBUG_PRINT_CODE
5057 fputc ('\n', stderr);
5060 POPSTACK (case_stack);
5064 /* Return unique bytecode ID. */
5069 static int bc_uid = 0;
5074 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5077 do_jump_if_equal (op1, op2, label, unsignedp)
5078 rtx op1, op2, label;
5081 if (GET_CODE (op1) == CONST_INT
5082 && GET_CODE (op2) == CONST_INT)
5084 if (INTVAL (op1) == INTVAL (op2))
5089 enum machine_mode mode = GET_MODE (op1);
5090 if (mode == VOIDmode)
5091 mode = GET_MODE (op2);
5092 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5093 emit_jump_insn (gen_beq (label));
5097 /* Not all case values are encountered equally. This function
5098 uses a heuristic to weight case labels, in cases where that
5099 looks like a reasonable thing to do.
5101 Right now, all we try to guess is text, and we establish the
5104 chars above space: 16
5113 If we find any cases in the switch that are not either -1 or in the range
5114 of valid ASCII characters, or are control characters other than those
5115 commonly used with "\", don't treat this switch scanning text.
5117 Return 1 if these nodes are suitable for cost estimation, otherwise
5121 estimate_case_costs (node)
5124 tree min_ascii = build_int_2 (-1, -1);
5125 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5129 /* If we haven't already made the cost table, make it now. Note that the
5130 lower bound of the table is -1, not zero. */
5132 if (cost_table == NULL)
5134 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5135 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5137 for (i = 0; i < 128; i++)
5141 else if (ispunct (i))
5143 else if (iscntrl (i))
5147 cost_table[' '] = 8;
5148 cost_table['\t'] = 4;
5149 cost_table['\0'] = 4;
5150 cost_table['\n'] = 2;
5151 cost_table['\f'] = 1;
5152 cost_table['\v'] = 1;
5153 cost_table['\b'] = 1;
5156 /* See if all the case expressions look like text. It is text if the
5157 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5158 as signed arithmetic since we don't want to ever access cost_table with a
5159 value less than -1. Also check that none of the constants in a range
5160 are strange control characters. */
5162 for (n = node; n; n = n->right)
5164 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5167 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5168 if (cost_table[i] < 0)
5172 /* All interesting values are within the range of interesting
5173 ASCII characters. */
5177 /* Scan an ordered list of case nodes
5178 combining those with consecutive values or ranges.
5180 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5183 group_case_nodes (head)
5186 case_node_ptr node = head;
5190 rtx lb = next_real_insn (label_rtx (node->code_label));
5191 case_node_ptr np = node;
5193 /* Try to group the successors of NODE with NODE. */
5194 while (((np = np->right) != 0)
5195 /* Do they jump to the same place? */
5196 && next_real_insn (label_rtx (np->code_label)) == lb
5197 /* Are their ranges consecutive? */
5198 && tree_int_cst_equal (np->low,
5199 fold (build (PLUS_EXPR,
5200 TREE_TYPE (node->high),
5203 /* An overflow is not consecutive. */
5204 && tree_int_cst_lt (node->high,
5205 fold (build (PLUS_EXPR,
5206 TREE_TYPE (node->high),
5208 integer_one_node))))
5210 node->high = np->high;
5212 /* NP is the first node after NODE which can't be grouped with it.
5213 Delete the nodes in between, and move on to that node. */
5219 /* Take an ordered list of case nodes
5220 and transform them into a near optimal binary tree,
5221 on the assumption that any target code selection value is as
5222 likely as any other.
5224 The transformation is performed by splitting the ordered
5225 list into two equal sections plus a pivot. The parts are
5226 then attached to the pivot as left and right branches. Each
5227 branch is is then transformed recursively. */
5230 balance_case_nodes (head, parent)
5231 case_node_ptr *head;
5232 case_node_ptr parent;
5234 register case_node_ptr np;
5242 register case_node_ptr *npp;
5245 /* Count the number of entries on branch. Also count the ranges. */
5249 if (!tree_int_cst_equal (np->low, np->high))
5253 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5257 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5265 /* Split this list if it is long enough for that to help. */
5270 /* Find the place in the list that bisects the list's total cost,
5271 Here I gets half the total cost. */
5276 /* Skip nodes while their cost does not reach that amount. */
5277 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5278 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5279 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5282 npp = &(*npp)->right;
5287 /* Leave this branch lopsided, but optimize left-hand
5288 side and fill in `parent' fields for right-hand side. */
5290 np->parent = parent;
5291 balance_case_nodes (&np->left, np);
5292 for (; np->right; np = np->right)
5293 np->right->parent = np;
5297 /* If there are just three nodes, split at the middle one. */
5299 npp = &(*npp)->right;
5302 /* Find the place in the list that bisects the list's total cost,
5303 where ranges count as 2.
5304 Here I gets half the total cost. */
5305 i = (i + ranges + 1) / 2;
5308 /* Skip nodes while their cost does not reach that amount. */
5309 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5314 npp = &(*npp)->right;
5319 np->parent = parent;
5322 /* Optimize each of the two split parts. */
5323 balance_case_nodes (&np->left, np);
5324 balance_case_nodes (&np->right, np);
5328 /* Else leave this branch as one level,
5329 but fill in `parent' fields. */
5331 np->parent = parent;
5332 for (; np->right; np = np->right)
5333 np->right->parent = np;
5338 /* Search the parent sections of the case node tree
5339 to see if a test for the lower bound of NODE would be redundant.
5340 INDEX_TYPE is the type of the index expression.
5342 The instructions to generate the case decision tree are
5343 output in the same order as nodes are processed so it is
5344 known that if a parent node checks the range of the current
5345 node minus one that the current node is bounded at its lower
5346 span. Thus the test would be redundant. */
5349 node_has_low_bound (node, index_type)
5354 case_node_ptr pnode;
5356 /* If the lower bound of this node is the lowest value in the index type,
5357 we need not test it. */
5359 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5362 /* If this node has a left branch, the value at the left must be less
5363 than that at this node, so it cannot be bounded at the bottom and
5364 we need not bother testing any further. */
5369 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5370 node->low, integer_one_node));
5372 /* If the subtraction above overflowed, we can't verify anything.
5373 Otherwise, look for a parent that tests our value - 1. */
5375 if (! tree_int_cst_lt (low_minus_one, node->low))
5378 for (pnode = node->parent; pnode; pnode = pnode->parent)
5379 if (tree_int_cst_equal (low_minus_one, pnode->high))
5385 /* Search the parent sections of the case node tree
5386 to see if a test for the upper bound of NODE would be redundant.
5387 INDEX_TYPE is the type of the index expression.
5389 The instructions to generate the case decision tree are
5390 output in the same order as nodes are processed so it is
5391 known that if a parent node checks the range of the current
5392 node plus one that the current node is bounded at its upper
5393 span. Thus the test would be redundant. */
5396 node_has_high_bound (node, index_type)
5401 case_node_ptr pnode;
5403 /* If the upper bound of this node is the highest value in the type
5404 of the index expression, we need not test against it. */
5406 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5409 /* If this node has a right branch, the value at the right must be greater
5410 than that at this node, so it cannot be bounded at the top and
5411 we need not bother testing any further. */
5416 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5417 node->high, integer_one_node));
5419 /* If the addition above overflowed, we can't verify anything.
5420 Otherwise, look for a parent that tests our value + 1. */
5422 if (! tree_int_cst_lt (node->high, high_plus_one))
5425 for (pnode = node->parent; pnode; pnode = pnode->parent)
5426 if (tree_int_cst_equal (high_plus_one, pnode->low))
5432 /* Search the parent sections of the
5433 case node tree to see if both tests for the upper and lower
5434 bounds of NODE would be redundant. */
5437 node_is_bounded (node, index_type)
5441 return (node_has_low_bound (node, index_type)
5442 && node_has_high_bound (node, index_type));
5445 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5448 emit_jump_if_reachable (label)
5451 if (GET_CODE (get_last_insn ()) != BARRIER)
5455 /* Emit step-by-step code to select a case for the value of INDEX.
5456 The thus generated decision tree follows the form of the
5457 case-node binary tree NODE, whose nodes represent test conditions.
5458 INDEX_TYPE is the type of the index of the switch.
5460 Care is taken to prune redundant tests from the decision tree
5461 by detecting any boundary conditions already checked by
5462 emitted rtx. (See node_has_high_bound, node_has_low_bound
5463 and node_is_bounded, above.)
5465 Where the test conditions can be shown to be redundant we emit
5466 an unconditional jump to the target code. As a further
5467 optimization, the subordinates of a tree node are examined to
5468 check for bounded nodes. In this case conditional and/or
5469 unconditional jumps as a result of the boundary check for the
5470 current node are arranged to target the subordinates associated
5471 code for out of bound conditions on the current node node.
5473 We can assume that when control reaches the code generated here,
5474 the index value has already been compared with the parents
5475 of this node, and determined to be on the same side of each parent
5476 as this node is. Thus, if this node tests for the value 51,
5477 and a parent tested for 52, we don't need to consider
5478 the possibility of a value greater than 51. If another parent
5479 tests for the value 50, then this node need not test anything. */
5482 emit_case_nodes (index, node, default_label, index_type)
5488 /* If INDEX has an unsigned type, we must make unsigned branches. */
5489 int unsignedp = TREE_UNSIGNED (index_type);
5490 typedef rtx rtx_function ();
5491 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5492 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5493 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5494 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5495 enum machine_mode mode = GET_MODE (index);
5497 /* See if our parents have already tested everything for us.
5498 If they have, emit an unconditional jump for this node. */
5499 if (node_is_bounded (node, index_type))
5500 emit_jump (label_rtx (node->code_label));
5502 else if (tree_int_cst_equal (node->low, node->high))
5504 /* Node is single valued. First see if the index expression matches
5505 this node and then check our children, if any. */
5507 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5508 label_rtx (node->code_label), unsignedp);
5510 if (node->right != 0 && node->left != 0)
5512 /* This node has children on both sides.
5513 Dispatch to one side or the other
5514 by comparing the index value with this node's value.
5515 If one subtree is bounded, check that one first,
5516 so we can avoid real branches in the tree. */
5518 if (node_is_bounded (node->right, index_type))
5520 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5522 GT, NULL_RTX, mode, unsignedp, 0);
5524 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5525 emit_case_nodes (index, node->left, default_label, index_type);
5528 else if (node_is_bounded (node->left, index_type))
5530 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5532 LT, NULL_RTX, mode, unsignedp, 0);
5533 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5534 emit_case_nodes (index, node->right, default_label, index_type);
5539 /* Neither node is bounded. First distinguish the two sides;
5540 then emit the code for one side at a time. */
5543 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5545 /* See if the value is on the right. */
5546 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5548 GT, NULL_RTX, mode, unsignedp, 0);
5549 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5551 /* Value must be on the left.
5552 Handle the left-hand subtree. */
5553 emit_case_nodes (index, node->left, default_label, index_type);
5554 /* If left-hand subtree does nothing,
5556 emit_jump_if_reachable (default_label);
5558 /* Code branches here for the right-hand subtree. */
5559 expand_label (test_label);
5560 emit_case_nodes (index, node->right, default_label, index_type);
5564 else if (node->right != 0 && node->left == 0)
5566 /* Here we have a right child but no left so we issue conditional
5567 branch to default and process the right child.
5569 Omit the conditional branch to default if we it avoid only one
5570 right child; it costs too much space to save so little time. */
5572 if (node->right->right || node->right->left
5573 || !tree_int_cst_equal (node->right->low, node->right->high))
5575 if (!node_has_low_bound (node, index_type))
5577 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5579 LT, NULL_RTX, mode, unsignedp, 0);
5580 emit_jump_insn ((*gen_blt_pat) (default_label));
5583 emit_case_nodes (index, node->right, default_label, index_type);
5586 /* We cannot process node->right normally
5587 since we haven't ruled out the numbers less than
5588 this node's value. So handle node->right explicitly. */
5589 do_jump_if_equal (index,
5590 expand_expr (node->right->low, NULL_RTX,
5592 label_rtx (node->right->code_label), unsignedp);
5595 else if (node->right == 0 && node->left != 0)
5597 /* Just one subtree, on the left. */
5599 #if 0 /* The following code and comment were formerly part
5600 of the condition here, but they didn't work
5601 and I don't understand what the idea was. -- rms. */
5602 /* If our "most probable entry" is less probable
5603 than the default label, emit a jump to
5604 the default label using condition codes
5605 already lying around. With no right branch,
5606 a branch-greater-than will get us to the default
5609 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5612 if (node->left->left || node->left->right
5613 || !tree_int_cst_equal (node->left->low, node->left->high))
5615 if (!node_has_high_bound (node, index_type))
5617 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5619 GT, NULL_RTX, mode, unsignedp, 0);
5620 emit_jump_insn ((*gen_bgt_pat) (default_label));
5623 emit_case_nodes (index, node->left, default_label, index_type);
5626 /* We cannot process node->left normally
5627 since we haven't ruled out the numbers less than
5628 this node's value. So handle node->left explicitly. */
5629 do_jump_if_equal (index,
5630 expand_expr (node->left->low, NULL_RTX,
5632 label_rtx (node->left->code_label), unsignedp);
5637 /* Node is a range. These cases are very similar to those for a single
5638 value, except that we do not start by testing whether this node
5639 is the one to branch to. */
5641 if (node->right != 0 && node->left != 0)
5643 /* Node has subtrees on both sides.
5644 If the right-hand subtree is bounded,
5645 test for it first, since we can go straight there.
5646 Otherwise, we need to make a branch in the control structure,
5647 then handle the two subtrees. */
5648 tree test_label = 0;
5650 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5652 GT, NULL_RTX, mode, unsignedp, 0);
5654 if (node_is_bounded (node->right, index_type))
5655 /* Right hand node is fully bounded so we can eliminate any
5656 testing and branch directly to the target code. */
5657 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5660 /* Right hand node requires testing.
5661 Branch to a label where we will handle it later. */
5663 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5664 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5667 /* Value belongs to this node or to the left-hand subtree. */
5669 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5670 GE, NULL_RTX, mode, unsignedp, 0);
5671 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5673 /* Handle the left-hand subtree. */
5674 emit_case_nodes (index, node->left, default_label, index_type);
5676 /* If right node had to be handled later, do that now. */
5680 /* If the left-hand subtree fell through,
5681 don't let it fall into the right-hand subtree. */
5682 emit_jump_if_reachable (default_label);
5684 expand_label (test_label);
5685 emit_case_nodes (index, node->right, default_label, index_type);
5689 else if (node->right != 0 && node->left == 0)
5691 /* Deal with values to the left of this node,
5692 if they are possible. */
5693 if (!node_has_low_bound (node, index_type))
5695 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5697 LT, NULL_RTX, mode, unsignedp, 0);
5698 emit_jump_insn ((*gen_blt_pat) (default_label));
5701 /* Value belongs to this node or to the right-hand subtree. */
5703 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5705 LE, NULL_RTX, mode, unsignedp, 0);
5706 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5708 emit_case_nodes (index, node->right, default_label, index_type);
5711 else if (node->right == 0 && node->left != 0)
5713 /* Deal with values to the right of this node,
5714 if they are possible. */
5715 if (!node_has_high_bound (node, index_type))
5717 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5719 GT, NULL_RTX, mode, unsignedp, 0);
5720 emit_jump_insn ((*gen_bgt_pat) (default_label));
5723 /* Value belongs to this node or to the left-hand subtree. */
5725 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5726 GE, NULL_RTX, mode, unsignedp, 0);
5727 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5729 emit_case_nodes (index, node->left, default_label, index_type);
5734 /* Node has no children so we check low and high bounds to remove
5735 redundant tests. Only one of the bounds can exist,
5736 since otherwise this node is bounded--a case tested already. */
5738 if (!node_has_high_bound (node, index_type))
5740 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5742 GT, NULL_RTX, mode, unsignedp, 0);
5743 emit_jump_insn ((*gen_bgt_pat) (default_label));
5746 if (!node_has_low_bound (node, index_type))
5748 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5750 LT, NULL_RTX, mode, unsignedp, 0);
5751 emit_jump_insn ((*gen_blt_pat) (default_label));
5754 emit_jump (label_rtx (node->code_label));
5759 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5760 so that the debugging info will be correct for the unrolled loop. */
5762 /* Indexed by block number, contains a pointer to the N'th block node. */
5764 static tree *block_vector;
5767 find_loop_tree_blocks ()
5769 tree block = DECL_INITIAL (current_function_decl);
5771 /* There first block is for the function body, and does not have
5772 corresponding block notes. Don't include it in the block vector. */
5773 block = BLOCK_SUBBLOCKS (block);
5775 block_vector = identify_blocks (block, get_insns ());
5779 unroll_block_trees ()
5781 tree block = DECL_INITIAL (current_function_decl);
5783 reorder_blocks (block_vector, block, get_insns ());