1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
55 #include "bc-typecd.h"
56 #include "bc-opcode.h"
60 #define obstack_chunk_alloc xmalloc
61 #define obstack_chunk_free free
62 struct obstack stmt_obstack;
64 /* Filename and line number of last line-number note,
65 whether we actually emitted it or not. */
69 /* Nonzero if within a ({...}) grouping, in which case we must
70 always compute a value for each expr-stmt in case it is the last one. */
72 int expr_stmts_for_value;
74 /* Each time we expand an expression-statement,
75 record the expr's type and its RTL value here. */
77 static tree last_expr_type;
78 static rtx last_expr_value;
80 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
81 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
82 This is used by the `remember_end_note' function to record the endpoint
83 of each generated block in its associated BLOCK node. */
85 static rtx last_block_end_note;
87 /* Number of binding contours started so far in this function. */
89 int block_start_count;
91 /* Nonzero if function being compiled needs to
92 return the address of where it has put a structure value. */
94 extern int current_function_returns_pcc_struct;
96 /* Label that will go on parm cleanup code, if any.
97 Jumping to this label runs cleanup code for parameters, if
98 such code must be run. Following this code is the logical return label. */
100 extern rtx cleanup_label;
102 /* Label that will go on function epilogue.
103 Jumping to this label serves as a "return" instruction
104 on machines which require execution of the epilogue on all returns. */
106 extern rtx return_label;
108 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
109 So we can mark them all live at the end of the function, if nonopt. */
110 extern rtx save_expr_regs;
112 /* Offset to end of allocated area of stack frame.
113 If stack grows down, this is the address of the last stack slot allocated.
114 If stack grows up, this is the address for the next slot. */
115 extern int frame_offset;
117 /* Label to jump back to for tail recursion, or 0 if we have
118 not yet needed one for this function. */
119 extern rtx tail_recursion_label;
121 /* Place after which to insert the tail_recursion_label if we need one. */
122 extern rtx tail_recursion_reentry;
124 /* Location at which to save the argument pointer if it will need to be
125 referenced. There are two cases where this is done: if nonlocal gotos
126 exist, or if vars whose is an offset from the argument pointer will be
127 needed by inner routines. */
129 extern rtx arg_pointer_save_area;
131 /* Chain of all RTL_EXPRs that have insns in them. */
132 extern tree rtl_expr_chain;
134 #if 0 /* Turned off because 0 seems to work just as well. */
135 /* Cleanup lists are required for binding levels regardless of whether
136 that binding level has cleanups or not. This node serves as the
137 cleanup list whenever an empty list is required. */
138 static tree empty_cleanup_list;
141 extern void (*interim_eh_hook) PROTO((tree));
143 /* Functions and data structures for expanding case statements. */
145 /* Case label structure, used to hold info on labels within case
146 statements. We handle "range" labels; for a single-value label
147 as in C, the high and low limits are the same.
149 A chain of case nodes is initially maintained via the RIGHT fields
150 in the nodes. Nodes with higher case values are later in the list.
152 Switch statements can be output in one of two forms. A branch table
153 is used if there are more than a few labels and the labels are dense
154 within the range between the smallest and largest case value. If a
155 branch table is used, no further manipulations are done with the case
158 The alternative to the use of a branch table is to generate a series
159 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
160 and PARENT fields to hold a binary tree. Initially the tree is
161 totally unbalanced, with everything on the right. We balance the tree
162 with nodes on the left having lower case values than the parent
163 and nodes on the right having higher values. We then output the tree
168 struct case_node *left; /* Left son in binary tree */
169 struct case_node *right; /* Right son in binary tree; also node chain */
170 struct case_node *parent; /* Parent of node in binary tree */
171 tree low; /* Lowest index value for this label */
172 tree high; /* Highest index value for this label */
173 tree code_label; /* Label to jump to when node matches */
176 typedef struct case_node case_node;
177 typedef struct case_node *case_node_ptr;
179 /* These are used by estimate_case_costs and balance_case_nodes. */
181 /* This must be a signed type, and non-ANSI compilers lack signed char. */
182 static short *cost_table;
183 static int use_cost_table;
185 /* Stack of control and binding constructs we are currently inside.
187 These constructs begin when you call `expand_start_WHATEVER'
188 and end when you call `expand_end_WHATEVER'. This stack records
189 info about how the construct began that tells the end-function
190 what to do. It also may provide information about the construct
191 to alter the behavior of other constructs within the body.
192 For example, they may affect the behavior of C `break' and `continue'.
194 Each construct gets one `struct nesting' object.
195 All of these objects are chained through the `all' field.
196 `nesting_stack' points to the first object (innermost construct).
197 The position of an entry on `nesting_stack' is in its `depth' field.
199 Each type of construct has its own individual stack.
200 For example, loops have `loop_stack'. Each object points to the
201 next object of the same type through the `next' field.
203 Some constructs are visible to `break' exit-statements and others
204 are not. Which constructs are visible depends on the language.
205 Therefore, the data structure allows each construct to be visible
206 or not, according to the args given when the construct is started.
207 The construct is visible if the `exit_label' field is non-null.
208 In that case, the value should be a CODE_LABEL rtx. */
213 struct nesting *next;
218 /* For conds (if-then and if-then-else statements). */
221 /* Label for the end of the if construct.
222 There is none if EXITFLAG was not set
223 and no `else' has been seen yet. */
225 /* Label for the end of this alternative.
226 This may be the end of the if or the next else/elseif. */
232 /* Label at the top of the loop; place to loop back to. */
234 /* Label at the end of the whole construct. */
236 /* Label before a jump that branches to the end of the whole
237 construct. This is where destructors go if any. */
239 /* Label for `continue' statement to jump to;
240 this is in front of the stepper of the loop. */
243 /* For variable binding contours. */
246 /* Sequence number of this binding contour within the function,
247 in order of entry. */
248 int block_start_count;
249 /* Nonzero => value to restore stack to on exit. Complemented by
250 bc_stack_level (see below) when generating bytecodes. */
252 /* The NOTE that starts this contour.
253 Used by expand_goto to check whether the destination
254 is within each contour or not. */
256 /* Innermost containing binding contour that has a stack level. */
257 struct nesting *innermost_stack_block;
258 /* List of cleanups to be run on exit from this contour.
259 This is a list of expressions to be evaluated.
260 The TREE_PURPOSE of each link is the ..._DECL node
261 which the cleanup pertains to. */
263 /* List of cleanup-lists of blocks containing this block,
264 as they were at the locus where this block appears.
265 There is an element for each containing block,
266 ordered innermost containing block first.
267 The tail of this list can be 0 (was empty_cleanup_list),
268 if all remaining elements would be empty lists.
269 The element's TREE_VALUE is the cleanup-list of that block,
270 which may be null. */
272 /* Chain of labels defined inside this binding contour.
273 For contours that have stack levels or cleanups. */
274 struct label_chain *label_chain;
275 /* Number of function calls seen, as of start of this block. */
276 int function_call_count;
277 /* Bytecode specific: stack level to restore stack to on exit. */
280 /* For switch (C) or case (Pascal) statements,
281 and also for dummies (see `expand_start_case_dummy'). */
284 /* The insn after which the case dispatch should finally
285 be emitted. Zero for a dummy. */
287 /* For bytecodes, the case table is in-lined right in the code.
288 A label is needed for skipping over this block. It is only
289 used when generating bytecodes. */
291 /* A list of case labels, kept in ascending order by value
292 as the list is built.
293 During expand_end_case, this list may be rearranged into a
294 nearly balanced binary tree. */
295 struct case_node *case_list;
296 /* Label to jump to if no case matches. */
298 /* The expression to be dispatched on. */
300 /* Type that INDEX_EXPR should be converted to. */
302 /* Number of range exprs in case statement. */
304 /* Name of this kind of statement, for warnings. */
306 /* Nonzero if a case label has been seen in this case stmt. */
312 /* Chain of all pending binding contours. */
313 struct nesting *block_stack;
315 /* If any new stacks are added here, add them to POPSTACKS too. */
317 /* Chain of all pending binding contours that restore stack levels
319 struct nesting *stack_block_stack;
321 /* Chain of all pending conditional statements. */
322 struct nesting *cond_stack;
324 /* Chain of all pending loops. */
325 struct nesting *loop_stack;
327 /* Chain of all pending case or switch statements. */
328 struct nesting *case_stack;
330 /* Separate chain including all of the above,
331 chained through the `all' field. */
332 struct nesting *nesting_stack;
334 /* Number of entries on nesting_stack now. */
337 /* Allocate and return a new `struct nesting'. */
339 #define ALLOC_NESTING() \
340 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
342 /* Pop the nesting stack element by element until we pop off
343 the element which is at the top of STACK.
344 Update all the other stacks, popping off elements from them
345 as we pop them from nesting_stack. */
347 #define POPSTACK(STACK) \
348 do { struct nesting *target = STACK; \
349 struct nesting *this; \
350 do { this = nesting_stack; \
351 if (loop_stack == this) \
352 loop_stack = loop_stack->next; \
353 if (cond_stack == this) \
354 cond_stack = cond_stack->next; \
355 if (block_stack == this) \
356 block_stack = block_stack->next; \
357 if (stack_block_stack == this) \
358 stack_block_stack = stack_block_stack->next; \
359 if (case_stack == this) \
360 case_stack = case_stack->next; \
361 nesting_depth = nesting_stack->depth - 1; \
362 nesting_stack = this->all; \
363 obstack_free (&stmt_obstack, this); } \
364 while (this != target); } while (0)
366 /* In some cases it is impossible to generate code for a forward goto
367 until the label definition is seen. This happens when it may be necessary
368 for the goto to reset the stack pointer: we don't yet know how to do that.
369 So expand_goto puts an entry on this fixup list.
370 Each time a binding contour that resets the stack is exited,
372 If the target label has now been defined, we can insert the proper code. */
376 /* Points to following fixup. */
377 struct goto_fixup *next;
378 /* Points to the insn before the jump insn.
379 If more code must be inserted, it goes after this insn. */
381 /* The LABEL_DECL that this jump is jumping to, or 0
382 for break, continue or return. */
384 /* The BLOCK for the place where this goto was found. */
386 /* The CODE_LABEL rtx that this is jumping to. */
388 /* Number of binding contours started in current function
389 before the label reference. */
390 int block_start_count;
391 /* The outermost stack level that should be restored for this jump.
392 Each time a binding contour that resets the stack is exited,
393 if the target label is *not* yet defined, this slot is updated. */
395 /* List of lists of cleanup expressions to be run by this goto.
396 There is one element for each block that this goto is within.
397 The tail of this list can be 0 (was empty_cleanup_list),
398 if all remaining elements would be empty.
399 The TREE_VALUE contains the cleanup list of that block as of the
400 time this goto was seen.
401 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
402 tree cleanup_list_list;
404 /* Bytecode specific members follow */
406 /* The label that this jump is jumping to, or 0 for break, continue
408 struct bc_label *bc_target;
410 /* The label we use for the fixup patch */
411 struct bc_label *label;
413 /* True (non-0) if fixup has been handled */
416 /* Like stack_level above, except refers to the interpreter stack */
420 static struct goto_fixup *goto_fixup_chain;
422 /* Within any binding contour that must restore a stack level,
423 all labels are recorded with a chain of these structures. */
427 /* Points to following fixup. */
428 struct label_chain *next;
431 static void expand_goto_internal PROTO((tree, rtx, rtx));
432 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
433 struct bc_label *, tree));
434 static int expand_fixup PROTO((tree, rtx, rtx));
435 static void bc_expand_fixup PROTO((enum bytecode_opcode,
436 struct bc_label *, int));
437 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
439 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
441 static int warn_if_unused_value PROTO((tree));
442 static void bc_expand_start_cond PROTO((tree, int));
443 static void bc_expand_end_cond PROTO((void));
444 static void bc_expand_start_else PROTO((void));
445 static void bc_expand_end_loop PROTO((void));
446 static void bc_expand_end_bindings PROTO((tree, int, int));
447 static void bc_expand_decl PROTO((tree, tree));
448 static void bc_expand_variable_local_init PROTO((tree));
449 static void bc_expand_decl_init PROTO((tree));
450 static void expand_null_return_1 PROTO((rtx, int));
451 static int tail_recursion_args PROTO((tree, tree));
452 static void expand_cleanups PROTO((tree, tree));
453 static void bc_expand_start_case PROTO((struct nesting *, tree,
455 static int bc_pushcase PROTO((tree, tree));
456 static void bc_check_for_full_enumeration_handling PROTO((tree));
457 static void bc_expand_end_case PROTO((tree));
458 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
459 static int estimate_case_costs PROTO((case_node_ptr));
460 static void group_case_nodes PROTO((case_node_ptr));
461 static void balance_case_nodes PROTO((case_node_ptr *,
463 static int node_has_low_bound PROTO((case_node_ptr, tree));
464 static int node_has_high_bound PROTO((case_node_ptr, tree));
465 static int node_is_bounded PROTO((case_node_ptr, tree));
466 static void emit_jump_if_reachable PROTO((rtx));
467 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
469 int bc_expand_exit_loop_if_false ();
470 void bc_expand_start_cond ();
471 void bc_expand_end_cond ();
472 void bc_expand_start_else ();
473 void bc_expand_end_bindings ();
474 void bc_expand_start_case ();
475 void bc_check_for_full_enumeration_handling ();
476 void bc_expand_end_case ();
477 void bc_expand_decl ();
479 extern rtx bc_allocate_local ();
480 extern rtx bc_allocate_variable_array ();
485 gcc_obstack_init (&stmt_obstack);
487 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
492 init_stmt_for_function ()
494 /* We are not currently within any block, conditional, loop or case. */
496 stack_block_stack = 0;
503 block_start_count = 0;
505 /* No gotos have been expanded yet. */
506 goto_fixup_chain = 0;
508 /* We are not processing a ({...}) grouping. */
509 expr_stmts_for_value = 0;
517 p->block_stack = block_stack;
518 p->stack_block_stack = stack_block_stack;
519 p->cond_stack = cond_stack;
520 p->loop_stack = loop_stack;
521 p->case_stack = case_stack;
522 p->nesting_stack = nesting_stack;
523 p->nesting_depth = nesting_depth;
524 p->block_start_count = block_start_count;
525 p->last_expr_type = last_expr_type;
526 p->last_expr_value = last_expr_value;
527 p->expr_stmts_for_value = expr_stmts_for_value;
528 p->emit_filename = emit_filename;
529 p->emit_lineno = emit_lineno;
530 p->goto_fixup_chain = goto_fixup_chain;
534 restore_stmt_status (p)
537 block_stack = p->block_stack;
538 stack_block_stack = p->stack_block_stack;
539 cond_stack = p->cond_stack;
540 loop_stack = p->loop_stack;
541 case_stack = p->case_stack;
542 nesting_stack = p->nesting_stack;
543 nesting_depth = p->nesting_depth;
544 block_start_count = p->block_start_count;
545 last_expr_type = p->last_expr_type;
546 last_expr_value = p->last_expr_value;
547 expr_stmts_for_value = p->expr_stmts_for_value;
548 emit_filename = p->emit_filename;
549 emit_lineno = p->emit_lineno;
550 goto_fixup_chain = p->goto_fixup_chain;
553 /* Emit a no-op instruction. */
560 if (!output_bytecode)
562 last_insn = get_last_insn ();
564 && (GET_CODE (last_insn) == CODE_LABEL
565 || prev_real_insn (last_insn) == 0))
566 emit_insn (gen_nop ());
570 /* Return the rtx-label that corresponds to a LABEL_DECL,
571 creating it if necessary. */
577 if (TREE_CODE (label) != LABEL_DECL)
580 if (DECL_RTL (label))
581 return DECL_RTL (label);
583 return DECL_RTL (label) = gen_label_rtx ();
586 /* Add an unconditional jump to LABEL as the next sequential instruction. */
592 do_pending_stack_adjust ();
593 emit_jump_insn (gen_jump (label));
597 /* Emit code to jump to the address
598 specified by the pointer expression EXP. */
601 expand_computed_goto (exp)
606 bc_expand_expr (exp);
607 bc_emit_instruction (jumpP);
611 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
613 emit_indirect_jump (x);
617 /* Handle goto statements and the labels that they can go to. */
619 /* Specify the location in the RTL code of a label LABEL,
620 which is a LABEL_DECL tree node.
622 This is used for the kind of label that the user can jump to with a
623 goto statement, and for alternatives of a switch or case statement.
624 RTL labels generated for loops and conditionals don't go through here;
625 they are generated directly at the RTL level, by other functions below.
627 Note that this has nothing to do with defining label *names*.
628 Languages vary in how they do that and what that even means. */
634 struct label_chain *p;
638 if (! DECL_RTL (label))
639 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
640 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
641 error ("multiply defined label");
645 do_pending_stack_adjust ();
646 emit_label (label_rtx (label));
647 if (DECL_NAME (label))
648 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
650 if (stack_block_stack != 0)
652 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
653 p->next = stack_block_stack->data.block.label_chain;
654 stack_block_stack->data.block.label_chain = p;
659 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
660 from nested functions. */
663 declare_nonlocal_label (label)
666 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
667 LABEL_PRESERVE_P (label_rtx (label)) = 1;
668 if (nonlocal_goto_handler_slot == 0)
670 nonlocal_goto_handler_slot
671 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
672 emit_stack_save (SAVE_NONLOCAL,
673 &nonlocal_goto_stack_level,
674 PREV_INSN (tail_recursion_reentry));
678 /* Generate RTL code for a `goto' statement with target label LABEL.
679 LABEL should be a LABEL_DECL tree node that was or will later be
680 defined with `expand_label'. */
690 expand_goto_internal (label, label_rtx (label), NULL_RTX);
694 /* Check for a nonlocal goto to a containing function. */
695 context = decl_function_context (label);
696 if (context != 0 && context != current_function_decl)
698 struct function *p = find_function_data (context);
699 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
702 p->has_nonlocal_label = 1;
703 current_function_has_nonlocal_goto = 1;
704 LABEL_REF_NONLOCAL_P (label_ref) = 1;
706 /* Copy the rtl for the slots so that they won't be shared in
707 case the virtual stack vars register gets instantiated differently
708 in the parent than in the child. */
710 #if HAVE_nonlocal_goto
711 if (HAVE_nonlocal_goto)
712 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
713 copy_rtx (p->nonlocal_goto_handler_slot),
714 copy_rtx (p->nonlocal_goto_stack_level),
721 /* Restore frame pointer for containing function.
722 This sets the actual hard register used for the frame pointer
723 to the location of the function's incoming static chain info.
724 The non-local goto handler will then adjust it to contain the
725 proper value and reload the argument pointer, if needed. */
726 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
728 /* We have now loaded the frame pointer hardware register with
729 the address of that corresponds to the start of the virtual
730 stack vars. So replace virtual_stack_vars_rtx in all
731 addresses we use with stack_pointer_rtx. */
733 /* Get addr of containing function's current nonlocal goto handler,
734 which will do any cleanups and then jump to the label. */
735 addr = copy_rtx (p->nonlocal_goto_handler_slot);
736 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
737 hard_frame_pointer_rtx));
739 /* Restore the stack pointer. Note this uses fp just restored. */
740 addr = p->nonlocal_goto_stack_level;
742 addr = replace_rtx (copy_rtx (addr),
743 virtual_stack_vars_rtx,
744 hard_frame_pointer_rtx);
746 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
748 /* Put in the static chain register the nonlocal label address. */
749 emit_move_insn (static_chain_rtx, label_ref);
750 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
752 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
753 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
754 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
755 emit_indirect_jump (temp);
759 expand_goto_internal (label, label_rtx (label), NULL_RTX);
762 /* Generate RTL code for a `goto' statement with target label BODY.
763 LABEL should be a LABEL_REF.
764 LAST_INSN, if non-0, is the rtx we should consider as the last
765 insn emitted (for the purposes of cleaning up a return). */
768 expand_goto_internal (body, label, last_insn)
773 struct nesting *block;
776 /* NOTICE! If a bytecode instruction other than `jump' is needed,
777 then the caller has to call bc_expand_goto_internal()
778 directly. This is rather an exceptional case, and there aren't
779 that many places where this is necessary. */
782 expand_goto_internal (body, label, last_insn);
786 if (GET_CODE (label) != CODE_LABEL)
789 /* If label has already been defined, we can tell now
790 whether and how we must alter the stack level. */
792 if (PREV_INSN (label) != 0)
794 /* Find the innermost pending block that contains the label.
795 (Check containment by comparing insn-uids.)
796 Then restore the outermost stack level within that block,
797 and do cleanups of all blocks contained in it. */
798 for (block = block_stack; block; block = block->next)
800 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
802 if (block->data.block.stack_level != 0)
803 stack_level = block->data.block.stack_level;
804 /* Execute the cleanups for blocks we are exiting. */
805 if (block->data.block.cleanups != 0)
807 expand_cleanups (block->data.block.cleanups, NULL_TREE);
808 do_pending_stack_adjust ();
814 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
815 the stack pointer. This one should be deleted as dead by flow. */
816 clear_pending_stack_adjust ();
817 do_pending_stack_adjust ();
818 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
821 if (body != 0 && DECL_TOO_LATE (body))
822 error ("jump to `%s' invalidly jumps into binding contour",
823 IDENTIFIER_POINTER (DECL_NAME (body)));
825 /* Label not yet defined: may need to put this goto
826 on the fixup list. */
827 else if (! expand_fixup (body, label, last_insn))
829 /* No fixup needed. Record that the label is the target
830 of at least one goto that has no fixup. */
832 TREE_ADDRESSABLE (body) = 1;
838 /* Generate a jump with OPCODE to the given bytecode LABEL which is
839 found within BODY. */
842 bc_expand_goto_internal (opcode, label, body)
843 enum bytecode_opcode opcode;
844 struct bc_label *label;
847 struct nesting *block;
848 int stack_level = -1;
850 /* If the label is defined, adjust the stack as necessary.
851 If it's not defined, we have to push the reference on the
857 /* Find the innermost pending block that contains the label.
858 (Check containment by comparing bytecode uids.) Then restore the
859 outermost stack level within that block. */
861 for (block = block_stack; block; block = block->next)
863 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
865 if (block->data.block.bc_stack_level)
866 stack_level = block->data.block.bc_stack_level;
868 /* Execute the cleanups for blocks we are exiting. */
869 if (block->data.block.cleanups != 0)
871 expand_cleanups (block->data.block.cleanups, NULL_TREE);
872 do_pending_stack_adjust ();
876 /* Restore the stack level. If we need to adjust the stack, we
877 must do so after the jump, since the jump may depend on
878 what's on the stack. Thus, any stack-modifying conditional
879 jumps (these are the only ones that rely on what's on the
880 stack) go into the fixup list. */
883 && stack_depth != stack_level
886 bc_expand_fixup (opcode, label, stack_level);
889 if (stack_level >= 0)
890 bc_adjust_stack (stack_depth - stack_level);
892 if (body && DECL_BIT_FIELD (body))
893 error ("jump to `%s' invalidly jumps into binding contour",
894 IDENTIFIER_POINTER (DECL_NAME (body)));
896 /* Emit immediate jump */
897 bc_emit_bytecode (opcode);
898 bc_emit_bytecode_labelref (label);
900 #ifdef DEBUG_PRINT_CODE
901 fputc ('\n', stderr);
906 /* Put goto in the fixup list */
907 bc_expand_fixup (opcode, label, stack_level);
910 /* Generate if necessary a fixup for a goto
911 whose target label in tree structure (if any) is TREE_LABEL
912 and whose target in rtl is RTL_LABEL.
914 If LAST_INSN is nonzero, we pretend that the jump appears
915 after insn LAST_INSN instead of at the current point in the insn stream.
917 The fixup will be used later to insert insns just before the goto.
918 Those insns will restore the stack level as appropriate for the
919 target label, and will (in the case of C++) also invoke any object
920 destructors which have to be invoked when we exit the scopes which
921 are exited by the goto.
923 Value is nonzero if a fixup is made. */
926 expand_fixup (tree_label, rtl_label, last_insn)
931 struct nesting *block, *end_block;
933 /* See if we can recognize which block the label will be output in.
934 This is possible in some very common cases.
935 If we succeed, set END_BLOCK to that block.
936 Otherwise, set it to 0. */
939 && (rtl_label == cond_stack->data.cond.endif_label
940 || rtl_label == cond_stack->data.cond.next_label))
941 end_block = cond_stack;
942 /* If we are in a loop, recognize certain labels which
943 are likely targets. This reduces the number of fixups
944 we need to create. */
946 && (rtl_label == loop_stack->data.loop.start_label
947 || rtl_label == loop_stack->data.loop.end_label
948 || rtl_label == loop_stack->data.loop.continue_label))
949 end_block = loop_stack;
953 /* Now set END_BLOCK to the binding level to which we will return. */
957 struct nesting *next_block = end_block->all;
960 /* First see if the END_BLOCK is inside the innermost binding level.
961 If so, then no cleanups or stack levels are relevant. */
962 while (next_block && next_block != block)
963 next_block = next_block->all;
968 /* Otherwise, set END_BLOCK to the innermost binding level
969 which is outside the relevant control-structure nesting. */
970 next_block = block_stack->next;
971 for (block = block_stack; block != end_block; block = block->all)
972 if (block == next_block)
973 next_block = next_block->next;
974 end_block = next_block;
977 /* Does any containing block have a stack level or cleanups?
978 If not, no fixup is needed, and that is the normal case
979 (the only case, for standard C). */
980 for (block = block_stack; block != end_block; block = block->next)
981 if (block->data.block.stack_level != 0
982 || block->data.block.cleanups != 0)
985 if (block != end_block)
987 /* Ok, a fixup is needed. Add a fixup to the list of such. */
988 struct goto_fixup *fixup
989 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
990 /* In case an old stack level is restored, make sure that comes
991 after any pending stack adjust. */
992 /* ?? If the fixup isn't to come at the present position,
993 doing the stack adjust here isn't useful. Doing it with our
994 settings at that location isn't useful either. Let's hope
997 do_pending_stack_adjust ();
998 fixup->target = tree_label;
999 fixup->target_rtl = rtl_label;
1001 /* Create a BLOCK node and a corresponding matched set of
1002 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1003 this point. The notes will encapsulate any and all fixup
1004 code which we might later insert at this point in the insn
1005 stream. Also, the BLOCK node will be the parent (i.e. the
1006 `SUPERBLOCK') of any other BLOCK nodes which we might create
1007 later on when we are expanding the fixup code. */
1010 register rtx original_before_jump
1011 = last_insn ? last_insn : get_last_insn ();
1015 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1016 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1017 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1019 emit_insns_after (fixup->before_jump, original_before_jump);
1022 fixup->block_start_count = block_start_count;
1023 fixup->stack_level = 0;
1024 fixup->cleanup_list_list
1025 = (((block->data.block.outer_cleanups
1027 && block->data.block.outer_cleanups != empty_cleanup_list
1030 || block->data.block.cleanups)
1031 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1032 block->data.block.outer_cleanups)
1034 fixup->next = goto_fixup_chain;
1035 goto_fixup_chain = fixup;
1042 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1043 Make the fixup restore the stack level to STACK_LEVEL. */
1046 bc_expand_fixup (opcode, label, stack_level)
1047 enum bytecode_opcode opcode;
1048 struct bc_label *label;
1051 struct goto_fixup *fixup
1052 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1054 fixup->label = bc_get_bytecode_label ();
1055 fixup->bc_target = label;
1056 fixup->bc_stack_level = stack_level;
1057 fixup->bc_handled = FALSE;
1059 fixup->next = goto_fixup_chain;
1060 goto_fixup_chain = fixup;
1062 /* Insert a jump to the fixup code */
1063 bc_emit_bytecode (opcode);
1064 bc_emit_bytecode_labelref (fixup->label);
1066 #ifdef DEBUG_PRINT_CODE
1067 fputc ('\n', stderr);
1071 /* Expand any needed fixups in the outputmost binding level of the
1072 function. FIRST_INSN is the first insn in the function. */
1075 expand_fixups (first_insn)
1078 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1081 /* When exiting a binding contour, process all pending gotos requiring fixups.
1082 THISBLOCK is the structure that describes the block being exited.
1083 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1084 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1085 FIRST_INSN is the insn that began this contour.
1087 Gotos that jump out of this contour must restore the
1088 stack level and do the cleanups before actually jumping.
1090 DONT_JUMP_IN nonzero means report error there is a jump into this
1091 contour from before the beginning of the contour.
1092 This is also done if STACK_LEVEL is nonzero. */
1095 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1096 struct nesting *thisblock;
1102 register struct goto_fixup *f, *prev;
1104 if (output_bytecode)
1106 /* ??? The second arg is the bc stack level, which is not the same
1107 as STACK_LEVEL. I have no idea what should go here, so I'll
1109 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1113 /* F is the fixup we are considering; PREV is the previous one. */
1114 /* We run this loop in two passes so that cleanups of exited blocks
1115 are run first, and blocks that are exited are marked so
1118 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1120 /* Test for a fixup that is inactive because it is already handled. */
1121 if (f->before_jump == 0)
1123 /* Delete inactive fixup from the chain, if that is easy to do. */
1125 prev->next = f->next;
1127 /* Has this fixup's target label been defined?
1128 If so, we can finalize it. */
1129 else if (PREV_INSN (f->target_rtl) != 0)
1131 register rtx cleanup_insns;
1133 /* Get the first non-label after the label
1134 this goto jumps to. If that's before this scope begins,
1135 we don't have a jump into the scope. */
1136 rtx after_label = f->target_rtl;
1137 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1138 after_label = NEXT_INSN (after_label);
1140 /* If this fixup jumped into this contour from before the beginning
1141 of this contour, report an error. */
1142 /* ??? Bug: this does not detect jumping in through intermediate
1143 blocks that have stack levels or cleanups.
1144 It detects only a problem with the innermost block
1145 around the label. */
1147 && (dont_jump_in || stack_level || cleanup_list)
1148 /* If AFTER_LABEL is 0, it means the jump goes to the end
1149 of the rtl, which means it jumps into this scope. */
1150 && (after_label == 0
1151 || INSN_UID (first_insn) < INSN_UID (after_label))
1152 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1153 && ! DECL_REGISTER (f->target))
1155 error_with_decl (f->target,
1156 "label `%s' used before containing binding contour");
1157 /* Prevent multiple errors for one label. */
1158 DECL_REGISTER (f->target) = 1;
1161 /* We will expand the cleanups into a sequence of their own and
1162 then later on we will attach this new sequence to the insn
1163 stream just ahead of the actual jump insn. */
1167 /* Temporarily restore the lexical context where we will
1168 logically be inserting the fixup code. We do this for the
1169 sake of getting the debugging information right. */
1172 set_block (f->context);
1174 /* Expand the cleanups for blocks this jump exits. */
1175 if (f->cleanup_list_list)
1178 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1179 /* Marked elements correspond to blocks that have been closed.
1180 Do their cleanups. */
1181 if (TREE_ADDRESSABLE (lists)
1182 && TREE_VALUE (lists) != 0)
1184 expand_cleanups (TREE_VALUE (lists), 0);
1185 /* Pop any pushes done in the cleanups,
1186 in case function is about to return. */
1187 do_pending_stack_adjust ();
1191 /* Restore stack level for the biggest contour that this
1192 jump jumps out of. */
1194 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1196 /* Finish up the sequence containing the insns which implement the
1197 necessary cleanups, and then attach that whole sequence to the
1198 insn stream just ahead of the actual jump insn. Attaching it
1199 at that point insures that any cleanups which are in fact
1200 implicit C++ object destructions (which must be executed upon
1201 leaving the block) appear (to the debugger) to be taking place
1202 in an area of the generated code where the object(s) being
1203 destructed are still "in scope". */
1205 cleanup_insns = get_insns ();
1209 emit_insns_after (cleanup_insns, f->before_jump);
1216 /* Mark the cleanups of exited blocks so that they are executed
1217 by the code above. */
1218 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1219 if (f->before_jump != 0
1220 && PREV_INSN (f->target_rtl) == 0
1221 /* Label has still not appeared. If we are exiting a block with
1222 a stack level to restore, that started before the fixup,
1223 mark this stack level as needing restoration
1224 when the fixup is later finalized.
1225 Also mark the cleanup_list_list element for F
1226 that corresponds to this block, so that ultimately
1227 this block's cleanups will be executed by the code above. */
1229 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1230 it means the label is undefined. That's erroneous, but possible. */
1231 && (thisblock->data.block.block_start_count
1232 <= f->block_start_count))
1234 tree lists = f->cleanup_list_list;
1235 for (; lists; lists = TREE_CHAIN (lists))
1236 /* If the following elt. corresponds to our containing block
1237 then the elt. must be for this block. */
1238 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1239 TREE_ADDRESSABLE (lists) = 1;
1242 f->stack_level = stack_level;
1247 /* When exiting a binding contour, process all pending gotos requiring fixups.
1248 Note: STACK_DEPTH is not altered.
1250 The arguments are currently not used in the bytecode compiler, but we may
1251 need them one day for languages other than C.
1253 THISBLOCK is the structure that describes the block being exited.
1254 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1255 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1256 FIRST_INSN is the insn that began this contour.
1258 Gotos that jump out of this contour must restore the
1259 stack level and do the cleanups before actually jumping.
1261 DONT_JUMP_IN nonzero means report error there is a jump into this
1262 contour from before the beginning of the contour.
1263 This is also done if STACK_LEVEL is nonzero. */
1266 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1267 struct nesting *thisblock;
1273 register struct goto_fixup *f, *prev;
1274 int saved_stack_depth;
1276 /* F is the fixup we are considering; PREV is the previous one. */
1278 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1280 /* Test for a fixup that is inactive because it is already handled. */
1281 if (f->before_jump == 0)
1283 /* Delete inactive fixup from the chain, if that is easy to do. */
1285 prev->next = f->next;
1288 /* Emit code to restore the stack and continue */
1289 bc_emit_bytecode_labeldef (f->label);
1291 /* Save stack_depth across call, since bc_adjust_stack () will alter
1292 the perceived stack depth via the instructions generated. */
1294 if (f->bc_stack_level >= 0)
1296 saved_stack_depth = stack_depth;
1297 bc_adjust_stack (stack_depth - f->bc_stack_level);
1298 stack_depth = saved_stack_depth;
1301 bc_emit_bytecode (jump);
1302 bc_emit_bytecode_labelref (f->bc_target);
1304 #ifdef DEBUG_PRINT_CODE
1305 fputc ('\n', stderr);
1309 goto_fixup_chain = NULL;
1312 /* Generate RTL for an asm statement (explicit assembler code).
1313 BODY is a STRING_CST node containing the assembler code text,
1314 or an ADDR_EXPR containing a STRING_CST. */
1320 if (output_bytecode)
1322 error ("`asm' is illegal when generating bytecode");
1326 if (TREE_CODE (body) == ADDR_EXPR)
1327 body = TREE_OPERAND (body, 0);
1329 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1330 TREE_STRING_POINTER (body)));
1334 /* Generate RTL for an asm statement with arguments.
1335 STRING is the instruction template.
1336 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1337 Each output or input has an expression in the TREE_VALUE and
1338 a constraint-string in the TREE_PURPOSE.
1339 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1340 that is clobbered by this insn.
1342 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1343 Some elements of OUTPUTS may be replaced with trees representing temporary
1344 values. The caller should copy those temporary values to the originally
1347 VOL nonzero means the insn is volatile; don't optimize it. */
1350 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1351 tree string, outputs, inputs, clobbers;
1356 rtvec argvec, constraints;
1358 int ninputs = list_length (inputs);
1359 int noutputs = list_length (outputs);
1363 /* Vector of RTX's of evaluated output operands. */
1364 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1365 /* The insn we have emitted. */
1368 if (output_bytecode)
1370 error ("`asm' is illegal when generating bytecode");
1374 /* Count the number of meaningful clobbered registers, ignoring what
1375 we would ignore later. */
1377 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1379 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1380 i = decode_reg_name (regname);
1381 if (i >= 0 || i == -4)
1387 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1389 tree val = TREE_VALUE (tail);
1394 /* If there's an erroneous arg, emit no insn. */
1395 if (TREE_TYPE (val) == error_mark_node)
1398 /* Make sure constraint has `=' and does not have `+'. */
1401 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1403 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1405 error ("output operand constraint contains `+'");
1408 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1413 error ("output operand constraint lacks `='");
1417 /* If an output operand is not a variable or indirect ref,
1419 create a SAVE_EXPR which is a pseudo-reg
1420 to act as an intermediate temporary.
1421 Make the asm insn write into that, then copy it to
1422 the real output operand. */
1424 while (TREE_CODE (val) == COMPONENT_REF
1425 || TREE_CODE (val) == ARRAY_REF)
1426 val = TREE_OPERAND (val, 0);
1428 if (TREE_CODE (val) != VAR_DECL
1429 && TREE_CODE (val) != PARM_DECL
1430 && TREE_CODE (val) != INDIRECT_REF)
1432 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1433 /* If it's a constant, print error now so don't crash later. */
1434 if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR)
1436 error ("invalid output in `asm'");
1441 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1444 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1446 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1450 /* Make vectors for the expression-rtx and constraint strings. */
1452 argvec = rtvec_alloc (ninputs);
1453 constraints = rtvec_alloc (ninputs);
1455 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1456 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1458 MEM_VOLATILE_P (body) = vol;
1460 /* Eval the inputs and put them into ARGVEC.
1461 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1464 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1468 /* If there's an erroneous arg, emit no insn,
1469 because the ASM_INPUT would get VOIDmode
1470 and that could cause a crash in reload. */
1471 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1473 if (TREE_PURPOSE (tail) == NULL_TREE)
1475 error ("hard register `%s' listed as input operand to `asm'",
1476 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1480 /* Make sure constraint has neither `=' nor `+'. */
1482 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1483 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1484 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1486 error ("input operand constraint contains `%c'",
1487 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1491 XVECEXP (body, 3, i) /* argvec */
1492 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1493 XVECEXP (body, 4, i) /* constraints */
1494 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1495 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1499 /* Protect all the operands from the queue,
1500 now that they have all been evaluated. */
1502 for (i = 0; i < ninputs; i++)
1503 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1505 for (i = 0; i < noutputs; i++)
1506 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1508 /* Now, for each output, construct an rtx
1509 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1510 ARGVEC CONSTRAINTS))
1511 If there is more than one, put them inside a PARALLEL. */
1513 if (noutputs == 1 && nclobbers == 0)
1515 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1516 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1518 else if (noutputs == 0 && nclobbers == 0)
1520 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1521 insn = emit_insn (body);
1527 if (num == 0) num = 1;
1528 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1530 /* For each output operand, store a SET. */
1532 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1534 XVECEXP (body, 0, i)
1535 = gen_rtx (SET, VOIDmode,
1537 gen_rtx (ASM_OPERANDS, VOIDmode,
1538 TREE_STRING_POINTER (string),
1539 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1540 i, argvec, constraints,
1542 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1545 /* If there are no outputs (but there are some clobbers)
1546 store the bare ASM_OPERANDS into the PARALLEL. */
1549 XVECEXP (body, 0, i++) = obody;
1551 /* Store (clobber REG) for each clobbered register specified. */
1553 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1555 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1556 int j = decode_reg_name (regname);
1560 if (j == -3) /* `cc', which is not a register */
1563 if (j == -4) /* `memory', don't cache memory across asm */
1565 XVECEXP (body, 0, i++)
1566 = gen_rtx (CLOBBER, VOIDmode,
1567 gen_rtx (MEM, BLKmode,
1568 gen_rtx (SCRATCH, VOIDmode, 0)));
1572 error ("unknown register name `%s' in `asm'", regname);
1576 /* Use QImode since that's guaranteed to clobber just one reg. */
1577 XVECEXP (body, 0, i++)
1578 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1581 insn = emit_insn (body);
1587 /* Generate RTL to evaluate the expression EXP
1588 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1591 expand_expr_stmt (exp)
1594 if (output_bytecode)
1596 int org_stack_depth = stack_depth;
1598 bc_expand_expr (exp);
1600 /* Restore stack depth */
1601 if (stack_depth < org_stack_depth)
1604 bc_emit_instruction (drop);
1606 last_expr_type = TREE_TYPE (exp);
1610 /* If -W, warn about statements with no side effects,
1611 except for an explicit cast to void (e.g. for assert()), and
1612 except inside a ({...}) where they may be useful. */
1613 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1615 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1616 && !(TREE_CODE (exp) == CONVERT_EXPR
1617 && TREE_TYPE (exp) == void_type_node))
1618 warning_with_file_and_line (emit_filename, emit_lineno,
1619 "statement with no effect");
1620 else if (warn_unused)
1621 warn_if_unused_value (exp);
1623 last_expr_type = TREE_TYPE (exp);
1624 if (! flag_syntax_only)
1625 last_expr_value = expand_expr (exp,
1626 (expr_stmts_for_value
1627 ? NULL_RTX : const0_rtx),
1630 /* If all we do is reference a volatile value in memory,
1631 copy it to a register to be sure it is actually touched. */
1632 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1633 && TREE_THIS_VOLATILE (exp))
1635 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1637 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1638 copy_to_reg (last_expr_value);
1641 rtx lab = gen_label_rtx ();
1643 /* Compare the value with itself to reference it. */
1644 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1645 expand_expr (TYPE_SIZE (last_expr_type),
1646 NULL_RTX, VOIDmode, 0),
1648 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1649 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1654 /* If this expression is part of a ({...}) and is in memory, we may have
1655 to preserve temporaries. */
1656 preserve_temp_slots (last_expr_value);
1658 /* Free any temporaries used to evaluate this expression. Any temporary
1659 used as a result of this expression will already have been preserved
1666 /* Warn if EXP contains any computations whose results are not used.
1667 Return 1 if a warning is printed; 0 otherwise. */
1670 warn_if_unused_value (exp)
1673 if (TREE_USED (exp))
1676 switch (TREE_CODE (exp))
1678 case PREINCREMENT_EXPR:
1679 case POSTINCREMENT_EXPR:
1680 case PREDECREMENT_EXPR:
1681 case POSTDECREMENT_EXPR:
1686 case METHOD_CALL_EXPR:
1688 case WITH_CLEANUP_EXPR:
1690 /* We don't warn about COND_EXPR because it may be a useful
1691 construct if either arm contains a side effect. */
1696 /* For a binding, warn if no side effect within it. */
1697 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1699 case TRUTH_ORIF_EXPR:
1700 case TRUTH_ANDIF_EXPR:
1701 /* In && or ||, warn if 2nd operand has no side effect. */
1702 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1705 if (TREE_NO_UNUSED_WARNING (exp))
1707 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1709 /* Let people do `(foo (), 0)' without a warning. */
1710 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1712 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1716 case NON_LVALUE_EXPR:
1717 /* Don't warn about values cast to void. */
1718 if (TREE_TYPE (exp) == void_type_node)
1720 /* Don't warn about conversions not explicit in the user's program. */
1721 if (TREE_NO_UNUSED_WARNING (exp))
1723 /* Assignment to a cast usually results in a cast of a modify.
1724 Don't complain about that. There can be an arbitrary number of
1725 casts before the modify, so we must loop until we find the first
1726 non-cast expression and then test to see if that is a modify. */
1728 tree tem = TREE_OPERAND (exp, 0);
1730 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1731 tem = TREE_OPERAND (tem, 0);
1733 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR)
1736 /* ... fall through ... */
1739 /* Referencing a volatile value is a side effect, so don't warn. */
1740 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1741 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1742 && TREE_THIS_VOLATILE (exp))
1744 warning_with_file_and_line (emit_filename, emit_lineno,
1745 "value computed is not used");
1750 /* Clear out the memory of the last expression evaluated. */
1758 /* Begin a statement which will return a value.
1759 Return the RTL_EXPR for this statement expr.
1760 The caller must save that value and pass it to expand_end_stmt_expr. */
1763 expand_start_stmt_expr ()
1768 /* When generating bytecode just note down the stack depth */
1769 if (output_bytecode)
1770 return (build_int_2 (stack_depth, 0));
1772 /* Make the RTL_EXPR node temporary, not momentary,
1773 so that rtl_expr_chain doesn't become garbage. */
1774 momentary = suspend_momentary ();
1775 t = make_node (RTL_EXPR);
1776 resume_momentary (momentary);
1777 start_sequence_for_rtl_expr (t);
1779 expr_stmts_for_value++;
1783 /* Restore the previous state at the end of a statement that returns a value.
1784 Returns a tree node representing the statement's value and the
1785 insns to compute the value.
1787 The nodes of that expression have been freed by now, so we cannot use them.
1788 But we don't want to do that anyway; the expression has already been
1789 evaluated and now we just want to use the value. So generate a RTL_EXPR
1790 with the proper type and RTL value.
1792 If the last substatement was not an expression,
1793 return something with type `void'. */
1796 expand_end_stmt_expr (t)
1799 if (output_bytecode)
1805 /* At this point, all expressions have been evaluated in order.
1806 However, all expression values have been popped when evaluated,
1807 which means we have to recover the last expression value. This is
1808 the last value removed by means of a `drop' instruction. Instead
1809 of adding code to inhibit dropping the last expression value, it
1810 is here recovered by undoing the `drop'. Since `drop' is
1811 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1814 bc_adjust_stack (-1);
1816 if (!last_expr_type)
1817 last_expr_type = void_type_node;
1819 t = make_node (RTL_EXPR);
1820 TREE_TYPE (t) = last_expr_type;
1821 RTL_EXPR_RTL (t) = NULL;
1822 RTL_EXPR_SEQUENCE (t) = NULL;
1824 /* Don't consider deleting this expr or containing exprs at tree level. */
1825 TREE_THIS_VOLATILE (t) = 1;
1833 if (last_expr_type == 0)
1835 last_expr_type = void_type_node;
1836 last_expr_value = const0_rtx;
1838 else if (last_expr_value == 0)
1839 /* There are some cases where this can happen, such as when the
1840 statement is void type. */
1841 last_expr_value = const0_rtx;
1842 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1843 /* Remove any possible QUEUED. */
1844 last_expr_value = protect_from_queue (last_expr_value, 0);
1848 TREE_TYPE (t) = last_expr_type;
1849 RTL_EXPR_RTL (t) = last_expr_value;
1850 RTL_EXPR_SEQUENCE (t) = get_insns ();
1852 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1856 /* Don't consider deleting this expr or containing exprs at tree level. */
1857 TREE_SIDE_EFFECTS (t) = 1;
1858 /* Propagate volatility of the actual RTL expr. */
1859 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1862 expr_stmts_for_value--;
1867 /* Generate RTL for the start of an if-then. COND is the expression
1868 whose truth should be tested.
1870 If EXITFLAG is nonzero, this conditional is visible to
1871 `exit_something'. */
1874 expand_start_cond (cond, exitflag)
1878 struct nesting *thiscond = ALLOC_NESTING ();
1880 /* Make an entry on cond_stack for the cond we are entering. */
1882 thiscond->next = cond_stack;
1883 thiscond->all = nesting_stack;
1884 thiscond->depth = ++nesting_depth;
1885 thiscond->data.cond.next_label = gen_label_rtx ();
1886 /* Before we encounter an `else', we don't need a separate exit label
1887 unless there are supposed to be exit statements
1888 to exit this conditional. */
1889 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1890 thiscond->data.cond.endif_label = thiscond->exit_label;
1891 cond_stack = thiscond;
1892 nesting_stack = thiscond;
1894 if (output_bytecode)
1895 bc_expand_start_cond (cond, exitflag);
1897 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1900 /* Generate RTL between then-clause and the elseif-clause
1901 of an if-then-elseif-.... */
1904 expand_start_elseif (cond)
1907 if (cond_stack->data.cond.endif_label == 0)
1908 cond_stack->data.cond.endif_label = gen_label_rtx ();
1909 emit_jump (cond_stack->data.cond.endif_label);
1910 emit_label (cond_stack->data.cond.next_label);
1911 cond_stack->data.cond.next_label = gen_label_rtx ();
1912 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1915 /* Generate RTL between the then-clause and the else-clause
1916 of an if-then-else. */
1919 expand_start_else ()
1921 if (cond_stack->data.cond.endif_label == 0)
1922 cond_stack->data.cond.endif_label = gen_label_rtx ();
1924 if (output_bytecode)
1926 bc_expand_start_else ();
1930 emit_jump (cond_stack->data.cond.endif_label);
1931 emit_label (cond_stack->data.cond.next_label);
1932 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1935 /* After calling expand_start_else, turn this "else" into an "else if"
1936 by providing another condition. */
1939 expand_elseif (cond)
1942 cond_stack->data.cond.next_label = gen_label_rtx ();
1943 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1946 /* Generate RTL for the end of an if-then.
1947 Pop the record for it off of cond_stack. */
1952 struct nesting *thiscond = cond_stack;
1954 if (output_bytecode)
1955 bc_expand_end_cond ();
1958 do_pending_stack_adjust ();
1959 if (thiscond->data.cond.next_label)
1960 emit_label (thiscond->data.cond.next_label);
1961 if (thiscond->data.cond.endif_label)
1962 emit_label (thiscond->data.cond.endif_label);
1965 POPSTACK (cond_stack);
1970 /* Generate code for the start of an if-then. COND is the expression
1971 whose truth is to be tested; if EXITFLAG is nonzero this conditional
1972 is to be visible to exit_something. It is assumed that the caller
1973 has pushed the previous context on the cond stack. */
1976 bc_expand_start_cond (cond, exitflag)
1980 struct nesting *thiscond = cond_stack;
1982 thiscond->data.case_stmt.nominal_type = cond;
1984 thiscond->exit_label = gen_label_rtx ();
1985 bc_expand_expr (cond);
1986 bc_emit_bytecode (xjumpifnot);
1987 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
1989 #ifdef DEBUG_PRINT_CODE
1990 fputc ('\n', stderr);
1994 /* Generate the label for the end of an if with
1998 bc_expand_end_cond ()
2000 struct nesting *thiscond = cond_stack;
2002 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2005 /* Generate code for the start of the else- clause of
2009 bc_expand_start_else ()
2011 struct nesting *thiscond = cond_stack;
2013 thiscond->data.cond.endif_label = thiscond->exit_label;
2014 thiscond->exit_label = gen_label_rtx ();
2015 bc_emit_bytecode (jump);
2016 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2018 #ifdef DEBUG_PRINT_CODE
2019 fputc ('\n', stderr);
2022 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2025 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2026 loop should be exited by `exit_something'. This is a loop for which
2027 `expand_continue' will jump to the top of the loop.
2029 Make an entry on loop_stack to record the labels associated with
2033 expand_start_loop (exit_flag)
2036 register struct nesting *thisloop = ALLOC_NESTING ();
2038 /* Make an entry on loop_stack for the loop we are entering. */
2040 thisloop->next = loop_stack;
2041 thisloop->all = nesting_stack;
2042 thisloop->depth = ++nesting_depth;
2043 thisloop->data.loop.start_label = gen_label_rtx ();
2044 thisloop->data.loop.end_label = gen_label_rtx ();
2045 thisloop->data.loop.alt_end_label = 0;
2046 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2047 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2048 loop_stack = thisloop;
2049 nesting_stack = thisloop;
2051 if (output_bytecode)
2053 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2057 do_pending_stack_adjust ();
2059 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2060 emit_label (thisloop->data.loop.start_label);
2065 /* Like expand_start_loop but for a loop where the continuation point
2066 (for expand_continue_loop) will be specified explicitly. */
2069 expand_start_loop_continue_elsewhere (exit_flag)
2072 struct nesting *thisloop = expand_start_loop (exit_flag);
2073 loop_stack->data.loop.continue_label = gen_label_rtx ();
2077 /* Specify the continuation point for a loop started with
2078 expand_start_loop_continue_elsewhere.
2079 Use this at the point in the code to which a continue statement
2083 expand_loop_continue_here ()
2085 if (output_bytecode)
2087 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2090 do_pending_stack_adjust ();
2091 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2092 emit_label (loop_stack->data.loop.continue_label);
2098 bc_expand_end_loop ()
2100 struct nesting *thisloop = loop_stack;
2102 bc_emit_bytecode (jump);
2103 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2105 #ifdef DEBUG_PRINT_CODE
2106 fputc ('\n', stderr);
2109 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2110 POPSTACK (loop_stack);
2115 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2116 Pop the block off of loop_stack. */
2122 register rtx start_label;
2123 rtx last_test_insn = 0;
2126 if (output_bytecode)
2128 bc_expand_end_loop ();
2132 insn = get_last_insn ();
2133 start_label = loop_stack->data.loop.start_label;
2135 /* Mark the continue-point at the top of the loop if none elsewhere. */
2136 if (start_label == loop_stack->data.loop.continue_label)
2137 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2139 do_pending_stack_adjust ();
2141 /* If optimizing, perhaps reorder the loop. If the loop
2142 starts with a conditional exit, roll that to the end
2143 where it will optimize together with the jump back.
2145 We look for the last conditional branch to the exit that we encounter
2146 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2147 branch to the exit first, use it.
2149 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2150 because moving them is not valid. */
2154 ! (GET_CODE (insn) == JUMP_INSN
2155 && GET_CODE (PATTERN (insn)) == SET
2156 && SET_DEST (PATTERN (insn)) == pc_rtx
2157 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2159 /* Scan insns from the top of the loop looking for a qualified
2160 conditional exit. */
2161 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2162 insn = NEXT_INSN (insn))
2164 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2167 if (GET_CODE (insn) == NOTE
2168 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2169 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2172 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2175 if (last_test_insn && num_insns > 30)
2178 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2179 && SET_DEST (PATTERN (insn)) == pc_rtx
2180 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2181 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2182 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2183 == loop_stack->data.loop.end_label)
2184 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2185 == loop_stack->data.loop.alt_end_label)))
2186 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2187 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2188 == loop_stack->data.loop.end_label)
2189 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2190 == loop_stack->data.loop.alt_end_label)))))
2191 last_test_insn = insn;
2193 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2194 && GET_CODE (PATTERN (insn)) == SET
2195 && SET_DEST (PATTERN (insn)) == pc_rtx
2196 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2197 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2198 == loop_stack->data.loop.end_label)
2199 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2200 == loop_stack->data.loop.alt_end_label)))
2201 /* Include BARRIER. */
2202 last_test_insn = NEXT_INSN (insn);
2205 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2207 /* We found one. Move everything from there up
2208 to the end of the loop, and add a jump into the loop
2209 to jump to there. */
2210 register rtx newstart_label = gen_label_rtx ();
2211 register rtx start_move = start_label;
2213 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2214 then we want to move this note also. */
2215 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2216 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2217 == NOTE_INSN_LOOP_CONT))
2218 start_move = PREV_INSN (start_move);
2220 emit_label_after (newstart_label, PREV_INSN (start_move));
2221 reorder_insns (start_move, last_test_insn, get_last_insn ());
2222 emit_jump_insn_after (gen_jump (start_label),
2223 PREV_INSN (newstart_label));
2224 emit_barrier_after (PREV_INSN (newstart_label));
2225 start_label = newstart_label;
2229 emit_jump (start_label);
2230 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2231 emit_label (loop_stack->data.loop.end_label);
2233 POPSTACK (loop_stack);
2238 /* Generate a jump to the current loop's continue-point.
2239 This is usually the top of the loop, but may be specified
2240 explicitly elsewhere. If not currently inside a loop,
2241 return 0 and do nothing; caller will print an error message. */
2244 expand_continue_loop (whichloop)
2245 struct nesting *whichloop;
2249 whichloop = loop_stack;
2252 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2257 /* Generate a jump to exit the current loop. If not currently inside a loop,
2258 return 0 and do nothing; caller will print an error message. */
2261 expand_exit_loop (whichloop)
2262 struct nesting *whichloop;
2266 whichloop = loop_stack;
2269 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2273 /* Generate a conditional jump to exit the current loop if COND
2274 evaluates to zero. If not currently inside a loop,
2275 return 0 and do nothing; caller will print an error message. */
2278 expand_exit_loop_if_false (whichloop, cond)
2279 struct nesting *whichloop;
2284 whichloop = loop_stack;
2287 if (output_bytecode)
2289 bc_expand_expr (cond);
2290 bc_expand_goto_internal (xjumpifnot,
2291 BYTECODE_BC_LABEL (whichloop->exit_label),
2296 /* In order to handle fixups, we actually create a conditional jump
2297 around a unconditional branch to exit the loop. If fixups are
2298 necessary, they go before the unconditional branch. */
2300 rtx label = gen_label_rtx ();
2303 do_jump (cond, NULL_RTX, label);
2304 last_insn = get_last_insn ();
2305 if (GET_CODE (last_insn) == CODE_LABEL)
2306 whichloop->data.loop.alt_end_label = last_insn;
2307 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2315 /* Return non-zero if we should preserve sub-expressions as separate
2316 pseudos. We never do so if we aren't optimizing. We always do so
2317 if -fexpensive-optimizations.
2319 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2320 the loop may still be a small one. */
2323 preserve_subexpressions_p ()
2327 if (flag_expensive_optimizations)
2330 if (optimize == 0 || loop_stack == 0)
2333 insn = get_last_insn_anywhere ();
2336 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2337 < n_non_fixed_regs * 3));
2341 /* Generate a jump to exit the current loop, conditional, binding contour
2342 or case statement. Not all such constructs are visible to this function,
2343 only those started with EXIT_FLAG nonzero. Individual languages use
2344 the EXIT_FLAG parameter to control which kinds of constructs you can
2347 If not currently inside anything that can be exited,
2348 return 0 and do nothing; caller will print an error message. */
2351 expand_exit_something ()
2355 for (n = nesting_stack; n; n = n->all)
2356 if (n->exit_label != 0)
2358 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2365 /* Generate RTL to return from the current function, with no value.
2366 (That is, we do not do anything about returning any value.) */
2369 expand_null_return ()
2371 struct nesting *block = block_stack;
2374 if (output_bytecode)
2376 bc_emit_instruction (ret);
2380 /* Does any pending block have cleanups? */
2382 while (block && block->data.block.cleanups == 0)
2383 block = block->next;
2385 /* If yes, use a goto to return, since that runs cleanups. */
2387 expand_null_return_1 (last_insn, block != 0);
2390 /* Generate RTL to return from the current function, with value VAL. */
2393 expand_value_return (val)
2396 struct nesting *block = block_stack;
2397 rtx last_insn = get_last_insn ();
2398 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2400 /* Copy the value to the return location
2401 unless it's already there. */
2403 if (return_reg != val)
2405 #ifdef PROMOTE_FUNCTION_RETURN
2406 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2407 int unsignedp = TREE_UNSIGNED (type);
2408 enum machine_mode mode
2409 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2412 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2413 convert_move (return_reg, val, unsignedp);
2416 emit_move_insn (return_reg, val);
2418 if (GET_CODE (return_reg) == REG
2419 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2420 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2422 /* Does any pending block have cleanups? */
2424 while (block && block->data.block.cleanups == 0)
2425 block = block->next;
2427 /* If yes, use a goto to return, since that runs cleanups.
2428 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2430 expand_null_return_1 (last_insn, block != 0);
2433 /* Output a return with no value. If LAST_INSN is nonzero,
2434 pretend that the return takes place after LAST_INSN.
2435 If USE_GOTO is nonzero then don't use a return instruction;
2436 go to the return label instead. This causes any cleanups
2437 of pending blocks to be executed normally. */
2440 expand_null_return_1 (last_insn, use_goto)
2444 rtx end_label = cleanup_label ? cleanup_label : return_label;
2446 clear_pending_stack_adjust ();
2447 do_pending_stack_adjust ();
2450 /* PCC-struct return always uses an epilogue. */
2451 if (current_function_returns_pcc_struct || use_goto)
2454 end_label = return_label = gen_label_rtx ();
2455 expand_goto_internal (NULL_TREE, end_label, last_insn);
2459 /* Otherwise output a simple return-insn if one is available,
2460 unless it won't do the job. */
2462 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2464 emit_jump_insn (gen_return ());
2470 /* Otherwise jump to the epilogue. */
2471 expand_goto_internal (NULL_TREE, end_label, last_insn);
2474 /* Generate RTL to evaluate the expression RETVAL and return it
2475 from the current function. */
2478 expand_return (retval)
2481 /* If there are any cleanups to be performed, then they will
2482 be inserted following LAST_INSN. It is desirable
2483 that the last_insn, for such purposes, should be the
2484 last insn before computing the return value. Otherwise, cleanups
2485 which call functions can clobber the return value. */
2486 /* ??? rms: I think that is erroneous, because in C++ it would
2487 run destructors on variables that might be used in the subsequent
2488 computation of the return value. */
2490 register rtx val = 0;
2494 struct nesting *block;
2496 /* Bytecode returns are quite simple, just leave the result on the
2497 arithmetic stack. */
2498 if (output_bytecode)
2500 bc_expand_expr (retval);
2501 bc_emit_instruction (ret);
2505 /* If function wants no value, give it none. */
2506 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2508 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2510 expand_null_return ();
2514 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2515 cleanups = any_pending_cleanups (1);
2517 if (TREE_CODE (retval) == RESULT_DECL)
2518 retval_rhs = retval;
2519 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2520 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2521 retval_rhs = TREE_OPERAND (retval, 1);
2522 else if (TREE_TYPE (retval) == void_type_node)
2523 /* Recognize tail-recursive call to void function. */
2524 retval_rhs = retval;
2526 retval_rhs = NULL_TREE;
2528 /* Only use `last_insn' if there are cleanups which must be run. */
2529 if (cleanups || cleanup_label != 0)
2530 last_insn = get_last_insn ();
2532 /* Distribute return down conditional expr if either of the sides
2533 may involve tail recursion (see test below). This enhances the number
2534 of tail recursions we see. Don't do this always since it can produce
2535 sub-optimal code in some cases and we distribute assignments into
2536 conditional expressions when it would help. */
2538 if (optimize && retval_rhs != 0
2539 && frame_offset == 0
2540 && TREE_CODE (retval_rhs) == COND_EXPR
2541 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2542 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2544 rtx label = gen_label_rtx ();
2547 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2548 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2549 DECL_RESULT (current_function_decl),
2550 TREE_OPERAND (retval_rhs, 1));
2551 TREE_SIDE_EFFECTS (expr) = 1;
2552 expand_return (expr);
2555 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2556 DECL_RESULT (current_function_decl),
2557 TREE_OPERAND (retval_rhs, 2));
2558 TREE_SIDE_EFFECTS (expr) = 1;
2559 expand_return (expr);
2563 /* For tail-recursive call to current function,
2564 just jump back to the beginning.
2565 It's unsafe if any auto variable in this function
2566 has its address taken; for simplicity,
2567 require stack frame to be empty. */
2568 if (optimize && retval_rhs != 0
2569 && frame_offset == 0
2570 && TREE_CODE (retval_rhs) == CALL_EXPR
2571 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2572 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2573 /* Finish checking validity, and if valid emit code
2574 to set the argument variables for the new call. */
2575 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2576 DECL_ARGUMENTS (current_function_decl)))
2578 if (tail_recursion_label == 0)
2580 tail_recursion_label = gen_label_rtx ();
2581 emit_label_after (tail_recursion_label,
2582 tail_recursion_reentry);
2585 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2590 /* This optimization is safe if there are local cleanups
2591 because expand_null_return takes care of them.
2592 ??? I think it should also be safe when there is a cleanup label,
2593 because expand_null_return takes care of them, too.
2594 Any reason why not? */
2595 if (HAVE_return && cleanup_label == 0
2596 && ! current_function_returns_pcc_struct
2597 && BRANCH_COST <= 1)
2599 /* If this is return x == y; then generate
2600 if (x == y) return 1; else return 0;
2601 if we can do it with explicit return insns and
2602 branches are cheap. */
2604 switch (TREE_CODE (retval_rhs))
2612 case TRUTH_ANDIF_EXPR:
2613 case TRUTH_ORIF_EXPR:
2614 case TRUTH_AND_EXPR:
2616 case TRUTH_NOT_EXPR:
2617 case TRUTH_XOR_EXPR:
2618 op0 = gen_label_rtx ();
2619 jumpifnot (retval_rhs, op0);
2620 expand_value_return (const1_rtx);
2622 expand_value_return (const0_rtx);
2626 #endif /* HAVE_return */
2630 && TREE_TYPE (retval_rhs) != void_type_node
2631 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2633 /* Calculate the return value into a pseudo reg. */
2634 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2636 /* All temporaries have now been used. */
2638 /* Return the calculated value, doing cleanups first. */
2639 expand_value_return (val);
2643 /* No cleanups or no hard reg used;
2644 calculate value into hard return reg. */
2645 expand_expr (retval, const0_rtx, VOIDmode, 0);
2648 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2652 /* Return 1 if the end of the generated RTX is not a barrier.
2653 This means code already compiled can drop through. */
2656 drop_through_at_end_p ()
2658 rtx insn = get_last_insn ();
2659 while (insn && GET_CODE (insn) == NOTE)
2660 insn = PREV_INSN (insn);
2661 return insn && GET_CODE (insn) != BARRIER;
2664 /* Emit code to alter this function's formal parms for a tail-recursive call.
2665 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2666 FORMALS is the chain of decls of formals.
2667 Return 1 if this can be done;
2668 otherwise return 0 and do not emit any code. */
2671 tail_recursion_args (actuals, formals)
2672 tree actuals, formals;
2674 register tree a = actuals, f = formals;
2676 register rtx *argvec;
2678 /* Check that number and types of actuals are compatible
2679 with the formals. This is not always true in valid C code.
2680 Also check that no formal needs to be addressable
2681 and that all formals are scalars. */
2683 /* Also count the args. */
2685 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2687 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2689 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2692 if (a != 0 || f != 0)
2695 /* Compute all the actuals. */
2697 argvec = (rtx *) alloca (i * sizeof (rtx));
2699 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2700 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2702 /* Find which actual values refer to current values of previous formals.
2703 Copy each of them now, before any formal is changed. */
2705 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2709 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2710 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2711 { copy = 1; break; }
2713 argvec[i] = copy_to_reg (argvec[i]);
2716 /* Store the values of the actuals into the formals. */
2718 for (f = formals, a = actuals, i = 0; f;
2719 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2721 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2722 emit_move_insn (DECL_RTL (f), argvec[i]);
2724 convert_move (DECL_RTL (f), argvec[i],
2725 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2732 /* Generate the RTL code for entering a binding contour.
2733 The variables are declared one by one, by calls to `expand_decl'.
2735 EXIT_FLAG is nonzero if this construct should be visible to
2736 `exit_something'. */
2739 expand_start_bindings (exit_flag)
2742 struct nesting *thisblock = ALLOC_NESTING ();
2743 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2745 /* Make an entry on block_stack for the block we are entering. */
2747 thisblock->next = block_stack;
2748 thisblock->all = nesting_stack;
2749 thisblock->depth = ++nesting_depth;
2750 thisblock->data.block.stack_level = 0;
2751 thisblock->data.block.cleanups = 0;
2752 thisblock->data.block.function_call_count = 0;
2756 if (block_stack->data.block.cleanups == NULL_TREE
2757 && (block_stack->data.block.outer_cleanups == NULL_TREE
2758 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2759 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2761 thisblock->data.block.outer_cleanups
2762 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2763 block_stack->data.block.outer_cleanups);
2766 thisblock->data.block.outer_cleanups = 0;
2770 && !(block_stack->data.block.cleanups == NULL_TREE
2771 && block_stack->data.block.outer_cleanups == NULL_TREE))
2772 thisblock->data.block.outer_cleanups
2773 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2774 block_stack->data.block.outer_cleanups);
2776 thisblock->data.block.outer_cleanups = 0;
2778 thisblock->data.block.label_chain = 0;
2779 thisblock->data.block.innermost_stack_block = stack_block_stack;
2780 thisblock->data.block.first_insn = note;
2781 thisblock->data.block.block_start_count = ++block_start_count;
2782 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2783 block_stack = thisblock;
2784 nesting_stack = thisblock;
2786 if (!output_bytecode)
2788 /* Make a new level for allocating stack slots. */
2793 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2794 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2798 remember_end_note (block)
2799 register tree block;
2801 BLOCK_END_NOTE (block) = last_block_end_note;
2802 last_block_end_note = NULL_RTX;
2805 /* Generate RTL code to terminate a binding contour.
2806 VARS is the chain of VAR_DECL nodes
2807 for the variables bound in this contour.
2808 MARK_ENDS is nonzero if we should put a note at the beginning
2809 and end of this binding contour.
2811 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2812 (That is true automatically if the contour has a saved stack level.) */
2815 expand_end_bindings (vars, mark_ends, dont_jump_in)
2820 register struct nesting *thisblock = block_stack;
2823 if (output_bytecode)
2825 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
2830 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2831 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
2832 && ! DECL_IN_SYSTEM_HEADER (decl))
2833 warning_with_decl (decl, "unused variable `%s'");
2835 if (thisblock->exit_label)
2837 do_pending_stack_adjust ();
2838 emit_label (thisblock->exit_label);
2841 /* If necessary, make a handler for nonlocal gotos taking
2842 place in the function calls in this block. */
2843 if (function_call_count != thisblock->data.block.function_call_count
2845 /* Make handler for outermost block
2846 if there were any nonlocal gotos to this function. */
2847 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2848 /* Make handler for inner block if it has something
2849 special to do when you jump out of it. */
2850 : (thisblock->data.block.cleanups != 0
2851 || thisblock->data.block.stack_level != 0)))
2854 rtx afterward = gen_label_rtx ();
2855 rtx handler_label = gen_label_rtx ();
2856 rtx save_receiver = gen_reg_rtx (Pmode);
2859 /* Don't let jump_optimize delete the handler. */
2860 LABEL_PRESERVE_P (handler_label) = 1;
2862 /* Record the handler address in the stack slot for that purpose,
2863 during this block, saving and restoring the outer value. */
2864 if (thisblock->next != 0)
2866 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2869 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
2870 insns = get_insns ();
2872 emit_insns_before (insns, thisblock->data.block.first_insn);
2876 emit_move_insn (nonlocal_goto_handler_slot,
2877 gen_rtx (LABEL_REF, Pmode, handler_label));
2878 insns = get_insns ();
2880 emit_insns_before (insns, thisblock->data.block.first_insn);
2882 /* Jump around the handler; it runs only when specially invoked. */
2883 emit_jump (afterward);
2884 emit_label (handler_label);
2886 #ifdef HAVE_nonlocal_goto
2887 if (! HAVE_nonlocal_goto)
2889 /* First adjust our frame pointer to its actual value. It was
2890 previously set to the start of the virtual area corresponding to
2891 the stacked variables when we branched here and now needs to be
2892 adjusted to the actual hardware fp value.
2894 Assignments are to virtual registers are converted by
2895 instantiate_virtual_regs into the corresponding assignment
2896 to the underlying register (fp in this case) that makes
2897 the original assignment true.
2898 So the following insn will actually be
2899 decrementing fp by STARTING_FRAME_OFFSET. */
2900 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2902 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2903 if (fixed_regs[ARG_POINTER_REGNUM])
2905 #ifdef ELIMINABLE_REGS
2906 /* If the argument pointer can be eliminated in favor of the
2907 frame pointer, we don't need to restore it. We assume here
2908 that if such an elimination is present, it can always be used.
2909 This is the case on all known machines; if we don't make this
2910 assumption, we do unnecessary saving on many machines. */
2911 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
2914 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
2915 if (elim_regs[i].from == ARG_POINTER_REGNUM
2916 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2919 if (i == sizeof elim_regs / sizeof elim_regs [0])
2922 /* Now restore our arg pointer from the address at which it
2923 was saved in our stack frame.
2924 If there hasn't be space allocated for it yet, make
2926 if (arg_pointer_save_area == 0)
2927 arg_pointer_save_area
2928 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2929 emit_move_insn (virtual_incoming_args_rtx,
2930 /* We need a pseudo here, or else
2931 instantiate_virtual_regs_1 complains. */
2932 copy_to_reg (arg_pointer_save_area));
2937 /* The handler expects the desired label address in the static chain
2938 register. It tests the address and does an appropriate jump
2939 to whatever label is desired. */
2940 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2941 /* Skip any labels we shouldn't be able to jump to from here. */
2942 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2944 rtx not_this = gen_label_rtx ();
2945 rtx this = gen_label_rtx ();
2946 do_jump_if_equal (static_chain_rtx,
2947 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2949 emit_jump (not_this);
2951 expand_goto (TREE_VALUE (link));
2952 emit_label (not_this);
2954 /* If label is not recognized, abort. */
2955 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2957 emit_label (afterward);
2960 /* Don't allow jumping into a block that has cleanups or a stack level. */
2962 || thisblock->data.block.stack_level != 0
2963 || thisblock->data.block.cleanups != 0)
2965 struct label_chain *chain;
2967 /* Any labels in this block are no longer valid to go to.
2968 Mark them to cause an error message. */
2969 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2971 DECL_TOO_LATE (chain->label) = 1;
2972 /* If any goto without a fixup came to this label,
2973 that must be an error, because gotos without fixups
2974 come from outside all saved stack-levels and all cleanups. */
2975 if (TREE_ADDRESSABLE (chain->label))
2976 error_with_decl (chain->label,
2977 "label `%s' used before containing binding contour");
2981 /* Restore stack level in effect before the block
2982 (only if variable-size objects allocated). */
2983 /* Perform any cleanups associated with the block. */
2985 if (thisblock->data.block.stack_level != 0
2986 || thisblock->data.block.cleanups != 0)
2988 /* Only clean up here if this point can actually be reached. */
2989 if (GET_CODE (get_last_insn ()) != BARRIER)
2991 /* Don't let cleanups affect ({...}) constructs. */
2992 int old_expr_stmts_for_value = expr_stmts_for_value;
2993 rtx old_last_expr_value = last_expr_value;
2994 tree old_last_expr_type = last_expr_type;
2995 expr_stmts_for_value = 0;
2997 /* Do the cleanups. */
2998 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
2999 do_pending_stack_adjust ();
3001 expr_stmts_for_value = old_expr_stmts_for_value;
3002 last_expr_value = old_last_expr_value;
3003 last_expr_type = old_last_expr_type;
3005 /* Restore the stack level. */
3007 if (thisblock->data.block.stack_level != 0)
3009 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3010 thisblock->data.block.stack_level, NULL_RTX);
3011 if (nonlocal_goto_handler_slot != 0)
3012 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3017 /* Any gotos out of this block must also do these things.
3018 Also report any gotos with fixups that came to labels in this
3020 fixup_gotos (thisblock,
3021 thisblock->data.block.stack_level,
3022 thisblock->data.block.cleanups,
3023 thisblock->data.block.first_insn,
3027 /* Mark the beginning and end of the scope if requested.
3028 We do this now, after running cleanups on the variables
3029 just going out of scope, so they are in scope for their cleanups. */
3032 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3034 /* Get rid of the beginning-mark if we don't make an end-mark. */
3035 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3037 /* If doing stupid register allocation, make sure lives of all
3038 register variables declared here extend thru end of scope. */
3041 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3043 rtx rtl = DECL_RTL (decl);
3044 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3048 /* Restore block_stack level for containing block. */
3050 stack_block_stack = thisblock->data.block.innermost_stack_block;
3051 POPSTACK (block_stack);
3053 /* Pop the stack slot nesting and free any slots at this level. */
3058 /* End a binding contour.
3059 VARS is the chain of VAR_DECL nodes for the variables bound
3060 in this contour. MARK_ENDS is nonzer if we should put a note
3061 at the beginning and end of this binding contour.
3062 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3066 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3071 struct nesting *thisbind = nesting_stack;
3075 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3076 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3077 warning_with_decl (decl, "unused variable `%s'");
3079 if (thisbind->exit_label)
3080 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3082 /* Pop block/bindings off stack */
3083 POPSTACK (block_stack);
3086 /* Generate RTL for the automatic variable declaration DECL.
3087 (Other kinds of declarations are simply ignored if seen here.)
3088 CLEANUP is an expression to be executed at exit from this binding contour;
3089 for example, in C++, it might call the destructor for this variable.
3091 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3092 either before or after calling `expand_decl' but before compiling
3093 any subsequent expressions. This is because CLEANUP may be expanded
3094 more than once, on different branches of execution.
3095 For the same reason, CLEANUP may not contain a CALL_EXPR
3096 except as its topmost node--else `preexpand_calls' would get confused.
3098 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3099 that is not associated with any particular variable.
3101 There is no special support here for C++ constructors.
3102 They should be handled by the proper code in DECL_INITIAL. */
3108 struct nesting *thisblock = block_stack;
3111 if (output_bytecode)
3113 bc_expand_decl (decl, 0);
3117 type = TREE_TYPE (decl);
3119 /* Only automatic variables need any expansion done.
3120 Static and external variables, and external functions,
3121 will be handled by `assemble_variable' (called from finish_decl).
3122 TYPE_DECL and CONST_DECL require nothing.
3123 PARM_DECLs are handled in `assign_parms'. */
3125 if (TREE_CODE (decl) != VAR_DECL)
3127 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3130 /* Create the RTL representation for the variable. */
3132 if (type == error_mark_node)
3133 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3134 else if (DECL_SIZE (decl) == 0)
3135 /* Variable with incomplete type. */
3137 if (DECL_INITIAL (decl) == 0)
3138 /* Error message was already done; now avoid a crash. */
3139 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3141 /* An initializer is going to decide the size of this array.
3142 Until we know the size, represent its address with a reg. */
3143 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3145 else if (DECL_MODE (decl) != BLKmode
3146 /* If -ffloat-store, don't put explicit float vars
3148 && !(flag_float_store
3149 && TREE_CODE (type) == REAL_TYPE)
3150 && ! TREE_THIS_VOLATILE (decl)
3151 && ! TREE_ADDRESSABLE (decl)
3152 && (DECL_REGISTER (decl) || ! obey_regdecls))
3154 /* Automatic variable that can go in a register. */
3155 int unsignedp = TREE_UNSIGNED (type);
3156 enum machine_mode reg_mode
3157 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3159 if (TREE_CODE (type) == COMPLEX_TYPE)
3161 rtx realpart, imagpart;
3162 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3164 /* For a complex type variable, make a CONCAT of two pseudos
3165 so that the real and imaginary parts
3166 can be allocated separately. */
3167 realpart = gen_reg_rtx (partmode);
3168 REG_USERVAR_P (realpart) = 1;
3169 imagpart = gen_reg_rtx (partmode);
3170 REG_USERVAR_P (imagpart) = 1;
3171 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3175 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3176 if (TREE_CODE (type) == POINTER_TYPE)
3177 mark_reg_pointer (DECL_RTL (decl));
3178 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3181 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3183 /* Variable of fixed size that goes on the stack. */
3187 /* If we previously made RTL for this decl, it must be an array
3188 whose size was determined by the initializer.
3189 The old address was a register; set that register now
3190 to the proper address. */
3191 if (DECL_RTL (decl) != 0)
3193 if (GET_CODE (DECL_RTL (decl)) != MEM
3194 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3196 oldaddr = XEXP (DECL_RTL (decl), 0);
3200 = assign_stack_temp (DECL_MODE (decl),
3201 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3202 + BITS_PER_UNIT - 1)
3206 /* Set alignment we actually gave this decl. */
3207 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3208 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3212 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3213 if (addr != oldaddr)
3214 emit_move_insn (oldaddr, addr);
3217 /* If this is a memory ref that contains aggregate components,
3218 mark it as such for cse and loop optimize. */
3219 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3221 /* If this is in memory because of -ffloat-store,
3222 set the volatile bit, to prevent optimizations from
3223 undoing the effects. */
3224 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3225 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3229 /* Dynamic-size object: must push space on the stack. */
3233 /* Record the stack pointer on entry to block, if have
3234 not already done so. */
3235 if (thisblock->data.block.stack_level == 0)
3237 do_pending_stack_adjust ();
3238 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3239 &thisblock->data.block.stack_level,
3240 thisblock->data.block.first_insn);
3241 stack_block_stack = thisblock;
3244 /* Compute the variable's size, in bytes. */
3245 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3247 size_int (BITS_PER_UNIT)),
3248 NULL_RTX, VOIDmode, 0);
3251 /* Allocate space on the stack for the variable. */
3252 address = allocate_dynamic_stack_space (size, NULL_RTX,
3255 /* Reference the variable indirect through that rtx. */
3256 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3258 /* If this is a memory ref that contains aggregate components,
3259 mark it as such for cse and loop optimize. */
3260 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3262 /* Indicate the alignment we actually gave this variable. */
3263 #ifdef STACK_BOUNDARY
3264 DECL_ALIGN (decl) = STACK_BOUNDARY;
3266 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3270 if (TREE_THIS_VOLATILE (decl))
3271 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3272 #if 0 /* A variable is not necessarily unchanging
3273 just because it is const. RTX_UNCHANGING_P
3274 means no change in the function,
3275 not merely no change in the variable's scope.
3276 It is correct to set RTX_UNCHANGING_P if the variable's scope
3277 is the whole function. There's no convenient way to test that. */
3278 if (TREE_READONLY (decl))
3279 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3282 /* If doing stupid register allocation, make sure life of any
3283 register variable starts here, at the start of its scope. */
3286 use_variable (DECL_RTL (decl));
3290 /* Generate code for the automatic variable declaration DECL. For
3291 most variables this just means we give it a stack offset. The
3292 compiler sometimes emits cleanups without variables and we will
3293 have to deal with those too. */
3296 bc_expand_decl (decl, cleanup)
3304 /* A cleanup with no variable. */
3311 /* Only auto variables need any work. */
3312 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3315 type = TREE_TYPE (decl);
3317 if (type == error_mark_node)
3318 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3320 else if (DECL_SIZE (decl) == 0)
3322 /* Variable with incomplete type. The stack offset herein will be
3323 fixed later in expand_decl_init (). */
3324 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3326 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3328 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3332 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3335 /* Emit code to perform the initialization of a declaration DECL. */
3338 expand_decl_init (decl)
3341 int was_used = TREE_USED (decl);
3343 if (output_bytecode)
3345 bc_expand_decl_init (decl);
3349 /* If this is a CONST_DECL, we don't have to generate any code, but
3350 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3351 to be set while in the obstack containing the constant. If we don't
3352 do this, we can lose if we have functions nested three deep and the middle
3353 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3354 the innermost function is the first to expand that STRING_CST. */
3355 if (TREE_CODE (decl) == CONST_DECL)
3357 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3358 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3359 EXPAND_INITIALIZER);
3363 if (TREE_STATIC (decl))
3366 /* Compute and store the initial value now. */
3368 if (DECL_INITIAL (decl) == error_mark_node)
3370 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3371 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3372 || code == POINTER_TYPE)
3373 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3377 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3379 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3380 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3384 /* Don't let the initialization count as "using" the variable. */
3385 TREE_USED (decl) = was_used;
3387 /* Free any temporaries we made while initializing the decl. */
3391 /* Expand initialization for variable-sized types. Allocate array
3392 using newlocalSI and set local variable, which is a pointer to the
3396 bc_expand_variable_local_init (decl)
3399 /* Evaluate size expression and coerce to SI */
3400 bc_expand_expr (DECL_SIZE (decl));
3402 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3403 no coercion is necessary (?) */
3405 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3406 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3408 /* Emit code to allocate array */
3409 bc_emit_instruction (newlocalSI);
3411 /* Store array pointer in local variable. This is the only instance
3412 where we actually want the address of the pointer to the
3413 variable-size block, rather than the pointer itself. We avoid
3414 using expand_address() since that would cause the pointer to be
3415 pushed rather than its address. Hence the hard-coded reference;
3416 notice also that the variable is always local (no global
3417 variable-size type variables). */
3419 bc_load_localaddr (DECL_RTL (decl));
3420 bc_emit_instruction (storeP);
3424 /* Emit code to initialize a declaration. */
3427 bc_expand_decl_init (decl)
3430 int org_stack_depth;
3432 /* Statical initializers are handled elsewhere */
3434 if (TREE_STATIC (decl))
3437 /* Memory original stack depth */
3438 org_stack_depth = stack_depth;
3440 /* If the type is variable-size, we first create its space (we ASSUME
3441 it CAN'T be static). We do this regardless of whether there's an
3442 initializer assignment or not. */
3444 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3445 bc_expand_variable_local_init (decl);
3447 /* Expand initializer assignment */
3448 if (DECL_INITIAL (decl) == error_mark_node)
3450 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3452 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3453 || code == POINTER_TYPE)
3455 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3457 else if (DECL_INITIAL (decl))
3458 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3460 /* Restore stack depth */
3461 if (org_stack_depth > stack_depth)
3464 bc_adjust_stack (stack_depth - org_stack_depth);
3468 /* CLEANUP is an expression to be executed at exit from this binding contour;
3469 for example, in C++, it might call the destructor for this variable.
3471 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3472 either before or after calling `expand_decl' but before compiling
3473 any subsequent expressions. This is because CLEANUP may be expanded
3474 more than once, on different branches of execution.
3475 For the same reason, CLEANUP may not contain a CALL_EXPR
3476 except as its topmost node--else `preexpand_calls' would get confused.
3478 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3479 that is not associated with any particular variable. */
3482 expand_decl_cleanup (decl, cleanup)
3485 struct nesting *thisblock = block_stack;
3487 /* Error if we are not in any block. */
3491 /* Record the cleanup if there is one. */
3495 thisblock->data.block.cleanups
3496 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3497 /* If this block has a cleanup, it belongs in stack_block_stack. */
3498 stack_block_stack = thisblock;
3499 (*interim_eh_hook) (NULL_TREE);
3504 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3505 DECL_ELTS is the list of elements that belong to DECL's type.
3506 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3509 expand_anon_union_decl (decl, cleanup, decl_elts)
3510 tree decl, cleanup, decl_elts;
3512 struct nesting *thisblock = block_stack;
3515 expand_decl (decl, cleanup);
3516 x = DECL_RTL (decl);
3520 tree decl_elt = TREE_VALUE (decl_elts);
3521 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3522 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3524 /* Propagate the union's alignment to the elements. */
3525 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3527 /* If the element has BLKmode and the union doesn't, the union is
3528 aligned such that the element doesn't need to have BLKmode, so
3529 change the element's mode to the appropriate one for its size. */
3530 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3531 DECL_MODE (decl_elt) = mode
3532 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3535 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3536 instead create a new MEM rtx with the proper mode. */
3537 if (GET_CODE (x) == MEM)
3539 if (mode == GET_MODE (x))
3540 DECL_RTL (decl_elt) = x;
3543 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3544 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3545 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3548 else if (GET_CODE (x) == REG)
3550 if (mode == GET_MODE (x))
3551 DECL_RTL (decl_elt) = x;
3553 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3558 /* Record the cleanup if there is one. */
3561 thisblock->data.block.cleanups
3562 = temp_tree_cons (decl_elt, cleanup_elt,
3563 thisblock->data.block.cleanups);
3565 decl_elts = TREE_CHAIN (decl_elts);
3569 /* Expand a list of cleanups LIST.
3570 Elements may be expressions or may be nested lists.
3572 If DONT_DO is nonnull, then any list-element
3573 whose TREE_PURPOSE matches DONT_DO is omitted.
3574 This is sometimes used to avoid a cleanup associated with
3575 a value that is being returned out of the scope. */
3578 expand_cleanups (list, dont_do)
3583 for (tail = list; tail; tail = TREE_CHAIN (tail))
3584 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3586 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3587 expand_cleanups (TREE_VALUE (tail), dont_do);
3590 (*interim_eh_hook) (TREE_VALUE (tail));
3592 /* Cleanups may be run multiple times. For example,
3593 when exiting a binding contour, we expand the
3594 cleanups associated with that contour. When a goto
3595 within that binding contour has a target outside that
3596 contour, it will expand all cleanups from its scope to
3597 the target. Though the cleanups are expanded multiple
3598 times, the control paths are non-overlapping so the
3599 cleanups will not be executed twice. */
3600 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3606 /* Move all cleanups from the current block_stack
3607 to the containing block_stack, where they are assumed to
3608 have been created. If anything can cause a temporary to
3609 be created, but not expanded for more than one level of
3610 block_stacks, then this code will have to change. */
3615 struct nesting *block = block_stack;
3616 struct nesting *outer = block->next;
3618 outer->data.block.cleanups
3619 = chainon (block->data.block.cleanups,
3620 outer->data.block.cleanups);
3621 block->data.block.cleanups = 0;
3625 last_cleanup_this_contour ()
3627 if (block_stack == 0)
3630 return block_stack->data.block.cleanups;
3633 /* Return 1 if there are any pending cleanups at this point.
3634 If THIS_CONTOUR is nonzero, check the current contour as well.
3635 Otherwise, look only at the contours that enclose this one. */
3638 any_pending_cleanups (this_contour)
3641 struct nesting *block;
3643 if (block_stack == 0)
3646 if (this_contour && block_stack->data.block.cleanups != NULL)
3648 if (block_stack->data.block.cleanups == 0
3649 && (block_stack->data.block.outer_cleanups == 0
3651 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3656 for (block = block_stack->next; block; block = block->next)
3657 if (block->data.block.cleanups != 0)
3663 /* Enter a case (Pascal) or switch (C) statement.
3664 Push a block onto case_stack and nesting_stack
3665 to accumulate the case-labels that are seen
3666 and to record the labels generated for the statement.
3668 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3669 Otherwise, this construct is transparent for `exit_something'.
3671 EXPR is the index-expression to be dispatched on.
3672 TYPE is its nominal type. We could simply convert EXPR to this type,
3673 but instead we take short cuts. */
3676 expand_start_case (exit_flag, expr, type, printname)
3682 register struct nesting *thiscase = ALLOC_NESTING ();
3684 /* Make an entry on case_stack for the case we are entering. */
3686 thiscase->next = case_stack;
3687 thiscase->all = nesting_stack;
3688 thiscase->depth = ++nesting_depth;
3689 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3690 thiscase->data.case_stmt.case_list = 0;
3691 thiscase->data.case_stmt.index_expr = expr;
3692 thiscase->data.case_stmt.nominal_type = type;
3693 thiscase->data.case_stmt.default_label = 0;
3694 thiscase->data.case_stmt.num_ranges = 0;
3695 thiscase->data.case_stmt.printname = printname;
3696 thiscase->data.case_stmt.seenlabel = 0;
3697 case_stack = thiscase;
3698 nesting_stack = thiscase;
3700 if (output_bytecode)
3702 bc_expand_start_case (thiscase, expr, type, printname);
3706 do_pending_stack_adjust ();
3708 /* Make sure case_stmt.start points to something that won't
3709 need any transformation before expand_end_case. */
3710 if (GET_CODE (get_last_insn ()) != NOTE)
3711 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3713 thiscase->data.case_stmt.start = get_last_insn ();
3717 /* Enter a case statement. It is assumed that the caller has pushed
3718 the current context onto the case stack. */
3721 bc_expand_start_case (thiscase, expr, type, printname)
3722 struct nesting *thiscase;
3727 bc_expand_expr (expr);
3728 bc_expand_conversion (TREE_TYPE (expr), type);
3730 /* For cases, the skip is a place we jump to that's emitted after
3731 the size of the jump table is known. */
3733 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3734 bc_emit_bytecode (jump);
3735 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
3737 #ifdef DEBUG_PRINT_CODE
3738 fputc ('\n', stderr);
3743 /* Start a "dummy case statement" within which case labels are invalid
3744 and are not connected to any larger real case statement.
3745 This can be used if you don't want to let a case statement jump
3746 into the middle of certain kinds of constructs. */
3749 expand_start_case_dummy ()
3751 register struct nesting *thiscase = ALLOC_NESTING ();
3753 /* Make an entry on case_stack for the dummy. */
3755 thiscase->next = case_stack;
3756 thiscase->all = nesting_stack;
3757 thiscase->depth = ++nesting_depth;
3758 thiscase->exit_label = 0;
3759 thiscase->data.case_stmt.case_list = 0;
3760 thiscase->data.case_stmt.start = 0;
3761 thiscase->data.case_stmt.nominal_type = 0;
3762 thiscase->data.case_stmt.default_label = 0;
3763 thiscase->data.case_stmt.num_ranges = 0;
3764 case_stack = thiscase;
3765 nesting_stack = thiscase;
3768 /* End a dummy case statement. */
3771 expand_end_case_dummy ()
3773 POPSTACK (case_stack);
3776 /* Return the data type of the index-expression
3777 of the innermost case statement, or null if none. */
3780 case_index_expr_type ()
3783 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3787 /* Accumulate one case or default label inside a case or switch statement.
3788 VALUE is the value of the case (a null pointer, for a default label).
3789 The function CONVERTER, when applied to arguments T and V,
3790 converts the value V to the type T.
3792 If not currently inside a case or switch statement, return 1 and do
3793 nothing. The caller will print a language-specific error message.
3794 If VALUE is a duplicate or overlaps, return 2 and do nothing
3795 except store the (first) duplicate node in *DUPLICATE.
3796 If VALUE is out of range, return 3 and do nothing.
3797 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3798 Return 0 on success.
3800 Extended to handle range statements. */
3803 pushcase (value, converter, label, duplicate)
3804 register tree value;
3805 tree (*converter) PROTO((tree, tree));
3806 register tree label;
3809 register struct case_node **l;
3810 register struct case_node *n;
3814 if (output_bytecode)
3815 return bc_pushcase (value, label);
3817 /* Fail if not inside a real case statement. */
3818 if (! (case_stack && case_stack->data.case_stmt.start))
3821 if (stack_block_stack
3822 && stack_block_stack->depth > case_stack->depth)
3825 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3826 nominal_type = case_stack->data.case_stmt.nominal_type;
3828 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3829 if (index_type == error_mark_node)
3832 /* Convert VALUE to the type in which the comparisons are nominally done. */
3834 value = (*converter) (nominal_type, value);
3836 /* If this is the first label, warn if any insns have been emitted. */
3837 if (case_stack->data.case_stmt.seenlabel == 0)
3840 for (insn = case_stack->data.case_stmt.start;
3842 insn = NEXT_INSN (insn))
3844 if (GET_CODE (insn) == CODE_LABEL)
3846 if (GET_CODE (insn) != NOTE
3847 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3849 warning ("unreachable code at beginning of %s",
3850 case_stack->data.case_stmt.printname);
3855 case_stack->data.case_stmt.seenlabel = 1;
3857 /* Fail if this value is out of range for the actual type of the index
3858 (which may be narrower than NOMINAL_TYPE). */
3859 if (value != 0 && ! int_fits_type_p (value, index_type))
3862 /* Fail if this is a duplicate or overlaps another entry. */
3865 if (case_stack->data.case_stmt.default_label != 0)
3867 *duplicate = case_stack->data.case_stmt.default_label;
3870 case_stack->data.case_stmt.default_label = label;
3874 /* Find the elt in the chain before which to insert the new value,
3875 to keep the chain sorted in increasing order.
3876 But report an error if this element is a duplicate. */
3877 for (l = &case_stack->data.case_stmt.case_list;
3878 /* Keep going past elements distinctly less than VALUE. */
3879 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3884 /* Element we will insert before must be distinctly greater;
3885 overlap means error. */
3886 if (! tree_int_cst_lt (value, (*l)->low))
3888 *duplicate = (*l)->code_label;
3893 /* Add this label to the chain, and succeed.
3894 Copy VALUE so it is on temporary rather than momentary
3895 obstack and will thus survive till the end of the case statement. */
3896 n = (struct case_node *) oballoc (sizeof (struct case_node));
3899 n->high = n->low = copy_node (value);
3900 n->code_label = label;
3904 expand_label (label);
3908 /* Like pushcase but this case applies to all values
3909 between VALUE1 and VALUE2 (inclusive).
3910 The return value is the same as that of pushcase
3911 but there is one additional error code:
3912 4 means the specified range was empty. */
3915 pushcase_range (value1, value2, converter, label, duplicate)
3916 register tree value1, value2;
3917 tree (*converter) PROTO((tree, tree));
3918 register tree label;
3921 register struct case_node **l;
3922 register struct case_node *n;
3926 /* Fail if not inside a real case statement. */
3927 if (! (case_stack && case_stack->data.case_stmt.start))
3930 if (stack_block_stack
3931 && stack_block_stack->depth > case_stack->depth)
3934 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3935 nominal_type = case_stack->data.case_stmt.nominal_type;
3937 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3938 if (index_type == error_mark_node)
3941 /* If this is the first label, warn if any insns have been emitted. */
3942 if (case_stack->data.case_stmt.seenlabel == 0)
3945 for (insn = case_stack->data.case_stmt.start;
3947 insn = NEXT_INSN (insn))
3949 if (GET_CODE (insn) == CODE_LABEL)
3951 if (GET_CODE (insn) != NOTE
3952 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3954 warning ("unreachable code at beginning of %s",
3955 case_stack->data.case_stmt.printname);
3960 case_stack->data.case_stmt.seenlabel = 1;
3962 /* Convert VALUEs to type in which the comparisons are nominally done. */
3963 if (value1 == 0) /* Negative infinity. */
3964 value1 = TYPE_MIN_VALUE(index_type);
3965 value1 = (*converter) (nominal_type, value1);
3967 if (value2 == 0) /* Positive infinity. */
3968 value2 = TYPE_MAX_VALUE(index_type);
3969 value2 = (*converter) (nominal_type, value2);
3971 /* Fail if these values are out of range. */
3972 if (! int_fits_type_p (value1, index_type))
3975 if (! int_fits_type_p (value2, index_type))
3978 /* Fail if the range is empty. */
3979 if (tree_int_cst_lt (value2, value1))
3982 /* If the bounds are equal, turn this into the one-value case. */
3983 if (tree_int_cst_equal (value1, value2))
3984 return pushcase (value1, converter, label, duplicate);
3986 /* Find the elt in the chain before which to insert the new value,
3987 to keep the chain sorted in increasing order.
3988 But report an error if this element is a duplicate. */
3989 for (l = &case_stack->data.case_stmt.case_list;
3990 /* Keep going past elements distinctly less than this range. */
3991 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3996 /* Element we will insert before must be distinctly greater;
3997 overlap means error. */
3998 if (! tree_int_cst_lt (value2, (*l)->low))
4000 *duplicate = (*l)->code_label;
4005 /* Add this label to the chain, and succeed.
4006 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
4007 obstack and will thus survive till the end of the case statement. */
4009 n = (struct case_node *) oballoc (sizeof (struct case_node));
4012 n->low = copy_node (value1);
4013 n->high = copy_node (value2);
4014 n->code_label = label;
4017 expand_label (label);
4019 case_stack->data.case_stmt.num_ranges++;
4025 /* Accumulate one case or default label; VALUE is the value of the
4026 case, or nil for a default label. If not currently inside a case,
4027 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4028 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4029 Return 0 on success. This function is a leftover from the earlier
4030 bytecode compiler, which was based on gcc 1.37. It should be
4031 merged into pushcase. */
4034 bc_pushcase (value, label)
4038 struct nesting *thiscase = case_stack;
4039 struct case_node *case_label, *new_label;
4044 /* Fail if duplicate, overlap, or out of type range. */
4047 value = convert (thiscase->data.case_stmt.nominal_type, value);
4048 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4051 for (case_label = thiscase->data.case_stmt.case_list;
4052 case_label->left; case_label = case_label->left)
4053 if (! tree_int_cst_lt (case_label->left->high, value))
4056 if (case_label != thiscase->data.case_stmt.case_list
4057 && ! tree_int_cst_lt (case_label->high, value)
4058 || case_label->left && ! tree_int_cst_lt (value, case_label->left->low))
4061 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4062 new_label->low = new_label->high = copy_node (value);
4063 new_label->code_label = label;
4064 new_label->left = case_label->left;
4066 case_label->left = new_label;
4067 thiscase->data.case_stmt.num_ranges++;
4071 if (thiscase->data.case_stmt.default_label)
4073 thiscase->data.case_stmt.default_label = label;
4076 expand_label (label);
4080 /* Called when the index of a switch statement is an enumerated type
4081 and there is no default label.
4083 Checks that all enumeration literals are covered by the case
4084 expressions of a switch. Also, warn if there are any extra
4085 switch cases that are *not* elements of the enumerated type.
4087 If all enumeration literals were covered by the case expressions,
4088 turn one of the expressions into the default expression since it should
4089 not be possible to fall through such a switch. */
4092 check_for_full_enumeration_handling (type)
4095 register struct case_node *n;
4096 register struct case_node **l;
4097 register tree chain;
4100 if (output_bytecode)
4102 bc_check_for_full_enumeration_handling (type);
4106 /* The time complexity of this loop is currently O(N * M), with
4107 N being the number of members in the enumerated type, and
4108 M being the number of case expressions in the switch. */
4110 for (chain = TYPE_VALUES (type);
4112 chain = TREE_CHAIN (chain))
4114 /* Find a match between enumeral and case expression, if possible.
4115 Quit looking when we've gone too far (since case expressions
4116 are kept sorted in ascending order). Warn about enumerators not
4117 handled in the switch statement case expression list. */
4119 for (n = case_stack->data.case_stmt.case_list;
4120 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
4124 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
4127 warning ("enumeration value `%s' not handled in switch",
4128 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
4133 /* Now we go the other way around; we warn if there are case
4134 expressions that don't correspond to enumerators. This can
4135 occur since C and C++ don't enforce type-checking of
4136 assignments to enumeration variables. */
4139 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4141 for (chain = TYPE_VALUES (type);
4142 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4143 chain = TREE_CHAIN (chain))
4148 if (TYPE_NAME (type) == 0)
4149 warning ("case value `%d' not in enumerated type",
4150 TREE_INT_CST_LOW (n->low));
4152 warning ("case value `%d' not in enumerated type `%s'",
4153 TREE_INT_CST_LOW (n->low),
4154 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4157 : DECL_NAME (TYPE_NAME (type))));
4159 if (!tree_int_cst_equal (n->low, n->high))
4161 for (chain = TYPE_VALUES (type);
4162 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4163 chain = TREE_CHAIN (chain))
4168 if (TYPE_NAME (type) == 0)
4169 warning ("case value `%d' not in enumerated type",
4170 TREE_INT_CST_LOW (n->high));
4172 warning ("case value `%d' not in enumerated type `%s'",
4173 TREE_INT_CST_LOW (n->high),
4174 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4177 : DECL_NAME (TYPE_NAME (type))));
4183 /* ??? This optimization is disabled because it causes valid programs to
4184 fail. ANSI C does not guarantee that an expression with enum type
4185 will have a value that is the same as one of the enumation literals. */
4187 /* If all values were found as case labels, make one of them the default
4188 label. Thus, this switch will never fall through. We arbitrarily pick
4189 the last one to make the default since this is likely the most
4190 efficient choice. */
4194 for (l = &case_stack->data.case_stmt.case_list;
4199 case_stack->data.case_stmt.default_label = (*l)->code_label;
4206 /* Check that all enumeration literals are covered by the case
4207 expressions of a switch. Also warn if there are any cases
4208 that are not elements of the enumerated type. */
4211 bc_check_for_full_enumeration_handling (type)
4214 struct nesting *thiscase = case_stack;
4215 struct case_node *c;
4218 /* Check for enums not handled. */
4219 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4221 for (c = thiscase->data.case_stmt.case_list->left;
4222 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4225 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4226 warning ("enumerated value `%s' not handled in switch",
4227 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4230 /* Check for cases not in the enumeration. */
4231 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4233 for (e = TYPE_VALUES (type);
4234 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4238 warning ("case value `%d' not in enumerated type `%s'",
4239 TREE_INT_CST_LOW (c->low),
4240 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4242 : DECL_NAME (TYPE_NAME (type))));
4246 /* Terminate a case (Pascal) or switch (C) statement
4247 in which ORIG_INDEX is the expression to be tested.
4248 Generate the code to test it and jump to the right place. */
4251 expand_end_case (orig_index)
4254 tree minval, maxval, range, orig_minval;
4255 rtx default_label = 0;
4256 register struct case_node *n;
4264 register struct nesting *thiscase = case_stack;
4265 tree index_expr, index_type;
4268 if (output_bytecode)
4270 bc_expand_end_case (orig_index);
4274 table_label = gen_label_rtx ();
4275 index_expr = thiscase->data.case_stmt.index_expr;
4276 index_type = TREE_TYPE (index_expr);
4277 unsignedp = TREE_UNSIGNED (index_type);
4279 do_pending_stack_adjust ();
4281 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4282 if (index_type != error_mark_node)
4284 /* If switch expression was an enumerated type, check that all
4285 enumeration literals are covered by the cases.
4286 No sense trying this if there's a default case, however. */
4288 if (!thiscase->data.case_stmt.default_label
4289 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4290 && TREE_CODE (index_expr) != INTEGER_CST)
4291 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4293 /* If this is the first label, warn if any insns have been emitted. */
4294 if (thiscase->data.case_stmt.seenlabel == 0)
4297 for (insn = get_last_insn ();
4298 insn != case_stack->data.case_stmt.start;
4299 insn = PREV_INSN (insn))
4300 if (GET_CODE (insn) != NOTE
4301 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4303 warning ("unreachable code at beginning of %s",
4304 case_stack->data.case_stmt.printname);
4309 /* If we don't have a default-label, create one here,
4310 after the body of the switch. */
4311 if (thiscase->data.case_stmt.default_label == 0)
4313 thiscase->data.case_stmt.default_label
4314 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4315 expand_label (thiscase->data.case_stmt.default_label);
4317 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4319 before_case = get_last_insn ();
4321 /* Simplify the case-list before we count it. */
4322 group_case_nodes (thiscase->data.case_stmt.case_list);
4324 /* Get upper and lower bounds of case values.
4325 Also convert all the case values to the index expr's data type. */
4328 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4330 /* Check low and high label values are integers. */
4331 if (TREE_CODE (n->low) != INTEGER_CST)
4333 if (TREE_CODE (n->high) != INTEGER_CST)
4336 n->low = convert (index_type, n->low);
4337 n->high = convert (index_type, n->high);
4339 /* Count the elements and track the largest and smallest
4340 of them (treating them as signed even if they are not). */
4348 if (INT_CST_LT (n->low, minval))
4350 if (INT_CST_LT (maxval, n->high))
4353 /* A range counts double, since it requires two compares. */
4354 if (! tree_int_cst_equal (n->low, n->high))
4358 orig_minval = minval;
4360 /* Compute span of values. */
4362 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4366 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4368 emit_jump (default_label);
4371 /* If range of values is much bigger than number of values,
4372 make a sequence of conditional branches instead of a dispatch.
4373 If the switch-index is a constant, do it this way
4374 because we can optimize it. */
4376 #ifndef CASE_VALUES_THRESHOLD
4378 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4380 /* If machine does not have a case insn that compares the
4381 bounds, this means extra overhead for dispatch tables
4382 which raises the threshold for using them. */
4383 #define CASE_VALUES_THRESHOLD 5
4384 #endif /* HAVE_casesi */
4385 #endif /* CASE_VALUES_THRESHOLD */
4387 else if (TREE_INT_CST_HIGH (range) != 0
4388 || count < CASE_VALUES_THRESHOLD
4389 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4391 || TREE_CODE (index_expr) == INTEGER_CST
4392 /* These will reduce to a constant. */
4393 || (TREE_CODE (index_expr) == CALL_EXPR
4394 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4395 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4396 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4397 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4398 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4400 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4402 /* If the index is a short or char that we do not have
4403 an insn to handle comparisons directly, convert it to
4404 a full integer now, rather than letting each comparison
4405 generate the conversion. */
4407 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4408 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4409 == CODE_FOR_nothing))
4411 enum machine_mode wider_mode;
4412 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4413 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4414 if (cmp_optab->handlers[(int) wider_mode].insn_code
4415 != CODE_FOR_nothing)
4417 index = convert_to_mode (wider_mode, index, unsignedp);
4423 do_pending_stack_adjust ();
4425 index = protect_from_queue (index, 0);
4426 if (GET_CODE (index) == MEM)
4427 index = copy_to_reg (index);
4428 if (GET_CODE (index) == CONST_INT
4429 || TREE_CODE (index_expr) == INTEGER_CST)
4431 /* Make a tree node with the proper constant value
4432 if we don't already have one. */
4433 if (TREE_CODE (index_expr) != INTEGER_CST)
4436 = build_int_2 (INTVAL (index),
4437 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4438 index_expr = convert (index_type, index_expr);
4441 /* For constant index expressions we need only
4442 issue a unconditional branch to the appropriate
4443 target code. The job of removing any unreachable
4444 code is left to the optimisation phase if the
4445 "-O" option is specified. */
4446 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4447 if (! tree_int_cst_lt (index_expr, n->low)
4448 && ! tree_int_cst_lt (n->high, index_expr))
4452 emit_jump (label_rtx (n->code_label));
4454 emit_jump (default_label);
4458 /* If the index expression is not constant we generate
4459 a binary decision tree to select the appropriate
4460 target code. This is done as follows:
4462 The list of cases is rearranged into a binary tree,
4463 nearly optimal assuming equal probability for each case.
4465 The tree is transformed into RTL, eliminating
4466 redundant test conditions at the same time.
4468 If program flow could reach the end of the
4469 decision tree an unconditional jump to the
4470 default code is emitted. */
4473 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4474 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4475 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4477 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4478 default_label, index_type);
4479 emit_jump_if_reachable (default_label);
4488 enum machine_mode index_mode = SImode;
4489 int index_bits = GET_MODE_BITSIZE (index_mode);
4491 enum machine_mode op_mode;
4493 /* Convert the index to SImode. */
4494 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
4495 > GET_MODE_BITSIZE (index_mode))
4497 enum machine_mode omode = TYPE_MODE (index_type);
4498 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4500 /* We must handle the endpoints in the original mode. */
4501 index_expr = build (MINUS_EXPR, index_type,
4502 index_expr, minval);
4503 minval = integer_zero_node;
4504 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4505 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4506 emit_jump_insn (gen_bltu (default_label));
4507 /* Now we can safely truncate. */
4508 index = convert_to_mode (index_mode, index, 0);
4512 if (TYPE_MODE (index_type) != index_mode)
4514 index_expr = convert (type_for_size (index_bits, 0),
4516 index_type = TREE_TYPE (index_expr);
4519 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4522 index = protect_from_queue (index, 0);
4523 do_pending_stack_adjust ();
4525 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
4526 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
4528 index = copy_to_mode_reg (op_mode, index);
4530 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
4532 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
4533 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
4535 op1 = copy_to_mode_reg (op_mode, op1);
4537 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
4539 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
4540 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
4542 op2 = copy_to_mode_reg (op_mode, op2);
4544 emit_jump_insn (gen_casesi (index, op1, op2,
4545 table_label, default_label));
4549 #ifdef HAVE_tablejump
4550 if (! win && HAVE_tablejump)
4552 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4553 fold (build (MINUS_EXPR, index_type,
4554 index_expr, minval)));
4555 index_type = TREE_TYPE (index_expr);
4556 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4558 index = protect_from_queue (index, 0);
4559 do_pending_stack_adjust ();
4561 do_tablejump (index, TYPE_MODE (index_type),
4562 expand_expr (range, NULL_RTX, VOIDmode, 0),
4563 table_label, default_label);
4570 /* Get table of labels to jump to, in order of case index. */
4572 ncases = TREE_INT_CST_LOW (range) + 1;
4573 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4574 bzero ((char *) labelvec, ncases * sizeof (rtx));
4576 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4578 register HOST_WIDE_INT i
4579 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4584 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
4585 if (i + TREE_INT_CST_LOW (orig_minval)
4586 == TREE_INT_CST_LOW (n->high))
4592 /* Fill in the gaps with the default. */
4593 for (i = 0; i < ncases; i++)
4594 if (labelvec[i] == 0)
4595 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
4597 /* Output the table */
4598 emit_label (table_label);
4600 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
4601 were an expression, instead of an #ifdef/#ifndef. */
4603 #ifdef CASE_VECTOR_PC_RELATIVE
4607 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
4608 gen_rtx (LABEL_REF, Pmode, table_label),
4609 gen_rtvec_v (ncases, labelvec)));
4611 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
4612 gen_rtvec_v (ncases, labelvec)));
4614 /* If the case insn drops through the table,
4615 after the table we must jump to the default-label.
4616 Otherwise record no drop-through after the table. */
4617 #ifdef CASE_DROPS_THROUGH
4618 emit_jump (default_label);
4624 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4625 reorder_insns (before_case, get_last_insn (),
4626 thiscase->data.case_stmt.start);
4629 if (thiscase->exit_label)
4630 emit_label (thiscase->exit_label);
4632 POPSTACK (case_stack);
4638 /* Terminate a case statement. EXPR is the original index
4642 bc_expand_end_case (expr)
4645 struct nesting *thiscase = case_stack;
4646 enum bytecode_opcode opcode;
4647 struct bc_label *jump_label;
4648 struct case_node *c;
4650 bc_emit_bytecode (jump);
4651 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4653 #ifdef DEBUG_PRINT_CODE
4654 fputc ('\n', stderr);
4657 /* Now that the size of the jump table is known, emit the actual
4658 indexed jump instruction. */
4659 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4661 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
4662 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
4663 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
4665 bc_emit_bytecode (opcode);
4667 /* Now emit the case instructions literal arguments, in order.
4668 In addition to the value on the stack, it uses:
4669 1. The address of the jump table.
4670 2. The size of the jump table.
4671 3. The default label. */
4673 jump_label = bc_get_bytecode_label ();
4674 bc_emit_bytecode_labelref (jump_label);
4675 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
4676 sizeof thiscase->data.case_stmt.num_ranges);
4678 if (thiscase->data.case_stmt.default_label)
4679 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
4681 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4683 /* Output the jump table. */
4685 bc_align_bytecode (3 /* PTR_ALIGN */);
4686 bc_emit_bytecode_labeldef (jump_label);
4688 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
4689 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4691 opcode = TREE_INT_CST_LOW (c->low);
4692 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4694 opcode = TREE_INT_CST_LOW (c->high);
4695 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4697 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4700 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
4701 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4703 bc_emit_bytecode_DI_const (c->low);
4704 bc_emit_bytecode_DI_const (c->high);
4706 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4713 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
4715 /* Possibly issue enumeration warnings. */
4717 if (!thiscase->data.case_stmt.default_label
4718 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
4719 && TREE_CODE (expr) != INTEGER_CST
4721 check_for_full_enumeration_handling (TREE_TYPE (expr));
4724 #ifdef DEBUG_PRINT_CODE
4725 fputc ('\n', stderr);
4728 POPSTACK (case_stack);
4732 /* Return unique bytecode ID. */
4737 static int bc_uid = 0;
4742 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4745 do_jump_if_equal (op1, op2, label, unsignedp)
4746 rtx op1, op2, label;
4749 if (GET_CODE (op1) == CONST_INT
4750 && GET_CODE (op2) == CONST_INT)
4752 if (INTVAL (op1) == INTVAL (op2))
4757 enum machine_mode mode = GET_MODE (op1);
4758 if (mode == VOIDmode)
4759 mode = GET_MODE (op2);
4760 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
4761 emit_jump_insn (gen_beq (label));
4765 /* Not all case values are encountered equally. This function
4766 uses a heuristic to weight case labels, in cases where that
4767 looks like a reasonable thing to do.
4769 Right now, all we try to guess is text, and we establish the
4772 chars above space: 16
4781 If we find any cases in the switch that are not either -1 or in the range
4782 of valid ASCII characters, or are control characters other than those
4783 commonly used with "\", don't treat this switch scanning text.
4785 Return 1 if these nodes are suitable for cost estimation, otherwise
4789 estimate_case_costs (node)
4792 tree min_ascii = build_int_2 (-1, -1);
4793 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4797 /* If we haven't already made the cost table, make it now. Note that the
4798 lower bound of the table is -1, not zero. */
4800 if (cost_table == NULL)
4802 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
4803 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
4805 for (i = 0; i < 128; i++)
4809 else if (ispunct (i))
4811 else if (iscntrl (i))
4815 cost_table[' '] = 8;
4816 cost_table['\t'] = 4;
4817 cost_table['\0'] = 4;
4818 cost_table['\n'] = 2;
4819 cost_table['\f'] = 1;
4820 cost_table['\v'] = 1;
4821 cost_table['\b'] = 1;
4824 /* See if all the case expressions look like text. It is text if the
4825 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4826 as signed arithmetic since we don't want to ever access cost_table with a
4827 value less than -1. Also check that none of the constants in a range
4828 are strange control characters. */
4830 for (n = node; n; n = n->right)
4832 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4835 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
4836 if (cost_table[i] < 0)
4840 /* All interesting values are within the range of interesting
4841 ASCII characters. */
4845 /* Scan an ordered list of case nodes
4846 combining those with consecutive values or ranges.
4848 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4851 group_case_nodes (head)
4854 case_node_ptr node = head;
4858 rtx lb = next_real_insn (label_rtx (node->code_label));
4859 case_node_ptr np = node;
4861 /* Try to group the successors of NODE with NODE. */
4862 while (((np = np->right) != 0)
4863 /* Do they jump to the same place? */
4864 && next_real_insn (label_rtx (np->code_label)) == lb
4865 /* Are their ranges consecutive? */
4866 && tree_int_cst_equal (np->low,
4867 fold (build (PLUS_EXPR,
4868 TREE_TYPE (node->high),
4871 /* An overflow is not consecutive. */
4872 && tree_int_cst_lt (node->high,
4873 fold (build (PLUS_EXPR,
4874 TREE_TYPE (node->high),
4876 integer_one_node))))
4878 node->high = np->high;
4880 /* NP is the first node after NODE which can't be grouped with it.
4881 Delete the nodes in between, and move on to that node. */
4887 /* Take an ordered list of case nodes
4888 and transform them into a near optimal binary tree,
4889 on the assumption that any target code selection value is as
4890 likely as any other.
4892 The transformation is performed by splitting the ordered
4893 list into two equal sections plus a pivot. The parts are
4894 then attached to the pivot as left and right branches. Each
4895 branch is is then transformed recursively. */
4898 balance_case_nodes (head, parent)
4899 case_node_ptr *head;
4900 case_node_ptr parent;
4902 register case_node_ptr np;
4910 register case_node_ptr *npp;
4913 /* Count the number of entries on branch. Also count the ranges. */
4917 if (!tree_int_cst_equal (np->low, np->high))
4921 cost += cost_table[TREE_INT_CST_LOW (np->high)];
4925 cost += cost_table[TREE_INT_CST_LOW (np->low)];
4933 /* Split this list if it is long enough for that to help. */
4938 /* Find the place in the list that bisects the list's total cost,
4939 Here I gets half the total cost. */
4944 /* Skip nodes while their cost does not reach that amount. */
4945 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4946 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
4947 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
4950 npp = &(*npp)->right;
4955 /* Leave this branch lopsided, but optimize left-hand
4956 side and fill in `parent' fields for right-hand side. */
4958 np->parent = parent;
4959 balance_case_nodes (&np->left, np);
4960 for (; np->right; np = np->right)
4961 np->right->parent = np;
4965 /* If there are just three nodes, split at the middle one. */
4967 npp = &(*npp)->right;
4970 /* Find the place in the list that bisects the list's total cost,
4971 where ranges count as 2.
4972 Here I gets half the total cost. */
4973 i = (i + ranges + 1) / 2;
4976 /* Skip nodes while their cost does not reach that amount. */
4977 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4982 npp = &(*npp)->right;
4987 np->parent = parent;
4990 /* Optimize each of the two split parts. */
4991 balance_case_nodes (&np->left, np);
4992 balance_case_nodes (&np->right, np);
4996 /* Else leave this branch as one level,
4997 but fill in `parent' fields. */
4999 np->parent = parent;
5000 for (; np->right; np = np->right)
5001 np->right->parent = np;
5006 /* Search the parent sections of the case node tree
5007 to see if a test for the lower bound of NODE would be redundant.
5008 INDEX_TYPE is the type of the index expression.
5010 The instructions to generate the case decision tree are
5011 output in the same order as nodes are processed so it is
5012 known that if a parent node checks the range of the current
5013 node minus one that the current node is bounded at its lower
5014 span. Thus the test would be redundant. */
5017 node_has_low_bound (node, index_type)
5022 case_node_ptr pnode;
5024 /* If the lower bound of this node is the lowest value in the index type,
5025 we need not test it. */
5027 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5030 /* If this node has a left branch, the value at the left must be less
5031 than that at this node, so it cannot be bounded at the bottom and
5032 we need not bother testing any further. */
5037 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5038 node->low, integer_one_node));
5040 /* If the subtraction above overflowed, we can't verify anything.
5041 Otherwise, look for a parent that tests our value - 1. */
5043 if (! tree_int_cst_lt (low_minus_one, node->low))
5046 for (pnode = node->parent; pnode; pnode = pnode->parent)
5047 if (tree_int_cst_equal (low_minus_one, pnode->high))
5053 /* Search the parent sections of the case node tree
5054 to see if a test for the upper bound of NODE would be redundant.
5055 INDEX_TYPE is the type of the index expression.
5057 The instructions to generate the case decision tree are
5058 output in the same order as nodes are processed so it is
5059 known that if a parent node checks the range of the current
5060 node plus one that the current node is bounded at its upper
5061 span. Thus the test would be redundant. */
5064 node_has_high_bound (node, index_type)
5069 case_node_ptr pnode;
5071 /* If the upper bound of this node is the highest value in the type
5072 of the index expression, we need not test against it. */
5074 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5077 /* If this node has a right branch, the value at the right must be greater
5078 than that at this node, so it cannot be bounded at the top and
5079 we need not bother testing any further. */
5084 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5085 node->high, integer_one_node));
5087 /* If the addition above overflowed, we can't verify anything.
5088 Otherwise, look for a parent that tests our value + 1. */
5090 if (! tree_int_cst_lt (node->high, high_plus_one))
5093 for (pnode = node->parent; pnode; pnode = pnode->parent)
5094 if (tree_int_cst_equal (high_plus_one, pnode->low))
5100 /* Search the parent sections of the
5101 case node tree to see if both tests for the upper and lower
5102 bounds of NODE would be redundant. */
5105 node_is_bounded (node, index_type)
5109 return (node_has_low_bound (node, index_type)
5110 && node_has_high_bound (node, index_type));
5113 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5116 emit_jump_if_reachable (label)
5119 if (GET_CODE (get_last_insn ()) != BARRIER)
5123 /* Emit step-by-step code to select a case for the value of INDEX.
5124 The thus generated decision tree follows the form of the
5125 case-node binary tree NODE, whose nodes represent test conditions.
5126 INDEX_TYPE is the type of the index of the switch.
5128 Care is taken to prune redundant tests from the decision tree
5129 by detecting any boundary conditions already checked by
5130 emitted rtx. (See node_has_high_bound, node_has_low_bound
5131 and node_is_bounded, above.)
5133 Where the test conditions can be shown to be redundant we emit
5134 an unconditional jump to the target code. As a further
5135 optimization, the subordinates of a tree node are examined to
5136 check for bounded nodes. In this case conditional and/or
5137 unconditional jumps as a result of the boundary check for the
5138 current node are arranged to target the subordinates associated
5139 code for out of bound conditions on the current node node.
5141 We can assume that when control reaches the code generated here,
5142 the index value has already been compared with the parents
5143 of this node, and determined to be on the same side of each parent
5144 as this node is. Thus, if this node tests for the value 51,
5145 and a parent tested for 52, we don't need to consider
5146 the possibility of a value greater than 51. If another parent
5147 tests for the value 50, then this node need not test anything. */
5150 emit_case_nodes (index, node, default_label, index_type)
5156 /* If INDEX has an unsigned type, we must make unsigned branches. */
5157 int unsignedp = TREE_UNSIGNED (index_type);
5158 typedef rtx rtx_function ();
5159 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5160 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5161 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5162 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5163 enum machine_mode mode = GET_MODE (index);
5165 /* See if our parents have already tested everything for us.
5166 If they have, emit an unconditional jump for this node. */
5167 if (node_is_bounded (node, index_type))
5168 emit_jump (label_rtx (node->code_label));
5170 else if (tree_int_cst_equal (node->low, node->high))
5172 /* Node is single valued. First see if the index expression matches
5173 this node and then check our children, if any. */
5175 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5176 label_rtx (node->code_label), unsignedp);
5178 if (node->right != 0 && node->left != 0)
5180 /* This node has children on both sides.
5181 Dispatch to one side or the other
5182 by comparing the index value with this node's value.
5183 If one subtree is bounded, check that one first,
5184 so we can avoid real branches in the tree. */
5186 if (node_is_bounded (node->right, index_type))
5188 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5190 GT, NULL_RTX, mode, unsignedp, 0);
5192 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5193 emit_case_nodes (index, node->left, default_label, index_type);
5196 else if (node_is_bounded (node->left, index_type))
5198 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5200 LT, NULL_RTX, mode, unsignedp, 0);
5201 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5202 emit_case_nodes (index, node->right, default_label, index_type);
5207 /* Neither node is bounded. First distinguish the two sides;
5208 then emit the code for one side at a time. */
5211 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5213 /* See if the value is on the right. */
5214 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5216 GT, NULL_RTX, mode, unsignedp, 0);
5217 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5219 /* Value must be on the left.
5220 Handle the left-hand subtree. */
5221 emit_case_nodes (index, node->left, default_label, index_type);
5222 /* If left-hand subtree does nothing,
5224 emit_jump_if_reachable (default_label);
5226 /* Code branches here for the right-hand subtree. */
5227 expand_label (test_label);
5228 emit_case_nodes (index, node->right, default_label, index_type);
5232 else if (node->right != 0 && node->left == 0)
5234 /* Here we have a right child but no left so we issue conditional
5235 branch to default and process the right child.
5237 Omit the conditional branch to default if we it avoid only one
5238 right child; it costs too much space to save so little time. */
5240 if (node->right->right || node->right->left
5241 || !tree_int_cst_equal (node->right->low, node->right->high))
5243 if (!node_has_low_bound (node, index_type))
5245 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5247 LT, NULL_RTX, mode, unsignedp, 0);
5248 emit_jump_insn ((*gen_blt_pat) (default_label));
5251 emit_case_nodes (index, node->right, default_label, index_type);
5254 /* We cannot process node->right normally
5255 since we haven't ruled out the numbers less than
5256 this node's value. So handle node->right explicitly. */
5257 do_jump_if_equal (index,
5258 expand_expr (node->right->low, NULL_RTX,
5260 label_rtx (node->right->code_label), unsignedp);
5263 else if (node->right == 0 && node->left != 0)
5265 /* Just one subtree, on the left. */
5267 #if 0 /* The following code and comment were formerly part
5268 of the condition here, but they didn't work
5269 and I don't understand what the idea was. -- rms. */
5270 /* If our "most probable entry" is less probable
5271 than the default label, emit a jump to
5272 the default label using condition codes
5273 already lying around. With no right branch,
5274 a branch-greater-than will get us to the default
5277 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5280 if (node->left->left || node->left->right
5281 || !tree_int_cst_equal (node->left->low, node->left->high))
5283 if (!node_has_high_bound (node, index_type))
5285 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5287 GT, NULL_RTX, mode, unsignedp, 0);
5288 emit_jump_insn ((*gen_bgt_pat) (default_label));
5291 emit_case_nodes (index, node->left, default_label, index_type);
5294 /* We cannot process node->left normally
5295 since we haven't ruled out the numbers less than
5296 this node's value. So handle node->left explicitly. */
5297 do_jump_if_equal (index,
5298 expand_expr (node->left->low, NULL_RTX,
5300 label_rtx (node->left->code_label), unsignedp);
5305 /* Node is a range. These cases are very similar to those for a single
5306 value, except that we do not start by testing whether this node
5307 is the one to branch to. */
5309 if (node->right != 0 && node->left != 0)
5311 /* Node has subtrees on both sides.
5312 If the right-hand subtree is bounded,
5313 test for it first, since we can go straight there.
5314 Otherwise, we need to make a branch in the control structure,
5315 then handle the two subtrees. */
5316 tree test_label = 0;
5318 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5320 GT, NULL_RTX, mode, unsignedp, 0);
5322 if (node_is_bounded (node->right, index_type))
5323 /* Right hand node is fully bounded so we can eliminate any
5324 testing and branch directly to the target code. */
5325 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5328 /* Right hand node requires testing.
5329 Branch to a label where we will handle it later. */
5331 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5332 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5335 /* Value belongs to this node or to the left-hand subtree. */
5337 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5338 GE, NULL_RTX, mode, unsignedp, 0);
5339 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5341 /* Handle the left-hand subtree. */
5342 emit_case_nodes (index, node->left, default_label, index_type);
5344 /* If right node had to be handled later, do that now. */
5348 /* If the left-hand subtree fell through,
5349 don't let it fall into the right-hand subtree. */
5350 emit_jump_if_reachable (default_label);
5352 expand_label (test_label);
5353 emit_case_nodes (index, node->right, default_label, index_type);
5357 else if (node->right != 0 && node->left == 0)
5359 /* Deal with values to the left of this node,
5360 if they are possible. */
5361 if (!node_has_low_bound (node, index_type))
5363 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5365 LT, NULL_RTX, mode, unsignedp, 0);
5366 emit_jump_insn ((*gen_blt_pat) (default_label));
5369 /* Value belongs to this node or to the right-hand subtree. */
5371 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5373 LE, NULL_RTX, mode, unsignedp, 0);
5374 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5376 emit_case_nodes (index, node->right, default_label, index_type);
5379 else if (node->right == 0 && node->left != 0)
5381 /* Deal with values to the right of this node,
5382 if they are possible. */
5383 if (!node_has_high_bound (node, index_type))
5385 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5387 GT, NULL_RTX, mode, unsignedp, 0);
5388 emit_jump_insn ((*gen_bgt_pat) (default_label));
5391 /* Value belongs to this node or to the left-hand subtree. */
5393 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5394 GE, NULL_RTX, mode, unsignedp, 0);
5395 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5397 emit_case_nodes (index, node->left, default_label, index_type);
5402 /* Node has no children so we check low and high bounds to remove
5403 redundant tests. Only one of the bounds can exist,
5404 since otherwise this node is bounded--a case tested already. */
5406 if (!node_has_high_bound (node, index_type))
5408 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5410 GT, NULL_RTX, mode, unsignedp, 0);
5411 emit_jump_insn ((*gen_bgt_pat) (default_label));
5414 if (!node_has_low_bound (node, index_type))
5416 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5418 LT, NULL_RTX, mode, unsignedp, 0);
5419 emit_jump_insn ((*gen_blt_pat) (default_label));
5422 emit_jump (label_rtx (node->code_label));
5427 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5428 so that the debugging info will be correct for the unrolled loop. */
5430 /* Indexed by block number, contains a pointer to the N'th block node. */
5432 static tree *block_vector;
5435 find_loop_tree_blocks ()
5437 tree block = DECL_INITIAL (current_function_decl);
5439 /* There first block is for the function body, and does not have
5440 corresponding block notes. Don't include it in the block vector. */
5441 block = BLOCK_SUBBLOCKS (block);
5443 block_vector = identify_blocks (block, get_insns ());
5447 unroll_block_trees ()
5449 tree block = DECL_INITIAL (current_function_decl);
5451 reorder_blocks (block_vector, block, get_insns ());