1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
55 #include "bc-typecd.h"
56 #include "bc-opcode.h"
60 #define obstack_chunk_alloc xmalloc
61 #define obstack_chunk_free free
62 struct obstack stmt_obstack;
64 /* Filename and line number of last line-number note,
65 whether we actually emitted it or not. */
69 /* Nonzero if within a ({...}) grouping, in which case we must
70 always compute a value for each expr-stmt in case it is the last one. */
72 int expr_stmts_for_value;
74 /* Each time we expand an expression-statement,
75 record the expr's type and its RTL value here. */
77 static tree last_expr_type;
78 static rtx last_expr_value;
80 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
81 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
82 This is used by the `remember_end_note' function to record the endpoint
83 of each generated block in its associated BLOCK node. */
85 static rtx last_block_end_note;
87 /* Number of binding contours started so far in this function. */
89 int block_start_count;
91 /* Nonzero if function being compiled needs to
92 return the address of where it has put a structure value. */
94 extern int current_function_returns_pcc_struct;
96 /* Label that will go on parm cleanup code, if any.
97 Jumping to this label runs cleanup code for parameters, if
98 such code must be run. Following this code is the logical return label. */
100 extern rtx cleanup_label;
102 /* Label that will go on function epilogue.
103 Jumping to this label serves as a "return" instruction
104 on machines which require execution of the epilogue on all returns. */
106 extern rtx return_label;
108 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
109 So we can mark them all live at the end of the function, if nonopt. */
110 extern rtx save_expr_regs;
112 /* Offset to end of allocated area of stack frame.
113 If stack grows down, this is the address of the last stack slot allocated.
114 If stack grows up, this is the address for the next slot. */
115 extern int frame_offset;
117 /* Label to jump back to for tail recursion, or 0 if we have
118 not yet needed one for this function. */
119 extern rtx tail_recursion_label;
121 /* Place after which to insert the tail_recursion_label if we need one. */
122 extern rtx tail_recursion_reentry;
124 /* Location at which to save the argument pointer if it will need to be
125 referenced. There are two cases where this is done: if nonlocal gotos
126 exist, or if vars whose is an offset from the argument pointer will be
127 needed by inner routines. */
129 extern rtx arg_pointer_save_area;
131 /* Chain of all RTL_EXPRs that have insns in them. */
132 extern tree rtl_expr_chain;
134 #if 0 /* Turned off because 0 seems to work just as well. */
135 /* Cleanup lists are required for binding levels regardless of whether
136 that binding level has cleanups or not. This node serves as the
137 cleanup list whenever an empty list is required. */
138 static tree empty_cleanup_list;
141 /* Functions and data structures for expanding case statements. */
143 /* Case label structure, used to hold info on labels within case
144 statements. We handle "range" labels; for a single-value label
145 as in C, the high and low limits are the same.
147 A chain of case nodes is initially maintained via the RIGHT fields
148 in the nodes. Nodes with higher case values are later in the list.
150 Switch statements can be output in one of two forms. A branch table
151 is used if there are more than a few labels and the labels are dense
152 within the range between the smallest and largest case value. If a
153 branch table is used, no further manipulations are done with the case
156 The alternative to the use of a branch table is to generate a series
157 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
158 and PARENT fields to hold a binary tree. Initially the tree is
159 totally unbalanced, with everything on the right. We balance the tree
160 with nodes on the left having lower case values than the parent
161 and nodes on the right having higher values. We then output the tree
166 struct case_node *left; /* Left son in binary tree */
167 struct case_node *right; /* Right son in binary tree; also node chain */
168 struct case_node *parent; /* Parent of node in binary tree */
169 tree low; /* Lowest index value for this label */
170 tree high; /* Highest index value for this label */
171 tree code_label; /* Label to jump to when node matches */
174 typedef struct case_node case_node;
175 typedef struct case_node *case_node_ptr;
177 /* These are used by estimate_case_costs and balance_case_nodes. */
179 /* This must be a signed type, and non-ANSI compilers lack signed char. */
180 static short *cost_table;
181 static int use_cost_table;
183 static int estimate_case_costs ();
184 static void balance_case_nodes ();
185 static void emit_case_nodes ();
186 static void group_case_nodes ();
187 static void emit_jump_if_reachable ();
189 static int warn_if_unused_value ();
190 static void expand_goto_internal ();
191 static void bc_expand_goto_internal ();
192 static int expand_fixup ();
193 static void bc_expand_fixup ();
195 static void bc_fixup_gotos ();
196 void free_temp_slots ();
197 static void expand_cleanups ();
198 static void expand_null_return_1 ();
199 static int tail_recursion_args ();
200 static void do_jump_if_equal ();
201 int bc_expand_exit_loop_if_false ();
202 void bc_expand_start_cond ();
203 void bc_expand_end_cond ();
204 void bc_expand_start_else ();
205 void bc_expand_end_bindings ();
206 void bc_expand_start_case ();
207 void bc_check_for_full_enumeration_handling ();
208 void bc_expand_end_case ();
209 void bc_expand_decl ();
211 extern rtx bc_allocate_local ();
212 extern rtx bc_allocate_variable_array ();
214 /* Stack of control and binding constructs we are currently inside.
216 These constructs begin when you call `expand_start_WHATEVER'
217 and end when you call `expand_end_WHATEVER'. This stack records
218 info about how the construct began that tells the end-function
219 what to do. It also may provide information about the construct
220 to alter the behavior of other constructs within the body.
221 For example, they may affect the behavior of C `break' and `continue'.
223 Each construct gets one `struct nesting' object.
224 All of these objects are chained through the `all' field.
225 `nesting_stack' points to the first object (innermost construct).
226 The position of an entry on `nesting_stack' is in its `depth' field.
228 Each type of construct has its own individual stack.
229 For example, loops have `loop_stack'. Each object points to the
230 next object of the same type through the `next' field.
232 Some constructs are visible to `break' exit-statements and others
233 are not. Which constructs are visible depends on the language.
234 Therefore, the data structure allows each construct to be visible
235 or not, according to the args given when the construct is started.
236 The construct is visible if the `exit_label' field is non-null.
237 In that case, the value should be a CODE_LABEL rtx. */
242 struct nesting *next;
247 /* For conds (if-then and if-then-else statements). */
250 /* Label for the end of the if construct.
251 There is none if EXITFLAG was not set
252 and no `else' has been seen yet. */
254 /* Label for the end of this alternative.
255 This may be the end of the if or the next else/elseif. */
261 /* Label at the top of the loop; place to loop back to. */
263 /* Label at the end of the whole construct. */
265 /* Label for `continue' statement to jump to;
266 this is in front of the stepper of the loop. */
269 /* For variable binding contours. */
272 /* Sequence number of this binding contour within the function,
273 in order of entry. */
274 int block_start_count;
275 /* Nonzero => value to restore stack to on exit. Complemented by
276 bc_stack_level (see below) when generating bytecodes. */
278 /* The NOTE that starts this contour.
279 Used by expand_goto to check whether the destination
280 is within each contour or not. */
282 /* Innermost containing binding contour that has a stack level. */
283 struct nesting *innermost_stack_block;
284 /* List of cleanups to be run on exit from this contour.
285 This is a list of expressions to be evaluated.
286 The TREE_PURPOSE of each link is the ..._DECL node
287 which the cleanup pertains to. */
289 /* List of cleanup-lists of blocks containing this block,
290 as they were at the locus where this block appears.
291 There is an element for each containing block,
292 ordered innermost containing block first.
293 The tail of this list can be 0 (was empty_cleanup_list),
294 if all remaining elements would be empty lists.
295 The element's TREE_VALUE is the cleanup-list of that block,
296 which may be null. */
298 /* Chain of labels defined inside this binding contour.
299 For contours that have stack levels or cleanups. */
300 struct label_chain *label_chain;
301 /* Number of function calls seen, as of start of this block. */
302 int function_call_count;
303 /* Bytecode specific: stack level to restore stack to on exit. */
306 /* For switch (C) or case (Pascal) statements,
307 and also for dummies (see `expand_start_case_dummy'). */
310 /* The insn after which the case dispatch should finally
311 be emitted. Zero for a dummy. */
313 /* For bytecodes, the case table is in-lined right in the code.
314 A label is needed for skipping over this block. It is only
315 used when generating bytecodes. */
317 /* A list of case labels, kept in ascending order by value
318 as the list is built.
319 During expand_end_case, this list may be rearranged into a
320 nearly balanced binary tree. */
321 struct case_node *case_list;
322 /* Label to jump to if no case matches. */
324 /* The expression to be dispatched on. */
326 /* Type that INDEX_EXPR should be converted to. */
328 /* Number of range exprs in case statement. */
330 /* Name of this kind of statement, for warnings. */
332 /* Nonzero if a case label has been seen in this case stmt. */
335 /* For exception contours. */
338 /* List of exceptions raised. This is a TREE_LIST
339 of whatever you want. */
341 /* List of exceptions caught. This is also a TREE_LIST
342 of whatever you want. As a special case, it has the
343 value `void_type_node' if it handles default exceptions. */
346 /* First insn of TRY block, in case resumptive model is needed. */
348 /* Label for the catch clauses. */
350 /* Label for unhandled exceptions. */
352 /* Label at the end of whole construct. */
354 /* Label which "escapes" the exception construct.
355 Like EXIT_LABEL for BREAK construct, but for exceptions. */
361 /* Chain of all pending binding contours. */
362 struct nesting *block_stack;
364 /* If any new stacks are added here, add them to POPSTACKS too. */
366 /* Chain of all pending binding contours that restore stack levels
368 struct nesting *stack_block_stack;
370 /* Chain of all pending conditional statements. */
371 struct nesting *cond_stack;
373 /* Chain of all pending loops. */
374 struct nesting *loop_stack;
376 /* Chain of all pending case or switch statements. */
377 struct nesting *case_stack;
379 /* Chain of all pending exception contours. */
380 struct nesting *except_stack;
382 /* Separate chain including all of the above,
383 chained through the `all' field. */
384 struct nesting *nesting_stack;
386 /* Number of entries on nesting_stack now. */
389 /* Allocate and return a new `struct nesting'. */
391 #define ALLOC_NESTING() \
392 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
394 /* Pop the nesting stack element by element until we pop off
395 the element which is at the top of STACK.
396 Update all the other stacks, popping off elements from them
397 as we pop them from nesting_stack. */
399 #define POPSTACK(STACK) \
400 do { struct nesting *target = STACK; \
401 struct nesting *this; \
402 do { this = nesting_stack; \
403 if (loop_stack == this) \
404 loop_stack = loop_stack->next; \
405 if (cond_stack == this) \
406 cond_stack = cond_stack->next; \
407 if (block_stack == this) \
408 block_stack = block_stack->next; \
409 if (stack_block_stack == this) \
410 stack_block_stack = stack_block_stack->next; \
411 if (case_stack == this) \
412 case_stack = case_stack->next; \
413 if (except_stack == this) \
414 except_stack = except_stack->next; \
415 nesting_depth = nesting_stack->depth - 1; \
416 nesting_stack = this->all; \
417 obstack_free (&stmt_obstack, this); } \
418 while (this != target); } while (0)
420 /* In some cases it is impossible to generate code for a forward goto
421 until the label definition is seen. This happens when it may be necessary
422 for the goto to reset the stack pointer: we don't yet know how to do that.
423 So expand_goto puts an entry on this fixup list.
424 Each time a binding contour that resets the stack is exited,
426 If the target label has now been defined, we can insert the proper code. */
430 /* Points to following fixup. */
431 struct goto_fixup *next;
432 /* Points to the insn before the jump insn.
433 If more code must be inserted, it goes after this insn. */
435 /* The LABEL_DECL that this jump is jumping to, or 0
436 for break, continue or return. */
438 /* The BLOCK for the place where this goto was found. */
440 /* The CODE_LABEL rtx that this is jumping to. */
442 /* Number of binding contours started in current function
443 before the label reference. */
444 int block_start_count;
445 /* The outermost stack level that should be restored for this jump.
446 Each time a binding contour that resets the stack is exited,
447 if the target label is *not* yet defined, this slot is updated. */
449 /* List of lists of cleanup expressions to be run by this goto.
450 There is one element for each block that this goto is within.
451 The tail of this list can be 0 (was empty_cleanup_list),
452 if all remaining elements would be empty.
453 The TREE_VALUE contains the cleanup list of that block as of the
454 time this goto was seen.
455 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
456 tree cleanup_list_list;
458 /* Bytecode specific members follow */
460 /* The label that this jump is jumping to, or 0 for break, continue
462 struct bc_label *bc_target;
464 /* The label we use for the fixup patch */
465 struct bc_label *label;
467 /* True (non-0) if fixup has been handled */
470 /* Like stack_level above, except refers to the interpreter stack */
474 static struct goto_fixup *goto_fixup_chain;
476 /* Within any binding contour that must restore a stack level,
477 all labels are recorded with a chain of these structures. */
481 /* Points to following fixup. */
482 struct label_chain *next;
489 gcc_obstack_init (&stmt_obstack);
491 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
496 init_stmt_for_function ()
498 /* We are not currently within any block, conditional, loop or case. */
506 block_start_count = 0;
508 /* No gotos have been expanded yet. */
509 goto_fixup_chain = 0;
511 /* We are not processing a ({...}) grouping. */
512 expr_stmts_for_value = 0;
520 p->block_stack = block_stack;
521 p->stack_block_stack = stack_block_stack;
522 p->cond_stack = cond_stack;
523 p->loop_stack = loop_stack;
524 p->case_stack = case_stack;
525 p->nesting_stack = nesting_stack;
526 p->nesting_depth = nesting_depth;
527 p->block_start_count = block_start_count;
528 p->last_expr_type = last_expr_type;
529 p->last_expr_value = last_expr_value;
530 p->expr_stmts_for_value = expr_stmts_for_value;
531 p->emit_filename = emit_filename;
532 p->emit_lineno = emit_lineno;
533 p->goto_fixup_chain = goto_fixup_chain;
537 restore_stmt_status (p)
540 block_stack = p->block_stack;
541 stack_block_stack = p->stack_block_stack;
542 cond_stack = p->cond_stack;
543 loop_stack = p->loop_stack;
544 case_stack = p->case_stack;
545 nesting_stack = p->nesting_stack;
546 nesting_depth = p->nesting_depth;
547 block_start_count = p->block_start_count;
548 last_expr_type = p->last_expr_type;
549 last_expr_value = p->last_expr_value;
550 expr_stmts_for_value = p->expr_stmts_for_value;
551 emit_filename = p->emit_filename;
552 emit_lineno = p->emit_lineno;
553 goto_fixup_chain = p->goto_fixup_chain;
556 /* Emit a no-op instruction. */
563 if (!output_bytecode)
565 last_insn = get_last_insn ();
567 && (GET_CODE (last_insn) == CODE_LABEL
568 || prev_real_insn (last_insn) == 0))
569 emit_insn (gen_nop ());
573 /* Return the rtx-label that corresponds to a LABEL_DECL,
574 creating it if necessary. */
580 if (TREE_CODE (label) != LABEL_DECL)
583 if (DECL_RTL (label))
584 return DECL_RTL (label);
586 return DECL_RTL (label) = gen_label_rtx ();
589 /* Add an unconditional jump to LABEL as the next sequential instruction. */
595 do_pending_stack_adjust ();
596 emit_jump_insn (gen_jump (label));
600 /* Emit code to jump to the address
601 specified by the pointer expression EXP. */
604 expand_computed_goto (exp)
609 bc_expand_expr (exp);
610 bc_emit_instruction (jumpP);
614 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
616 emit_indirect_jump (x);
620 /* Handle goto statements and the labels that they can go to. */
622 /* Specify the location in the RTL code of a label LABEL,
623 which is a LABEL_DECL tree node.
625 This is used for the kind of label that the user can jump to with a
626 goto statement, and for alternatives of a switch or case statement.
627 RTL labels generated for loops and conditionals don't go through here;
628 they are generated directly at the RTL level, by other functions below.
630 Note that this has nothing to do with defining label *names*.
631 Languages vary in how they do that and what that even means. */
637 struct label_chain *p;
641 if (! DECL_RTL (label))
642 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
643 if (! bc_emit_bytecode_labeldef (DECL_RTL (label)->bc_label))
644 error ("multiply defined label");
648 do_pending_stack_adjust ();
649 emit_label (label_rtx (label));
650 if (DECL_NAME (label))
651 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
653 if (stack_block_stack != 0)
655 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
656 p->next = stack_block_stack->data.block.label_chain;
657 stack_block_stack->data.block.label_chain = p;
662 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
663 from nested functions. */
666 declare_nonlocal_label (label)
669 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
670 LABEL_PRESERVE_P (label_rtx (label)) = 1;
671 if (nonlocal_goto_handler_slot == 0)
673 nonlocal_goto_handler_slot
674 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
675 emit_stack_save (SAVE_NONLOCAL,
676 &nonlocal_goto_stack_level,
677 PREV_INSN (tail_recursion_reentry));
681 /* Generate RTL code for a `goto' statement with target label LABEL.
682 LABEL should be a LABEL_DECL tree node that was or will later be
683 defined with `expand_label'. */
693 expand_goto_internal (label, label_rtx (label), NULL_RTX);
697 /* Check for a nonlocal goto to a containing function. */
698 context = decl_function_context (label);
699 if (context != 0 && context != current_function_decl)
701 struct function *p = find_function_data (context);
702 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
705 p->has_nonlocal_label = 1;
706 current_function_has_nonlocal_goto = 1;
707 LABEL_REF_NONLOCAL_P (label_ref) = 1;
709 /* Copy the rtl for the slots so that they won't be shared in
710 case the virtual stack vars register gets instantiated differently
711 in the parent than in the child. */
713 #if HAVE_nonlocal_goto
714 if (HAVE_nonlocal_goto)
715 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
716 copy_rtx (p->nonlocal_goto_handler_slot),
717 copy_rtx (p->nonlocal_goto_stack_level),
724 /* Restore frame pointer for containing function.
725 This sets the actual hard register used for the frame pointer
726 to the location of the function's incoming static chain info.
727 The non-local goto handler will then adjust it to contain the
728 proper value and reload the argument pointer, if needed. */
729 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
731 /* We have now loaded the frame pointer hardware register with
732 the address of that corresponds to the start of the virtual
733 stack vars. So replace virtual_stack_vars_rtx in all
734 addresses we use with stack_pointer_rtx. */
736 /* Get addr of containing function's current nonlocal goto handler,
737 which will do any cleanups and then jump to the label. */
738 addr = copy_rtx (p->nonlocal_goto_handler_slot);
739 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
742 /* Restore the stack pointer. Note this uses fp just restored. */
743 addr = p->nonlocal_goto_stack_level;
745 addr = replace_rtx (copy_rtx (addr),
746 virtual_stack_vars_rtx, frame_pointer_rtx);
748 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
750 /* Put in the static chain register the nonlocal label address. */
751 emit_move_insn (static_chain_rtx, label_ref);
752 /* USE of frame_pointer_rtx added for consistency; not clear if
754 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
755 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
756 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
757 emit_indirect_jump (temp);
761 expand_goto_internal (label, label_rtx (label), NULL_RTX);
764 /* Generate RTL code for a `goto' statement with target label BODY.
765 LABEL should be a LABEL_REF.
766 LAST_INSN, if non-0, is the rtx we should consider as the last
767 insn emitted (for the purposes of cleaning up a return). */
770 expand_goto_internal (body, label, last_insn)
775 struct nesting *block;
778 /* NOTICE! If a bytecode instruction other than `jump' is needed,
779 then the caller has to call bc_expand_goto_internal()
780 directly. This is rather an exceptional case, and there aren't
781 that many places where this is necessary. */
784 expand_goto_internal (body, label, last_insn);
788 if (GET_CODE (label) != CODE_LABEL)
791 /* If label has already been defined, we can tell now
792 whether and how we must alter the stack level. */
794 if (PREV_INSN (label) != 0)
796 /* Find the innermost pending block that contains the label.
797 (Check containment by comparing insn-uids.)
798 Then restore the outermost stack level within that block,
799 and do cleanups of all blocks contained in it. */
800 for (block = block_stack; block; block = block->next)
802 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
804 if (block->data.block.stack_level != 0)
805 stack_level = block->data.block.stack_level;
806 /* Execute the cleanups for blocks we are exiting. */
807 if (block->data.block.cleanups != 0)
809 expand_cleanups (block->data.block.cleanups, NULL_TREE);
810 do_pending_stack_adjust ();
816 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
817 the stack pointer. This one should be deleted as dead by flow. */
818 clear_pending_stack_adjust ();
819 do_pending_stack_adjust ();
820 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
823 if (body != 0 && DECL_TOO_LATE (body))
824 error ("jump to `%s' invalidly jumps into binding contour",
825 IDENTIFIER_POINTER (DECL_NAME (body)));
827 /* Label not yet defined: may need to put this goto
828 on the fixup list. */
829 else if (! expand_fixup (body, label, last_insn))
831 /* No fixup needed. Record that the label is the target
832 of at least one goto that has no fixup. */
834 TREE_ADDRESSABLE (body) = 1;
840 /* Generate a jump with OPCODE to the given bytecode LABEL which is
841 found within BODY. */
843 bc_expand_goto_internal (opcode, label, body)
844 enum bytecode_opcode opcode;
845 struct bc_label *label;
848 struct nesting *block;
849 int stack_level = -1;
851 /* If the label is defined, adjust the stack as necessary.
852 If it's not defined, we have to push the reference on the
858 /* Find the innermost pending block that contains the label.
859 (Check containment by comparing bytecode uids.) Then restore the
860 outermost stack level within that block. */
862 for (block = block_stack; block; block = block->next)
864 if (block->data.block.first_insn->uid < label->uid)
866 if (block->data.block.bc_stack_level)
867 stack_level = block->data.block.bc_stack_level;
869 /* Execute the cleanups for blocks we are exiting. */
870 if (block->data.block.cleanups != 0)
872 expand_cleanups (block->data.block.cleanups, NULL_TREE);
873 do_pending_stack_adjust ();
877 /* Restore the stack level. If we need to adjust the stack, we
878 must do so after the jump, since the jump may depend on
879 what's on the stack. Thus, any stack-modifying conditional
880 jumps (these are the only ones that rely on what's on the
881 stack) go into the fixup list. */
884 && stack_depth != stack_level
887 bc_expand_fixup (opcode, label, stack_level);
890 if (stack_level >= 0)
891 bc_adjust_stack (stack_depth - stack_level);
893 if (body && DECL_BIT_FIELD (body))
894 error ("jump to `%s' invalidly jumps into binding contour",
895 IDENTIFIER_POINTER (DECL_NAME (body)));
897 /* Emit immediate jump */
898 bc_emit_bytecode (opcode);
899 bc_emit_bytecode_labelref (label);
901 #ifdef DEBUG_PRINT_CODE
902 fputc ('\n', stderr);
907 /* Put goto in the fixup list */
908 bc_expand_fixup (opcode, label, stack_level);
911 /* Generate if necessary a fixup for a goto
912 whose target label in tree structure (if any) is TREE_LABEL
913 and whose target in rtl is RTL_LABEL.
915 If LAST_INSN is nonzero, we pretend that the jump appears
916 after insn LAST_INSN instead of at the current point in the insn stream.
918 The fixup will be used later to insert insns just before the goto.
919 Those insns will restore the stack level as appropriate for the
920 target label, and will (in the case of C++) also invoke any object
921 destructors which have to be invoked when we exit the scopes which
922 are exited by the goto.
924 Value is nonzero if a fixup is made. */
927 expand_fixup (tree_label, rtl_label, last_insn)
932 struct nesting *block, *end_block;
934 /* See if we can recognize which block the label will be output in.
935 This is possible in some very common cases.
936 If we succeed, set END_BLOCK to that block.
937 Otherwise, set it to 0. */
940 && (rtl_label == cond_stack->data.cond.endif_label
941 || rtl_label == cond_stack->data.cond.next_label))
942 end_block = cond_stack;
943 /* If we are in a loop, recognize certain labels which
944 are likely targets. This reduces the number of fixups
945 we need to create. */
947 && (rtl_label == loop_stack->data.loop.start_label
948 || rtl_label == loop_stack->data.loop.end_label
949 || rtl_label == loop_stack->data.loop.continue_label))
950 end_block = loop_stack;
954 /* Now set END_BLOCK to the binding level to which we will return. */
958 struct nesting *next_block = end_block->all;
961 /* First see if the END_BLOCK is inside the innermost binding level.
962 If so, then no cleanups or stack levels are relevant. */
963 while (next_block && next_block != block)
964 next_block = next_block->all;
969 /* Otherwise, set END_BLOCK to the innermost binding level
970 which is outside the relevant control-structure nesting. */
971 next_block = block_stack->next;
972 for (block = block_stack; block != end_block; block = block->all)
973 if (block == next_block)
974 next_block = next_block->next;
975 end_block = next_block;
978 /* Does any containing block have a stack level or cleanups?
979 If not, no fixup is needed, and that is the normal case
980 (the only case, for standard C). */
981 for (block = block_stack; block != end_block; block = block->next)
982 if (block->data.block.stack_level != 0
983 || block->data.block.cleanups != 0)
986 if (block != end_block)
988 /* Ok, a fixup is needed. Add a fixup to the list of such. */
989 struct goto_fixup *fixup
990 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
991 /* In case an old stack level is restored, make sure that comes
992 after any pending stack adjust. */
993 /* ?? If the fixup isn't to come at the present position,
994 doing the stack adjust here isn't useful. Doing it with our
995 settings at that location isn't useful either. Let's hope
998 do_pending_stack_adjust ();
999 fixup->target = tree_label;
1000 fixup->target_rtl = rtl_label;
1002 /* Create a BLOCK node and a corresponding matched set of
1003 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1004 this point. The notes will encapsulate any and all fixup
1005 code which we might later insert at this point in the insn
1006 stream. Also, the BLOCK node will be the parent (i.e. the
1007 `SUPERBLOCK') of any other BLOCK nodes which we might create
1008 later on when we are expanding the fixup code. */
1011 register rtx original_before_jump
1012 = last_insn ? last_insn : get_last_insn ();
1016 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1017 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1018 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1020 emit_insns_after (fixup->before_jump, original_before_jump);
1023 fixup->block_start_count = block_start_count;
1024 fixup->stack_level = 0;
1025 fixup->cleanup_list_list
1026 = (((block->data.block.outer_cleanups
1028 && block->data.block.outer_cleanups != empty_cleanup_list
1031 || block->data.block.cleanups)
1032 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1033 block->data.block.outer_cleanups)
1035 fixup->next = goto_fixup_chain;
1036 goto_fixup_chain = fixup;
1043 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1044 Make the fixup restore the stack level to STACK_LEVEL. */
1047 bc_expand_fixup (opcode, label, stack_level)
1048 enum bytecode_opcode opcode;
1049 struct bc_label *label;
1052 struct goto_fixup *fixup
1053 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1055 fixup->label = bc_get_bytecode_label ();
1056 fixup->bc_target = label;
1057 fixup->bc_stack_level = stack_level;
1058 fixup->bc_handled = FALSE;
1060 fixup->next = goto_fixup_chain;
1061 goto_fixup_chain = fixup;
1063 /* Insert a jump to the fixup code */
1064 bc_emit_bytecode (opcode);
1065 bc_emit_bytecode_labelref (fixup->label);
1067 #ifdef DEBUG_PRINT_CODE
1068 fputc ('\n', stderr);
1073 /* When exiting a binding contour, process all pending gotos requiring fixups.
1074 THISBLOCK is the structure that describes the block being exited.
1075 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1076 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1077 FIRST_INSN is the insn that began this contour.
1079 Gotos that jump out of this contour must restore the
1080 stack level and do the cleanups before actually jumping.
1082 DONT_JUMP_IN nonzero means report error there is a jump into this
1083 contour from before the beginning of the contour.
1084 This is also done if STACK_LEVEL is nonzero. */
1087 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1088 struct nesting *thisblock;
1094 register struct goto_fixup *f, *prev;
1096 if (output_bytecode)
1098 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in);
1102 /* F is the fixup we are considering; PREV is the previous one. */
1103 /* We run this loop in two passes so that cleanups of exited blocks
1104 are run first, and blocks that are exited are marked so
1107 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1109 /* Test for a fixup that is inactive because it is already handled. */
1110 if (f->before_jump == 0)
1112 /* Delete inactive fixup from the chain, if that is easy to do. */
1114 prev->next = f->next;
1116 /* Has this fixup's target label been defined?
1117 If so, we can finalize it. */
1118 else if (PREV_INSN (f->target_rtl) != 0)
1120 register rtx cleanup_insns;
1122 /* Get the first non-label after the label
1123 this goto jumps to. If that's before this scope begins,
1124 we don't have a jump into the scope. */
1125 rtx after_label = f->target_rtl;
1126 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1127 after_label = NEXT_INSN (after_label);
1129 /* If this fixup jumped into this contour from before the beginning
1130 of this contour, report an error. */
1131 /* ??? Bug: this does not detect jumping in through intermediate
1132 blocks that have stack levels or cleanups.
1133 It detects only a problem with the innermost block
1134 around the label. */
1136 && (dont_jump_in || stack_level || cleanup_list)
1137 /* If AFTER_LABEL is 0, it means the jump goes to the end
1138 of the rtl, which means it jumps into this scope. */
1139 && (after_label == 0
1140 || INSN_UID (first_insn) < INSN_UID (after_label))
1141 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1142 && ! DECL_REGISTER (f->target))
1144 error_with_decl (f->target,
1145 "label `%s' used before containing binding contour");
1146 /* Prevent multiple errors for one label. */
1147 DECL_REGISTER (f->target) = 1;
1150 /* We will expand the cleanups into a sequence of their own and
1151 then later on we will attach this new sequence to the insn
1152 stream just ahead of the actual jump insn. */
1156 /* Temporarily restore the lexical context where we will
1157 logically be inserting the fixup code. We do this for the
1158 sake of getting the debugging information right. */
1161 set_block (f->context);
1163 /* Expand the cleanups for blocks this jump exits. */
1164 if (f->cleanup_list_list)
1167 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1168 /* Marked elements correspond to blocks that have been closed.
1169 Do their cleanups. */
1170 if (TREE_ADDRESSABLE (lists)
1171 && TREE_VALUE (lists) != 0)
1173 expand_cleanups (TREE_VALUE (lists), 0);
1174 /* Pop any pushes done in the cleanups,
1175 in case function is about to return. */
1176 do_pending_stack_adjust ();
1180 /* Restore stack level for the biggest contour that this
1181 jump jumps out of. */
1183 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1185 /* Finish up the sequence containing the insns which implement the
1186 necessary cleanups, and then attach that whole sequence to the
1187 insn stream just ahead of the actual jump insn. Attaching it
1188 at that point insures that any cleanups which are in fact
1189 implicit C++ object destructions (which must be executed upon
1190 leaving the block) appear (to the debugger) to be taking place
1191 in an area of the generated code where the object(s) being
1192 destructed are still "in scope". */
1194 cleanup_insns = get_insns ();
1198 emit_insns_after (cleanup_insns, f->before_jump);
1205 /* Mark the cleanups of exited blocks so that they are executed
1206 by the code above. */
1207 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1208 if (f->before_jump != 0
1209 && PREV_INSN (f->target_rtl) == 0
1210 /* Label has still not appeared. If we are exiting a block with
1211 a stack level to restore, that started before the fixup,
1212 mark this stack level as needing restoration
1213 when the fixup is later finalized.
1214 Also mark the cleanup_list_list element for F
1215 that corresponds to this block, so that ultimately
1216 this block's cleanups will be executed by the code above. */
1218 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1219 it means the label is undefined. That's erroneous, but possible. */
1220 && (thisblock->data.block.block_start_count
1221 <= f->block_start_count))
1223 tree lists = f->cleanup_list_list;
1224 for (; lists; lists = TREE_CHAIN (lists))
1225 /* If the following elt. corresponds to our containing block
1226 then the elt. must be for this block. */
1227 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1228 TREE_ADDRESSABLE (lists) = 1;
1231 f->stack_level = stack_level;
1236 /* When exiting a binding contour, process all pending gotos requiring fixups.
1237 Note: STACK_DEPTH is not altered.
1239 The arguments are currently not used in the bytecode compiler, but we may need
1240 them one day for languages other than C.
1242 THISBLOCK is the structure that describes the block being exited.
1243 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1244 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1245 FIRST_INSN is the insn that began this contour.
1247 Gotos that jump out of this contour must restore the
1248 stack level and do the cleanups before actually jumping.
1250 DONT_JUMP_IN nonzero means report error there is a jump into this
1251 contour from before the beginning of the contour.
1252 This is also done if STACK_LEVEL is nonzero. */
1255 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1256 struct nesting *thisblock;
1262 register struct goto_fixup *f, *prev;
1263 int saved_stack_depth;
1265 /* F is the fixup we are considering; PREV is the previous one. */
1267 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1269 /* Test for a fixup that is inactive because it is already handled. */
1270 if (f->before_jump == 0)
1272 /* Delete inactive fixup from the chain, if that is easy to do. */
1274 prev->next = f->next;
1277 /* Emit code to restore the stack and continue */
1278 bc_emit_bytecode_labeldef (f->label);
1280 /* Save stack_depth across call, since bc_adjust_stack () will alter
1281 the perceived stack depth via the instructions generated. */
1283 if (f->bc_stack_level >= 0)
1285 saved_stack_depth = stack_depth;
1286 bc_adjust_stack (stack_depth - f->bc_stack_level);
1287 stack_depth = saved_stack_depth;
1290 bc_emit_bytecode (jump);
1291 bc_emit_bytecode_labelref (f->bc_target);
1293 #ifdef DEBUG_PRINT_CODE
1294 fputc ('\n', stderr);
1298 goto_fixup_chain = NULL;
1301 /* Generate RTL for an asm statement (explicit assembler code).
1302 BODY is a STRING_CST node containing the assembler code text,
1303 or an ADDR_EXPR containing a STRING_CST. */
1309 if (output_bytecode)
1311 error ("`asm' is illegal when generating bytecode");
1315 if (TREE_CODE (body) == ADDR_EXPR)
1316 body = TREE_OPERAND (body, 0);
1318 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1319 TREE_STRING_POINTER (body)));
1323 /* Generate RTL for an asm statement with arguments.
1324 STRING is the instruction template.
1325 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1326 Each output or input has an expression in the TREE_VALUE and
1327 a constraint-string in the TREE_PURPOSE.
1328 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1329 that is clobbered by this insn.
1331 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1332 Some elements of OUTPUTS may be replaced with trees representing temporary
1333 values. The caller should copy those temporary values to the originally
1336 VOL nonzero means the insn is volatile; don't optimize it. */
1339 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1340 tree string, outputs, inputs, clobbers;
1345 rtvec argvec, constraints;
1347 int ninputs = list_length (inputs);
1348 int noutputs = list_length (outputs);
1352 /* Vector of RTX's of evaluated output operands. */
1353 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1354 /* The insn we have emitted. */
1357 if (output_bytecode)
1359 error ("`asm' is illegal when generating bytecode");
1363 /* Count the number of meaningful clobbered registers, ignoring what
1364 we would ignore later. */
1366 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1368 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1369 i = decode_reg_name (regname);
1370 if (i >= 0 || i == -4)
1376 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1378 tree val = TREE_VALUE (tail);
1383 /* If there's an erroneous arg, emit no insn. */
1384 if (TREE_TYPE (val) == error_mark_node)
1387 /* Make sure constraint has `=' and does not have `+'. */
1390 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1392 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1394 error ("output operand constraint contains `+'");
1397 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1402 error ("output operand constraint lacks `='");
1406 /* If an output operand is not a variable or indirect ref,
1408 create a SAVE_EXPR which is a pseudo-reg
1409 to act as an intermediate temporary.
1410 Make the asm insn write into that, then copy it to
1411 the real output operand. */
1413 while (TREE_CODE (val) == COMPONENT_REF
1414 || TREE_CODE (val) == ARRAY_REF)
1415 val = TREE_OPERAND (val, 0);
1417 if (TREE_CODE (val) != VAR_DECL
1418 && TREE_CODE (val) != PARM_DECL
1419 && TREE_CODE (val) != INDIRECT_REF)
1421 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1422 /* If it's a constant, print error now so don't crash later. */
1423 if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR)
1425 error ("invalid output in `asm'");
1430 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1433 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1435 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1439 /* Make vectors for the expression-rtx and constraint strings. */
1441 argvec = rtvec_alloc (ninputs);
1442 constraints = rtvec_alloc (ninputs);
1444 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1445 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1447 MEM_VOLATILE_P (body) = vol;
1449 /* Eval the inputs and put them into ARGVEC.
1450 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1453 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1457 /* If there's an erroneous arg, emit no insn,
1458 because the ASM_INPUT would get VOIDmode
1459 and that could cause a crash in reload. */
1460 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1462 if (TREE_PURPOSE (tail) == NULL_TREE)
1464 error ("hard register `%s' listed as input operand to `asm'",
1465 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1469 /* Make sure constraint has neither `=' nor `+'. */
1471 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1472 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1473 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1475 error ("input operand constraint contains `%c'",
1476 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1480 XVECEXP (body, 3, i) /* argvec */
1481 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1482 XVECEXP (body, 4, i) /* constraints */
1483 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1484 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1488 /* Protect all the operands from the queue,
1489 now that they have all been evaluated. */
1491 for (i = 0; i < ninputs; i++)
1492 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1494 for (i = 0; i < noutputs; i++)
1495 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1497 /* Now, for each output, construct an rtx
1498 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1499 ARGVEC CONSTRAINTS))
1500 If there is more than one, put them inside a PARALLEL. */
1502 if (noutputs == 1 && nclobbers == 0)
1504 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1505 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1507 else if (noutputs == 0 && nclobbers == 0)
1509 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1510 insn = emit_insn (body);
1516 if (num == 0) num = 1;
1517 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1519 /* For each output operand, store a SET. */
1521 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1523 XVECEXP (body, 0, i)
1524 = gen_rtx (SET, VOIDmode,
1526 gen_rtx (ASM_OPERANDS, VOIDmode,
1527 TREE_STRING_POINTER (string),
1528 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1529 i, argvec, constraints,
1531 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1534 /* If there are no outputs (but there are some clobbers)
1535 store the bare ASM_OPERANDS into the PARALLEL. */
1538 XVECEXP (body, 0, i++) = obody;
1540 /* Store (clobber REG) for each clobbered register specified. */
1542 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1544 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1545 int j = decode_reg_name (regname);
1549 if (j == -3) /* `cc', which is not a register */
1552 if (j == -4) /* `memory', don't cache memory across asm */
1554 XVECEXP (body, 0, i++)
1555 = gen_rtx (CLOBBER, VOIDmode,
1556 gen_rtx (MEM, QImode,
1557 gen_rtx (SCRATCH, VOIDmode, 0)));
1561 error ("unknown register name `%s' in `asm'", regname);
1565 /* Use QImode since that's guaranteed to clobber just one reg. */
1566 XVECEXP (body, 0, i++)
1567 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1570 insn = emit_insn (body);
1576 /* Generate RTL to evaluate the expression EXP
1577 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1580 expand_expr_stmt (exp)
1583 if (output_bytecode)
1585 int org_stack_depth = stack_depth;
1587 bc_expand_expr (exp);
1589 /* Restore stack depth */
1590 if (stack_depth < org_stack_depth)
1593 bc_emit_instruction (drop);
1595 last_expr_type = TREE_TYPE (exp);
1599 /* If -W, warn about statements with no side effects,
1600 except for an explicit cast to void (e.g. for assert()), and
1601 except inside a ({...}) where they may be useful. */
1602 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1604 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1605 && !(TREE_CODE (exp) == CONVERT_EXPR
1606 && TREE_TYPE (exp) == void_type_node))
1607 warning_with_file_and_line (emit_filename, emit_lineno,
1608 "statement with no effect");
1609 else if (warn_unused)
1610 warn_if_unused_value (exp);
1612 last_expr_type = TREE_TYPE (exp);
1613 if (! flag_syntax_only)
1614 last_expr_value = expand_expr (exp,
1615 (expr_stmts_for_value
1616 ? NULL_RTX : const0_rtx),
1619 /* If all we do is reference a volatile value in memory,
1620 copy it to a register to be sure it is actually touched. */
1621 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1622 && TREE_THIS_VOLATILE (exp))
1624 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1626 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1627 copy_to_reg (last_expr_value);
1630 rtx lab = gen_label_rtx ();
1632 /* Compare the value with itself to reference it. */
1633 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1634 expand_expr (TYPE_SIZE (last_expr_type),
1635 NULL_RTX, VOIDmode, 0),
1637 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1638 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1643 /* If this expression is part of a ({...}) and is in memory, we may have
1644 to preserve temporaries. */
1645 preserve_temp_slots (last_expr_value);
1647 /* Free any temporaries used to evaluate this expression. Any temporary
1648 used as a result of this expression will already have been preserved
1655 /* Warn if EXP contains any computations whose results are not used.
1656 Return 1 if a warning is printed; 0 otherwise. */
1659 warn_if_unused_value (exp)
1662 if (TREE_USED (exp))
1665 switch (TREE_CODE (exp))
1667 case PREINCREMENT_EXPR:
1668 case POSTINCREMENT_EXPR:
1669 case PREDECREMENT_EXPR:
1670 case POSTDECREMENT_EXPR:
1675 case METHOD_CALL_EXPR:
1677 case WITH_CLEANUP_EXPR:
1679 /* We don't warn about COND_EXPR because it may be a useful
1680 construct if either arm contains a side effect. */
1685 /* For a binding, warn if no side effect within it. */
1686 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1688 case TRUTH_ORIF_EXPR:
1689 case TRUTH_ANDIF_EXPR:
1690 /* In && or ||, warn if 2nd operand has no side effect. */
1691 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1694 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1696 /* Let people do `(foo (), 0)' without a warning. */
1697 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1699 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1703 case NON_LVALUE_EXPR:
1704 /* Don't warn about values cast to void. */
1705 if (TREE_TYPE (exp) == void_type_node)
1707 /* Don't warn about conversions not explicit in the user's program. */
1708 if (TREE_NO_UNUSED_WARNING (exp))
1710 /* Assignment to a cast usually results in a cast of a modify.
1711 Don't complain about that. */
1712 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1714 /* Sometimes it results in a cast of a cast of a modify.
1715 Don't complain about that. */
1716 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1717 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1718 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1722 /* Referencing a volatile value is a side effect, so don't warn. */
1723 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1724 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1725 && TREE_THIS_VOLATILE (exp))
1727 warning_with_file_and_line (emit_filename, emit_lineno,
1728 "value computed is not used");
1733 /* Clear out the memory of the last expression evaluated. */
1741 /* Begin a statement which will return a value.
1742 Return the RTL_EXPR for this statement expr.
1743 The caller must save that value and pass it to expand_end_stmt_expr. */
1746 expand_start_stmt_expr ()
1751 /* When generating bytecode just note down the stack depth */
1752 if (output_bytecode)
1753 return (build_int_2 (stack_depth, 0));
1755 /* Make the RTL_EXPR node temporary, not momentary,
1756 so that rtl_expr_chain doesn't become garbage. */
1757 momentary = suspend_momentary ();
1758 t = make_node (RTL_EXPR);
1759 resume_momentary (momentary);
1762 expr_stmts_for_value++;
1766 /* Restore the previous state at the end of a statement that returns a value.
1767 Returns a tree node representing the statement's value and the
1768 insns to compute the value.
1770 The nodes of that expression have been freed by now, so we cannot use them.
1771 But we don't want to do that anyway; the expression has already been
1772 evaluated and now we just want to use the value. So generate a RTL_EXPR
1773 with the proper type and RTL value.
1775 If the last substatement was not an expression,
1776 return something with type `void'. */
1779 expand_end_stmt_expr (t)
1782 if (output_bytecode)
1788 /* At this point, all expressions have been evaluated in order.
1789 However, all expression values have been popped when evaluated,
1790 which means we have to recover the last expression value. This is
1791 the last value removed by means of a `drop' instruction. Instead
1792 of adding code to inhibit dropping the last expression value, it
1793 is here recovered by undoing the `drop'. Since `drop' is
1794 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1797 bc_adjust_stack (-1);
1799 if (!last_expr_type)
1800 last_expr_type = void_type_node;
1802 t = make_node (RTL_EXPR);
1803 TREE_TYPE (t) = last_expr_type;
1804 RTL_EXPR_RTL (t) = NULL;
1805 RTL_EXPR_SEQUENCE (t) = NULL;
1807 /* Don't consider deleting this expr or containing exprs at tree level. */
1808 TREE_THIS_VOLATILE (t) = 1;
1816 if (last_expr_type == 0)
1818 last_expr_type = void_type_node;
1819 last_expr_value = const0_rtx;
1821 else if (last_expr_value == 0)
1822 /* There are some cases where this can happen, such as when the
1823 statement is void type. */
1824 last_expr_value = const0_rtx;
1825 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1826 /* Remove any possible QUEUED. */
1827 last_expr_value = protect_from_queue (last_expr_value, 0);
1831 TREE_TYPE (t) = last_expr_type;
1832 RTL_EXPR_RTL (t) = last_expr_value;
1833 RTL_EXPR_SEQUENCE (t) = get_insns ();
1835 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1839 /* Don't consider deleting this expr or containing exprs at tree level. */
1840 TREE_SIDE_EFFECTS (t) = 1;
1841 /* Propagate volatility of the actual RTL expr. */
1842 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1845 expr_stmts_for_value--;
1850 /* The exception handling nesting looks like this:
1853 { <-- exception handler block
1855 <-- in an exception handler
1857 : <-- in a TRY block
1858 : <-- in an exception handler
1863 : <-- in an except block
1864 : <-- in an exception handler
1871 /* Return nonzero iff in a try block at level LEVEL. */
1874 in_try_block (level)
1877 struct nesting *n = except_stack;
1880 while (n && n->data.except_stmt.after_label != 0)
1891 /* Return nonzero iff in an except block at level LEVEL. */
1894 in_except_block (level)
1897 struct nesting *n = except_stack;
1900 while (n && n->data.except_stmt.after_label == 0)
1911 /* Return nonzero iff in an exception handler at level LEVEL. */
1914 in_exception_handler (level)
1917 struct nesting *n = except_stack;
1918 while (n && level--)
1923 /* Record the fact that the current exception nesting raises
1924 exception EX. If not in an exception handler, return 0. */
1931 if (except_stack == 0)
1933 raises_ptr = &except_stack->data.except_stmt.raised;
1934 if (! value_member (ex, *raises_ptr))
1935 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1939 /* Generate RTL for the start of a try block.
1941 TRY_CLAUSE is the condition to test to enter the try block. */
1944 expand_start_try (try_clause, exitflag, escapeflag)
1949 struct nesting *thishandler = ALLOC_NESTING ();
1951 /* Make an entry on cond_stack for the cond we are entering. */
1953 thishandler->next = except_stack;
1954 thishandler->all = nesting_stack;
1955 thishandler->depth = ++nesting_depth;
1956 thishandler->data.except_stmt.raised = 0;
1957 thishandler->data.except_stmt.handled = 0;
1958 thishandler->data.except_stmt.first_insn = get_insns ();
1959 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1960 thishandler->data.except_stmt.unhandled_label = 0;
1961 thishandler->data.except_stmt.after_label = 0;
1962 thishandler->data.except_stmt.escape_label
1963 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1964 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1965 except_stack = thishandler;
1966 nesting_stack = thishandler;
1968 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX);
1971 /* End of a TRY block. Nothing to do for now. */
1976 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1977 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1981 /* Start an `except' nesting contour.
1982 EXITFLAG says whether this contour should be able to `exit' something.
1983 ESCAPEFLAG says whether this contour should be escapable. */
1986 expand_start_except (exitflag, escapeflag)
1993 /* An `exit' from catch clauses goes out to next exit level,
1994 if there is one. Otherwise, it just goes to the end
1995 of the construct. */
1996 for (n = except_stack->next; n; n = n->next)
1997 if (n->exit_label != 0)
1999 except_stack->exit_label = n->exit_label;
2003 except_stack->exit_label = except_stack->data.except_stmt.after_label;
2008 /* An `escape' from catch clauses goes out to next escape level,
2009 if there is one. Otherwise, it just goes to the end
2010 of the construct. */
2011 for (n = except_stack->next; n; n = n->next)
2012 if (n->data.except_stmt.escape_label != 0)
2014 except_stack->data.except_stmt.escape_label
2015 = n->data.except_stmt.escape_label;
2019 except_stack->data.except_stmt.escape_label
2020 = except_stack->data.except_stmt.after_label;
2022 do_pending_stack_adjust ();
2023 emit_label (except_stack->data.except_stmt.except_label);
2026 /* Generate code to `escape' from an exception contour. This
2027 is like `exiting', but does not conflict with constructs which
2030 Return nonzero if this contour is escapable, otherwise
2031 return zero, and language-specific code will emit the
2032 appropriate error message. */
2034 expand_escape_except ()
2038 for (n = except_stack; n; n = n->next)
2039 if (n->data.except_stmt.escape_label != 0)
2041 expand_goto_internal (NULL_TREE,
2042 n->data.except_stmt.escape_label, NULL_RTX);
2049 /* Finish processing and `except' contour.
2050 Culls out all exceptions which might be raise but not
2051 handled, and returns the list to the caller.
2052 Language-specific code is responsible for dealing with these
2056 expand_end_except ()
2059 tree raised = NULL_TREE;
2061 do_pending_stack_adjust ();
2062 emit_label (except_stack->data.except_stmt.after_label);
2064 n = except_stack->next;
2067 /* Propagate exceptions raised but not handled to next
2069 tree handled = except_stack->data.except_stmt.raised;
2070 if (handled != void_type_node)
2072 tree prev = NULL_TREE;
2073 raised = except_stack->data.except_stmt.raised;
2077 for (this_raise = raised, prev = 0; this_raise;
2078 this_raise = TREE_CHAIN (this_raise))
2080 if (value_member (TREE_VALUE (this_raise), handled))
2083 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
2086 raised = TREE_CHAIN (raised);
2087 if (raised == NULL_TREE)
2094 handled = TREE_CHAIN (handled);
2096 if (prev == NULL_TREE)
2099 TREE_CHAIN (prev) = n->data.except_stmt.raised;
2101 n->data.except_stmt.raised = raised;
2105 POPSTACK (except_stack);
2110 /* Record that exception EX is caught by this exception handler.
2111 Return nonzero if in exception handling construct, otherwise return 0. */
2118 if (except_stack == 0)
2120 raises_ptr = &except_stack->data.except_stmt.handled;
2121 if (*raises_ptr != void_type_node
2123 && ! value_member (ex, *raises_ptr))
2124 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
2128 /* Record that this exception handler catches all exceptions.
2129 Return nonzero if in exception handling construct, otherwise return 0. */
2132 expand_catch_default ()
2134 if (except_stack == 0)
2136 except_stack->data.except_stmt.handled = void_type_node;
2143 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
2145 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
2150 /* Generate RTL for the start of an if-then. COND is the expression
2151 whose truth should be tested.
2153 If EXITFLAG is nonzero, this conditional is visible to
2154 `exit_something'. */
2157 expand_start_cond (cond, exitflag)
2161 struct nesting *thiscond = ALLOC_NESTING ();
2163 /* Make an entry on cond_stack for the cond we are entering. */
2165 thiscond->next = cond_stack;
2166 thiscond->all = nesting_stack;
2167 thiscond->depth = ++nesting_depth;
2168 thiscond->data.cond.next_label = gen_label_rtx ();
2169 /* Before we encounter an `else', we don't need a separate exit label
2170 unless there are supposed to be exit statements
2171 to exit this conditional. */
2172 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2173 thiscond->data.cond.endif_label = thiscond->exit_label;
2174 cond_stack = thiscond;
2175 nesting_stack = thiscond;
2177 if (output_bytecode)
2178 bc_expand_start_cond (cond, exitflag);
2180 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2183 /* Generate RTL between then-clause and the elseif-clause
2184 of an if-then-elseif-.... */
2187 expand_start_elseif (cond)
2190 if (cond_stack->data.cond.endif_label == 0)
2191 cond_stack->data.cond.endif_label = gen_label_rtx ();
2192 emit_jump (cond_stack->data.cond.endif_label);
2193 emit_label (cond_stack->data.cond.next_label);
2194 cond_stack->data.cond.next_label = gen_label_rtx ();
2195 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2198 /* Generate RTL between the then-clause and the else-clause
2199 of an if-then-else. */
2202 expand_start_else ()
2204 if (cond_stack->data.cond.endif_label == 0)
2205 cond_stack->data.cond.endif_label = gen_label_rtx ();
2207 if (output_bytecode)
2209 bc_expand_start_else ();
2213 emit_jump (cond_stack->data.cond.endif_label);
2214 emit_label (cond_stack->data.cond.next_label);
2215 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2218 /* Generate RTL for the end of an if-then.
2219 Pop the record for it off of cond_stack. */
2224 struct nesting *thiscond = cond_stack;
2226 if (output_bytecode)
2227 bc_expand_end_cond ();
2230 do_pending_stack_adjust ();
2231 if (thiscond->data.cond.next_label)
2232 emit_label (thiscond->data.cond.next_label);
2233 if (thiscond->data.cond.endif_label)
2234 emit_label (thiscond->data.cond.endif_label);
2237 POPSTACK (cond_stack);
2242 /* Generate code for the start of an if-then. COND is the expression
2243 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2244 is to be visible to exit_something. It is assumed that the caller
2245 has pushed the previous context on the cond stack. */
2247 bc_expand_start_cond (cond, exitflag)
2251 struct nesting *thiscond = cond_stack;
2253 thiscond->data.case_stmt.nominal_type = cond;
2254 bc_expand_expr (cond);
2255 bc_emit_bytecode (jumpifnot);
2256 bc_emit_bytecode_labelref (thiscond->exit_label->bc_label);
2258 #ifdef DEBUG_PRINT_CODE
2259 fputc ('\n', stderr);
2263 /* Generate the label for the end of an if with
2266 bc_expand_end_cond ()
2268 struct nesting *thiscond = cond_stack;
2270 bc_emit_bytecode_labeldef (thiscond->exit_label->bc_label);
2273 /* Generate code for the start of the else- clause of
2276 bc_expand_start_else ()
2278 struct nesting *thiscond = cond_stack;
2280 thiscond->data.cond.endif_label = thiscond->exit_label;
2281 thiscond->exit_label = gen_label_rtx ();
2282 bc_emit_bytecode (jump);
2283 bc_emit_bytecode_labelref (thiscond->exit_label->bc_label);
2285 #ifdef DEBUG_PRINT_CODE
2286 fputc ('\n', stderr);
2289 bc_emit_bytecode_labeldef (thiscond->data.cond.endif_label->bc_label);
2292 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2293 loop should be exited by `exit_something'. This is a loop for which
2294 `expand_continue' will jump to the top of the loop.
2296 Make an entry on loop_stack to record the labels associated with
2300 expand_start_loop (exit_flag)
2303 register struct nesting *thisloop = ALLOC_NESTING ();
2305 /* Make an entry on loop_stack for the loop we are entering. */
2307 thisloop->next = loop_stack;
2308 thisloop->all = nesting_stack;
2309 thisloop->depth = ++nesting_depth;
2310 thisloop->data.loop.start_label = gen_label_rtx ();
2311 thisloop->data.loop.end_label = gen_label_rtx ();
2312 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2313 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2314 loop_stack = thisloop;
2315 nesting_stack = thisloop;
2317 if (output_bytecode)
2319 bc_emit_bytecode_labeldef (thisloop->data.loop.start_label->bc_label);
2323 do_pending_stack_adjust ();
2325 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2326 emit_label (thisloop->data.loop.start_label);
2331 /* Like expand_start_loop but for a loop where the continuation point
2332 (for expand_continue_loop) will be specified explicitly. */
2335 expand_start_loop_continue_elsewhere (exit_flag)
2338 struct nesting *thisloop = expand_start_loop (exit_flag);
2339 loop_stack->data.loop.continue_label = gen_label_rtx ();
2343 /* Specify the continuation point for a loop started with
2344 expand_start_loop_continue_elsewhere.
2345 Use this at the point in the code to which a continue statement
2349 expand_loop_continue_here ()
2351 if (output_bytecode)
2353 bc_emit_bytecode_labeldef (loop_stack->data.loop.continue_label->bc_label);
2356 do_pending_stack_adjust ();
2357 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2358 emit_label (loop_stack->data.loop.continue_label);
2363 bc_expand_end_loop ()
2365 struct nesting *thisloop = loop_stack;
2367 bc_emit_bytecode (jump);
2368 bc_emit_bytecode_labelref (thisloop->data.loop.start_label->bc_label);
2370 #ifdef DEBUG_PRINT_CODE
2371 fputc ('\n', stderr);
2374 bc_emit_bytecode_labeldef (thisloop->exit_label->bc_label);
2375 POPSTACK (loop_stack);
2380 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2381 Pop the block off of loop_stack. */
2387 register rtx start_label;
2388 rtx last_test_insn = 0;
2391 if (output_bytecode)
2393 bc_expand_end_loop ();
2397 insn = get_last_insn ();
2398 start_label = loop_stack->data.loop.start_label;
2400 /* Mark the continue-point at the top of the loop if none elsewhere. */
2401 if (start_label == loop_stack->data.loop.continue_label)
2402 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2404 do_pending_stack_adjust ();
2406 /* If optimizing, perhaps reorder the loop. If the loop
2407 starts with a conditional exit, roll that to the end
2408 where it will optimize together with the jump back.
2410 We look for the last conditional branch to the exit that we encounter
2411 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2412 branch to the exit first, use it.
2414 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2415 because moving them is not valid. */
2419 ! (GET_CODE (insn) == JUMP_INSN
2420 && GET_CODE (PATTERN (insn)) == SET
2421 && SET_DEST (PATTERN (insn)) == pc_rtx
2422 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2424 /* Scan insns from the top of the loop looking for a qualified
2425 conditional exit. */
2426 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2427 insn = NEXT_INSN (insn))
2429 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2432 if (GET_CODE (insn) == NOTE
2433 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2434 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2437 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2440 if (last_test_insn && num_insns > 30)
2443 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2444 && SET_DEST (PATTERN (insn)) == pc_rtx
2445 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2446 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2447 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2448 == loop_stack->data.loop.end_label))
2449 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2450 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2451 == loop_stack->data.loop.end_label))))
2452 last_test_insn = insn;
2454 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2455 && GET_CODE (PATTERN (insn)) == SET
2456 && SET_DEST (PATTERN (insn)) == pc_rtx
2457 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2458 && (XEXP (SET_SRC (PATTERN (insn)), 0)
2459 == loop_stack->data.loop.end_label))
2460 /* Include BARRIER. */
2461 last_test_insn = NEXT_INSN (insn);
2464 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2466 /* We found one. Move everything from there up
2467 to the end of the loop, and add a jump into the loop
2468 to jump to there. */
2469 register rtx newstart_label = gen_label_rtx ();
2470 register rtx start_move = start_label;
2472 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2473 then we want to move this note also. */
2474 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2475 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2476 == NOTE_INSN_LOOP_CONT))
2477 start_move = PREV_INSN (start_move);
2479 emit_label_after (newstart_label, PREV_INSN (start_move));
2480 reorder_insns (start_move, last_test_insn, get_last_insn ());
2481 emit_jump_insn_after (gen_jump (start_label),
2482 PREV_INSN (newstart_label));
2483 emit_barrier_after (PREV_INSN (newstart_label));
2484 start_label = newstart_label;
2488 emit_jump (start_label);
2489 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2490 emit_label (loop_stack->data.loop.end_label);
2492 POPSTACK (loop_stack);
2497 /* Generate a jump to the current loop's continue-point.
2498 This is usually the top of the loop, but may be specified
2499 explicitly elsewhere. If not currently inside a loop,
2500 return 0 and do nothing; caller will print an error message. */
2503 expand_continue_loop (whichloop)
2504 struct nesting *whichloop;
2508 whichloop = loop_stack;
2511 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2516 /* Generate a jump to exit the current loop. If not currently inside a loop,
2517 return 0 and do nothing; caller will print an error message. */
2520 expand_exit_loop (whichloop)
2521 struct nesting *whichloop;
2525 whichloop = loop_stack;
2528 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2532 /* Generate a conditional jump to exit the current loop if COND
2533 evaluates to zero. If not currently inside a loop,
2534 return 0 and do nothing; caller will print an error message. */
2537 expand_exit_loop_if_false (whichloop, cond)
2538 struct nesting *whichloop;
2543 whichloop = loop_stack;
2546 if (output_bytecode)
2548 bc_expand_expr (cond);
2549 bc_expand_goto_internal (jumpifnot,
2550 whichloop->exit_label->bc_label, NULL_RTX);
2553 do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
2558 /* Return non-zero if we should preserve sub-expressions as separate
2559 pseudos. We never do so if we aren't optimizing. We always do so
2560 if -fexpensive-optimizations.
2562 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2563 the loop may still be a small one. */
2566 preserve_subexpressions_p ()
2570 if (flag_expensive_optimizations)
2573 if (optimize == 0 || loop_stack == 0)
2576 insn = get_last_insn_anywhere ();
2579 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2580 < n_non_fixed_regs * 3));
2584 /* Generate a jump to exit the current loop, conditional, binding contour
2585 or case statement. Not all such constructs are visible to this function,
2586 only those started with EXIT_FLAG nonzero. Individual languages use
2587 the EXIT_FLAG parameter to control which kinds of constructs you can
2590 If not currently inside anything that can be exited,
2591 return 0 and do nothing; caller will print an error message. */
2594 expand_exit_something ()
2598 for (n = nesting_stack; n; n = n->all)
2599 if (n->exit_label != 0)
2601 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2608 /* Generate RTL to return from the current function, with no value.
2609 (That is, we do not do anything about returning any value.) */
2612 expand_null_return ()
2614 struct nesting *block = block_stack;
2617 if (output_bytecode)
2619 bc_emit_instruction (ret);
2623 /* Does any pending block have cleanups? */
2625 while (block && block->data.block.cleanups == 0)
2626 block = block->next;
2628 /* If yes, use a goto to return, since that runs cleanups. */
2630 expand_null_return_1 (last_insn, block != 0);
2633 /* Generate RTL to return from the current function, with value VAL. */
2636 expand_value_return (val)
2639 struct nesting *block = block_stack;
2640 rtx last_insn = get_last_insn ();
2641 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2643 /* Copy the value to the return location
2644 unless it's already there. */
2646 if (return_reg != val)
2648 #ifdef PROMOTE_FUNCTION_RETURN
2649 enum machine_mode mode = DECL_MODE (DECL_RESULT (current_function_decl));
2650 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2651 int unsignedp = TREE_UNSIGNED (type);
2653 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
2654 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
2655 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
2656 || TREE_CODE (type) == OFFSET_TYPE)
2658 PROMOTE_MODE (mode, unsignedp, type);
2661 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2662 convert_move (return_reg, val, unsignedp);
2665 emit_move_insn (return_reg, val);
2667 if (GET_CODE (return_reg) == REG
2668 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2669 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2671 /* Does any pending block have cleanups? */
2673 while (block && block->data.block.cleanups == 0)
2674 block = block->next;
2676 /* If yes, use a goto to return, since that runs cleanups.
2677 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2679 expand_null_return_1 (last_insn, block != 0);
2682 /* Output a return with no value. If LAST_INSN is nonzero,
2683 pretend that the return takes place after LAST_INSN.
2684 If USE_GOTO is nonzero then don't use a return instruction;
2685 go to the return label instead. This causes any cleanups
2686 of pending blocks to be executed normally. */
2689 expand_null_return_1 (last_insn, use_goto)
2693 rtx end_label = cleanup_label ? cleanup_label : return_label;
2695 clear_pending_stack_adjust ();
2696 do_pending_stack_adjust ();
2699 /* PCC-struct return always uses an epilogue. */
2700 if (current_function_returns_pcc_struct || use_goto)
2703 end_label = return_label = gen_label_rtx ();
2704 expand_goto_internal (NULL_TREE, end_label, last_insn);
2708 /* Otherwise output a simple return-insn if one is available,
2709 unless it won't do the job. */
2711 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2713 emit_jump_insn (gen_return ());
2719 /* Otherwise jump to the epilogue. */
2720 expand_goto_internal (NULL_TREE, end_label, last_insn);
2723 /* Generate RTL to evaluate the expression RETVAL and return it
2724 from the current function. */
2727 expand_return (retval)
2730 /* If there are any cleanups to be performed, then they will
2731 be inserted following LAST_INSN. It is desirable
2732 that the last_insn, for such purposes, should be the
2733 last insn before computing the return value. Otherwise, cleanups
2734 which call functions can clobber the return value. */
2735 /* ??? rms: I think that is erroneous, because in C++ it would
2736 run destructors on variables that might be used in the subsequent
2737 computation of the return value. */
2739 register rtx val = 0;
2743 struct nesting *block;
2745 /* Bytecode returns are quite simple, just leave the result on the
2746 arithmetic stack. */
2747 if (output_bytecode)
2749 bc_expand_expr (retval);
2750 bc_emit_instruction (ret);
2754 /* If function wants no value, give it none. */
2755 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2757 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2759 expand_null_return ();
2763 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2764 cleanups = any_pending_cleanups (1);
2766 if (TREE_CODE (retval) == RESULT_DECL)
2767 retval_rhs = retval;
2768 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2769 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2770 retval_rhs = TREE_OPERAND (retval, 1);
2771 else if (TREE_TYPE (retval) == void_type_node)
2772 /* Recognize tail-recursive call to void function. */
2773 retval_rhs = retval;
2775 retval_rhs = NULL_TREE;
2777 /* Only use `last_insn' if there are cleanups which must be run. */
2778 if (cleanups || cleanup_label != 0)
2779 last_insn = get_last_insn ();
2781 /* Distribute return down conditional expr if either of the sides
2782 may involve tail recursion (see test below). This enhances the number
2783 of tail recursions we see. Don't do this always since it can produce
2784 sub-optimal code in some cases and we distribute assignments into
2785 conditional expressions when it would help. */
2787 if (optimize && retval_rhs != 0
2788 && frame_offset == 0
2789 && TREE_CODE (retval_rhs) == COND_EXPR
2790 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2791 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2793 rtx label = gen_label_rtx ();
2796 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2797 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2798 DECL_RESULT (current_function_decl),
2799 TREE_OPERAND (retval_rhs, 1));
2800 TREE_SIDE_EFFECTS (expr) = 1;
2801 expand_return (expr);
2804 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2805 DECL_RESULT (current_function_decl),
2806 TREE_OPERAND (retval_rhs, 2));
2807 TREE_SIDE_EFFECTS (expr) = 1;
2808 expand_return (expr);
2812 /* For tail-recursive call to current function,
2813 just jump back to the beginning.
2814 It's unsafe if any auto variable in this function
2815 has its address taken; for simplicity,
2816 require stack frame to be empty. */
2817 if (optimize && retval_rhs != 0
2818 && frame_offset == 0
2819 && TREE_CODE (retval_rhs) == CALL_EXPR
2820 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2821 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2822 /* Finish checking validity, and if valid emit code
2823 to set the argument variables for the new call. */
2824 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2825 DECL_ARGUMENTS (current_function_decl)))
2827 if (tail_recursion_label == 0)
2829 tail_recursion_label = gen_label_rtx ();
2830 emit_label_after (tail_recursion_label,
2831 tail_recursion_reentry);
2834 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2839 /* This optimization is safe if there are local cleanups
2840 because expand_null_return takes care of them.
2841 ??? I think it should also be safe when there is a cleanup label,
2842 because expand_null_return takes care of them, too.
2843 Any reason why not? */
2844 if (HAVE_return && cleanup_label == 0
2845 && ! current_function_returns_pcc_struct
2846 && BRANCH_COST <= 1)
2848 /* If this is return x == y; then generate
2849 if (x == y) return 1; else return 0;
2850 if we can do it with explicit return insns and
2851 branches are cheap. */
2853 switch (TREE_CODE (retval_rhs))
2861 case TRUTH_ANDIF_EXPR:
2862 case TRUTH_ORIF_EXPR:
2863 case TRUTH_AND_EXPR:
2865 case TRUTH_NOT_EXPR:
2866 case TRUTH_XOR_EXPR:
2867 op0 = gen_label_rtx ();
2868 jumpifnot (retval_rhs, op0);
2869 expand_value_return (const1_rtx);
2871 expand_value_return (const0_rtx);
2875 #endif /* HAVE_return */
2879 && TREE_TYPE (retval_rhs) != void_type_node
2880 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2882 /* Calculate the return value into a pseudo reg. */
2883 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2885 /* All temporaries have now been used. */
2887 /* Return the calculated value, doing cleanups first. */
2888 expand_value_return (val);
2892 /* No cleanups or no hard reg used;
2893 calculate value into hard return reg. */
2894 expand_expr (retval, const0_rtx, VOIDmode, 0);
2897 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2901 /* Return 1 if the end of the generated RTX is not a barrier.
2902 This means code already compiled can drop through. */
2905 drop_through_at_end_p ()
2907 rtx insn = get_last_insn ();
2908 while (insn && GET_CODE (insn) == NOTE)
2909 insn = PREV_INSN (insn);
2910 return insn && GET_CODE (insn) != BARRIER;
2913 /* Emit code to alter this function's formal parms for a tail-recursive call.
2914 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2915 FORMALS is the chain of decls of formals.
2916 Return 1 if this can be done;
2917 otherwise return 0 and do not emit any code. */
2920 tail_recursion_args (actuals, formals)
2921 tree actuals, formals;
2923 register tree a = actuals, f = formals;
2925 register rtx *argvec;
2927 /* Check that number and types of actuals are compatible
2928 with the formals. This is not always true in valid C code.
2929 Also check that no formal needs to be addressable
2930 and that all formals are scalars. */
2932 /* Also count the args. */
2934 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2936 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2938 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2941 if (a != 0 || f != 0)
2944 /* Compute all the actuals. */
2946 argvec = (rtx *) alloca (i * sizeof (rtx));
2948 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2949 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2951 /* Find which actual values refer to current values of previous formals.
2952 Copy each of them now, before any formal is changed. */
2954 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2958 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2959 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2960 { copy = 1; break; }
2962 argvec[i] = copy_to_reg (argvec[i]);
2965 /* Store the values of the actuals into the formals. */
2967 for (f = formals, a = actuals, i = 0; f;
2968 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2970 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2971 emit_move_insn (DECL_RTL (f), argvec[i]);
2973 convert_move (DECL_RTL (f), argvec[i],
2974 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2981 /* Generate the RTL code for entering a binding contour.
2982 The variables are declared one by one, by calls to `expand_decl'.
2984 EXIT_FLAG is nonzero if this construct should be visible to
2985 `exit_something'. */
2988 expand_start_bindings (exit_flag)
2991 struct nesting *thisblock = ALLOC_NESTING ();
2994 if (!output_bytecode)
2995 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2997 /* Make an entry on block_stack for the block we are entering. */
2999 thisblock->next = block_stack;
3000 thisblock->all = nesting_stack;
3001 thisblock->depth = ++nesting_depth;
3002 thisblock->data.block.stack_level = 0;
3003 thisblock->data.block.cleanups = 0;
3004 thisblock->data.block.function_call_count = 0;
3008 if (block_stack->data.block.cleanups == NULL_TREE
3009 && (block_stack->data.block.outer_cleanups == NULL_TREE
3010 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
3011 thisblock->data.block.outer_cleanups = empty_cleanup_list;
3013 thisblock->data.block.outer_cleanups
3014 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3015 block_stack->data.block.outer_cleanups);
3018 thisblock->data.block.outer_cleanups = 0;
3022 && !(block_stack->data.block.cleanups == NULL_TREE
3023 && block_stack->data.block.outer_cleanups == NULL_TREE))
3024 thisblock->data.block.outer_cleanups
3025 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3026 block_stack->data.block.outer_cleanups);
3028 thisblock->data.block.outer_cleanups = 0;
3030 thisblock->data.block.label_chain = 0;
3031 thisblock->data.block.innermost_stack_block = stack_block_stack;
3032 thisblock->data.block.first_insn = note;
3033 thisblock->data.block.block_start_count = ++block_start_count;
3034 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3035 block_stack = thisblock;
3036 nesting_stack = thisblock;
3038 if (!output_bytecode)
3040 /* Make a new level for allocating stack slots. */
3045 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3046 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3050 remember_end_note (block)
3051 register tree block;
3053 BLOCK_END_NOTE (block) = last_block_end_note;
3054 last_block_end_note = NULL_RTX;
3057 /* Generate RTL code to terminate a binding contour.
3058 VARS is the chain of VAR_DECL nodes
3059 for the variables bound in this contour.
3060 MARK_ENDS is nonzero if we should put a note at the beginning
3061 and end of this binding contour.
3063 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3064 (That is true automatically if the contour has a saved stack level.) */
3067 expand_end_bindings (vars, mark_ends, dont_jump_in)
3072 register struct nesting *thisblock = block_stack;
3075 if (output_bytecode)
3077 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3082 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3083 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3084 && ! DECL_IN_SYSTEM_HEADER (decl))
3085 warning_with_decl (decl, "unused variable `%s'");
3087 if (thisblock->exit_label)
3089 do_pending_stack_adjust ();
3090 emit_label (thisblock->exit_label);
3093 /* If necessary, make a handler for nonlocal gotos taking
3094 place in the function calls in this block. */
3095 if (function_call_count != thisblock->data.block.function_call_count
3097 /* Make handler for outermost block
3098 if there were any nonlocal gotos to this function. */
3099 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3100 /* Make handler for inner block if it has something
3101 special to do when you jump out of it. */
3102 : (thisblock->data.block.cleanups != 0
3103 || thisblock->data.block.stack_level != 0)))
3106 rtx afterward = gen_label_rtx ();
3107 rtx handler_label = gen_label_rtx ();
3108 rtx save_receiver = gen_reg_rtx (Pmode);
3110 /* Don't let jump_optimize delete the handler. */
3111 LABEL_PRESERVE_P (handler_label) = 1;
3113 /* Record the handler address in the stack slot for that purpose,
3114 during this block, saving and restoring the outer value. */
3115 if (thisblock->next != 0)
3117 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3118 emit_insn_before (gen_move_insn (save_receiver,
3119 nonlocal_goto_handler_slot),
3120 thisblock->data.block.first_insn);
3122 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
3123 gen_rtx (LABEL_REF, Pmode,
3125 thisblock->data.block.first_insn);
3127 /* Jump around the handler; it runs only when specially invoked. */
3128 emit_jump (afterward);
3129 emit_label (handler_label);
3131 #ifdef HAVE_nonlocal_goto
3132 if (! HAVE_nonlocal_goto)
3134 /* First adjust our frame pointer to its actual value. It was
3135 previously set to the start of the virtual area corresponding to
3136 the stacked variables when we branched here and now needs to be
3137 adjusted to the actual hardware fp value.
3139 Assignments are to virtual registers are converted by
3140 instantiate_virtual_regs into the corresponding assignment
3141 to the underlying register (fp in this case) that makes
3142 the original assignment true.
3143 So the following insn will actually be
3144 decrementing fp by STARTING_FRAME_OFFSET. */
3145 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
3147 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
3148 if (fixed_regs[ARG_POINTER_REGNUM])
3150 #ifdef ELIMINABLE_REGS
3151 /* If the argument pointer can be eliminated in favor of the
3152 frame pointer, we don't need to restore it. We assume here
3153 that if such an elimination is present, it can always be used.
3154 This is the case on all known machines; if we don't make this
3155 assumption, we do unnecessary saving on many machines. */
3156 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3159 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3160 if (elim_regs[i].from == ARG_POINTER_REGNUM
3161 && elim_regs[i].to == FRAME_POINTER_REGNUM)
3164 if (i == sizeof elim_regs / sizeof elim_regs [0])
3167 /* Now restore our arg pointer from the address at which it
3168 was saved in our stack frame.
3169 If there hasn't be space allocated for it yet, make
3171 if (arg_pointer_save_area == 0)
3172 arg_pointer_save_area
3173 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3174 emit_move_insn (virtual_incoming_args_rtx,
3175 /* We need a pseudo here, or else
3176 instantiate_virtual_regs_1 complains. */
3177 copy_to_reg (arg_pointer_save_area));
3182 /* The handler expects the desired label address in the static chain
3183 register. It tests the address and does an appropriate jump
3184 to whatever label is desired. */
3185 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3186 /* Skip any labels we shouldn't be able to jump to from here. */
3187 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3189 rtx not_this = gen_label_rtx ();
3190 rtx this = gen_label_rtx ();
3191 do_jump_if_equal (static_chain_rtx,
3192 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3194 emit_jump (not_this);
3196 expand_goto (TREE_VALUE (link));
3197 emit_label (not_this);
3199 /* If label is not recognized, abort. */
3200 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3202 emit_label (afterward);
3205 /* Don't allow jumping into a block that has cleanups or a stack level. */
3207 || thisblock->data.block.stack_level != 0
3208 || thisblock->data.block.cleanups != 0)
3210 struct label_chain *chain;
3212 /* Any labels in this block are no longer valid to go to.
3213 Mark them to cause an error message. */
3214 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3216 DECL_TOO_LATE (chain->label) = 1;
3217 /* If any goto without a fixup came to this label,
3218 that must be an error, because gotos without fixups
3219 come from outside all saved stack-levels and all cleanups. */
3220 if (TREE_ADDRESSABLE (chain->label))
3221 error_with_decl (chain->label,
3222 "label `%s' used before containing binding contour");
3226 /* Restore stack level in effect before the block
3227 (only if variable-size objects allocated). */
3228 /* Perform any cleanups associated with the block. */
3230 if (thisblock->data.block.stack_level != 0
3231 || thisblock->data.block.cleanups != 0)
3233 /* Don't let cleanups affect ({...}) constructs. */
3234 int old_expr_stmts_for_value = expr_stmts_for_value;
3235 rtx old_last_expr_value = last_expr_value;
3236 tree old_last_expr_type = last_expr_type;
3237 expr_stmts_for_value = 0;
3239 /* Do the cleanups. */
3240 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
3241 do_pending_stack_adjust ();
3243 expr_stmts_for_value = old_expr_stmts_for_value;
3244 last_expr_value = old_last_expr_value;
3245 last_expr_type = old_last_expr_type;
3247 /* Restore the stack level. */
3249 if (thisblock->data.block.stack_level != 0)
3251 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3252 thisblock->data.block.stack_level, NULL_RTX);
3253 if (nonlocal_goto_handler_slot != 0)
3254 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3258 /* Any gotos out of this block must also do these things.
3259 Also report any gotos with fixups that came to labels in this
3261 fixup_gotos (thisblock,
3262 thisblock->data.block.stack_level,
3263 thisblock->data.block.cleanups,
3264 thisblock->data.block.first_insn,
3268 /* Mark the beginning and end of the scope if requested.
3269 We do this now, after running cleanups on the variables
3270 just going out of scope, so they are in scope for their cleanups. */
3273 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3275 /* Get rid of the beginning-mark if we don't make an end-mark. */
3276 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3278 /* If doing stupid register allocation, make sure lives of all
3279 register variables declared here extend thru end of scope. */
3282 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3284 rtx rtl = DECL_RTL (decl);
3285 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3289 /* Restore block_stack level for containing block. */
3291 stack_block_stack = thisblock->data.block.innermost_stack_block;
3292 POPSTACK (block_stack);
3294 /* Pop the stack slot nesting and free any slots at this level. */
3299 /* End a binding contour.
3300 VARS is the chain of VAR_DECL nodes for the variables bound
3301 in this contour. MARK_ENDS is nonzer if we should put a note
3302 at the beginning and end of this binding contour.
3303 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3307 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3312 struct nesting *thisbind = nesting_stack;
3316 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3317 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3318 warning_with_decl (decl, "unused variable `%s'");
3320 bc_emit_bytecode_labeldef (thisbind->exit_label->bc_label);
3322 /* Pop block/bindings off stack */
3323 POPSTACK (nesting_stack);
3324 POPSTACK (block_stack);
3327 /* Generate RTL for the automatic variable declaration DECL.
3328 (Other kinds of declarations are simply ignored if seen here.)
3329 CLEANUP is an expression to be executed at exit from this binding contour;
3330 for example, in C++, it might call the destructor for this variable.
3332 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3333 either before or after calling `expand_decl' but before compiling
3334 any subsequent expressions. This is because CLEANUP may be expanded
3335 more than once, on different branches of execution.
3336 For the same reason, CLEANUP may not contain a CALL_EXPR
3337 except as its topmost node--else `preexpand_calls' would get confused.
3339 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3340 that is not associated with any particular variable.
3342 There is no special support here for C++ constructors.
3343 They should be handled by the proper code in DECL_INITIAL. */
3349 struct nesting *thisblock = block_stack;
3352 if (output_bytecode)
3354 bc_expand_decl (decl, 0);
3358 type = TREE_TYPE (decl);
3360 /* Only automatic variables need any expansion done.
3361 Static and external variables, and external functions,
3362 will be handled by `assemble_variable' (called from finish_decl).
3363 TYPE_DECL and CONST_DECL require nothing.
3364 PARM_DECLs are handled in `assign_parms'. */
3366 if (TREE_CODE (decl) != VAR_DECL)
3368 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3371 /* Create the RTL representation for the variable. */
3373 if (type == error_mark_node)
3374 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3375 else if (DECL_SIZE (decl) == 0)
3376 /* Variable with incomplete type. */
3378 if (DECL_INITIAL (decl) == 0)
3379 /* Error message was already done; now avoid a crash. */
3380 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3382 /* An initializer is going to decide the size of this array.
3383 Until we know the size, represent its address with a reg. */
3384 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3386 else if (DECL_MODE (decl) != BLKmode
3387 /* If -ffloat-store, don't put explicit float vars
3389 && !(flag_float_store
3390 && TREE_CODE (type) == REAL_TYPE)
3391 && ! TREE_THIS_VOLATILE (decl)
3392 && ! TREE_ADDRESSABLE (decl)
3393 && (DECL_REGISTER (decl) || ! obey_regdecls))
3395 /* Automatic variable that can go in a register. */
3396 enum machine_mode reg_mode = DECL_MODE (decl);
3397 int unsignedp = TREE_UNSIGNED (type);
3399 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
3400 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
3401 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
3402 || TREE_CODE (type) == OFFSET_TYPE)
3404 PROMOTE_MODE (reg_mode, unsignedp, type);
3407 if (TREE_CODE (type) == COMPLEX_TYPE)
3409 rtx realpart, imagpart;
3410 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3412 /* For a complex type variable, make a CONCAT of two pseudos
3413 so that the real and imaginary parts
3414 can be allocated separately. */
3415 realpart = gen_reg_rtx (partmode);
3416 REG_USERVAR_P (realpart) = 1;
3417 imagpart = gen_reg_rtx (partmode);
3418 REG_USERVAR_P (imagpart) = 1;
3419 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3423 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3424 if (TREE_CODE (type) == POINTER_TYPE)
3425 mark_reg_pointer (DECL_RTL (decl));
3426 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3429 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3431 /* Variable of fixed size that goes on the stack. */
3435 /* If we previously made RTL for this decl, it must be an array
3436 whose size was determined by the initializer.
3437 The old address was a register; set that register now
3438 to the proper address. */
3439 if (DECL_RTL (decl) != 0)
3441 if (GET_CODE (DECL_RTL (decl)) != MEM
3442 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3444 oldaddr = XEXP (DECL_RTL (decl), 0);
3448 = assign_stack_temp (DECL_MODE (decl),
3449 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3450 + BITS_PER_UNIT - 1)
3454 /* Set alignment we actually gave this decl. */
3455 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3456 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3460 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3461 if (addr != oldaddr)
3462 emit_move_insn (oldaddr, addr);
3465 /* If this is a memory ref that contains aggregate components,
3466 mark it as such for cse and loop optimize. */
3467 MEM_IN_STRUCT_P (DECL_RTL (decl))
3468 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
3469 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
3470 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
3471 || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
3473 /* If this is in memory because of -ffloat-store,
3474 set the volatile bit, to prevent optimizations from
3475 undoing the effects. */
3476 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3477 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3481 /* Dynamic-size object: must push space on the stack. */
3485 /* Record the stack pointer on entry to block, if have
3486 not already done so. */
3487 if (thisblock->data.block.stack_level == 0)
3489 do_pending_stack_adjust ();
3490 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3491 &thisblock->data.block.stack_level,
3492 thisblock->data.block.first_insn);
3493 stack_block_stack = thisblock;
3496 /* Compute the variable's size, in bytes. */
3497 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3499 size_int (BITS_PER_UNIT)),
3500 NULL_RTX, VOIDmode, 0);
3503 /* This is equivalent to calling alloca. */
3504 current_function_calls_alloca = 1;
3506 /* Allocate space on the stack for the variable. */
3507 address = allocate_dynamic_stack_space (size, NULL_RTX,
3510 if (nonlocal_goto_handler_slot != 0)
3511 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3513 /* Reference the variable indirect through that rtx. */
3514 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3516 /* If this is a memory ref that contains aggregate components,
3517 mark it as such for cse and loop optimize. */
3518 MEM_IN_STRUCT_P (DECL_RTL (decl))
3519 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
3520 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
3521 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
3522 || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
3524 /* Indicate the alignment we actually gave this variable. */
3525 #ifdef STACK_BOUNDARY
3526 DECL_ALIGN (decl) = STACK_BOUNDARY;
3528 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3532 if (TREE_THIS_VOLATILE (decl))
3533 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3534 #if 0 /* A variable is not necessarily unchanging
3535 just because it is const. RTX_UNCHANGING_P
3536 means no change in the function,
3537 not merely no change in the variable's scope.
3538 It is correct to set RTX_UNCHANGING_P if the variable's scope
3539 is the whole function. There's no convenient way to test that. */
3540 if (TREE_READONLY (decl))
3541 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3544 /* If doing stupid register allocation, make sure life of any
3545 register variable starts here, at the start of its scope. */
3548 use_variable (DECL_RTL (decl));
3552 /* Generate code for the automatic variable declaration DECL. For
3553 most variables this just means we give it a stack offset. The
3554 compiler sometimes emits cleanups without variables and we will
3555 have to deal with those too. */
3558 bc_expand_decl (decl, cleanup)
3566 /* A cleanup with no variable. */
3573 /* Only auto variables need any work. */
3574 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3577 type = TREE_TYPE (decl);
3579 if (type == error_mark_node)
3580 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3582 else if (DECL_SIZE (decl) == 0)
3584 /* Variable with incomplete type. The stack offset herein will be
3585 fixed later in expand_decl_init (). */
3586 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3588 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3590 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3594 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3597 /* Emit code to perform the initialization of a declaration DECL. */
3600 expand_decl_init (decl)
3603 int was_used = TREE_USED (decl);
3605 if (TREE_STATIC (decl))
3608 /* Compute and store the initial value now. */
3610 if (DECL_INITIAL (decl) == error_mark_node)
3612 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3613 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3614 || code == POINTER_TYPE)
3615 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3619 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3621 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3622 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3626 /* Don't let the initialization count as "using" the variable. */
3627 TREE_USED (decl) = was_used;
3629 /* Free any temporaries we made while initializing the decl. */
3633 /* Expand initialization for variable-sized types. Allocate array
3634 using newlocalSI and set local variable, which is a pointer to the
3637 bc_expand_variable_local_init (decl)
3640 /* Evaluate size expression and coerce to SI */
3641 bc_expand_expr (DECL_SIZE (decl));
3643 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3644 no coercion is necessary (?) */
3646 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3647 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3649 /* Emit code to allocate array */
3650 bc_emit_instruction (newlocalSI);
3652 /* Store array pointer in local variable. This is the only instance
3653 where we actually want the address of the pointer to the
3654 variable-size block, rather than the pointer itself. We avoid
3655 using expand_address() since that would cause the pointer to be
3656 pushed rather than its address. Hence the hard-coded reference;
3657 notice also that the variable is always local (no global
3658 variable-size type variables). */
3660 bc_load_localaddr (DECL_RTL (decl));
3661 bc_emit_instruction (storeP);
3665 /* Emit code to initialize a declaration. */
3667 bc_expand_decl_init (decl)
3670 int org_stack_depth;
3672 /* Statical initializers are handled elsewhere */
3674 if (TREE_STATIC (decl))
3677 /* Memory original stack depth */
3678 org_stack_depth = stack_depth;
3680 /* If the type is variable-size, we first create its space (we ASSUME
3681 it CAN'T be static). We do this regardless of whether there's an
3682 initializer assignment or not. */
3684 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3685 bc_expand_variable_local_init (decl);
3687 /* Expand initializer assignment */
3688 if (DECL_INITIAL (decl) == error_mark_node)
3690 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3692 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3693 || code == POINTER_TYPE)
3695 expand_assignment (TREE_TYPE (decl), decl,
3696 convert (TREE_TYPE (decl), integer_zero_node));
3698 else if (DECL_INITIAL (decl))
3699 expand_assignment (TREE_TYPE (decl), decl, DECL_INITIAL (decl));
3701 /* Restore stack depth */
3702 if (org_stack_depth > stack_depth)
3705 bc_adjust_stack (stack_depth - org_stack_depth);
3709 /* CLEANUP is an expression to be executed at exit from this binding contour;
3710 for example, in C++, it might call the destructor for this variable.
3712 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3713 either before or after calling `expand_decl' but before compiling
3714 any subsequent expressions. This is because CLEANUP may be expanded
3715 more than once, on different branches of execution.
3716 For the same reason, CLEANUP may not contain a CALL_EXPR
3717 except as its topmost node--else `preexpand_calls' would get confused.
3719 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3720 that is not associated with any particular variable. */
3723 expand_decl_cleanup (decl, cleanup)
3726 struct nesting *thisblock = block_stack;
3728 /* Error if we are not in any block. */
3732 /* Record the cleanup if there is one. */
3736 thisblock->data.block.cleanups
3737 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3738 /* If this block has a cleanup, it belongs in stack_block_stack. */
3739 stack_block_stack = thisblock;
3744 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3745 DECL_ELTS is the list of elements that belong to DECL's type.
3746 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3749 expand_anon_union_decl (decl, cleanup, decl_elts)
3750 tree decl, cleanup, decl_elts;
3752 struct nesting *thisblock = block_stack;
3755 expand_decl (decl, cleanup);
3756 x = DECL_RTL (decl);
3760 tree decl_elt = TREE_VALUE (decl_elts);
3761 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3762 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3764 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3765 instead create a new MEM rtx with the proper mode. */
3766 if (GET_CODE (x) == MEM)
3768 if (mode == GET_MODE (x))
3769 DECL_RTL (decl_elt) = x;
3772 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3773 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3774 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3777 else if (GET_CODE (x) == REG)
3779 if (mode == GET_MODE (x))
3780 DECL_RTL (decl_elt) = x;
3782 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3787 /* Record the cleanup if there is one. */
3790 thisblock->data.block.cleanups
3791 = temp_tree_cons (decl_elt, cleanup_elt,
3792 thisblock->data.block.cleanups);
3794 decl_elts = TREE_CHAIN (decl_elts);
3798 /* Expand a list of cleanups LIST.
3799 Elements may be expressions or may be nested lists.
3801 If DONT_DO is nonnull, then any list-element
3802 whose TREE_PURPOSE matches DONT_DO is omitted.
3803 This is sometimes used to avoid a cleanup associated with
3804 a value that is being returned out of the scope. */
3807 expand_cleanups (list, dont_do)
3812 for (tail = list; tail; tail = TREE_CHAIN (tail))
3813 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3815 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3816 expand_cleanups (TREE_VALUE (tail), dont_do);
3819 /* Cleanups may be run multiple times. For example,
3820 when exiting a binding contour, we expand the
3821 cleanups associated with that contour. When a goto
3822 within that binding contour has a target outside that
3823 contour, it will expand all cleanups from its scope to
3824 the target. Though the cleanups are expanded multiple
3825 times, the control paths are non-overlapping so the
3826 cleanups will not be executed twice. */
3827 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3833 /* Move all cleanups from the current block_stack
3834 to the containing block_stack, where they are assumed to
3835 have been created. If anything can cause a temporary to
3836 be created, but not expanded for more than one level of
3837 block_stacks, then this code will have to change. */
3842 struct nesting *block = block_stack;
3843 struct nesting *outer = block->next;
3845 outer->data.block.cleanups
3846 = chainon (block->data.block.cleanups,
3847 outer->data.block.cleanups);
3848 block->data.block.cleanups = 0;
3852 last_cleanup_this_contour ()
3854 if (block_stack == 0)
3857 return block_stack->data.block.cleanups;
3860 /* Return 1 if there are any pending cleanups at this point.
3861 If THIS_CONTOUR is nonzero, check the current contour as well.
3862 Otherwise, look only at the contours that enclose this one. */
3865 any_pending_cleanups (this_contour)
3868 struct nesting *block;
3870 if (block_stack == 0)
3873 if (this_contour && block_stack->data.block.cleanups != NULL)
3875 if (block_stack->data.block.cleanups == 0
3876 && (block_stack->data.block.outer_cleanups == 0
3878 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3883 for (block = block_stack->next; block; block = block->next)
3884 if (block->data.block.cleanups != 0)
3890 /* Enter a case (Pascal) or switch (C) statement.
3891 Push a block onto case_stack and nesting_stack
3892 to accumulate the case-labels that are seen
3893 and to record the labels generated for the statement.
3895 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3896 Otherwise, this construct is transparent for `exit_something'.
3898 EXPR is the index-expression to be dispatched on.
3899 TYPE is its nominal type. We could simply convert EXPR to this type,
3900 but instead we take short cuts. */
3903 expand_start_case (exit_flag, expr, type, printname)
3909 register struct nesting *thiscase = ALLOC_NESTING ();
3911 /* Make an entry on case_stack for the case we are entering. */
3913 thiscase->next = case_stack;
3914 thiscase->all = nesting_stack;
3915 thiscase->depth = ++nesting_depth;
3916 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3917 thiscase->data.case_stmt.case_list = 0;
3918 thiscase->data.case_stmt.index_expr = expr;
3919 thiscase->data.case_stmt.nominal_type = type;
3920 thiscase->data.case_stmt.default_label = 0;
3921 thiscase->data.case_stmt.num_ranges = 0;
3922 thiscase->data.case_stmt.printname = printname;
3923 thiscase->data.case_stmt.seenlabel = 0;
3924 case_stack = thiscase;
3925 nesting_stack = thiscase;
3927 if (output_bytecode)
3929 bc_expand_start_case (thiscase, expr, type, printname);
3933 do_pending_stack_adjust ();
3935 /* Make sure case_stmt.start points to something that won't
3936 need any transformation before expand_end_case. */
3937 if (GET_CODE (get_last_insn ()) != NOTE)
3938 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3940 thiscase->data.case_stmt.start = get_last_insn ();
3944 /* Enter a case statement. It is assumed that the caller has pushed
3945 the current context onto the case stack. */
3947 bc_expand_start_case (thiscase, expr, type, printname)
3948 struct nesting *thiscase;
3953 bc_expand_expr (expr);
3954 bc_expand_conversion (TREE_TYPE (expr), type);
3956 /* For cases, the skip is a place we jump to that's emitted after
3957 the size of the jump table is known. */
3959 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3960 bc_emit_bytecode (jump);
3961 bc_emit_bytecode_labelref (thiscase->data.case_stmt.skip_label->bc_label);
3963 #ifdef DEBUG_PRINT_CODE
3964 fputc ('\n', stderr);
3969 /* Start a "dummy case statement" within which case labels are invalid
3970 and are not connected to any larger real case statement.
3971 This can be used if you don't want to let a case statement jump
3972 into the middle of certain kinds of constructs. */
3975 expand_start_case_dummy ()
3977 register struct nesting *thiscase = ALLOC_NESTING ();
3979 /* Make an entry on case_stack for the dummy. */
3981 thiscase->next = case_stack;
3982 thiscase->all = nesting_stack;
3983 thiscase->depth = ++nesting_depth;
3984 thiscase->exit_label = 0;
3985 thiscase->data.case_stmt.case_list = 0;
3986 thiscase->data.case_stmt.start = 0;
3987 thiscase->data.case_stmt.nominal_type = 0;
3988 thiscase->data.case_stmt.default_label = 0;
3989 thiscase->data.case_stmt.num_ranges = 0;
3990 case_stack = thiscase;
3991 nesting_stack = thiscase;
3994 /* End a dummy case statement. */
3997 expand_end_case_dummy ()
3999 POPSTACK (case_stack);
4002 /* Return the data type of the index-expression
4003 of the innermost case statement, or null if none. */
4006 case_index_expr_type ()
4009 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4013 /* Accumulate one case or default label inside a case or switch statement.
4014 VALUE is the value of the case (a null pointer, for a default label).
4015 The function CONVERTER, when applied to arguments T and V,
4016 converts the value V to the type T.
4018 If not currently inside a case or switch statement, return 1 and do
4019 nothing. The caller will print a language-specific error message.
4020 If VALUE is a duplicate or overlaps, return 2 and do nothing
4021 except store the (first) duplicate node in *DUPLICATE.
4022 If VALUE is out of range, return 3 and do nothing.
4023 If we are jumping into the scope of a cleaup or var-sized array, return 5.
4024 Return 0 on success.
4026 Extended to handle range statements. */
4029 pushcase (value, converter, label, duplicate)
4030 register tree value;
4031 tree (*converter) PROTO((tree, tree));
4032 register tree label;
4035 register struct case_node **l;
4036 register struct case_node *n;
4040 if (output_bytecode)
4041 return bc_pushcase (value, label);
4043 /* Fail if not inside a real case statement. */
4044 if (! (case_stack && case_stack->data.case_stmt.start))
4047 if (stack_block_stack
4048 && stack_block_stack->depth > case_stack->depth)
4051 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4052 nominal_type = case_stack->data.case_stmt.nominal_type;
4054 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4055 if (index_type == error_mark_node)
4058 /* Convert VALUE to the type in which the comparisons are nominally done. */
4060 value = (*converter) (nominal_type, value);
4062 /* If this is the first label, warn if any insns have been emitted. */
4063 if (case_stack->data.case_stmt.seenlabel == 0)
4066 for (insn = case_stack->data.case_stmt.start;
4068 insn = NEXT_INSN (insn))
4070 if (GET_CODE (insn) == CODE_LABEL)
4072 if (GET_CODE (insn) != NOTE
4073 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4075 warning ("unreachable code at beginning of %s",
4076 case_stack->data.case_stmt.printname);
4081 case_stack->data.case_stmt.seenlabel = 1;
4083 /* Fail if this value is out of range for the actual type of the index
4084 (which may be narrower than NOMINAL_TYPE). */
4085 if (value != 0 && ! int_fits_type_p (value, index_type))
4088 /* Fail if this is a duplicate or overlaps another entry. */
4091 if (case_stack->data.case_stmt.default_label != 0)
4093 *duplicate = case_stack->data.case_stmt.default_label;
4096 case_stack->data.case_stmt.default_label = label;
4100 /* Find the elt in the chain before which to insert the new value,
4101 to keep the chain sorted in increasing order.
4102 But report an error if this element is a duplicate. */
4103 for (l = &case_stack->data.case_stmt.case_list;
4104 /* Keep going past elements distinctly less than VALUE. */
4105 *l != 0 && tree_int_cst_lt ((*l)->high, value);
4110 /* Element we will insert before must be distinctly greater;
4111 overlap means error. */
4112 if (! tree_int_cst_lt (value, (*l)->low))
4114 *duplicate = (*l)->code_label;
4119 /* Add this label to the chain, and succeed.
4120 Copy VALUE so it is on temporary rather than momentary
4121 obstack and will thus survive till the end of the case statement. */
4122 n = (struct case_node *) oballoc (sizeof (struct case_node));
4125 n->high = n->low = copy_node (value);
4126 n->code_label = label;
4130 expand_label (label);
4134 /* Like pushcase but this case applies to all values
4135 between VALUE1 and VALUE2 (inclusive).
4136 The return value is the same as that of pushcase
4137 but there is one additional error code:
4138 4 means the specified range was empty. */
4141 pushcase_range (value1, value2, converter, label, duplicate)
4142 register tree value1, value2;
4143 tree (*converter) PROTO((tree, tree));
4144 register tree label;
4147 register struct case_node **l;
4148 register struct case_node *n;
4152 /* Fail if not inside a real case statement. */
4153 if (! (case_stack && case_stack->data.case_stmt.start))
4156 if (stack_block_stack
4157 && stack_block_stack->depth > case_stack->depth)
4160 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4161 nominal_type = case_stack->data.case_stmt.nominal_type;
4163 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4164 if (index_type == error_mark_node)
4167 /* If this is the first label, warn if any insns have been emitted. */
4168 if (case_stack->data.case_stmt.seenlabel == 0)
4171 for (insn = case_stack->data.case_stmt.start;
4173 insn = NEXT_INSN (insn))
4175 if (GET_CODE (insn) == CODE_LABEL)
4177 if (GET_CODE (insn) != NOTE
4178 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4180 warning ("unreachable code at beginning of %s",
4181 case_stack->data.case_stmt.printname);
4186 case_stack->data.case_stmt.seenlabel = 1;
4188 /* Convert VALUEs to type in which the comparisons are nominally done. */
4189 if (value1 == 0) /* Negative infinity. */
4190 value1 = TYPE_MIN_VALUE(index_type);
4191 value1 = (*converter) (nominal_type, value1);
4193 if (value2 == 0) /* Positive infinity. */
4194 value2 = TYPE_MAX_VALUE(index_type);
4195 value2 = (*converter) (nominal_type, value2);
4197 /* Fail if these values are out of range. */
4198 if (! int_fits_type_p (value1, index_type))
4201 if (! int_fits_type_p (value2, index_type))
4204 /* Fail if the range is empty. */
4205 if (tree_int_cst_lt (value2, value1))
4208 /* If the bounds are equal, turn this into the one-value case. */
4209 if (tree_int_cst_equal (value1, value2))
4210 return pushcase (value1, converter, label, duplicate);
4212 /* Find the elt in the chain before which to insert the new value,
4213 to keep the chain sorted in increasing order.
4214 But report an error if this element is a duplicate. */
4215 for (l = &case_stack->data.case_stmt.case_list;
4216 /* Keep going past elements distinctly less than this range. */
4217 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
4222 /* Element we will insert before must be distinctly greater;
4223 overlap means error. */
4224 if (! tree_int_cst_lt (value2, (*l)->low))
4226 *duplicate = (*l)->code_label;
4231 /* Add this label to the chain, and succeed.
4232 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
4233 obstack and will thus survive till the end of the case statement. */
4235 n = (struct case_node *) oballoc (sizeof (struct case_node));
4238 n->low = copy_node (value1);
4239 n->high = copy_node (value2);
4240 n->code_label = label;
4243 expand_label (label);
4245 case_stack->data.case_stmt.num_ranges++;
4251 /* Accumulate one case or default label; VALUE is the value of the
4252 case, or nil for a default label. If not currently inside a case,
4253 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4254 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4255 Return 0 on success. This function is a leftover from the earlier
4256 bytecode compiler, which was based on gcc 1.37. It should be
4257 merged into pushcase. */
4260 bc_pushcase (value, label)
4264 struct nesting *thiscase = case_stack;
4265 struct case_node *case_label, *new_label;
4270 /* Fail if duplicate, overlap, or out of type range. */
4273 value = convert (thiscase->data.case_stmt.nominal_type, value);
4274 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4277 for (case_label = thiscase->data.case_stmt.case_list;
4278 case_label->left; case_label = case_label->left)
4279 if (! tree_int_cst_lt (case_label->left->high, value))
4282 if (case_label != thiscase->data.case_stmt.case_list
4283 && ! tree_int_cst_lt (case_label->high, value)
4284 || case_label->left && ! tree_int_cst_lt (value, case_label->left->low))
4287 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4288 new_label->low = new_label->high = copy_node (value);
4289 new_label->code_label = label;
4290 new_label->left = case_label->left;
4292 case_label->left = new_label;
4293 thiscase->data.case_stmt.num_ranges++;
4297 if (thiscase->data.case_stmt.default_label)
4299 thiscase->data.case_stmt.default_label = label;
4302 expand_label (label);
4306 /* Called when the index of a switch statement is an enumerated type
4307 and there is no default label.
4309 Checks that all enumeration literals are covered by the case
4310 expressions of a switch. Also, warn if there are any extra
4311 switch cases that are *not* elements of the enumerated type.
4313 If all enumeration literals were covered by the case expressions,
4314 turn one of the expressions into the default expression since it should
4315 not be possible to fall through such a switch. */
4318 check_for_full_enumeration_handling (type)
4321 register struct case_node *n;
4322 register struct case_node **l;
4323 register tree chain;
4326 if (output_bytecode)
4328 bc_check_for_full_enumeration_handling (type);
4332 /* The time complexity of this loop is currently O(N * M), with
4333 N being the number of members in the enumerated type, and
4334 M being the number of case expressions in the switch. */
4336 for (chain = TYPE_VALUES (type);
4338 chain = TREE_CHAIN (chain))
4340 /* Find a match between enumeral and case expression, if possible.
4341 Quit looking when we've gone too far (since case expressions
4342 are kept sorted in ascending order). Warn about enumerators not
4343 handled in the switch statement case expression list. */
4345 for (n = case_stack->data.case_stmt.case_list;
4346 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
4350 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
4353 warning ("enumeration value `%s' not handled in switch",
4354 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
4359 /* Now we go the other way around; we warn if there are case
4360 expressions that don't correspond to enumerators. This can
4361 occur since C and C++ don't enforce type-checking of
4362 assignments to enumeration variables. */
4365 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4367 for (chain = TYPE_VALUES (type);
4368 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4369 chain = TREE_CHAIN (chain))
4374 if (TYPE_NAME (type) == 0)
4375 warning ("case value `%d' not in enumerated type",
4376 TREE_INT_CST_LOW (n->low));
4378 warning ("case value `%d' not in enumerated type `%s'",
4379 TREE_INT_CST_LOW (n->low),
4380 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4383 : DECL_NAME (TYPE_NAME (type))));
4385 if (!tree_int_cst_equal (n->low, n->high))
4387 for (chain = TYPE_VALUES (type);
4388 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4389 chain = TREE_CHAIN (chain))
4394 if (TYPE_NAME (type) == 0)
4395 warning ("case value `%d' not in enumerated type",
4396 TREE_INT_CST_LOW (n->high));
4398 warning ("case value `%d' not in enumerated type `%s'",
4399 TREE_INT_CST_LOW (n->high),
4400 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4403 : DECL_NAME (TYPE_NAME (type))));
4409 /* ??? This optimization is disabled because it causes valid programs to
4410 fail. ANSI C does not guarantee that an expression with enum type
4411 will have a value that is the same as one of the enumation literals. */
4413 /* If all values were found as case labels, make one of them the default
4414 label. Thus, this switch will never fall through. We arbitrarily pick
4415 the last one to make the default since this is likely the most
4416 efficient choice. */
4420 for (l = &case_stack->data.case_stmt.case_list;
4425 case_stack->data.case_stmt.default_label = (*l)->code_label;
4432 /* Check that all enumeration literals are covered by the case
4433 expressions of a switch. Also warn if there are any cases
4434 that are not elements of the enumerated type. */
4436 bc_check_for_full_enumeration_handling (type)
4439 struct nesting *thiscase = case_stack;
4440 struct case_node *c;
4443 /* Check for enums not handled. */
4444 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4446 for (c = thiscase->data.case_stmt.case_list->left;
4447 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4450 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4451 warning ("enumerated value `%s' not handled in switch",
4452 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4455 /* Check for cases not in the enumeration. */
4456 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4458 for (e = TYPE_VALUES (type);
4459 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4463 warning ("case value `%d' not in enumerated type `%s'",
4464 TREE_INT_CST_LOW (c->low),
4465 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4467 : DECL_NAME (TYPE_NAME (type))));
4471 /* Terminate a case (Pascal) or switch (C) statement
4472 in which ORIG_INDEX is the expression to be tested.
4473 Generate the code to test it and jump to the right place. */
4476 expand_end_case (orig_index)
4479 tree minval, maxval, range, orig_minval;
4480 rtx default_label = 0;
4481 register struct case_node *n;
4489 register struct nesting *thiscase = case_stack;
4493 if (output_bytecode)
4495 bc_expand_end_case (orig_index);
4499 table_label = gen_label_rtx ();
4500 index_expr = thiscase->data.case_stmt.index_expr;
4501 unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
4503 do_pending_stack_adjust ();
4505 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4506 if (TREE_TYPE (index_expr) != error_mark_node)
4508 /* If switch expression was an enumerated type, check that all
4509 enumeration literals are covered by the cases.
4510 No sense trying this if there's a default case, however. */
4512 if (!thiscase->data.case_stmt.default_label
4513 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4514 && TREE_CODE (index_expr) != INTEGER_CST)
4515 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4517 /* If this is the first label, warn if any insns have been emitted. */
4518 if (thiscase->data.case_stmt.seenlabel == 0)
4521 for (insn = get_last_insn ();
4522 insn != case_stack->data.case_stmt.start;
4523 insn = PREV_INSN (insn))
4524 if (GET_CODE (insn) != NOTE
4525 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4527 warning ("unreachable code at beginning of %s",
4528 case_stack->data.case_stmt.printname);
4533 /* If we don't have a default-label, create one here,
4534 after the body of the switch. */
4535 if (thiscase->data.case_stmt.default_label == 0)
4537 thiscase->data.case_stmt.default_label
4538 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4539 expand_label (thiscase->data.case_stmt.default_label);
4541 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4543 before_case = get_last_insn ();
4545 /* Simplify the case-list before we count it. */
4546 group_case_nodes (thiscase->data.case_stmt.case_list);
4548 /* Get upper and lower bounds of case values.
4549 Also convert all the case values to the index expr's data type. */
4552 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4554 /* Check low and high label values are integers. */
4555 if (TREE_CODE (n->low) != INTEGER_CST)
4557 if (TREE_CODE (n->high) != INTEGER_CST)
4560 n->low = convert (TREE_TYPE (index_expr), n->low);
4561 n->high = convert (TREE_TYPE (index_expr), n->high);
4563 /* Count the elements and track the largest and smallest
4564 of them (treating them as signed even if they are not). */
4572 if (INT_CST_LT (n->low, minval))
4574 if (INT_CST_LT (maxval, n->high))
4577 /* A range counts double, since it requires two compares. */
4578 if (! tree_int_cst_equal (n->low, n->high))
4582 orig_minval = minval;
4584 /* Compute span of values. */
4586 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
4589 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
4591 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4593 emit_jump (default_label);
4596 /* If range of values is much bigger than number of values,
4597 make a sequence of conditional branches instead of a dispatch.
4598 If the switch-index is a constant, do it this way
4599 because we can optimize it. */
4601 #ifndef CASE_VALUES_THRESHOLD
4603 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4605 /* If machine does not have a case insn that compares the
4606 bounds, this means extra overhead for dispatch tables
4607 which raises the threshold for using them. */
4608 #define CASE_VALUES_THRESHOLD 5
4609 #endif /* HAVE_casesi */
4610 #endif /* CASE_VALUES_THRESHOLD */
4612 else if (TREE_INT_CST_HIGH (range) != 0
4613 || count < CASE_VALUES_THRESHOLD
4614 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4616 || TREE_CODE (index_expr) == INTEGER_CST
4617 /* These will reduce to a constant. */
4618 || (TREE_CODE (index_expr) == CALL_EXPR
4619 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4620 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4621 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4622 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4623 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4625 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4627 /* If the index is a short or char that we do not have
4628 an insn to handle comparisons directly, convert it to
4629 a full integer now, rather than letting each comparison
4630 generate the conversion. */
4632 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4633 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4634 == CODE_FOR_nothing))
4636 enum machine_mode wider_mode;
4637 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4638 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4639 if (cmp_optab->handlers[(int) wider_mode].insn_code
4640 != CODE_FOR_nothing)
4642 index = convert_to_mode (wider_mode, index, unsignedp);
4648 do_pending_stack_adjust ();
4650 index = protect_from_queue (index, 0);
4651 if (GET_CODE (index) == MEM)
4652 index = copy_to_reg (index);
4653 if (GET_CODE (index) == CONST_INT
4654 || TREE_CODE (index_expr) == INTEGER_CST)
4656 /* Make a tree node with the proper constant value
4657 if we don't already have one. */
4658 if (TREE_CODE (index_expr) != INTEGER_CST)
4661 = build_int_2 (INTVAL (index),
4662 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
4663 index_expr = convert (TREE_TYPE (index_expr), index_expr);
4666 /* For constant index expressions we need only
4667 issue a unconditional branch to the appropriate
4668 target code. The job of removing any unreachable
4669 code is left to the optimisation phase if the
4670 "-O" option is specified. */
4671 for (n = thiscase->data.case_stmt.case_list;
4675 if (! tree_int_cst_lt (index_expr, n->low)
4676 && ! tree_int_cst_lt (n->high, index_expr))
4680 emit_jump (label_rtx (n->code_label));
4682 emit_jump (default_label);
4686 /* If the index expression is not constant we generate
4687 a binary decision tree to select the appropriate
4688 target code. This is done as follows:
4690 The list of cases is rearranged into a binary tree,
4691 nearly optimal assuming equal probability for each case.
4693 The tree is transformed into RTL, eliminating
4694 redundant test conditions at the same time.
4696 If program flow could reach the end of the
4697 decision tree an unconditional jump to the
4698 default code is emitted. */
4701 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4702 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4703 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4705 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4706 default_label, TREE_TYPE (index_expr));
4707 emit_jump_if_reachable (default_label);
4716 enum machine_mode index_mode = SImode;
4717 int index_bits = GET_MODE_BITSIZE (index_mode);
4719 /* Convert the index to SImode. */
4720 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
4721 > GET_MODE_BITSIZE (index_mode))
4723 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
4724 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4726 /* We must handle the endpoints in the original mode. */
4727 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
4728 index_expr, minval);
4729 minval = integer_zero_node;
4730 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4731 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4732 emit_jump_insn (gen_bltu (default_label));
4733 /* Now we can safely truncate. */
4734 index = convert_to_mode (index_mode, index, 0);
4738 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
4739 index_expr = convert (type_for_size (index_bits, 0),
4741 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4744 index = protect_from_queue (index, 0);
4745 do_pending_stack_adjust ();
4747 emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX,
4749 expand_expr (range, NULL_RTX,
4751 table_label, default_label));
4755 #ifdef HAVE_tablejump
4756 if (! win && HAVE_tablejump)
4758 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4759 fold (build (MINUS_EXPR,
4760 TREE_TYPE (index_expr),
4761 index_expr, minval)));
4762 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4764 index = protect_from_queue (index, 0);
4765 do_pending_stack_adjust ();
4767 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
4768 expand_expr (range, NULL_RTX, VOIDmode, 0),
4769 table_label, default_label);
4776 /* Get table of labels to jump to, in order of case index. */
4778 ncases = TREE_INT_CST_LOW (range) + 1;
4779 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4780 bzero (labelvec, ncases * sizeof (rtx));
4782 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4784 register HOST_WIDE_INT i
4785 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4790 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
4791 if (i + TREE_INT_CST_LOW (orig_minval)
4792 == TREE_INT_CST_LOW (n->high))
4798 /* Fill in the gaps with the default. */
4799 for (i = 0; i < ncases; i++)
4800 if (labelvec[i] == 0)
4801 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
4803 /* Output the table */
4804 emit_label (table_label);
4806 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
4807 were an expression, instead of an #ifdef/#ifndef. */
4809 #ifdef CASE_VECTOR_PC_RELATIVE
4813 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
4814 gen_rtx (LABEL_REF, Pmode, table_label),
4815 gen_rtvec_v (ncases, labelvec)));
4817 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
4818 gen_rtvec_v (ncases, labelvec)));
4820 /* If the case insn drops through the table,
4821 after the table we must jump to the default-label.
4822 Otherwise record no drop-through after the table. */
4823 #ifdef CASE_DROPS_THROUGH
4824 emit_jump (default_label);
4830 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4831 reorder_insns (before_case, get_last_insn (),
4832 thiscase->data.case_stmt.start);
4834 if (thiscase->exit_label)
4835 emit_label (thiscase->exit_label);
4837 POPSTACK (case_stack);
4843 /* Terminate a case statement. EXPR is the original index
4846 bc_expand_end_case (expr)
4849 struct nesting *thiscase = case_stack;
4850 enum bytecode_opcode opcode;
4851 struct bc_label *jump_label;
4852 struct case_node *c;
4854 bc_emit_bytecode (jump);
4855 bc_emit_bytecode_labelref (thiscase->exit_label->bc_label);
4857 #ifdef DEBUG_PRINT_CODE
4858 fputc ('\n', stderr);
4861 /* Now that the size of the jump table is known, emit the actual
4862 indexed jump instruction. */
4863 bc_emit_bytecode_labeldef (thiscase->data.case_stmt.skip_label->bc_label);
4865 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
4866 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
4867 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
4869 bc_emit_bytecode (opcode);
4871 /* Now emit the case instructions literal arguments, in order.
4872 In addition to the value on the stack, it uses:
4873 1. The address of the jump table.
4874 2. The size of the jump table.
4875 3. The default label. */
4877 jump_label = bc_get_bytecode_label ();
4878 bc_emit_bytecode_labelref (jump_label);
4879 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
4880 sizeof thiscase->data.case_stmt.num_ranges);
4882 if (thiscase->data.case_stmt.default_label)
4883 bc_emit_bytecode_labelref (DECL_RTL (thiscase->
4884 data.case_stmt.default_label)->bc_label);
4886 bc_emit_bytecode_labelref (thiscase->exit_label->bc_label);
4888 /* Output the jump table. */
4890 bc_align_bytecode (3 /* PTR_ALIGN */);
4891 bc_emit_bytecode_labeldef (jump_label);
4893 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
4894 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4896 opcode = TREE_INT_CST_LOW (c->low);
4897 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4899 opcode = TREE_INT_CST_LOW (c->high);
4900 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4902 bc_emit_bytecode_labelref (DECL_RTL (c->code_label)->bc_label);
4905 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
4906 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4908 bc_emit_bytecode_DI_const (c->low);
4909 bc_emit_bytecode_DI_const (c->high);
4911 bc_emit_bytecode_labelref (DECL_RTL (c->code_label)->bc_label);
4918 bc_emit_bytecode_labeldef (thiscase->exit_label->bc_label);
4920 /* Possibly issue enumeration warnings. */
4922 if (!thiscase->data.case_stmt.default_label
4923 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
4924 && TREE_CODE (expr) != INTEGER_CST
4926 check_for_full_enumeration_handling (TREE_TYPE (expr));
4929 #ifdef DEBUG_PRINT_CODE
4930 fputc ('\n', stderr);
4933 POPSTACK (case_stack);
4937 /* Return unique bytecode ID. */
4941 static int bc_uid = 0;
4946 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4949 do_jump_if_equal (op1, op2, label, unsignedp)
4950 rtx op1, op2, label;
4953 if (GET_CODE (op1) == CONST_INT
4954 && GET_CODE (op2) == CONST_INT)
4956 if (INTVAL (op1) == INTVAL (op2))
4961 enum machine_mode mode = GET_MODE (op1);
4962 if (mode == VOIDmode)
4963 mode = GET_MODE (op2);
4964 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
4965 emit_jump_insn (gen_beq (label));
4969 /* Not all case values are encountered equally. This function
4970 uses a heuristic to weight case labels, in cases where that
4971 looks like a reasonable thing to do.
4973 Right now, all we try to guess is text, and we establish the
4976 chars above space: 16
4985 If we find any cases in the switch that are not either -1 or in the range
4986 of valid ASCII characters, or are control characters other than those
4987 commonly used with "\", don't treat this switch scanning text.
4989 Return 1 if these nodes are suitable for cost estimation, otherwise
4993 estimate_case_costs (node)
4996 tree min_ascii = build_int_2 (-1, -1);
4997 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5001 /* If we haven't already made the cost table, make it now. Note that the
5002 lower bound of the table is -1, not zero. */
5004 if (cost_table == NULL)
5006 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5007 bzero (cost_table - 1, 129 * sizeof (short));
5009 for (i = 0; i < 128; i++)
5013 else if (ispunct (i))
5015 else if (iscntrl (i))
5019 cost_table[' '] = 8;
5020 cost_table['\t'] = 4;
5021 cost_table['\0'] = 4;
5022 cost_table['\n'] = 2;
5023 cost_table['\f'] = 1;
5024 cost_table['\v'] = 1;
5025 cost_table['\b'] = 1;
5028 /* See if all the case expressions look like text. It is text if the
5029 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5030 as signed arithmetic since we don't want to ever access cost_table with a
5031 value less than -1. Also check that none of the constants in a range
5032 are strange control characters. */
5034 for (n = node; n; n = n->right)
5036 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5039 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5040 if (cost_table[i] < 0)
5044 /* All interesting values are within the range of interesting
5045 ASCII characters. */
5049 /* Scan an ordered list of case nodes
5050 combining those with consecutive values or ranges.
5052 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5055 group_case_nodes (head)
5058 case_node_ptr node = head;
5062 rtx lb = next_real_insn (label_rtx (node->code_label));
5063 case_node_ptr np = node;
5065 /* Try to group the successors of NODE with NODE. */
5066 while (((np = np->right) != 0)
5067 /* Do they jump to the same place? */
5068 && next_real_insn (label_rtx (np->code_label)) == lb
5069 /* Are their ranges consecutive? */
5070 && tree_int_cst_equal (np->low,
5071 fold (build (PLUS_EXPR,
5072 TREE_TYPE (node->high),
5075 /* An overflow is not consecutive. */
5076 && tree_int_cst_lt (node->high,
5077 fold (build (PLUS_EXPR,
5078 TREE_TYPE (node->high),
5080 integer_one_node))))
5082 node->high = np->high;
5084 /* NP is the first node after NODE which can't be grouped with it.
5085 Delete the nodes in between, and move on to that node. */
5091 /* Take an ordered list of case nodes
5092 and transform them into a near optimal binary tree,
5093 on the assumption that any target code selection value is as
5094 likely as any other.
5096 The transformation is performed by splitting the ordered
5097 list into two equal sections plus a pivot. The parts are
5098 then attached to the pivot as left and right branches. Each
5099 branch is is then transformed recursively. */
5102 balance_case_nodes (head, parent)
5103 case_node_ptr *head;
5104 case_node_ptr parent;
5106 register case_node_ptr np;
5114 register case_node_ptr *npp;
5117 /* Count the number of entries on branch. Also count the ranges. */
5121 if (!tree_int_cst_equal (np->low, np->high))
5125 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5129 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5137 /* Split this list if it is long enough for that to help. */
5142 /* Find the place in the list that bisects the list's total cost,
5143 Here I gets half the total cost. */
5148 /* Skip nodes while their cost does not reach that amount. */
5149 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5150 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5151 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5154 npp = &(*npp)->right;
5159 /* Leave this branch lopsided, but optimize left-hand
5160 side and fill in `parent' fields for right-hand side. */
5162 np->parent = parent;
5163 balance_case_nodes (&np->left, np);
5164 for (; np->right; np = np->right)
5165 np->right->parent = np;
5169 /* If there are just three nodes, split at the middle one. */
5171 npp = &(*npp)->right;
5174 /* Find the place in the list that bisects the list's total cost,
5175 where ranges count as 2.
5176 Here I gets half the total cost. */
5177 i = (i + ranges + 1) / 2;
5180 /* Skip nodes while their cost does not reach that amount. */
5181 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5186 npp = &(*npp)->right;
5191 np->parent = parent;
5194 /* Optimize each of the two split parts. */
5195 balance_case_nodes (&np->left, np);
5196 balance_case_nodes (&np->right, np);
5200 /* Else leave this branch as one level,
5201 but fill in `parent' fields. */
5203 np->parent = parent;
5204 for (; np->right; np = np->right)
5205 np->right->parent = np;
5210 /* Search the parent sections of the case node tree
5211 to see if a test for the lower bound of NODE would be redundant.
5212 INDEX_TYPE is the type of the index expression.
5214 The instructions to generate the case decision tree are
5215 output in the same order as nodes are processed so it is
5216 known that if a parent node checks the range of the current
5217 node minus one that the current node is bounded at its lower
5218 span. Thus the test would be redundant. */
5221 node_has_low_bound (node, index_type)
5226 case_node_ptr pnode;
5228 /* If the lower bound of this node is the lowest value in the index type,
5229 we need not test it. */
5231 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5234 /* If this node has a left branch, the value at the left must be less
5235 than that at this node, so it cannot be bounded at the bottom and
5236 we need not bother testing any further. */
5241 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5242 node->low, integer_one_node));
5244 /* If the subtraction above overflowed, we can't verify anything.
5245 Otherwise, look for a parent that tests our value - 1. */
5247 if (! tree_int_cst_lt (low_minus_one, node->low))
5250 for (pnode = node->parent; pnode; pnode = pnode->parent)
5251 if (tree_int_cst_equal (low_minus_one, pnode->high))
5257 /* Search the parent sections of the case node tree
5258 to see if a test for the upper bound of NODE would be redundant.
5259 INDEX_TYPE is the type of the index expression.
5261 The instructions to generate the case decision tree are
5262 output in the same order as nodes are processed so it is
5263 known that if a parent node checks the range of the current
5264 node plus one that the current node is bounded at its upper
5265 span. Thus the test would be redundant. */
5268 node_has_high_bound (node, index_type)
5273 case_node_ptr pnode;
5275 /* If the upper bound of this node is the highest value in the type
5276 of the index expression, we need not test against it. */
5278 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5281 /* If this node has a right branch, the value at the right must be greater
5282 than that at this node, so it cannot be bounded at the top and
5283 we need not bother testing any further. */
5288 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5289 node->high, integer_one_node));
5291 /* If the addition above overflowed, we can't verify anything.
5292 Otherwise, look for a parent that tests our value + 1. */
5294 if (! tree_int_cst_lt (node->high, high_plus_one))
5297 for (pnode = node->parent; pnode; pnode = pnode->parent)
5298 if (tree_int_cst_equal (high_plus_one, pnode->low))
5304 /* Search the parent sections of the
5305 case node tree to see if both tests for the upper and lower
5306 bounds of NODE would be redundant. */
5309 node_is_bounded (node, index_type)
5313 return (node_has_low_bound (node, index_type)
5314 && node_has_high_bound (node, index_type));
5317 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5320 emit_jump_if_reachable (label)
5323 if (GET_CODE (get_last_insn ()) != BARRIER)
5327 /* Emit step-by-step code to select a case for the value of INDEX.
5328 The thus generated decision tree follows the form of the
5329 case-node binary tree NODE, whose nodes represent test conditions.
5330 INDEX_TYPE is the type of the index of the switch.
5332 Care is taken to prune redundant tests from the decision tree
5333 by detecting any boundary conditions already checked by
5334 emitted rtx. (See node_has_high_bound, node_has_low_bound
5335 and node_is_bounded, above.)
5337 Where the test conditions can be shown to be redundant we emit
5338 an unconditional jump to the target code. As a further
5339 optimization, the subordinates of a tree node are examined to
5340 check for bounded nodes. In this case conditional and/or
5341 unconditional jumps as a result of the boundary check for the
5342 current node are arranged to target the subordinates associated
5343 code for out of bound conditions on the current node node.
5345 We can assume that when control reaches the code generated here,
5346 the index value has already been compared with the parents
5347 of this node, and determined to be on the same side of each parent
5348 as this node is. Thus, if this node tests for the value 51,
5349 and a parent tested for 52, we don't need to consider
5350 the possibility of a value greater than 51. If another parent
5351 tests for the value 50, then this node need not test anything. */
5354 emit_case_nodes (index, node, default_label, index_type)
5360 /* If INDEX has an unsigned type, we must make unsigned branches. */
5361 int unsignedp = TREE_UNSIGNED (index_type);
5362 typedef rtx rtx_function ();
5363 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5364 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5365 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5366 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5367 enum machine_mode mode = GET_MODE (index);
5369 /* See if our parents have already tested everything for us.
5370 If they have, emit an unconditional jump for this node. */
5371 if (node_is_bounded (node, index_type))
5372 emit_jump (label_rtx (node->code_label));
5374 else if (tree_int_cst_equal (node->low, node->high))
5376 /* Node is single valued. First see if the index expression matches
5377 this node and then check our children, if any. */
5379 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5380 label_rtx (node->code_label), unsignedp);
5382 if (node->right != 0 && node->left != 0)
5384 /* This node has children on both sides.
5385 Dispatch to one side or the other
5386 by comparing the index value with this node's value.
5387 If one subtree is bounded, check that one first,
5388 so we can avoid real branches in the tree. */
5390 if (node_is_bounded (node->right, index_type))
5392 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5394 GT, NULL_RTX, mode, unsignedp, 0);
5396 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5397 emit_case_nodes (index, node->left, default_label, index_type);
5400 else if (node_is_bounded (node->left, index_type))
5402 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5404 LT, NULL_RTX, mode, unsignedp, 0);
5405 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5406 emit_case_nodes (index, node->right, default_label, index_type);
5411 /* Neither node is bounded. First distinguish the two sides;
5412 then emit the code for one side at a time. */
5415 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5417 /* See if the value is on the right. */
5418 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5420 GT, NULL_RTX, mode, unsignedp, 0);
5421 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5423 /* Value must be on the left.
5424 Handle the left-hand subtree. */
5425 emit_case_nodes (index, node->left, default_label, index_type);
5426 /* If left-hand subtree does nothing,
5428 emit_jump_if_reachable (default_label);
5430 /* Code branches here for the right-hand subtree. */
5431 expand_label (test_label);
5432 emit_case_nodes (index, node->right, default_label, index_type);
5436 else if (node->right != 0 && node->left == 0)
5438 /* Here we have a right child but no left so we issue conditional
5439 branch to default and process the right child.
5441 Omit the conditional branch to default if we it avoid only one
5442 right child; it costs too much space to save so little time. */
5444 if (node->right->right || node->right->left
5445 || !tree_int_cst_equal (node->right->low, node->right->high))
5447 if (!node_has_low_bound (node, index_type))
5449 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5451 LT, NULL_RTX, mode, unsignedp, 0);
5452 emit_jump_insn ((*gen_blt_pat) (default_label));
5455 emit_case_nodes (index, node->right, default_label, index_type);
5458 /* We cannot process node->right normally
5459 since we haven't ruled out the numbers less than
5460 this node's value. So handle node->right explicitly. */
5461 do_jump_if_equal (index,
5462 expand_expr (node->right->low, NULL_RTX,
5464 label_rtx (node->right->code_label), unsignedp);
5467 else if (node->right == 0 && node->left != 0)
5469 /* Just one subtree, on the left. */
5471 #if 0 /* The following code and comment were formerly part
5472 of the condition here, but they didn't work
5473 and I don't understand what the idea was. -- rms. */
5474 /* If our "most probable entry" is less probable
5475 than the default label, emit a jump to
5476 the default label using condition codes
5477 already lying around. With no right branch,
5478 a branch-greater-than will get us to the default
5481 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5484 if (node->left->left || node->left->right
5485 || !tree_int_cst_equal (node->left->low, node->left->high))
5487 if (!node_has_high_bound (node, index_type))
5489 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5491 GT, NULL_RTX, mode, unsignedp, 0);
5492 emit_jump_insn ((*gen_bgt_pat) (default_label));
5495 emit_case_nodes (index, node->left, default_label, index_type);
5498 /* We cannot process node->left normally
5499 since we haven't ruled out the numbers less than
5500 this node's value. So handle node->left explicitly. */
5501 do_jump_if_equal (index,
5502 expand_expr (node->left->low, NULL_RTX,
5504 label_rtx (node->left->code_label), unsignedp);
5509 /* Node is a range. These cases are very similar to those for a single
5510 value, except that we do not start by testing whether this node
5511 is the one to branch to. */
5513 if (node->right != 0 && node->left != 0)
5515 /* Node has subtrees on both sides.
5516 If the right-hand subtree is bounded,
5517 test for it first, since we can go straight there.
5518 Otherwise, we need to make a branch in the control structure,
5519 then handle the two subtrees. */
5520 tree test_label = 0;
5522 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5524 GT, NULL_RTX, mode, unsignedp, 0);
5526 if (node_is_bounded (node->right, index_type))
5527 /* Right hand node is fully bounded so we can eliminate any
5528 testing and branch directly to the target code. */
5529 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5532 /* Right hand node requires testing.
5533 Branch to a label where we will handle it later. */
5535 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5536 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5539 /* Value belongs to this node or to the left-hand subtree. */
5541 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5542 GE, NULL_RTX, mode, unsignedp, 0);
5543 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5545 /* Handle the left-hand subtree. */
5546 emit_case_nodes (index, node->left, default_label, index_type);
5548 /* If right node had to be handled later, do that now. */
5552 /* If the left-hand subtree fell through,
5553 don't let it fall into the right-hand subtree. */
5554 emit_jump_if_reachable (default_label);
5556 expand_label (test_label);
5557 emit_case_nodes (index, node->right, default_label, index_type);
5561 else if (node->right != 0 && node->left == 0)
5563 /* Deal with values to the left of this node,
5564 if they are possible. */
5565 if (!node_has_low_bound (node, index_type))
5567 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5569 LT, NULL_RTX, mode, unsignedp, 0);
5570 emit_jump_insn ((*gen_blt_pat) (default_label));
5573 /* Value belongs to this node or to the right-hand subtree. */
5575 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5577 LE, NULL_RTX, mode, unsignedp, 0);
5578 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5580 emit_case_nodes (index, node->right, default_label, index_type);
5583 else if (node->right == 0 && node->left != 0)
5585 /* Deal with values to the right of this node,
5586 if they are possible. */
5587 if (!node_has_high_bound (node, index_type))
5589 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5591 GT, NULL_RTX, mode, unsignedp, 0);
5592 emit_jump_insn ((*gen_bgt_pat) (default_label));
5595 /* Value belongs to this node or to the left-hand subtree. */
5597 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5598 GE, NULL_RTX, mode, unsignedp, 0);
5599 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5601 emit_case_nodes (index, node->left, default_label, index_type);
5606 /* Node has no children so we check low and high bounds to remove
5607 redundant tests. Only one of the bounds can exist,
5608 since otherwise this node is bounded--a case tested already. */
5610 if (!node_has_high_bound (node, index_type))
5612 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5614 GT, NULL_RTX, mode, unsignedp, 0);
5615 emit_jump_insn ((*gen_bgt_pat) (default_label));
5618 if (!node_has_low_bound (node, index_type))
5620 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5622 LT, NULL_RTX, mode, unsignedp, 0);
5623 emit_jump_insn ((*gen_blt_pat) (default_label));
5626 emit_jump (label_rtx (node->code_label));
5631 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5632 so that the debugging info will be correct for the unrolled loop. */
5634 /* Indexed by block number, contains a pointer to the N'th block node. */
5636 static tree *block_vector;
5639 find_loop_tree_blocks ()
5641 tree block = DECL_INITIAL (current_function_decl);
5643 /* There first block is for the function body, and does not have
5644 corresponding block notes. Don't include it in the block vector. */
5645 block = BLOCK_SUBBLOCKS (block);
5647 block_vector = identify_blocks (block, get_insns ());
5651 unroll_block_trees ()
5653 tree block = DECL_INITIAL (current_function_decl);
5655 reorder_blocks (block_vector, block, get_insns ());