1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-5, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
45 #include "insn-flags.h"
46 #include "insn-config.h"
47 #include "insn-codes.h"
49 #include "hard-reg-set.h"
56 #include "bc-typecd.h"
57 #include "bc-opcode.h"
61 #define obstack_chunk_alloc xmalloc
62 #define obstack_chunk_free free
63 struct obstack stmt_obstack;
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
73 int expr_stmts_for_value;
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
78 static tree last_expr_type;
79 static rtx last_expr_value;
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
86 static rtx last_block_end_note;
88 /* Number of binding contours started so far in this function. */
90 int block_start_count;
92 /* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
95 extern int current_function_returns_pcc_struct;
97 /* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
101 extern rtx cleanup_label;
103 /* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
107 extern rtx return_label;
109 /* Offset to end of allocated area of stack frame.
110 If stack grows down, this is the address of the last stack slot allocated.
111 If stack grows up, this is the address for the next slot. */
112 extern int frame_offset;
114 /* Label to jump back to for tail recursion, or 0 if we have
115 not yet needed one for this function. */
116 extern rtx tail_recursion_label;
118 /* Place after which to insert the tail_recursion_label if we need one. */
119 extern rtx tail_recursion_reentry;
121 /* Location at which to save the argument pointer if it will need to be
122 referenced. There are two cases where this is done: if nonlocal gotos
123 exist, or if vars whose is an offset from the argument pointer will be
124 needed by inner routines. */
126 extern rtx arg_pointer_save_area;
128 /* Chain of all RTL_EXPRs that have insns in them. */
129 extern tree rtl_expr_chain;
131 #if 0 /* Turned off because 0 seems to work just as well. */
132 /* Cleanup lists are required for binding levels regardless of whether
133 that binding level has cleanups or not. This node serves as the
134 cleanup list whenever an empty list is required. */
135 static tree empty_cleanup_list;
138 extern void (*interim_eh_hook) PROTO((tree));
140 /* Functions and data structures for expanding case statements. */
142 /* Case label structure, used to hold info on labels within case
143 statements. We handle "range" labels; for a single-value label
144 as in C, the high and low limits are the same.
146 An AVL tree of case nodes is initially created, and later transformed
147 to a list linked via the RIGHT fields in the nodes. Nodes with
148 higher case values are later in the list.
150 Switch statements can be output in one of two forms. A branch table
151 is used if there are more than a few labels and the labels are dense
152 within the range between the smallest and largest case value. If a
153 branch table is used, no further manipulations are done with the case
156 The alternative to the use of a branch table is to generate a series
157 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
158 and PARENT fields to hold a binary tree. Initially the tree is
159 totally unbalanced, with everything on the right. We balance the tree
160 with nodes on the left having lower case values than the parent
161 and nodes on the right having higher values. We then output the tree
166 struct case_node *left; /* Left son in binary tree */
167 struct case_node *right; /* Right son in binary tree; also node chain */
168 struct case_node *parent; /* Parent of node in binary tree */
169 tree low; /* Lowest index value for this label */
170 tree high; /* Highest index value for this label */
171 tree code_label; /* Label to jump to when node matches */
175 typedef struct case_node case_node;
176 typedef struct case_node *case_node_ptr;
178 /* These are used by estimate_case_costs and balance_case_nodes. */
180 /* This must be a signed type, and non-ANSI compilers lack signed char. */
181 static short *cost_table;
182 static int use_cost_table;
184 /* Stack of control and binding constructs we are currently inside.
186 These constructs begin when you call `expand_start_WHATEVER'
187 and end when you call `expand_end_WHATEVER'. This stack records
188 info about how the construct began that tells the end-function
189 what to do. It also may provide information about the construct
190 to alter the behavior of other constructs within the body.
191 For example, they may affect the behavior of C `break' and `continue'.
193 Each construct gets one `struct nesting' object.
194 All of these objects are chained through the `all' field.
195 `nesting_stack' points to the first object (innermost construct).
196 The position of an entry on `nesting_stack' is in its `depth' field.
198 Each type of construct has its own individual stack.
199 For example, loops have `loop_stack'. Each object points to the
200 next object of the same type through the `next' field.
202 Some constructs are visible to `break' exit-statements and others
203 are not. Which constructs are visible depends on the language.
204 Therefore, the data structure allows each construct to be visible
205 or not, according to the args given when the construct is started.
206 The construct is visible if the `exit_label' field is non-null.
207 In that case, the value should be a CODE_LABEL rtx. */
212 struct nesting *next;
217 /* For conds (if-then and if-then-else statements). */
220 /* Label for the end of the if construct.
221 There is none if EXITFLAG was not set
222 and no `else' has been seen yet. */
224 /* Label for the end of this alternative.
225 This may be the end of the if or the next else/elseif. */
231 /* Label at the top of the loop; place to loop back to. */
233 /* Label at the end of the whole construct. */
235 /* Label before a jump that branches to the end of the whole
236 construct. This is where destructors go if any. */
238 /* Label for `continue' statement to jump to;
239 this is in front of the stepper of the loop. */
242 /* For variable binding contours. */
245 /* Sequence number of this binding contour within the function,
246 in order of entry. */
247 int block_start_count;
248 /* Nonzero => value to restore stack to on exit. Complemented by
249 bc_stack_level (see below) when generating bytecodes. */
251 /* The NOTE that starts this contour.
252 Used by expand_goto to check whether the destination
253 is within each contour or not. */
255 /* Innermost containing binding contour that has a stack level. */
256 struct nesting *innermost_stack_block;
257 /* List of cleanups to be run on exit from this contour.
258 This is a list of expressions to be evaluated.
259 The TREE_PURPOSE of each link is the ..._DECL node
260 which the cleanup pertains to. */
262 /* List of cleanup-lists of blocks containing this block,
263 as they were at the locus where this block appears.
264 There is an element for each containing block,
265 ordered innermost containing block first.
266 The tail of this list can be 0 (was empty_cleanup_list),
267 if all remaining elements would be empty lists.
268 The element's TREE_VALUE is the cleanup-list of that block,
269 which may be null. */
271 /* Chain of labels defined inside this binding contour.
272 For contours that have stack levels or cleanups. */
273 struct label_chain *label_chain;
274 /* Number of function calls seen, as of start of this block. */
275 int function_call_count;
276 /* Bytecode specific: stack level to restore stack to on exit. */
279 /* For switch (C) or case (Pascal) statements,
280 and also for dummies (see `expand_start_case_dummy'). */
283 /* The insn after which the case dispatch should finally
284 be emitted. Zero for a dummy. */
286 /* For bytecodes, the case table is in-lined right in the code.
287 A label is needed for skipping over this block. It is only
288 used when generating bytecodes. */
290 /* A list of case labels; it is first built as an AVL tree.
291 During expand_end_case, this is converted to a list, and may be
292 rearranged into a nearly balanced binary tree. */
293 struct case_node *case_list;
294 /* Label to jump to if no case matches. */
296 /* The expression to be dispatched on. */
298 /* Type that INDEX_EXPR should be converted to. */
300 /* Number of range exprs in case statement. */
302 /* Name of this kind of statement, for warnings. */
304 /* Nonzero if a case label has been seen in this case stmt. */
310 /* Chain of all pending binding contours. */
311 struct nesting *block_stack;
313 /* If any new stacks are added here, add them to POPSTACKS too. */
315 /* Chain of all pending binding contours that restore stack levels
317 struct nesting *stack_block_stack;
319 /* Chain of all pending conditional statements. */
320 struct nesting *cond_stack;
322 /* Chain of all pending loops. */
323 struct nesting *loop_stack;
325 /* Chain of all pending case or switch statements. */
326 struct nesting *case_stack;
328 /* Separate chain including all of the above,
329 chained through the `all' field. */
330 struct nesting *nesting_stack;
332 /* Number of entries on nesting_stack now. */
335 /* Allocate and return a new `struct nesting'. */
337 #define ALLOC_NESTING() \
338 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
340 /* Pop the nesting stack element by element until we pop off
341 the element which is at the top of STACK.
342 Update all the other stacks, popping off elements from them
343 as we pop them from nesting_stack. */
345 #define POPSTACK(STACK) \
346 do { struct nesting *target = STACK; \
347 struct nesting *this; \
348 do { this = nesting_stack; \
349 if (loop_stack == this) \
350 loop_stack = loop_stack->next; \
351 if (cond_stack == this) \
352 cond_stack = cond_stack->next; \
353 if (block_stack == this) \
354 block_stack = block_stack->next; \
355 if (stack_block_stack == this) \
356 stack_block_stack = stack_block_stack->next; \
357 if (case_stack == this) \
358 case_stack = case_stack->next; \
359 nesting_depth = nesting_stack->depth - 1; \
360 nesting_stack = this->all; \
361 obstack_free (&stmt_obstack, this); } \
362 while (this != target); } while (0)
364 /* In some cases it is impossible to generate code for a forward goto
365 until the label definition is seen. This happens when it may be necessary
366 for the goto to reset the stack pointer: we don't yet know how to do that.
367 So expand_goto puts an entry on this fixup list.
368 Each time a binding contour that resets the stack is exited,
370 If the target label has now been defined, we can insert the proper code. */
374 /* Points to following fixup. */
375 struct goto_fixup *next;
376 /* Points to the insn before the jump insn.
377 If more code must be inserted, it goes after this insn. */
379 /* The LABEL_DECL that this jump is jumping to, or 0
380 for break, continue or return. */
382 /* The BLOCK for the place where this goto was found. */
384 /* The CODE_LABEL rtx that this is jumping to. */
386 /* Number of binding contours started in current function
387 before the label reference. */
388 int block_start_count;
389 /* The outermost stack level that should be restored for this jump.
390 Each time a binding contour that resets the stack is exited,
391 if the target label is *not* yet defined, this slot is updated. */
393 /* List of lists of cleanup expressions to be run by this goto.
394 There is one element for each block that this goto is within.
395 The tail of this list can be 0 (was empty_cleanup_list),
396 if all remaining elements would be empty.
397 The TREE_VALUE contains the cleanup list of that block as of the
398 time this goto was seen.
399 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
400 tree cleanup_list_list;
402 /* Bytecode specific members follow */
404 /* The label that this jump is jumping to, or 0 for break, continue
406 struct bc_label *bc_target;
408 /* The label we use for the fixup patch */
409 struct bc_label *label;
411 /* True (non-0) if fixup has been handled */
414 /* Like stack_level above, except refers to the interpreter stack */
418 static struct goto_fixup *goto_fixup_chain;
420 /* Within any binding contour that must restore a stack level,
421 all labels are recorded with a chain of these structures. */
425 /* Points to following fixup. */
426 struct label_chain *next;
429 static void expand_goto_internal PROTO((tree, rtx, rtx));
430 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
431 struct bc_label *, tree));
432 static int expand_fixup PROTO((tree, rtx, rtx));
433 static void bc_expand_fixup PROTO((enum bytecode_opcode,
434 struct bc_label *, int));
435 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
437 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
439 static void bc_expand_start_cond PROTO((tree, int));
440 static void bc_expand_end_cond PROTO((void));
441 static void bc_expand_start_else PROTO((void));
442 static void bc_expand_end_loop PROTO((void));
443 static void bc_expand_end_bindings PROTO((tree, int, int));
444 static void bc_expand_decl PROTO((tree, tree));
445 static void bc_expand_variable_local_init PROTO((tree));
446 static void bc_expand_decl_init PROTO((tree));
447 static void expand_null_return_1 PROTO((rtx, int));
448 static void expand_value_return PROTO((rtx));
449 static int tail_recursion_args PROTO((tree, tree));
450 static void expand_cleanups PROTO((tree, tree, int, int));
451 static void bc_expand_start_case PROTO((struct nesting *, tree,
453 static int bc_pushcase PROTO((tree, tree));
454 static void bc_check_for_full_enumeration_handling PROTO((tree));
455 static void bc_expand_end_case PROTO((tree));
456 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
457 static int estimate_case_costs PROTO((case_node_ptr));
458 static void group_case_nodes PROTO((case_node_ptr));
459 static void balance_case_nodes PROTO((case_node_ptr *,
461 static int node_has_low_bound PROTO((case_node_ptr, tree));
462 static int node_has_high_bound PROTO((case_node_ptr, tree));
463 static int node_is_bounded PROTO((case_node_ptr, tree));
464 static void emit_jump_if_reachable PROTO((rtx));
465 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
466 static int add_case_node PROTO((tree, tree, tree, tree *));
467 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
469 extern rtx bc_allocate_local ();
470 extern rtx bc_allocate_variable_array ();
475 gcc_obstack_init (&stmt_obstack);
477 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
482 init_stmt_for_function ()
484 /* We are not currently within any block, conditional, loop or case. */
486 stack_block_stack = 0;
493 block_start_count = 0;
495 /* No gotos have been expanded yet. */
496 goto_fixup_chain = 0;
498 /* We are not processing a ({...}) grouping. */
499 expr_stmts_for_value = 0;
507 p->block_stack = block_stack;
508 p->stack_block_stack = stack_block_stack;
509 p->cond_stack = cond_stack;
510 p->loop_stack = loop_stack;
511 p->case_stack = case_stack;
512 p->nesting_stack = nesting_stack;
513 p->nesting_depth = nesting_depth;
514 p->block_start_count = block_start_count;
515 p->last_expr_type = last_expr_type;
516 p->last_expr_value = last_expr_value;
517 p->expr_stmts_for_value = expr_stmts_for_value;
518 p->emit_filename = emit_filename;
519 p->emit_lineno = emit_lineno;
520 p->goto_fixup_chain = goto_fixup_chain;
524 restore_stmt_status (p)
527 block_stack = p->block_stack;
528 stack_block_stack = p->stack_block_stack;
529 cond_stack = p->cond_stack;
530 loop_stack = p->loop_stack;
531 case_stack = p->case_stack;
532 nesting_stack = p->nesting_stack;
533 nesting_depth = p->nesting_depth;
534 block_start_count = p->block_start_count;
535 last_expr_type = p->last_expr_type;
536 last_expr_value = p->last_expr_value;
537 expr_stmts_for_value = p->expr_stmts_for_value;
538 emit_filename = p->emit_filename;
539 emit_lineno = p->emit_lineno;
540 goto_fixup_chain = p->goto_fixup_chain;
543 /* Emit a no-op instruction. */
550 if (!output_bytecode)
552 last_insn = get_last_insn ();
554 && (GET_CODE (last_insn) == CODE_LABEL
555 || (GET_CODE (last_insn) == NOTE
556 && prev_real_insn (last_insn) == 0)))
557 emit_insn (gen_nop ());
561 /* Return the rtx-label that corresponds to a LABEL_DECL,
562 creating it if necessary. */
568 if (TREE_CODE (label) != LABEL_DECL)
571 if (DECL_RTL (label))
572 return DECL_RTL (label);
574 return DECL_RTL (label) = gen_label_rtx ();
577 /* Add an unconditional jump to LABEL as the next sequential instruction. */
583 do_pending_stack_adjust ();
584 emit_jump_insn (gen_jump (label));
588 /* Emit code to jump to the address
589 specified by the pointer expression EXP. */
592 expand_computed_goto (exp)
597 bc_expand_expr (exp);
598 bc_emit_instruction (jumpP);
602 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
604 #ifdef POINTERS_EXTEND_UNSIGNED
605 x = convert_memory_address (Pmode, x);
609 do_pending_stack_adjust ();
610 emit_indirect_jump (x);
614 /* Handle goto statements and the labels that they can go to. */
616 /* Specify the location in the RTL code of a label LABEL,
617 which is a LABEL_DECL tree node.
619 This is used for the kind of label that the user can jump to with a
620 goto statement, and for alternatives of a switch or case statement.
621 RTL labels generated for loops and conditionals don't go through here;
622 they are generated directly at the RTL level, by other functions below.
624 Note that this has nothing to do with defining label *names*.
625 Languages vary in how they do that and what that even means. */
631 struct label_chain *p;
635 if (! DECL_RTL (label))
636 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
637 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
638 error ("multiply defined label");
642 do_pending_stack_adjust ();
643 emit_label (label_rtx (label));
644 if (DECL_NAME (label))
645 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
647 if (stack_block_stack != 0)
649 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
650 p->next = stack_block_stack->data.block.label_chain;
651 stack_block_stack->data.block.label_chain = p;
656 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
657 from nested functions. */
660 declare_nonlocal_label (label)
663 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
664 LABEL_PRESERVE_P (label_rtx (label)) = 1;
665 if (nonlocal_goto_handler_slot == 0)
667 nonlocal_goto_handler_slot
668 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
669 emit_stack_save (SAVE_NONLOCAL,
670 &nonlocal_goto_stack_level,
671 PREV_INSN (tail_recursion_reentry));
675 /* Generate RTL code for a `goto' statement with target label LABEL.
676 LABEL should be a LABEL_DECL tree node that was or will later be
677 defined with `expand_label'. */
687 expand_goto_internal (label, label_rtx (label), NULL_RTX);
691 /* Check for a nonlocal goto to a containing function. */
692 context = decl_function_context (label);
693 if (context != 0 && context != current_function_decl)
695 struct function *p = find_function_data (context);
696 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
699 p->has_nonlocal_label = 1;
700 current_function_has_nonlocal_goto = 1;
701 LABEL_REF_NONLOCAL_P (label_ref) = 1;
703 /* Copy the rtl for the slots so that they won't be shared in
704 case the virtual stack vars register gets instantiated differently
705 in the parent than in the child. */
707 #if HAVE_nonlocal_goto
708 if (HAVE_nonlocal_goto)
709 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
710 copy_rtx (p->nonlocal_goto_handler_slot),
711 copy_rtx (p->nonlocal_goto_stack_level),
718 /* Restore frame pointer for containing function.
719 This sets the actual hard register used for the frame pointer
720 to the location of the function's incoming static chain info.
721 The non-local goto handler will then adjust it to contain the
722 proper value and reload the argument pointer, if needed. */
723 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
725 /* We have now loaded the frame pointer hardware register with
726 the address of that corresponds to the start of the virtual
727 stack vars. So replace virtual_stack_vars_rtx in all
728 addresses we use with stack_pointer_rtx. */
730 /* Get addr of containing function's current nonlocal goto handler,
731 which will do any cleanups and then jump to the label. */
732 addr = copy_rtx (p->nonlocal_goto_handler_slot);
733 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
734 hard_frame_pointer_rtx));
736 /* Restore the stack pointer. Note this uses fp just restored. */
737 addr = p->nonlocal_goto_stack_level;
739 addr = replace_rtx (copy_rtx (addr),
740 virtual_stack_vars_rtx,
741 hard_frame_pointer_rtx);
743 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
745 /* Put in the static chain register the nonlocal label address. */
746 emit_move_insn (static_chain_rtx, label_ref);
747 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
749 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
750 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
751 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
752 emit_indirect_jump (temp);
756 expand_goto_internal (label, label_rtx (label), NULL_RTX);
759 /* Generate RTL code for a `goto' statement with target label BODY.
760 LABEL should be a LABEL_REF.
761 LAST_INSN, if non-0, is the rtx we should consider as the last
762 insn emitted (for the purposes of cleaning up a return). */
765 expand_goto_internal (body, label, last_insn)
770 struct nesting *block;
773 /* NOTICE! If a bytecode instruction other than `jump' is needed,
774 then the caller has to call bc_expand_goto_internal()
775 directly. This is rather an exceptional case, and there aren't
776 that many places where this is necessary. */
779 expand_goto_internal (body, label, last_insn);
783 if (GET_CODE (label) != CODE_LABEL)
786 /* If label has already been defined, we can tell now
787 whether and how we must alter the stack level. */
789 if (PREV_INSN (label) != 0)
791 /* Find the innermost pending block that contains the label.
792 (Check containment by comparing insn-uids.)
793 Then restore the outermost stack level within that block,
794 and do cleanups of all blocks contained in it. */
795 for (block = block_stack; block; block = block->next)
797 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
799 if (block->data.block.stack_level != 0)
800 stack_level = block->data.block.stack_level;
801 /* Execute the cleanups for blocks we are exiting. */
802 if (block->data.block.cleanups != 0)
804 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
805 do_pending_stack_adjust ();
811 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
812 the stack pointer. This one should be deleted as dead by flow. */
813 clear_pending_stack_adjust ();
814 do_pending_stack_adjust ();
815 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
818 if (body != 0 && DECL_TOO_LATE (body))
819 error ("jump to `%s' invalidly jumps into binding contour",
820 IDENTIFIER_POINTER (DECL_NAME (body)));
822 /* Label not yet defined: may need to put this goto
823 on the fixup list. */
824 else if (! expand_fixup (body, label, last_insn))
826 /* No fixup needed. Record that the label is the target
827 of at least one goto that has no fixup. */
829 TREE_ADDRESSABLE (body) = 1;
835 /* Generate a jump with OPCODE to the given bytecode LABEL which is
836 found within BODY. */
839 bc_expand_goto_internal (opcode, label, body)
840 enum bytecode_opcode opcode;
841 struct bc_label *label;
844 struct nesting *block;
845 int stack_level = -1;
847 /* If the label is defined, adjust the stack as necessary.
848 If it's not defined, we have to push the reference on the
854 /* Find the innermost pending block that contains the label.
855 (Check containment by comparing bytecode uids.) Then restore the
856 outermost stack level within that block. */
858 for (block = block_stack; block; block = block->next)
860 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
862 if (block->data.block.bc_stack_level)
863 stack_level = block->data.block.bc_stack_level;
865 /* Execute the cleanups for blocks we are exiting. */
866 if (block->data.block.cleanups != 0)
868 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
869 do_pending_stack_adjust ();
873 /* Restore the stack level. If we need to adjust the stack, we
874 must do so after the jump, since the jump may depend on
875 what's on the stack. Thus, any stack-modifying conditional
876 jumps (these are the only ones that rely on what's on the
877 stack) go into the fixup list. */
880 && stack_depth != stack_level
883 bc_expand_fixup (opcode, label, stack_level);
886 if (stack_level >= 0)
887 bc_adjust_stack (stack_depth - stack_level);
889 if (body && DECL_BIT_FIELD (body))
890 error ("jump to `%s' invalidly jumps into binding contour",
891 IDENTIFIER_POINTER (DECL_NAME (body)));
893 /* Emit immediate jump */
894 bc_emit_bytecode (opcode);
895 bc_emit_bytecode_labelref (label);
897 #ifdef DEBUG_PRINT_CODE
898 fputc ('\n', stderr);
903 /* Put goto in the fixup list */
904 bc_expand_fixup (opcode, label, stack_level);
907 /* Generate if necessary a fixup for a goto
908 whose target label in tree structure (if any) is TREE_LABEL
909 and whose target in rtl is RTL_LABEL.
911 If LAST_INSN is nonzero, we pretend that the jump appears
912 after insn LAST_INSN instead of at the current point in the insn stream.
914 The fixup will be used later to insert insns just before the goto.
915 Those insns will restore the stack level as appropriate for the
916 target label, and will (in the case of C++) also invoke any object
917 destructors which have to be invoked when we exit the scopes which
918 are exited by the goto.
920 Value is nonzero if a fixup is made. */
923 expand_fixup (tree_label, rtl_label, last_insn)
928 struct nesting *block, *end_block;
930 /* See if we can recognize which block the label will be output in.
931 This is possible in some very common cases.
932 If we succeed, set END_BLOCK to that block.
933 Otherwise, set it to 0. */
936 && (rtl_label == cond_stack->data.cond.endif_label
937 || rtl_label == cond_stack->data.cond.next_label))
938 end_block = cond_stack;
939 /* If we are in a loop, recognize certain labels which
940 are likely targets. This reduces the number of fixups
941 we need to create. */
943 && (rtl_label == loop_stack->data.loop.start_label
944 || rtl_label == loop_stack->data.loop.end_label
945 || rtl_label == loop_stack->data.loop.continue_label))
946 end_block = loop_stack;
950 /* Now set END_BLOCK to the binding level to which we will return. */
954 struct nesting *next_block = end_block->all;
957 /* First see if the END_BLOCK is inside the innermost binding level.
958 If so, then no cleanups or stack levels are relevant. */
959 while (next_block && next_block != block)
960 next_block = next_block->all;
965 /* Otherwise, set END_BLOCK to the innermost binding level
966 which is outside the relevant control-structure nesting. */
967 next_block = block_stack->next;
968 for (block = block_stack; block != end_block; block = block->all)
969 if (block == next_block)
970 next_block = next_block->next;
971 end_block = next_block;
974 /* Does any containing block have a stack level or cleanups?
975 If not, no fixup is needed, and that is the normal case
976 (the only case, for standard C). */
977 for (block = block_stack; block != end_block; block = block->next)
978 if (block->data.block.stack_level != 0
979 || block->data.block.cleanups != 0)
982 if (block != end_block)
984 /* Ok, a fixup is needed. Add a fixup to the list of such. */
985 struct goto_fixup *fixup
986 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
987 /* In case an old stack level is restored, make sure that comes
988 after any pending stack adjust. */
989 /* ?? If the fixup isn't to come at the present position,
990 doing the stack adjust here isn't useful. Doing it with our
991 settings at that location isn't useful either. Let's hope
994 do_pending_stack_adjust ();
995 fixup->target = tree_label;
996 fixup->target_rtl = rtl_label;
998 /* Create a BLOCK node and a corresponding matched set of
999 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1000 this point. The notes will encapsulate any and all fixup
1001 code which we might later insert at this point in the insn
1002 stream. Also, the BLOCK node will be the parent (i.e. the
1003 `SUPERBLOCK') of any other BLOCK nodes which we might create
1004 later on when we are expanding the fixup code. */
1007 register rtx original_before_jump
1008 = last_insn ? last_insn : get_last_insn ();
1012 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1013 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1014 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1016 emit_insns_after (fixup->before_jump, original_before_jump);
1019 fixup->block_start_count = block_start_count;
1020 fixup->stack_level = 0;
1021 fixup->cleanup_list_list
1022 = (((block->data.block.outer_cleanups
1024 && block->data.block.outer_cleanups != empty_cleanup_list
1027 || block->data.block.cleanups)
1028 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1029 block->data.block.outer_cleanups)
1031 fixup->next = goto_fixup_chain;
1032 goto_fixup_chain = fixup;
1039 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1040 Make the fixup restore the stack level to STACK_LEVEL. */
1043 bc_expand_fixup (opcode, label, stack_level)
1044 enum bytecode_opcode opcode;
1045 struct bc_label *label;
1048 struct goto_fixup *fixup
1049 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1051 fixup->label = bc_get_bytecode_label ();
1052 fixup->bc_target = label;
1053 fixup->bc_stack_level = stack_level;
1054 fixup->bc_handled = FALSE;
1056 fixup->next = goto_fixup_chain;
1057 goto_fixup_chain = fixup;
1059 /* Insert a jump to the fixup code */
1060 bc_emit_bytecode (opcode);
1061 bc_emit_bytecode_labelref (fixup->label);
1063 #ifdef DEBUG_PRINT_CODE
1064 fputc ('\n', stderr);
1068 /* Expand any needed fixups in the outputmost binding level of the
1069 function. FIRST_INSN is the first insn in the function. */
1072 expand_fixups (first_insn)
1075 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1078 /* When exiting a binding contour, process all pending gotos requiring fixups.
1079 THISBLOCK is the structure that describes the block being exited.
1080 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1081 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1082 FIRST_INSN is the insn that began this contour.
1084 Gotos that jump out of this contour must restore the
1085 stack level and do the cleanups before actually jumping.
1087 DONT_JUMP_IN nonzero means report error there is a jump into this
1088 contour from before the beginning of the contour.
1089 This is also done if STACK_LEVEL is nonzero. */
1092 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1093 struct nesting *thisblock;
1099 register struct goto_fixup *f, *prev;
1101 if (output_bytecode)
1103 /* ??? The second arg is the bc stack level, which is not the same
1104 as STACK_LEVEL. I have no idea what should go here, so I'll
1106 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1110 /* F is the fixup we are considering; PREV is the previous one. */
1111 /* We run this loop in two passes so that cleanups of exited blocks
1112 are run first, and blocks that are exited are marked so
1115 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1117 /* Test for a fixup that is inactive because it is already handled. */
1118 if (f->before_jump == 0)
1120 /* Delete inactive fixup from the chain, if that is easy to do. */
1122 prev->next = f->next;
1124 /* Has this fixup's target label been defined?
1125 If so, we can finalize it. */
1126 else if (PREV_INSN (f->target_rtl) != 0)
1128 register rtx cleanup_insns;
1130 /* Get the first non-label after the label
1131 this goto jumps to. If that's before this scope begins,
1132 we don't have a jump into the scope. */
1133 rtx after_label = f->target_rtl;
1134 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1135 after_label = NEXT_INSN (after_label);
1137 /* If this fixup jumped into this contour from before the beginning
1138 of this contour, report an error. */
1139 /* ??? Bug: this does not detect jumping in through intermediate
1140 blocks that have stack levels or cleanups.
1141 It detects only a problem with the innermost block
1142 around the label. */
1144 && (dont_jump_in || stack_level || cleanup_list)
1145 /* If AFTER_LABEL is 0, it means the jump goes to the end
1146 of the rtl, which means it jumps into this scope. */
1147 && (after_label == 0
1148 || INSN_UID (first_insn) < INSN_UID (after_label))
1149 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1150 && ! DECL_ERROR_ISSUED (f->target))
1152 error_with_decl (f->target,
1153 "label `%s' used before containing binding contour");
1154 /* Prevent multiple errors for one label. */
1155 DECL_ERROR_ISSUED (f->target) = 1;
1158 /* We will expand the cleanups into a sequence of their own and
1159 then later on we will attach this new sequence to the insn
1160 stream just ahead of the actual jump insn. */
1164 /* Temporarily restore the lexical context where we will
1165 logically be inserting the fixup code. We do this for the
1166 sake of getting the debugging information right. */
1169 set_block (f->context);
1171 /* Expand the cleanups for blocks this jump exits. */
1172 if (f->cleanup_list_list)
1175 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1176 /* Marked elements correspond to blocks that have been closed.
1177 Do their cleanups. */
1178 if (TREE_ADDRESSABLE (lists)
1179 && TREE_VALUE (lists) != 0)
1181 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1182 /* Pop any pushes done in the cleanups,
1183 in case function is about to return. */
1184 do_pending_stack_adjust ();
1188 /* Restore stack level for the biggest contour that this
1189 jump jumps out of. */
1191 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1193 /* Finish up the sequence containing the insns which implement the
1194 necessary cleanups, and then attach that whole sequence to the
1195 insn stream just ahead of the actual jump insn. Attaching it
1196 at that point insures that any cleanups which are in fact
1197 implicit C++ object destructions (which must be executed upon
1198 leaving the block) appear (to the debugger) to be taking place
1199 in an area of the generated code where the object(s) being
1200 destructed are still "in scope". */
1202 cleanup_insns = get_insns ();
1206 emit_insns_after (cleanup_insns, f->before_jump);
1213 /* For any still-undefined labels, do the cleanups for this block now.
1214 We must do this now since items in the cleanup list may go out
1215 of scope when the block ends. */
1216 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1217 if (f->before_jump != 0
1218 && PREV_INSN (f->target_rtl) == 0
1219 /* Label has still not appeared. If we are exiting a block with
1220 a stack level to restore, that started before the fixup,
1221 mark this stack level as needing restoration
1222 when the fixup is later finalized. */
1224 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1225 means the label is undefined. That's erroneous, but possible. */
1226 && (thisblock->data.block.block_start_count
1227 <= f->block_start_count))
1229 tree lists = f->cleanup_list_list;
1232 for (; lists; lists = TREE_CHAIN (lists))
1233 /* If the following elt. corresponds to our containing block
1234 then the elt. must be for this block. */
1235 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1239 set_block (f->context);
1240 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1241 do_pending_stack_adjust ();
1242 cleanup_insns = get_insns ();
1246 = emit_insns_after (cleanup_insns, f->before_jump);
1248 f->cleanup_list_list = TREE_CHAIN (lists);
1252 f->stack_level = stack_level;
1257 /* When exiting a binding contour, process all pending gotos requiring fixups.
1258 Note: STACK_DEPTH is not altered.
1260 The arguments are currently not used in the bytecode compiler, but we may
1261 need them one day for languages other than C.
1263 THISBLOCK is the structure that describes the block being exited.
1264 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1265 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1266 FIRST_INSN is the insn that began this contour.
1268 Gotos that jump out of this contour must restore the
1269 stack level and do the cleanups before actually jumping.
1271 DONT_JUMP_IN nonzero means report error there is a jump into this
1272 contour from before the beginning of the contour.
1273 This is also done if STACK_LEVEL is nonzero. */
1276 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1277 struct nesting *thisblock;
1283 register struct goto_fixup *f, *prev;
1284 int saved_stack_depth;
1286 /* F is the fixup we are considering; PREV is the previous one. */
1288 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1290 /* Test for a fixup that is inactive because it is already handled. */
1291 if (f->before_jump == 0)
1293 /* Delete inactive fixup from the chain, if that is easy to do. */
1295 prev->next = f->next;
1298 /* Emit code to restore the stack and continue */
1299 bc_emit_bytecode_labeldef (f->label);
1301 /* Save stack_depth across call, since bc_adjust_stack () will alter
1302 the perceived stack depth via the instructions generated. */
1304 if (f->bc_stack_level >= 0)
1306 saved_stack_depth = stack_depth;
1307 bc_adjust_stack (stack_depth - f->bc_stack_level);
1308 stack_depth = saved_stack_depth;
1311 bc_emit_bytecode (jump);
1312 bc_emit_bytecode_labelref (f->bc_target);
1314 #ifdef DEBUG_PRINT_CODE
1315 fputc ('\n', stderr);
1319 goto_fixup_chain = NULL;
1322 /* Generate RTL for an asm statement (explicit assembler code).
1323 BODY is a STRING_CST node containing the assembler code text,
1324 or an ADDR_EXPR containing a STRING_CST. */
1330 if (output_bytecode)
1332 error ("`asm' is invalid when generating bytecode");
1336 if (TREE_CODE (body) == ADDR_EXPR)
1337 body = TREE_OPERAND (body, 0);
1339 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1340 TREE_STRING_POINTER (body)));
1344 /* Generate RTL for an asm statement with arguments.
1345 STRING is the instruction template.
1346 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1347 Each output or input has an expression in the TREE_VALUE and
1348 a constraint-string in the TREE_PURPOSE.
1349 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1350 that is clobbered by this insn.
1352 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1353 Some elements of OUTPUTS may be replaced with trees representing temporary
1354 values. The caller should copy those temporary values to the originally
1357 VOL nonzero means the insn is volatile; don't optimize it. */
1360 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1361 tree string, outputs, inputs, clobbers;
1366 rtvec argvec, constraints;
1368 int ninputs = list_length (inputs);
1369 int noutputs = list_length (outputs);
1373 /* Vector of RTX's of evaluated output operands. */
1374 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1375 /* The insn we have emitted. */
1378 if (output_bytecode)
1380 error ("`asm' is invalid when generating bytecode");
1384 /* Count the number of meaningful clobbered registers, ignoring what
1385 we would ignore later. */
1387 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1389 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1390 i = decode_reg_name (regname);
1391 if (i >= 0 || i == -4)
1394 error ("unknown register name `%s' in `asm'", regname);
1399 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1401 tree val = TREE_VALUE (tail);
1402 tree type = TREE_TYPE (val);
1405 int found_equal = 0;
1408 /* If there's an erroneous arg, emit no insn. */
1409 if (TREE_TYPE (val) == error_mark_node)
1412 /* Make sure constraint has `=' and does not have `+'. Also, see
1413 if it allows any register. Be liberal on the latter test, since
1414 the worst that happens if we get it wrong is we issue an error
1417 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1418 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1421 error ("output operand constraint contains `+'");
1428 case '?': case '!': case '*': case '%': case '&':
1429 case 'V': case 'm': case 'o': case '<': case '>':
1430 case 'E': case 'F': case 'G': case 'H': case 'X':
1431 case 's': case 'i': case 'n':
1432 case 'I': case 'J': case 'K': case 'L': case 'M':
1433 case 'N': case 'O': case 'P': case ',':
1434 #ifdef EXTRA_CONSTRAINT
1435 case 'Q': case 'R': case 'S': case 'T': case 'U':
1439 case 'p': case 'g': case 'r':
1440 /* Whether or not a numeric constraint allows a register is
1441 decided by the matching constraint, and so there is no need
1442 to do anything special with them. We must handle them in
1443 the default case, so that we don't unnecessarily force
1444 operands to memory. */
1445 case '0': case '1': case '2': case '3': case '4':
1453 error ("output operand constraint lacks `='");
1457 /* If an output operand is not a decl or indirect ref and our constraint
1458 allows a register, make a temporary to act as an intermediate.
1459 Make the asm insn write into that, then our caller will copy it to
1460 the real output operand. Likewise for promoted variables. */
1462 if (TREE_CODE (val) == INDIRECT_REF
1463 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1464 && ! (GET_CODE (DECL_RTL (val)) == REG
1465 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1469 mark_addressable (TREE_VALUE (tail));
1472 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1474 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1475 error ("output number %d not directly addressable", i);
1479 output_rtx[i] = assign_temp (type, 0, 0, 0);
1480 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1484 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1486 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1490 /* Make vectors for the expression-rtx and constraint strings. */
1492 argvec = rtvec_alloc (ninputs);
1493 constraints = rtvec_alloc (ninputs);
1495 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1496 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1498 MEM_VOLATILE_P (body) = vol;
1500 /* Eval the inputs and put them into ARGVEC.
1501 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1504 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1509 /* If there's an erroneous arg, emit no insn,
1510 because the ASM_INPUT would get VOIDmode
1511 and that could cause a crash in reload. */
1512 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1514 if (TREE_PURPOSE (tail) == NULL_TREE)
1516 error ("hard register `%s' listed as input operand to `asm'",
1517 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1521 /* Make sure constraint has neither `=' nor `+'. */
1523 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1524 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1527 error ("input operand constraint contains `%c'",
1528 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1531 case '?': case '!': case '*': case '%': case '&':
1532 case 'V': case 'm': case 'o': case '<': case '>':
1533 case 'E': case 'F': case 'G': case 'H': case 'X':
1534 case 's': case 'i': case 'n':
1535 case 'I': case 'J': case 'K': case 'L': case 'M':
1536 case 'N': case 'O': case 'P': case ',':
1537 #ifdef EXTRA_CONSTRAINT
1538 case 'Q': case 'R': case 'S': case 'T': case 'U':
1542 case 'p': case 'g': case 'r':
1543 /* Whether or not a numeric constraint allows a register is
1544 decided by the matching constraint, and so there is no need
1545 to do anything special with them. We must handle them in
1546 the default case, so that we don't unnecessarily force
1547 operands to memory. */
1548 case '0': case '1': case '2': case '3': case '4':
1555 mark_addressable (TREE_VALUE (tail));
1557 XVECEXP (body, 3, i) /* argvec */
1558 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1559 if (CONSTANT_P (XVECEXP (body, 3, i))
1560 && ! general_operand (XVECEXP (body, 3, i),
1561 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1564 XVECEXP (body, 3, i)
1565 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1566 XVECEXP (body, 3, i));
1568 XVECEXP (body, 3, i)
1569 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1570 XVECEXP (body, 3, i));
1574 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1575 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1576 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1578 tree type = TREE_TYPE (TREE_VALUE (tail));
1579 rtx memloc = assign_temp (type, 1, 1, 1);
1581 emit_move_insn (memloc, XVECEXP (body, 3, i));
1582 XVECEXP (body, 3, i) = memloc;
1585 XVECEXP (body, 4, i) /* constraints */
1586 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1587 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1591 /* Protect all the operands from the queue,
1592 now that they have all been evaluated. */
1594 for (i = 0; i < ninputs; i++)
1595 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1597 for (i = 0; i < noutputs; i++)
1598 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1600 /* Now, for each output, construct an rtx
1601 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1602 ARGVEC CONSTRAINTS))
1603 If there is more than one, put them inside a PARALLEL. */
1605 if (noutputs == 1 && nclobbers == 0)
1607 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1608 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1610 else if (noutputs == 0 && nclobbers == 0)
1612 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1613 insn = emit_insn (body);
1619 if (num == 0) num = 1;
1620 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1622 /* For each output operand, store a SET. */
1624 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1626 XVECEXP (body, 0, i)
1627 = gen_rtx (SET, VOIDmode,
1629 gen_rtx (ASM_OPERANDS, VOIDmode,
1630 TREE_STRING_POINTER (string),
1631 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1632 i, argvec, constraints,
1634 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1637 /* If there are no outputs (but there are some clobbers)
1638 store the bare ASM_OPERANDS into the PARALLEL. */
1641 XVECEXP (body, 0, i++) = obody;
1643 /* Store (clobber REG) for each clobbered register specified. */
1645 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1647 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1648 int j = decode_reg_name (regname);
1652 if (j == -3) /* `cc', which is not a register */
1655 if (j == -4) /* `memory', don't cache memory across asm */
1657 XVECEXP (body, 0, i++)
1658 = gen_rtx (CLOBBER, VOIDmode,
1659 gen_rtx (MEM, BLKmode,
1660 gen_rtx (SCRATCH, VOIDmode, 0)));
1664 /* Ignore unknown register, error already signalled. */
1668 /* Use QImode since that's guaranteed to clobber just one reg. */
1669 XVECEXP (body, 0, i++)
1670 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1673 insn = emit_insn (body);
1679 /* Generate RTL to evaluate the expression EXP
1680 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1683 expand_expr_stmt (exp)
1686 if (output_bytecode)
1688 int org_stack_depth = stack_depth;
1690 bc_expand_expr (exp);
1692 /* Restore stack depth */
1693 if (stack_depth < org_stack_depth)
1696 bc_emit_instruction (drop);
1698 last_expr_type = TREE_TYPE (exp);
1702 /* If -W, warn about statements with no side effects,
1703 except for an explicit cast to void (e.g. for assert()), and
1704 except inside a ({...}) where they may be useful. */
1705 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1707 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1708 && !(TREE_CODE (exp) == CONVERT_EXPR
1709 && TREE_TYPE (exp) == void_type_node))
1710 warning_with_file_and_line (emit_filename, emit_lineno,
1711 "statement with no effect");
1712 else if (warn_unused)
1713 warn_if_unused_value (exp);
1716 /* If EXP is of function type and we are expanding statements for
1717 value, convert it to pointer-to-function. */
1718 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1719 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1721 last_expr_type = TREE_TYPE (exp);
1722 if (! flag_syntax_only)
1723 last_expr_value = expand_expr (exp,
1724 (expr_stmts_for_value
1725 ? NULL_RTX : const0_rtx),
1728 /* If all we do is reference a volatile value in memory,
1729 copy it to a register to be sure it is actually touched. */
1730 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1731 && TREE_THIS_VOLATILE (exp))
1733 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1735 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1736 copy_to_reg (last_expr_value);
1739 rtx lab = gen_label_rtx ();
1741 /* Compare the value with itself to reference it. */
1742 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1743 expand_expr (TYPE_SIZE (last_expr_type),
1744 NULL_RTX, VOIDmode, 0),
1746 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1747 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1752 /* If this expression is part of a ({...}) and is in memory, we may have
1753 to preserve temporaries. */
1754 preserve_temp_slots (last_expr_value);
1756 /* Free any temporaries used to evaluate this expression. Any temporary
1757 used as a result of this expression will already have been preserved
1764 /* Warn if EXP contains any computations whose results are not used.
1765 Return 1 if a warning is printed; 0 otherwise. */
1768 warn_if_unused_value (exp)
1771 if (TREE_USED (exp))
1774 switch (TREE_CODE (exp))
1776 case PREINCREMENT_EXPR:
1777 case POSTINCREMENT_EXPR:
1778 case PREDECREMENT_EXPR:
1779 case POSTDECREMENT_EXPR:
1784 case METHOD_CALL_EXPR:
1786 case WITH_CLEANUP_EXPR:
1788 /* We don't warn about COND_EXPR because it may be a useful
1789 construct if either arm contains a side effect. */
1794 /* For a binding, warn if no side effect within it. */
1795 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1798 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1800 case TRUTH_ORIF_EXPR:
1801 case TRUTH_ANDIF_EXPR:
1802 /* In && or ||, warn if 2nd operand has no side effect. */
1803 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1806 if (TREE_NO_UNUSED_WARNING (exp))
1808 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1810 /* Let people do `(foo (), 0)' without a warning. */
1811 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1813 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1817 case NON_LVALUE_EXPR:
1818 /* Don't warn about values cast to void. */
1819 if (TREE_TYPE (exp) == void_type_node)
1821 /* Don't warn about conversions not explicit in the user's program. */
1822 if (TREE_NO_UNUSED_WARNING (exp))
1824 /* Assignment to a cast usually results in a cast of a modify.
1825 Don't complain about that. There can be an arbitrary number of
1826 casts before the modify, so we must loop until we find the first
1827 non-cast expression and then test to see if that is a modify. */
1829 tree tem = TREE_OPERAND (exp, 0);
1831 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1832 tem = TREE_OPERAND (tem, 0);
1834 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1835 || TREE_CODE (tem) == CALL_EXPR)
1841 /* Don't warn about automatic dereferencing of references, since
1842 the user cannot control it. */
1843 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1844 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1845 /* ... fall through ... */
1848 /* Referencing a volatile value is a side effect, so don't warn. */
1849 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1850 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1851 && TREE_THIS_VOLATILE (exp))
1854 warning_with_file_and_line (emit_filename, emit_lineno,
1855 "value computed is not used");
1860 /* Clear out the memory of the last expression evaluated. */
1868 /* Begin a statement which will return a value.
1869 Return the RTL_EXPR for this statement expr.
1870 The caller must save that value and pass it to expand_end_stmt_expr. */
1873 expand_start_stmt_expr ()
1878 /* When generating bytecode just note down the stack depth */
1879 if (output_bytecode)
1880 return (build_int_2 (stack_depth, 0));
1882 /* Make the RTL_EXPR node temporary, not momentary,
1883 so that rtl_expr_chain doesn't become garbage. */
1884 momentary = suspend_momentary ();
1885 t = make_node (RTL_EXPR);
1886 resume_momentary (momentary);
1887 do_pending_stack_adjust ();
1888 start_sequence_for_rtl_expr (t);
1890 expr_stmts_for_value++;
1894 /* Restore the previous state at the end of a statement that returns a value.
1895 Returns a tree node representing the statement's value and the
1896 insns to compute the value.
1898 The nodes of that expression have been freed by now, so we cannot use them.
1899 But we don't want to do that anyway; the expression has already been
1900 evaluated and now we just want to use the value. So generate a RTL_EXPR
1901 with the proper type and RTL value.
1903 If the last substatement was not an expression,
1904 return something with type `void'. */
1907 expand_end_stmt_expr (t)
1910 if (output_bytecode)
1916 /* At this point, all expressions have been evaluated in order.
1917 However, all expression values have been popped when evaluated,
1918 which means we have to recover the last expression value. This is
1919 the last value removed by means of a `drop' instruction. Instead
1920 of adding code to inhibit dropping the last expression value, it
1921 is here recovered by undoing the `drop'. Since `drop' is
1922 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1925 bc_adjust_stack (-1);
1927 if (!last_expr_type)
1928 last_expr_type = void_type_node;
1930 t = make_node (RTL_EXPR);
1931 TREE_TYPE (t) = last_expr_type;
1932 RTL_EXPR_RTL (t) = NULL;
1933 RTL_EXPR_SEQUENCE (t) = NULL;
1935 /* Don't consider deleting this expr or containing exprs at tree level. */
1936 TREE_THIS_VOLATILE (t) = 1;
1944 if (last_expr_type == 0)
1946 last_expr_type = void_type_node;
1947 last_expr_value = const0_rtx;
1949 else if (last_expr_value == 0)
1950 /* There are some cases where this can happen, such as when the
1951 statement is void type. */
1952 last_expr_value = const0_rtx;
1953 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1954 /* Remove any possible QUEUED. */
1955 last_expr_value = protect_from_queue (last_expr_value, 0);
1959 TREE_TYPE (t) = last_expr_type;
1960 RTL_EXPR_RTL (t) = last_expr_value;
1961 RTL_EXPR_SEQUENCE (t) = get_insns ();
1963 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1967 /* Don't consider deleting this expr or containing exprs at tree level. */
1968 TREE_SIDE_EFFECTS (t) = 1;
1969 /* Propagate volatility of the actual RTL expr. */
1970 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1973 expr_stmts_for_value--;
1978 /* Generate RTL for the start of an if-then. COND is the expression
1979 whose truth should be tested.
1981 If EXITFLAG is nonzero, this conditional is visible to
1982 `exit_something'. */
1985 expand_start_cond (cond, exitflag)
1989 struct nesting *thiscond = ALLOC_NESTING ();
1991 /* Make an entry on cond_stack for the cond we are entering. */
1993 thiscond->next = cond_stack;
1994 thiscond->all = nesting_stack;
1995 thiscond->depth = ++nesting_depth;
1996 thiscond->data.cond.next_label = gen_label_rtx ();
1997 /* Before we encounter an `else', we don't need a separate exit label
1998 unless there are supposed to be exit statements
1999 to exit this conditional. */
2000 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2001 thiscond->data.cond.endif_label = thiscond->exit_label;
2002 cond_stack = thiscond;
2003 nesting_stack = thiscond;
2005 if (output_bytecode)
2006 bc_expand_start_cond (cond, exitflag);
2008 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2011 /* Generate RTL between then-clause and the elseif-clause
2012 of an if-then-elseif-.... */
2015 expand_start_elseif (cond)
2018 if (cond_stack->data.cond.endif_label == 0)
2019 cond_stack->data.cond.endif_label = gen_label_rtx ();
2020 emit_jump (cond_stack->data.cond.endif_label);
2021 emit_label (cond_stack->data.cond.next_label);
2022 cond_stack->data.cond.next_label = gen_label_rtx ();
2023 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2026 /* Generate RTL between the then-clause and the else-clause
2027 of an if-then-else. */
2030 expand_start_else ()
2032 if (cond_stack->data.cond.endif_label == 0)
2033 cond_stack->data.cond.endif_label = gen_label_rtx ();
2035 if (output_bytecode)
2037 bc_expand_start_else ();
2041 emit_jump (cond_stack->data.cond.endif_label);
2042 emit_label (cond_stack->data.cond.next_label);
2043 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2046 /* After calling expand_start_else, turn this "else" into an "else if"
2047 by providing another condition. */
2050 expand_elseif (cond)
2053 cond_stack->data.cond.next_label = gen_label_rtx ();
2054 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2057 /* Generate RTL for the end of an if-then.
2058 Pop the record for it off of cond_stack. */
2063 struct nesting *thiscond = cond_stack;
2065 if (output_bytecode)
2066 bc_expand_end_cond ();
2069 do_pending_stack_adjust ();
2070 if (thiscond->data.cond.next_label)
2071 emit_label (thiscond->data.cond.next_label);
2072 if (thiscond->data.cond.endif_label)
2073 emit_label (thiscond->data.cond.endif_label);
2076 POPSTACK (cond_stack);
2081 /* Generate code for the start of an if-then. COND is the expression
2082 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2083 is to be visible to exit_something. It is assumed that the caller
2084 has pushed the previous context on the cond stack. */
2087 bc_expand_start_cond (cond, exitflag)
2091 struct nesting *thiscond = cond_stack;
2093 thiscond->data.case_stmt.nominal_type = cond;
2095 thiscond->exit_label = gen_label_rtx ();
2096 bc_expand_expr (cond);
2097 bc_emit_bytecode (xjumpifnot);
2098 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2100 #ifdef DEBUG_PRINT_CODE
2101 fputc ('\n', stderr);
2105 /* Generate the label for the end of an if with
2109 bc_expand_end_cond ()
2111 struct nesting *thiscond = cond_stack;
2113 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2116 /* Generate code for the start of the else- clause of
2120 bc_expand_start_else ()
2122 struct nesting *thiscond = cond_stack;
2124 thiscond->data.cond.endif_label = thiscond->exit_label;
2125 thiscond->exit_label = gen_label_rtx ();
2126 bc_emit_bytecode (jump);
2127 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2129 #ifdef DEBUG_PRINT_CODE
2130 fputc ('\n', stderr);
2133 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2136 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2137 loop should be exited by `exit_something'. This is a loop for which
2138 `expand_continue' will jump to the top of the loop.
2140 Make an entry on loop_stack to record the labels associated with
2144 expand_start_loop (exit_flag)
2147 register struct nesting *thisloop = ALLOC_NESTING ();
2149 /* Make an entry on loop_stack for the loop we are entering. */
2151 thisloop->next = loop_stack;
2152 thisloop->all = nesting_stack;
2153 thisloop->depth = ++nesting_depth;
2154 thisloop->data.loop.start_label = gen_label_rtx ();
2155 thisloop->data.loop.end_label = gen_label_rtx ();
2156 thisloop->data.loop.alt_end_label = 0;
2157 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2158 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2159 loop_stack = thisloop;
2160 nesting_stack = thisloop;
2162 if (output_bytecode)
2164 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2168 do_pending_stack_adjust ();
2170 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2171 emit_label (thisloop->data.loop.start_label);
2176 /* Like expand_start_loop but for a loop where the continuation point
2177 (for expand_continue_loop) will be specified explicitly. */
2180 expand_start_loop_continue_elsewhere (exit_flag)
2183 struct nesting *thisloop = expand_start_loop (exit_flag);
2184 loop_stack->data.loop.continue_label = gen_label_rtx ();
2188 /* Specify the continuation point for a loop started with
2189 expand_start_loop_continue_elsewhere.
2190 Use this at the point in the code to which a continue statement
2194 expand_loop_continue_here ()
2196 if (output_bytecode)
2198 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2201 do_pending_stack_adjust ();
2202 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2203 emit_label (loop_stack->data.loop.continue_label);
2209 bc_expand_end_loop ()
2211 struct nesting *thisloop = loop_stack;
2213 bc_emit_bytecode (jump);
2214 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2216 #ifdef DEBUG_PRINT_CODE
2217 fputc ('\n', stderr);
2220 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2221 POPSTACK (loop_stack);
2226 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2227 Pop the block off of loop_stack. */
2233 register rtx start_label;
2234 rtx last_test_insn = 0;
2237 if (output_bytecode)
2239 bc_expand_end_loop ();
2243 insn = get_last_insn ();
2244 start_label = loop_stack->data.loop.start_label;
2246 /* Mark the continue-point at the top of the loop if none elsewhere. */
2247 if (start_label == loop_stack->data.loop.continue_label)
2248 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2250 do_pending_stack_adjust ();
2252 /* If optimizing, perhaps reorder the loop. If the loop
2253 starts with a conditional exit, roll that to the end
2254 where it will optimize together with the jump back.
2256 We look for the last conditional branch to the exit that we encounter
2257 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2258 branch to the exit first, use it.
2260 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2261 because moving them is not valid. */
2265 ! (GET_CODE (insn) == JUMP_INSN
2266 && GET_CODE (PATTERN (insn)) == SET
2267 && SET_DEST (PATTERN (insn)) == pc_rtx
2268 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2270 /* Scan insns from the top of the loop looking for a qualified
2271 conditional exit. */
2272 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2273 insn = NEXT_INSN (insn))
2275 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2278 if (GET_CODE (insn) == NOTE
2279 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2280 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2283 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2286 if (last_test_insn && num_insns > 30)
2289 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2290 && SET_DEST (PATTERN (insn)) == pc_rtx
2291 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2292 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2293 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2294 == loop_stack->data.loop.end_label)
2295 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2296 == loop_stack->data.loop.alt_end_label)))
2297 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2298 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2299 == loop_stack->data.loop.end_label)
2300 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2301 == loop_stack->data.loop.alt_end_label)))))
2302 last_test_insn = insn;
2304 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2305 && GET_CODE (PATTERN (insn)) == SET
2306 && SET_DEST (PATTERN (insn)) == pc_rtx
2307 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2308 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2309 == loop_stack->data.loop.end_label)
2310 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2311 == loop_stack->data.loop.alt_end_label)))
2312 /* Include BARRIER. */
2313 last_test_insn = NEXT_INSN (insn);
2316 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2318 /* We found one. Move everything from there up
2319 to the end of the loop, and add a jump into the loop
2320 to jump to there. */
2321 register rtx newstart_label = gen_label_rtx ();
2322 register rtx start_move = start_label;
2324 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2325 then we want to move this note also. */
2326 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2327 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2328 == NOTE_INSN_LOOP_CONT))
2329 start_move = PREV_INSN (start_move);
2331 emit_label_after (newstart_label, PREV_INSN (start_move));
2332 reorder_insns (start_move, last_test_insn, get_last_insn ());
2333 emit_jump_insn_after (gen_jump (start_label),
2334 PREV_INSN (newstart_label));
2335 emit_barrier_after (PREV_INSN (newstart_label));
2336 start_label = newstart_label;
2340 emit_jump (start_label);
2341 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2342 emit_label (loop_stack->data.loop.end_label);
2344 POPSTACK (loop_stack);
2349 /* Generate a jump to the current loop's continue-point.
2350 This is usually the top of the loop, but may be specified
2351 explicitly elsewhere. If not currently inside a loop,
2352 return 0 and do nothing; caller will print an error message. */
2355 expand_continue_loop (whichloop)
2356 struct nesting *whichloop;
2360 whichloop = loop_stack;
2363 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2368 /* Generate a jump to exit the current loop. If not currently inside a loop,
2369 return 0 and do nothing; caller will print an error message. */
2372 expand_exit_loop (whichloop)
2373 struct nesting *whichloop;
2377 whichloop = loop_stack;
2380 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2384 /* Generate a conditional jump to exit the current loop if COND
2385 evaluates to zero. If not currently inside a loop,
2386 return 0 and do nothing; caller will print an error message. */
2389 expand_exit_loop_if_false (whichloop, cond)
2390 struct nesting *whichloop;
2395 whichloop = loop_stack;
2398 if (output_bytecode)
2400 bc_expand_expr (cond);
2401 bc_expand_goto_internal (xjumpifnot,
2402 BYTECODE_BC_LABEL (whichloop->exit_label),
2407 /* In order to handle fixups, we actually create a conditional jump
2408 around a unconditional branch to exit the loop. If fixups are
2409 necessary, they go before the unconditional branch. */
2411 rtx label = gen_label_rtx ();
2414 do_jump (cond, NULL_RTX, label);
2415 last_insn = get_last_insn ();
2416 if (GET_CODE (last_insn) == CODE_LABEL)
2417 whichloop->data.loop.alt_end_label = last_insn;
2418 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2426 /* Return non-zero if we should preserve sub-expressions as separate
2427 pseudos. We never do so if we aren't optimizing. We always do so
2428 if -fexpensive-optimizations.
2430 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2431 the loop may still be a small one. */
2434 preserve_subexpressions_p ()
2438 if (flag_expensive_optimizations)
2441 if (optimize == 0 || loop_stack == 0)
2444 insn = get_last_insn_anywhere ();
2447 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2448 < n_non_fixed_regs * 3));
2452 /* Generate a jump to exit the current loop, conditional, binding contour
2453 or case statement. Not all such constructs are visible to this function,
2454 only those started with EXIT_FLAG nonzero. Individual languages use
2455 the EXIT_FLAG parameter to control which kinds of constructs you can
2458 If not currently inside anything that can be exited,
2459 return 0 and do nothing; caller will print an error message. */
2462 expand_exit_something ()
2466 for (n = nesting_stack; n; n = n->all)
2467 if (n->exit_label != 0)
2469 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2476 /* Generate RTL to return from the current function, with no value.
2477 (That is, we do not do anything about returning any value.) */
2480 expand_null_return ()
2482 struct nesting *block = block_stack;
2485 if (output_bytecode)
2487 bc_emit_instruction (ret);
2491 /* Does any pending block have cleanups? */
2493 while (block && block->data.block.cleanups == 0)
2494 block = block->next;
2496 /* If yes, use a goto to return, since that runs cleanups. */
2498 expand_null_return_1 (last_insn, block != 0);
2501 /* Generate RTL to return from the current function, with value VAL. */
2504 expand_value_return (val)
2507 struct nesting *block = block_stack;
2508 rtx last_insn = get_last_insn ();
2509 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2511 /* Copy the value to the return location
2512 unless it's already there. */
2514 if (return_reg != val)
2516 #ifdef PROMOTE_FUNCTION_RETURN
2517 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2518 int unsignedp = TREE_UNSIGNED (type);
2519 enum machine_mode mode
2520 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2523 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2524 convert_move (return_reg, val, unsignedp);
2527 emit_move_insn (return_reg, val);
2529 if (GET_CODE (return_reg) == REG
2530 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2531 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2533 /* Does any pending block have cleanups? */
2535 while (block && block->data.block.cleanups == 0)
2536 block = block->next;
2538 /* If yes, use a goto to return, since that runs cleanups.
2539 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2541 expand_null_return_1 (last_insn, block != 0);
2544 /* Output a return with no value. If LAST_INSN is nonzero,
2545 pretend that the return takes place after LAST_INSN.
2546 If USE_GOTO is nonzero then don't use a return instruction;
2547 go to the return label instead. This causes any cleanups
2548 of pending blocks to be executed normally. */
2551 expand_null_return_1 (last_insn, use_goto)
2555 rtx end_label = cleanup_label ? cleanup_label : return_label;
2557 clear_pending_stack_adjust ();
2558 do_pending_stack_adjust ();
2561 /* PCC-struct return always uses an epilogue. */
2562 if (current_function_returns_pcc_struct || use_goto)
2565 end_label = return_label = gen_label_rtx ();
2566 expand_goto_internal (NULL_TREE, end_label, last_insn);
2570 /* Otherwise output a simple return-insn if one is available,
2571 unless it won't do the job. */
2573 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2575 emit_jump_insn (gen_return ());
2581 /* Otherwise jump to the epilogue. */
2582 expand_goto_internal (NULL_TREE, end_label, last_insn);
2585 /* Generate RTL to evaluate the expression RETVAL and return it
2586 from the current function. */
2589 expand_return (retval)
2592 /* If there are any cleanups to be performed, then they will
2593 be inserted following LAST_INSN. It is desirable
2594 that the last_insn, for such purposes, should be the
2595 last insn before computing the return value. Otherwise, cleanups
2596 which call functions can clobber the return value. */
2597 /* ??? rms: I think that is erroneous, because in C++ it would
2598 run destructors on variables that might be used in the subsequent
2599 computation of the return value. */
2601 register rtx val = 0;
2605 struct nesting *block;
2607 /* Bytecode returns are quite simple, just leave the result on the
2608 arithmetic stack. */
2609 if (output_bytecode)
2611 bc_expand_expr (retval);
2612 bc_emit_instruction (ret);
2616 /* If function wants no value, give it none. */
2617 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2619 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2621 expand_null_return ();
2625 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2626 /* This is not sufficient. We also need to watch for cleanups of the
2627 expression we are about to expand. Unfortunately, we cannot know
2628 if it has cleanups until we expand it, and we want to change how we
2629 expand it depending upon if we need cleanups. We can't win. */
2631 cleanups = any_pending_cleanups (1);
2636 if (TREE_CODE (retval) == RESULT_DECL)
2637 retval_rhs = retval;
2638 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2639 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2640 retval_rhs = TREE_OPERAND (retval, 1);
2641 else if (TREE_TYPE (retval) == void_type_node)
2642 /* Recognize tail-recursive call to void function. */
2643 retval_rhs = retval;
2645 retval_rhs = NULL_TREE;
2647 /* Only use `last_insn' if there are cleanups which must be run. */
2648 if (cleanups || cleanup_label != 0)
2649 last_insn = get_last_insn ();
2651 /* Distribute return down conditional expr if either of the sides
2652 may involve tail recursion (see test below). This enhances the number
2653 of tail recursions we see. Don't do this always since it can produce
2654 sub-optimal code in some cases and we distribute assignments into
2655 conditional expressions when it would help. */
2657 if (optimize && retval_rhs != 0
2658 && frame_offset == 0
2659 && TREE_CODE (retval_rhs) == COND_EXPR
2660 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2661 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2663 rtx label = gen_label_rtx ();
2666 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2667 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2668 DECL_RESULT (current_function_decl),
2669 TREE_OPERAND (retval_rhs, 1));
2670 TREE_SIDE_EFFECTS (expr) = 1;
2671 expand_return (expr);
2674 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2675 DECL_RESULT (current_function_decl),
2676 TREE_OPERAND (retval_rhs, 2));
2677 TREE_SIDE_EFFECTS (expr) = 1;
2678 expand_return (expr);
2682 /* For tail-recursive call to current function,
2683 just jump back to the beginning.
2684 It's unsafe if any auto variable in this function
2685 has its address taken; for simplicity,
2686 require stack frame to be empty. */
2687 if (optimize && retval_rhs != 0
2688 && frame_offset == 0
2689 && TREE_CODE (retval_rhs) == CALL_EXPR
2690 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2691 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2692 /* Finish checking validity, and if valid emit code
2693 to set the argument variables for the new call. */
2694 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2695 DECL_ARGUMENTS (current_function_decl)))
2697 if (tail_recursion_label == 0)
2699 tail_recursion_label = gen_label_rtx ();
2700 emit_label_after (tail_recursion_label,
2701 tail_recursion_reentry);
2704 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2709 /* This optimization is safe if there are local cleanups
2710 because expand_null_return takes care of them.
2711 ??? I think it should also be safe when there is a cleanup label,
2712 because expand_null_return takes care of them, too.
2713 Any reason why not? */
2714 if (HAVE_return && cleanup_label == 0
2715 && ! current_function_returns_pcc_struct
2716 && BRANCH_COST <= 1)
2718 /* If this is return x == y; then generate
2719 if (x == y) return 1; else return 0;
2720 if we can do it with explicit return insns and
2721 branches are cheap. */
2723 switch (TREE_CODE (retval_rhs))
2731 case TRUTH_ANDIF_EXPR:
2732 case TRUTH_ORIF_EXPR:
2733 case TRUTH_AND_EXPR:
2735 case TRUTH_NOT_EXPR:
2736 case TRUTH_XOR_EXPR:
2737 op0 = gen_label_rtx ();
2738 jumpifnot (retval_rhs, op0);
2739 expand_value_return (const1_rtx);
2741 expand_value_return (const0_rtx);
2745 #endif /* HAVE_return */
2747 /* If the result is an aggregate that is being returned in one (or more)
2748 registers, load the registers here. The compiler currently can't handle
2749 copying a BLKmode value into registers. We could put this code in a
2750 more general area (for use by everyone instead of just function
2751 call/return), but until this feature is generally usable it is kept here
2752 (and in expand_call). The value must go into a pseudo in case there
2753 are cleanups that will clobber the real return register. */
2756 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2757 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2759 int i, bitpos, xbitpos;
2760 int big_endian_correction = 0;
2761 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2762 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2763 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2764 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2765 rtx result_reg, src, dst;
2766 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2767 enum machine_mode tmpmode, result_reg_mode;
2769 /* Structures whose size is not a multiple of a word are aligned
2770 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2771 machine, this means we must skip the empty high order bytes when
2772 calculating the bit offset. */
2773 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2774 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2777 /* Copy the structure BITSIZE bits at a time. */
2778 for (bitpos = 0, xbitpos = big_endian_correction;
2779 bitpos < bytes * BITS_PER_UNIT;
2780 bitpos += bitsize, xbitpos += bitsize)
2782 /* We need a new destination pseudo each time xbitpos is
2783 on a word boundary and when xbitpos == big_endian_correction
2784 (the first time through). */
2785 if (xbitpos % BITS_PER_WORD == 0
2786 || xbitpos == big_endian_correction)
2788 /* Generate an appropriate register. */
2789 dst = gen_reg_rtx (word_mode);
2790 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2792 /* Clobber the destination before we move anything into it. */
2793 emit_insn (gen_rtx (CLOBBER, VOIDmode, dst));
2796 /* We need a new source operand each time bitpos is on a word
2798 if (bitpos % BITS_PER_WORD == 0)
2799 src = operand_subword_force (result_val,
2800 bitpos / BITS_PER_WORD,
2803 /* Use bitpos for the source extraction (left justified) and
2804 xbitpos for the destination store (right justified). */
2805 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2806 extract_bit_field (src, bitsize,
2807 bitpos % BITS_PER_WORD, 1,
2808 NULL_RTX, word_mode,
2810 bitsize / BITS_PER_UNIT,
2812 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2815 /* Find the smallest integer mode large enough to hold the
2816 entire structure and use that mode instead of BLKmode
2817 on the USE insn for the return register. */
2818 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2819 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2820 tmpmode != MAX_MACHINE_MODE;
2821 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2823 /* Have we found a large enough mode? */
2824 if (GET_MODE_SIZE (tmpmode) >= bytes)
2828 /* No suitable mode found. */
2829 if (tmpmode == MAX_MACHINE_MODE)
2832 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2834 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2835 result_reg_mode = word_mode;
2837 result_reg_mode = tmpmode;
2838 result_reg = gen_reg_rtx (result_reg_mode);
2840 /* Now that the value is in pseudos, copy it to the result reg(s). */
2843 for (i = 0; i < n_regs; i++)
2844 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2847 if (tmpmode != result_reg_mode)
2848 result_reg = gen_lowpart (tmpmode, result_reg);
2850 expand_value_return (result_reg);
2854 && TREE_TYPE (retval_rhs) != void_type_node
2855 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2857 /* Calculate the return value into a pseudo reg. */
2858 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2860 /* All temporaries have now been used. */
2862 /* Return the calculated value, doing cleanups first. */
2863 expand_value_return (val);
2867 /* No cleanups or no hard reg used;
2868 calculate value into hard return reg. */
2869 expand_expr (retval, const0_rtx, VOIDmode, 0);
2872 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2876 /* Return 1 if the end of the generated RTX is not a barrier.
2877 This means code already compiled can drop through. */
2880 drop_through_at_end_p ()
2882 rtx insn = get_last_insn ();
2883 while (insn && GET_CODE (insn) == NOTE)
2884 insn = PREV_INSN (insn);
2885 return insn && GET_CODE (insn) != BARRIER;
2888 /* Emit code to alter this function's formal parms for a tail-recursive call.
2889 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2890 FORMALS is the chain of decls of formals.
2891 Return 1 if this can be done;
2892 otherwise return 0 and do not emit any code. */
2895 tail_recursion_args (actuals, formals)
2896 tree actuals, formals;
2898 register tree a = actuals, f = formals;
2900 register rtx *argvec;
2902 /* Check that number and types of actuals are compatible
2903 with the formals. This is not always true in valid C code.
2904 Also check that no formal needs to be addressable
2905 and that all formals are scalars. */
2907 /* Also count the args. */
2909 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2911 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2912 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2914 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2917 if (a != 0 || f != 0)
2920 /* Compute all the actuals. */
2922 argvec = (rtx *) alloca (i * sizeof (rtx));
2924 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2925 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2927 /* Find which actual values refer to current values of previous formals.
2928 Copy each of them now, before any formal is changed. */
2930 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2934 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2935 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2936 { copy = 1; break; }
2938 argvec[i] = copy_to_reg (argvec[i]);
2941 /* Store the values of the actuals into the formals. */
2943 for (f = formals, a = actuals, i = 0; f;
2944 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2946 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2947 emit_move_insn (DECL_RTL (f), argvec[i]);
2949 convert_move (DECL_RTL (f), argvec[i],
2950 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2957 /* Generate the RTL code for entering a binding contour.
2958 The variables are declared one by one, by calls to `expand_decl'.
2960 EXIT_FLAG is nonzero if this construct should be visible to
2961 `exit_something'. */
2964 expand_start_bindings (exit_flag)
2967 struct nesting *thisblock = ALLOC_NESTING ();
2968 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2970 /* Make an entry on block_stack for the block we are entering. */
2972 thisblock->next = block_stack;
2973 thisblock->all = nesting_stack;
2974 thisblock->depth = ++nesting_depth;
2975 thisblock->data.block.stack_level = 0;
2976 thisblock->data.block.cleanups = 0;
2977 thisblock->data.block.function_call_count = 0;
2981 if (block_stack->data.block.cleanups == NULL_TREE
2982 && (block_stack->data.block.outer_cleanups == NULL_TREE
2983 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2984 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2986 thisblock->data.block.outer_cleanups
2987 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2988 block_stack->data.block.outer_cleanups);
2991 thisblock->data.block.outer_cleanups = 0;
2995 && !(block_stack->data.block.cleanups == NULL_TREE
2996 && block_stack->data.block.outer_cleanups == NULL_TREE))
2997 thisblock->data.block.outer_cleanups
2998 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2999 block_stack->data.block.outer_cleanups);
3001 thisblock->data.block.outer_cleanups = 0;
3003 thisblock->data.block.label_chain = 0;
3004 thisblock->data.block.innermost_stack_block = stack_block_stack;
3005 thisblock->data.block.first_insn = note;
3006 thisblock->data.block.block_start_count = ++block_start_count;
3007 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3008 block_stack = thisblock;
3009 nesting_stack = thisblock;
3011 if (!output_bytecode)
3013 /* Make a new level for allocating stack slots. */
3018 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3019 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3023 remember_end_note (block)
3024 register tree block;
3026 BLOCK_END_NOTE (block) = last_block_end_note;
3027 last_block_end_note = NULL_RTX;
3030 /* Generate RTL code to terminate a binding contour.
3031 VARS is the chain of VAR_DECL nodes
3032 for the variables bound in this contour.
3033 MARK_ENDS is nonzero if we should put a note at the beginning
3034 and end of this binding contour.
3036 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3037 (That is true automatically if the contour has a saved stack level.) */
3040 expand_end_bindings (vars, mark_ends, dont_jump_in)
3045 register struct nesting *thisblock = block_stack;
3048 if (output_bytecode)
3050 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3055 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3056 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3057 && ! DECL_IN_SYSTEM_HEADER (decl))
3058 warning_with_decl (decl, "unused variable `%s'");
3060 if (thisblock->exit_label)
3062 do_pending_stack_adjust ();
3063 emit_label (thisblock->exit_label);
3066 /* If necessary, make a handler for nonlocal gotos taking
3067 place in the function calls in this block. */
3068 if (function_call_count != thisblock->data.block.function_call_count
3070 /* Make handler for outermost block
3071 if there were any nonlocal gotos to this function. */
3072 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3073 /* Make handler for inner block if it has something
3074 special to do when you jump out of it. */
3075 : (thisblock->data.block.cleanups != 0
3076 || thisblock->data.block.stack_level != 0)))
3079 rtx afterward = gen_label_rtx ();
3080 rtx handler_label = gen_label_rtx ();
3081 rtx save_receiver = gen_reg_rtx (Pmode);
3084 /* Don't let jump_optimize delete the handler. */
3085 LABEL_PRESERVE_P (handler_label) = 1;
3087 /* Record the handler address in the stack slot for that purpose,
3088 during this block, saving and restoring the outer value. */
3089 if (thisblock->next != 0)
3091 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3094 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3095 insns = get_insns ();
3097 emit_insns_before (insns, thisblock->data.block.first_insn);
3101 emit_move_insn (nonlocal_goto_handler_slot,
3102 gen_rtx (LABEL_REF, Pmode, handler_label));
3103 insns = get_insns ();
3105 emit_insns_before (insns, thisblock->data.block.first_insn);
3107 /* Jump around the handler; it runs only when specially invoked. */
3108 emit_jump (afterward);
3109 emit_label (handler_label);
3111 #ifdef HAVE_nonlocal_goto
3112 if (! HAVE_nonlocal_goto)
3114 /* First adjust our frame pointer to its actual value. It was
3115 previously set to the start of the virtual area corresponding to
3116 the stacked variables when we branched here and now needs to be
3117 adjusted to the actual hardware fp value.
3119 Assignments are to virtual registers are converted by
3120 instantiate_virtual_regs into the corresponding assignment
3121 to the underlying register (fp in this case) that makes
3122 the original assignment true.
3123 So the following insn will actually be
3124 decrementing fp by STARTING_FRAME_OFFSET. */
3125 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3127 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3128 if (fixed_regs[ARG_POINTER_REGNUM])
3130 #ifdef ELIMINABLE_REGS
3131 /* If the argument pointer can be eliminated in favor of the
3132 frame pointer, we don't need to restore it. We assume here
3133 that if such an elimination is present, it can always be used.
3134 This is the case on all known machines; if we don't make this
3135 assumption, we do unnecessary saving on many machines. */
3136 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3139 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3140 if (elim_regs[i].from == ARG_POINTER_REGNUM
3141 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3144 if (i == sizeof elim_regs / sizeof elim_regs [0])
3147 /* Now restore our arg pointer from the address at which it
3148 was saved in our stack frame.
3149 If there hasn't be space allocated for it yet, make
3151 if (arg_pointer_save_area == 0)
3152 arg_pointer_save_area
3153 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3154 emit_move_insn (virtual_incoming_args_rtx,
3155 /* We need a pseudo here, or else
3156 instantiate_virtual_regs_1 complains. */
3157 copy_to_reg (arg_pointer_save_area));
3162 /* The handler expects the desired label address in the static chain
3163 register. It tests the address and does an appropriate jump
3164 to whatever label is desired. */
3165 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3166 /* Skip any labels we shouldn't be able to jump to from here. */
3167 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3169 rtx not_this = gen_label_rtx ();
3170 rtx this = gen_label_rtx ();
3171 do_jump_if_equal (static_chain_rtx,
3172 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3174 emit_jump (not_this);
3176 expand_goto (TREE_VALUE (link));
3177 emit_label (not_this);
3179 /* If label is not recognized, abort. */
3180 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3183 emit_label (afterward);
3186 /* Don't allow jumping into a block that has cleanups or a stack level. */
3188 || thisblock->data.block.stack_level != 0
3189 || thisblock->data.block.cleanups != 0)
3191 struct label_chain *chain;
3193 /* Any labels in this block are no longer valid to go to.
3194 Mark them to cause an error message. */
3195 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3197 DECL_TOO_LATE (chain->label) = 1;
3198 /* If any goto without a fixup came to this label,
3199 that must be an error, because gotos without fixups
3200 come from outside all saved stack-levels and all cleanups. */
3201 if (TREE_ADDRESSABLE (chain->label))
3202 error_with_decl (chain->label,
3203 "label `%s' used before containing binding contour");
3207 /* Restore stack level in effect before the block
3208 (only if variable-size objects allocated). */
3209 /* Perform any cleanups associated with the block. */
3211 if (thisblock->data.block.stack_level != 0
3212 || thisblock->data.block.cleanups != 0)
3214 /* Only clean up here if this point can actually be reached. */
3215 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3217 /* Don't let cleanups affect ({...}) constructs. */
3218 int old_expr_stmts_for_value = expr_stmts_for_value;
3219 rtx old_last_expr_value = last_expr_value;
3220 tree old_last_expr_type = last_expr_type;
3221 expr_stmts_for_value = 0;
3223 /* Do the cleanups. */
3224 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3226 do_pending_stack_adjust ();
3228 expr_stmts_for_value = old_expr_stmts_for_value;
3229 last_expr_value = old_last_expr_value;
3230 last_expr_type = old_last_expr_type;
3232 /* Restore the stack level. */
3234 if (reachable && thisblock->data.block.stack_level != 0)
3236 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3237 thisblock->data.block.stack_level, NULL_RTX);
3238 if (nonlocal_goto_handler_slot != 0)
3239 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3243 /* Any gotos out of this block must also do these things.
3244 Also report any gotos with fixups that came to labels in this
3246 fixup_gotos (thisblock,
3247 thisblock->data.block.stack_level,
3248 thisblock->data.block.cleanups,
3249 thisblock->data.block.first_insn,
3253 /* Mark the beginning and end of the scope if requested.
3254 We do this now, after running cleanups on the variables
3255 just going out of scope, so they are in scope for their cleanups. */
3258 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3260 /* Get rid of the beginning-mark if we don't make an end-mark. */
3261 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3263 /* If doing stupid register allocation, make sure lives of all
3264 register variables declared here extend thru end of scope. */
3267 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3269 rtx rtl = DECL_RTL (decl);
3270 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3274 /* Restore block_stack level for containing block. */
3276 stack_block_stack = thisblock->data.block.innermost_stack_block;
3277 POPSTACK (block_stack);
3279 /* Pop the stack slot nesting and free any slots at this level. */
3284 /* End a binding contour.
3285 VARS is the chain of VAR_DECL nodes for the variables bound
3286 in this contour. MARK_ENDS is nonzer if we should put a note
3287 at the beginning and end of this binding contour.
3288 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3292 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3297 struct nesting *thisbind = nesting_stack;
3301 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3302 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3303 warning_with_decl (decl, "unused variable `%s'");
3305 if (thisbind->exit_label)
3306 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3308 /* Pop block/bindings off stack */
3309 POPSTACK (block_stack);
3312 /* Generate RTL for the automatic variable declaration DECL.
3313 (Other kinds of declarations are simply ignored if seen here.) */
3319 struct nesting *thisblock = block_stack;
3322 if (output_bytecode)
3324 bc_expand_decl (decl, 0);
3328 type = TREE_TYPE (decl);
3330 /* Only automatic variables need any expansion done.
3331 Static and external variables, and external functions,
3332 will be handled by `assemble_variable' (called from finish_decl).
3333 TYPE_DECL and CONST_DECL require nothing.
3334 PARM_DECLs are handled in `assign_parms'. */
3336 if (TREE_CODE (decl) != VAR_DECL)
3338 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3341 /* Create the RTL representation for the variable. */
3343 if (type == error_mark_node)
3344 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3345 else if (DECL_SIZE (decl) == 0)
3346 /* Variable with incomplete type. */
3348 if (DECL_INITIAL (decl) == 0)
3349 /* Error message was already done; now avoid a crash. */
3350 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3352 /* An initializer is going to decide the size of this array.
3353 Until we know the size, represent its address with a reg. */
3354 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3355 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3357 else if (DECL_MODE (decl) != BLKmode
3358 /* If -ffloat-store, don't put explicit float vars
3360 && !(flag_float_store
3361 && TREE_CODE (type) == REAL_TYPE)
3362 && ! TREE_THIS_VOLATILE (decl)
3363 && ! TREE_ADDRESSABLE (decl)
3364 && (DECL_REGISTER (decl) || ! obey_regdecls))
3366 /* Automatic variable that can go in a register. */
3367 int unsignedp = TREE_UNSIGNED (type);
3368 enum machine_mode reg_mode
3369 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3371 if (TREE_CODE (type) == COMPLEX_TYPE)
3373 rtx realpart, imagpart;
3374 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3376 /* For a complex type variable, make a CONCAT of two pseudos
3377 so that the real and imaginary parts
3378 can be allocated separately. */
3379 realpart = gen_reg_rtx (partmode);
3380 REG_USERVAR_P (realpart) = 1;
3381 imagpart = gen_reg_rtx (partmode);
3382 REG_USERVAR_P (imagpart) = 1;
3383 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3387 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3388 if (TREE_CODE (type) == POINTER_TYPE)
3389 mark_reg_pointer (DECL_RTL (decl),
3390 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3392 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3395 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3397 /* Variable of fixed size that goes on the stack. */
3401 /* If we previously made RTL for this decl, it must be an array
3402 whose size was determined by the initializer.
3403 The old address was a register; set that register now
3404 to the proper address. */
3405 if (DECL_RTL (decl) != 0)
3407 if (GET_CODE (DECL_RTL (decl)) != MEM
3408 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3410 oldaddr = XEXP (DECL_RTL (decl), 0);
3414 = assign_stack_temp (DECL_MODE (decl),
3415 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3416 + BITS_PER_UNIT - 1)
3419 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3421 /* Set alignment we actually gave this decl. */
3422 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3423 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3427 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3428 if (addr != oldaddr)
3429 emit_move_insn (oldaddr, addr);
3432 /* If this is a memory ref that contains aggregate components,
3433 mark it as such for cse and loop optimize. */
3434 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3436 /* If this is in memory because of -ffloat-store,
3437 set the volatile bit, to prevent optimizations from
3438 undoing the effects. */
3439 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3440 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3444 /* Dynamic-size object: must push space on the stack. */
3448 /* Record the stack pointer on entry to block, if have
3449 not already done so. */
3450 if (thisblock->data.block.stack_level == 0)
3452 do_pending_stack_adjust ();
3453 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3454 &thisblock->data.block.stack_level,
3455 thisblock->data.block.first_insn);
3456 stack_block_stack = thisblock;
3459 /* Compute the variable's size, in bytes. */
3460 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3462 size_int (BITS_PER_UNIT)),
3463 NULL_RTX, VOIDmode, 0);
3466 /* Allocate space on the stack for the variable. Note that
3467 DECL_ALIGN says how the variable is to be aligned and we
3468 cannot use it to conclude anything about the alignment of
3470 address = allocate_dynamic_stack_space (size, NULL_RTX,
3471 TYPE_ALIGN (TREE_TYPE (decl)));
3473 /* Reference the variable indirect through that rtx. */
3474 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3476 /* If this is a memory ref that contains aggregate components,
3477 mark it as such for cse and loop optimize. */
3478 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3480 /* Indicate the alignment we actually gave this variable. */
3481 #ifdef STACK_BOUNDARY
3482 DECL_ALIGN (decl) = STACK_BOUNDARY;
3484 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3488 if (TREE_THIS_VOLATILE (decl))
3489 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3490 #if 0 /* A variable is not necessarily unchanging
3491 just because it is const. RTX_UNCHANGING_P
3492 means no change in the function,
3493 not merely no change in the variable's scope.
3494 It is correct to set RTX_UNCHANGING_P if the variable's scope
3495 is the whole function. There's no convenient way to test that. */
3496 if (TREE_READONLY (decl))
3497 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3500 /* If doing stupid register allocation, make sure life of any
3501 register variable starts here, at the start of its scope. */
3504 use_variable (DECL_RTL (decl));
3508 /* Generate code for the automatic variable declaration DECL. For
3509 most variables this just means we give it a stack offset. The
3510 compiler sometimes emits cleanups without variables and we will
3511 have to deal with those too. */
3514 bc_expand_decl (decl, cleanup)
3522 /* A cleanup with no variable. */
3529 /* Only auto variables need any work. */
3530 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3533 type = TREE_TYPE (decl);
3535 if (type == error_mark_node)
3536 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3538 else if (DECL_SIZE (decl) == 0)
3540 /* Variable with incomplete type. The stack offset herein will be
3541 fixed later in expand_decl_init (). */
3542 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3544 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3546 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3550 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3553 /* Emit code to perform the initialization of a declaration DECL. */
3556 expand_decl_init (decl)
3559 int was_used = TREE_USED (decl);
3561 if (output_bytecode)
3563 bc_expand_decl_init (decl);
3567 /* If this is a CONST_DECL, we don't have to generate any code, but
3568 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3569 to be set while in the obstack containing the constant. If we don't
3570 do this, we can lose if we have functions nested three deep and the middle
3571 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3572 the innermost function is the first to expand that STRING_CST. */
3573 if (TREE_CODE (decl) == CONST_DECL)
3575 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3576 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3577 EXPAND_INITIALIZER);
3581 if (TREE_STATIC (decl))
3584 /* Compute and store the initial value now. */
3586 if (DECL_INITIAL (decl) == error_mark_node)
3588 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3589 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3590 || code == POINTER_TYPE)
3591 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3595 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3597 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3598 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3602 /* Don't let the initialization count as "using" the variable. */
3603 TREE_USED (decl) = was_used;
3605 /* Free any temporaries we made while initializing the decl. */
3606 preserve_temp_slots (NULL_RTX);
3610 /* Expand initialization for variable-sized types. Allocate array
3611 using newlocalSI and set local variable, which is a pointer to the
3615 bc_expand_variable_local_init (decl)
3618 /* Evaluate size expression and coerce to SI */
3619 bc_expand_expr (DECL_SIZE (decl));
3621 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3622 no coercion is necessary (?) */
3624 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3625 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3627 /* Emit code to allocate array */
3628 bc_emit_instruction (newlocalSI);
3630 /* Store array pointer in local variable. This is the only instance
3631 where we actually want the address of the pointer to the
3632 variable-size block, rather than the pointer itself. We avoid
3633 using expand_address() since that would cause the pointer to be
3634 pushed rather than its address. Hence the hard-coded reference;
3635 notice also that the variable is always local (no global
3636 variable-size type variables). */
3638 bc_load_localaddr (DECL_RTL (decl));
3639 bc_emit_instruction (storeP);
3643 /* Emit code to initialize a declaration. */
3646 bc_expand_decl_init (decl)
3649 int org_stack_depth;
3651 /* Statical initializers are handled elsewhere */
3653 if (TREE_STATIC (decl))
3656 /* Memory original stack depth */
3657 org_stack_depth = stack_depth;
3659 /* If the type is variable-size, we first create its space (we ASSUME
3660 it CAN'T be static). We do this regardless of whether there's an
3661 initializer assignment or not. */
3663 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3664 bc_expand_variable_local_init (decl);
3666 /* Expand initializer assignment */
3667 if (DECL_INITIAL (decl) == error_mark_node)
3669 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3671 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3672 || code == POINTER_TYPE)
3674 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3676 else if (DECL_INITIAL (decl))
3677 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3679 /* Restore stack depth */
3680 if (org_stack_depth > stack_depth)
3683 bc_adjust_stack (stack_depth - org_stack_depth);
3687 /* CLEANUP is an expression to be executed at exit from this binding contour;
3688 for example, in C++, it might call the destructor for this variable.
3690 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3691 CLEANUP multiple times, and have the correct semantics. This
3692 happens in exception handling, and for non-local gotos.
3694 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3695 that is not associated with any particular variable. */
3698 expand_decl_cleanup (decl, cleanup)
3701 struct nesting *thisblock = block_stack;
3703 /* Error if we are not in any block. */
3707 /* Record the cleanup if there is one. */
3711 cleanup = unsave_expr (cleanup);
3713 thisblock->data.block.cleanups
3714 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3715 /* If this block has a cleanup, it belongs in stack_block_stack. */
3716 stack_block_stack = thisblock;
3717 (*interim_eh_hook) (NULL_TREE);
3722 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3723 DECL_ELTS is the list of elements that belong to DECL's type.
3724 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3727 expand_anon_union_decl (decl, cleanup, decl_elts)
3728 tree decl, cleanup, decl_elts;
3730 struct nesting *thisblock = block_stack;
3734 expand_decl_cleanup (decl, cleanup);
3735 x = DECL_RTL (decl);
3739 tree decl_elt = TREE_VALUE (decl_elts);
3740 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3741 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3743 /* Propagate the union's alignment to the elements. */
3744 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3746 /* If the element has BLKmode and the union doesn't, the union is
3747 aligned such that the element doesn't need to have BLKmode, so
3748 change the element's mode to the appropriate one for its size. */
3749 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3750 DECL_MODE (decl_elt) = mode
3751 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3754 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3755 instead create a new MEM rtx with the proper mode. */
3756 if (GET_CODE (x) == MEM)
3758 if (mode == GET_MODE (x))
3759 DECL_RTL (decl_elt) = x;
3762 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3763 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3764 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3767 else if (GET_CODE (x) == REG)
3769 if (mode == GET_MODE (x))
3770 DECL_RTL (decl_elt) = x;
3772 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3777 /* Record the cleanup if there is one. */
3780 thisblock->data.block.cleanups
3781 = temp_tree_cons (decl_elt, cleanup_elt,
3782 thisblock->data.block.cleanups);
3784 decl_elts = TREE_CHAIN (decl_elts);
3788 /* Expand a list of cleanups LIST.
3789 Elements may be expressions or may be nested lists.
3791 If DONT_DO is nonnull, then any list-element
3792 whose TREE_PURPOSE matches DONT_DO is omitted.
3793 This is sometimes used to avoid a cleanup associated with
3794 a value that is being returned out of the scope.
3796 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3797 goto and handle protection regions specially in that case.
3799 If REACHABLE, we emit code, otherwise just inform the exception handling
3800 code about this finalization. */
3803 expand_cleanups (list, dont_do, in_fixup, reachable)
3810 for (tail = list; tail; tail = TREE_CHAIN (tail))
3811 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3813 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3814 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3818 (*interim_eh_hook) (TREE_VALUE (tail));
3822 /* Cleanups may be run multiple times. For example,
3823 when exiting a binding contour, we expand the
3824 cleanups associated with that contour. When a goto
3825 within that binding contour has a target outside that
3826 contour, it will expand all cleanups from its scope to
3827 the target. Though the cleanups are expanded multiple
3828 times, the control paths are non-overlapping so the
3829 cleanups will not be executed twice. */
3830 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3837 /* Move all cleanups from the current block_stack
3838 to the containing block_stack, where they are assumed to
3839 have been created. If anything can cause a temporary to
3840 be created, but not expanded for more than one level of
3841 block_stacks, then this code will have to change. */
3846 struct nesting *block = block_stack;
3847 struct nesting *outer = block->next;
3849 outer->data.block.cleanups
3850 = chainon (block->data.block.cleanups,
3851 outer->data.block.cleanups);
3852 block->data.block.cleanups = 0;
3856 last_cleanup_this_contour ()
3858 if (block_stack == 0)
3861 return block_stack->data.block.cleanups;
3864 /* Return 1 if there are any pending cleanups at this point.
3865 If THIS_CONTOUR is nonzero, check the current contour as well.
3866 Otherwise, look only at the contours that enclose this one. */
3869 any_pending_cleanups (this_contour)
3872 struct nesting *block;
3874 if (block_stack == 0)
3877 if (this_contour && block_stack->data.block.cleanups != NULL)
3879 if (block_stack->data.block.cleanups == 0
3880 && (block_stack->data.block.outer_cleanups == 0
3882 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3887 for (block = block_stack->next; block; block = block->next)
3888 if (block->data.block.cleanups != 0)
3894 /* Enter a case (Pascal) or switch (C) statement.
3895 Push a block onto case_stack and nesting_stack
3896 to accumulate the case-labels that are seen
3897 and to record the labels generated for the statement.
3899 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3900 Otherwise, this construct is transparent for `exit_something'.
3902 EXPR is the index-expression to be dispatched on.
3903 TYPE is its nominal type. We could simply convert EXPR to this type,
3904 but instead we take short cuts. */
3907 expand_start_case (exit_flag, expr, type, printname)
3913 register struct nesting *thiscase = ALLOC_NESTING ();
3915 /* Make an entry on case_stack for the case we are entering. */
3917 thiscase->next = case_stack;
3918 thiscase->all = nesting_stack;
3919 thiscase->depth = ++nesting_depth;
3920 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3921 thiscase->data.case_stmt.case_list = 0;
3922 thiscase->data.case_stmt.index_expr = expr;
3923 thiscase->data.case_stmt.nominal_type = type;
3924 thiscase->data.case_stmt.default_label = 0;
3925 thiscase->data.case_stmt.num_ranges = 0;
3926 thiscase->data.case_stmt.printname = printname;
3927 thiscase->data.case_stmt.seenlabel = 0;
3928 case_stack = thiscase;
3929 nesting_stack = thiscase;
3931 if (output_bytecode)
3933 bc_expand_start_case (thiscase, expr, type, printname);
3937 do_pending_stack_adjust ();
3939 /* Make sure case_stmt.start points to something that won't
3940 need any transformation before expand_end_case. */
3941 if (GET_CODE (get_last_insn ()) != NOTE)
3942 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3944 thiscase->data.case_stmt.start = get_last_insn ();
3948 /* Enter a case statement. It is assumed that the caller has pushed
3949 the current context onto the case stack. */
3952 bc_expand_start_case (thiscase, expr, type, printname)
3953 struct nesting *thiscase;
3958 bc_expand_expr (expr);
3959 bc_expand_conversion (TREE_TYPE (expr), type);
3961 /* For cases, the skip is a place we jump to that's emitted after
3962 the size of the jump table is known. */
3964 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3965 bc_emit_bytecode (jump);
3966 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
3968 #ifdef DEBUG_PRINT_CODE
3969 fputc ('\n', stderr);
3974 /* Start a "dummy case statement" within which case labels are invalid
3975 and are not connected to any larger real case statement.
3976 This can be used if you don't want to let a case statement jump
3977 into the middle of certain kinds of constructs. */
3980 expand_start_case_dummy ()
3982 register struct nesting *thiscase = ALLOC_NESTING ();
3984 /* Make an entry on case_stack for the dummy. */
3986 thiscase->next = case_stack;
3987 thiscase->all = nesting_stack;
3988 thiscase->depth = ++nesting_depth;
3989 thiscase->exit_label = 0;
3990 thiscase->data.case_stmt.case_list = 0;
3991 thiscase->data.case_stmt.start = 0;
3992 thiscase->data.case_stmt.nominal_type = 0;
3993 thiscase->data.case_stmt.default_label = 0;
3994 thiscase->data.case_stmt.num_ranges = 0;
3995 case_stack = thiscase;
3996 nesting_stack = thiscase;
3999 /* End a dummy case statement. */
4002 expand_end_case_dummy ()
4004 POPSTACK (case_stack);
4007 /* Return the data type of the index-expression
4008 of the innermost case statement, or null if none. */
4011 case_index_expr_type ()
4014 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4018 /* Accumulate one case or default label inside a case or switch statement.
4019 VALUE is the value of the case (a null pointer, for a default label).
4020 The function CONVERTER, when applied to arguments T and V,
4021 converts the value V to the type T.
4023 If not currently inside a case or switch statement, return 1 and do
4024 nothing. The caller will print a language-specific error message.
4025 If VALUE is a duplicate or overlaps, return 2 and do nothing
4026 except store the (first) duplicate node in *DUPLICATE.
4027 If VALUE is out of range, return 3 and do nothing.
4028 If we are jumping into the scope of a cleaup or var-sized array, return 5.
4029 Return 0 on success.
4031 Extended to handle range statements. */
4034 pushcase (value, converter, label, duplicate)
4035 register tree value;
4036 tree (*converter) PROTO((tree, tree));
4037 register tree label;
4040 register struct case_node **l;
4041 register struct case_node *n;
4045 if (output_bytecode)
4046 return bc_pushcase (value, label);
4048 /* Fail if not inside a real case statement. */
4049 if (! (case_stack && case_stack->data.case_stmt.start))
4052 if (stack_block_stack
4053 && stack_block_stack->depth > case_stack->depth)
4056 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4057 nominal_type = case_stack->data.case_stmt.nominal_type;
4059 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4060 if (index_type == error_mark_node)
4063 /* Convert VALUE to the type in which the comparisons are nominally done. */
4065 value = (*converter) (nominal_type, value);
4067 /* If this is the first label, warn if any insns have been emitted. */
4068 if (case_stack->data.case_stmt.seenlabel == 0)
4071 for (insn = case_stack->data.case_stmt.start;
4073 insn = NEXT_INSN (insn))
4075 if (GET_CODE (insn) == CODE_LABEL)
4077 if (GET_CODE (insn) != NOTE
4078 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4080 warning ("unreachable code at beginning of %s",
4081 case_stack->data.case_stmt.printname);
4086 case_stack->data.case_stmt.seenlabel = 1;
4088 /* Fail if this value is out of range for the actual type of the index
4089 (which may be narrower than NOMINAL_TYPE). */
4090 if (value != 0 && ! int_fits_type_p (value, index_type))
4093 /* Fail if this is a duplicate or overlaps another entry. */
4096 if (case_stack->data.case_stmt.default_label != 0)
4098 *duplicate = case_stack->data.case_stmt.default_label;
4101 case_stack->data.case_stmt.default_label = label;
4104 return add_case_node (value, value, label, duplicate);
4106 expand_label (label);
4110 /* Like pushcase but this case applies to all values
4111 between VALUE1 and VALUE2 (inclusive).
4112 The return value is the same as that of pushcase
4113 but there is one additional error code:
4114 4 means the specified range was empty. */
4117 pushcase_range (value1, value2, converter, label, duplicate)
4118 register tree value1, value2;
4119 tree (*converter) PROTO((tree, tree));
4120 register tree label;
4123 register struct case_node **l;
4124 register struct case_node *n;
4128 /* Fail if not inside a real case statement. */
4129 if (! (case_stack && case_stack->data.case_stmt.start))
4132 if (stack_block_stack
4133 && stack_block_stack->depth > case_stack->depth)
4136 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4137 nominal_type = case_stack->data.case_stmt.nominal_type;
4139 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4140 if (index_type == error_mark_node)
4143 /* If this is the first label, warn if any insns have been emitted. */
4144 if (case_stack->data.case_stmt.seenlabel == 0)
4147 for (insn = case_stack->data.case_stmt.start;
4149 insn = NEXT_INSN (insn))
4151 if (GET_CODE (insn) == CODE_LABEL)
4153 if (GET_CODE (insn) != NOTE
4154 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4156 warning ("unreachable code at beginning of %s",
4157 case_stack->data.case_stmt.printname);
4162 case_stack->data.case_stmt.seenlabel = 1;
4164 /* Convert VALUEs to type in which the comparisons are nominally done. */
4165 if (value1 == 0) /* Negative infinity. */
4166 value1 = TYPE_MIN_VALUE(index_type);
4167 value1 = (*converter) (nominal_type, value1);
4169 if (value2 == 0) /* Positive infinity. */
4170 value2 = TYPE_MAX_VALUE(index_type);
4171 value2 = (*converter) (nominal_type, value2);
4173 /* Fail if these values are out of range. */
4174 if (! int_fits_type_p (value1, index_type))
4177 if (! int_fits_type_p (value2, index_type))
4180 /* Fail if the range is empty. */
4181 if (tree_int_cst_lt (value2, value1))
4184 return add_case_node (value1, value2, label, duplicate);
4187 /* Do the actual insertion of a case label for pushcase and pushcase_range
4188 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4189 slowdown for large switch statements. */
4192 add_case_node (low, high, label, duplicate)
4197 struct case_node *p, **q, *r;
4199 q = &case_stack->data.case_stmt.case_list;
4206 /* Keep going past elements distinctly greater than HIGH. */
4207 if (tree_int_cst_lt (high, p->low))
4210 /* or distinctly less than LOW. */
4211 else if (tree_int_cst_lt (p->high, low))
4216 /* We have an overlap; this is an error. */
4217 *duplicate = p->code_label;
4222 /* Add this label to the chain, and succeed.
4223 Copy LOW, HIGH so they are on temporary rather than momentary
4224 obstack and will thus survive till the end of the case statement. */
4226 r = (struct case_node *) oballoc (sizeof (struct case_node));
4227 r->low = copy_node (low);
4229 /* If the bounds are equal, turn this into the one-value case. */
4231 if (tree_int_cst_equal (low, high))
4235 r->high = copy_node (high);
4236 case_stack->data.case_stmt.num_ranges++;
4239 r->code_label = label;
4240 expand_label (label);
4250 struct case_node *s;
4256 if (! (b = p->balance))
4257 /* Growth propagation from left side. */
4264 if (p->left = s = r->right)
4281 case_stack->data.case_stmt.case_list = r;
4284 /* r->balance == +1 */
4289 struct case_node *t = r->right;
4291 if (p->left = s = t->right)
4295 if (r->right = s = t->left)
4317 case_stack->data.case_stmt.case_list = t;
4324 /* p->balance == +1; growth of left side balances the node. */
4334 if (! (b = p->balance))
4335 /* Growth propagation from right side. */
4343 if (p->right = s = r->left)
4360 case_stack->data.case_stmt.case_list = r;
4364 /* r->balance == -1 */
4368 struct case_node *t = r->left;
4370 if (p->right = s = t->left)
4375 if (r->left = s = t->right)
4398 case_stack->data.case_stmt.case_list = t;
4404 /* p->balance == -1; growth of right side balances the node. */
4417 /* Accumulate one case or default label; VALUE is the value of the
4418 case, or nil for a default label. If not currently inside a case,
4419 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4420 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4421 Return 0 on success. This function is a leftover from the earlier
4422 bytecode compiler, which was based on gcc 1.37. It should be
4423 merged into pushcase. */
4426 bc_pushcase (value, label)
4430 struct nesting *thiscase = case_stack;
4431 struct case_node *case_label, *new_label;
4436 /* Fail if duplicate, overlap, or out of type range. */
4439 value = convert (thiscase->data.case_stmt.nominal_type, value);
4440 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4443 for (case_label = thiscase->data.case_stmt.case_list;
4444 case_label->left; case_label = case_label->left)
4445 if (! tree_int_cst_lt (case_label->left->high, value))
4448 if (case_label != thiscase->data.case_stmt.case_list
4449 && ! tree_int_cst_lt (case_label->high, value)
4450 || (case_label->left && ! tree_int_cst_lt (value, case_label->left->low)))
4453 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4454 new_label->low = new_label->high = copy_node (value);
4455 new_label->code_label = label;
4456 new_label->left = case_label->left;
4458 case_label->left = new_label;
4459 thiscase->data.case_stmt.num_ranges++;
4463 if (thiscase->data.case_stmt.default_label)
4465 thiscase->data.case_stmt.default_label = label;
4468 expand_label (label);
4472 /* Returns the number of possible values of TYPE.
4473 Returns -1 if the number is unknown or variable.
4474 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4475 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4476 do not increase monotonically (there may be duplicates);
4477 to 1 if the values increase monotonically, but not always by 1;
4478 otherwise sets it to 0. */
4481 all_cases_count (type, spareness)
4485 HOST_WIDE_INT count, count_high = 0;
4488 switch (TREE_CODE (type))
4495 count = 1 << BITS_PER_UNIT;
4499 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4500 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4505 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4506 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4507 but with overflow checking. */
4508 tree mint = TYPE_MIN_VALUE (type);
4509 tree maxt = TYPE_MAX_VALUE (type);
4510 HOST_WIDE_INT lo, hi;
4511 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4513 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4515 add_double (lo, hi, 1, 0, &lo, &hi);
4516 if (hi != 0 || lo < 0)
4523 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4525 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4526 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4527 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4528 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4532 if (*spareness == 1)
4534 tree prev = TREE_VALUE (TYPE_VALUES (type));
4535 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4537 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4542 prev = TREE_VALUE (t);
4551 #define BITARRAY_TEST(ARRAY, INDEX) \
4552 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4553 & (1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR)))
4554 #define BITARRAY_SET(ARRAY, INDEX) \
4555 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4556 |= 1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR))
4558 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4559 with the case values we have seen, assuming the case expression
4561 SPARSENESS is as determined by all_cases_count.
4563 The time needed is proportional to COUNT, unless
4564 SPARSENESS is 2, in which case quadratic time is needed. */
4567 mark_seen_cases (type, cases_seen, count, sparseness)
4569 unsigned char *cases_seen;
4575 tree next_node_to_try = NULL_TREE;
4576 long next_node_offset = 0;
4578 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4579 tree val = make_node (INTEGER_CST);
4580 TREE_TYPE (val) = type;
4583 else if (sparseness == 2)
4588 /* This less efficient loop is only needed to handle
4589 duplicate case values (multiple enum constants
4590 with the same value). */
4591 TREE_TYPE (val) = TREE_TYPE (root->low);
4592 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4593 t = TREE_CHAIN (t), xlo++)
4595 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4596 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4600 /* Keep going past elements distinctly greater than VAL. */
4601 if (tree_int_cst_lt (val, n->low))
4604 /* or distinctly less than VAL. */
4605 else if (tree_int_cst_lt (n->high, val))
4610 /* We have found a matching range. */
4611 BITARRAY_SET (cases_seen, xlo);
4621 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4622 for (n = root; n; n = n->right)
4624 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4625 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4626 while ( ! tree_int_cst_lt (n->high, val))
4628 /* Calculate (into xlo) the "offset" of the integer (val).
4629 The element with lowest value has offset 0, the next smallest
4630 element has offset 1, etc. */
4632 HOST_WIDE_INT xlo, xhi;
4634 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4636 /* The TYPE_VALUES will be in increasing order, so
4637 starting searching where we last ended. */
4638 t = next_node_to_try;
4639 xlo = next_node_offset;
4645 t = TYPE_VALUES (type);
4648 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4650 next_node_to_try = TREE_CHAIN (t);
4651 next_node_offset = xlo + 1;
4656 if (t == next_node_to_try)
4665 t = TYPE_MIN_VALUE (type);
4667 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4671 add_double (xlo, xhi,
4672 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4676 if (xhi == 0 && xlo >= 0 && xlo < count)
4677 BITARRAY_SET (cases_seen, xlo);
4678 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4680 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4686 /* Called when the index of a switch statement is an enumerated type
4687 and there is no default label.
4689 Checks that all enumeration literals are covered by the case
4690 expressions of a switch. Also, warn if there are any extra
4691 switch cases that are *not* elements of the enumerated type.
4693 If all enumeration literals were covered by the case expressions,
4694 turn one of the expressions into the default expression since it should
4695 not be possible to fall through such a switch. */
4698 check_for_full_enumeration_handling (type)
4701 register struct case_node *n;
4702 register struct case_node **l;
4703 register tree chain;
4706 /* True iff the selector type is a numbered set mode. */
4709 /* The number of possible selector values. */
4712 /* For each possible selector value. a one iff it has been matched
4713 by a case value alternative. */
4714 unsigned char *cases_seen;
4716 /* The allocated size of cases_seen, in chars. */
4720 if (output_bytecode)
4722 bc_check_for_full_enumeration_handling (type);
4729 size = all_cases_count (type, &sparseness);
4730 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4732 if (size > 0 && size < 600000
4733 /* We deliberately use malloc here - not xmalloc. */
4734 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4737 tree v = TYPE_VALUES (type);
4738 bzero (cases_seen, bytes_needed);
4740 /* The time complexity of this code is normally O(N), where
4741 N being the number of members in the enumerated type.
4742 However, if type is a ENUMERAL_TYPE whose values do not
4743 increase monotonically, O(N*log(N)) time may be needed. */
4745 mark_seen_cases (type, cases_seen, size, sparseness);
4747 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4749 if (BITARRAY_TEST(cases_seen, i) == 0)
4750 warning ("enumeration value `%s' not handled in switch",
4751 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4757 /* Now we go the other way around; we warn if there are case
4758 expressions that don't correspond to enumerators. This can
4759 occur since C and C++ don't enforce type-checking of
4760 assignments to enumeration variables. */
4762 if (case_stack->data.case_stmt.case_list
4763 && case_stack->data.case_stmt.case_list->left)
4764 case_stack->data.case_stmt.case_list
4765 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4767 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4769 for (chain = TYPE_VALUES (type);
4770 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4771 chain = TREE_CHAIN (chain))
4776 if (TYPE_NAME (type) == 0)
4777 warning ("case value `%d' not in enumerated type",
4778 TREE_INT_CST_LOW (n->low));
4780 warning ("case value `%d' not in enumerated type `%s'",
4781 TREE_INT_CST_LOW (n->low),
4782 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4785 : DECL_NAME (TYPE_NAME (type))));
4787 if (!tree_int_cst_equal (n->low, n->high))
4789 for (chain = TYPE_VALUES (type);
4790 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4791 chain = TREE_CHAIN (chain))
4796 if (TYPE_NAME (type) == 0)
4797 warning ("case value `%d' not in enumerated type",
4798 TREE_INT_CST_LOW (n->high));
4800 warning ("case value `%d' not in enumerated type `%s'",
4801 TREE_INT_CST_LOW (n->high),
4802 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4805 : DECL_NAME (TYPE_NAME (type))));
4811 /* ??? This optimization is disabled because it causes valid programs to
4812 fail. ANSI C does not guarantee that an expression with enum type
4813 will have a value that is the same as one of the enumeration literals. */
4815 /* If all values were found as case labels, make one of them the default
4816 label. Thus, this switch will never fall through. We arbitrarily pick
4817 the last one to make the default since this is likely the most
4818 efficient choice. */
4822 for (l = &case_stack->data.case_stmt.case_list;
4827 case_stack->data.case_stmt.default_label = (*l)->code_label;
4834 /* Check that all enumeration literals are covered by the case
4835 expressions of a switch. Also warn if there are any cases
4836 that are not elements of the enumerated type. */
4839 bc_check_for_full_enumeration_handling (type)
4842 struct nesting *thiscase = case_stack;
4843 struct case_node *c;
4846 /* Check for enums not handled. */
4847 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4849 for (c = thiscase->data.case_stmt.case_list->left;
4850 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4853 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4854 warning ("enumerated value `%s' not handled in switch",
4855 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4858 /* Check for cases not in the enumeration. */
4859 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4861 for (e = TYPE_VALUES (type);
4862 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4866 warning ("case value `%d' not in enumerated type `%s'",
4867 TREE_INT_CST_LOW (c->low),
4868 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4870 : DECL_NAME (TYPE_NAME (type))));
4874 /* Terminate a case (Pascal) or switch (C) statement
4875 in which ORIG_INDEX is the expression to be tested.
4876 Generate the code to test it and jump to the right place. */
4879 expand_end_case (orig_index)
4882 tree minval, maxval, range, orig_minval;
4883 rtx default_label = 0;
4884 register struct case_node *n;
4892 register struct nesting *thiscase = case_stack;
4893 tree index_expr, index_type;
4896 if (output_bytecode)
4898 bc_expand_end_case (orig_index);
4902 table_label = gen_label_rtx ();
4903 index_expr = thiscase->data.case_stmt.index_expr;
4904 index_type = TREE_TYPE (index_expr);
4905 unsignedp = TREE_UNSIGNED (index_type);
4907 do_pending_stack_adjust ();
4909 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4910 if (index_type != error_mark_node)
4912 /* If switch expression was an enumerated type, check that all
4913 enumeration literals are covered by the cases.
4914 No sense trying this if there's a default case, however. */
4916 if (!thiscase->data.case_stmt.default_label
4917 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4918 && TREE_CODE (index_expr) != INTEGER_CST)
4919 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4921 /* If this is the first label, warn if any insns have been emitted. */
4922 if (thiscase->data.case_stmt.seenlabel == 0)
4925 for (insn = get_last_insn ();
4926 insn != case_stack->data.case_stmt.start;
4927 insn = PREV_INSN (insn))
4928 if (GET_CODE (insn) != NOTE
4929 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4931 warning ("unreachable code at beginning of %s",
4932 case_stack->data.case_stmt.printname);
4937 /* If we don't have a default-label, create one here,
4938 after the body of the switch. */
4939 if (thiscase->data.case_stmt.default_label == 0)
4941 thiscase->data.case_stmt.default_label
4942 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4943 expand_label (thiscase->data.case_stmt.default_label);
4945 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4947 before_case = get_last_insn ();
4949 if (thiscase->data.case_stmt.case_list
4950 && thiscase->data.case_stmt.case_list->left)
4951 thiscase->data.case_stmt.case_list
4952 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
4954 /* Simplify the case-list before we count it. */
4955 group_case_nodes (thiscase->data.case_stmt.case_list);
4957 /* Get upper and lower bounds of case values.
4958 Also convert all the case values to the index expr's data type. */
4961 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4963 /* Check low and high label values are integers. */
4964 if (TREE_CODE (n->low) != INTEGER_CST)
4966 if (TREE_CODE (n->high) != INTEGER_CST)
4969 n->low = convert (index_type, n->low);
4970 n->high = convert (index_type, n->high);
4972 /* Count the elements and track the largest and smallest
4973 of them (treating them as signed even if they are not). */
4981 if (INT_CST_LT (n->low, minval))
4983 if (INT_CST_LT (maxval, n->high))
4986 /* A range counts double, since it requires two compares. */
4987 if (! tree_int_cst_equal (n->low, n->high))
4991 orig_minval = minval;
4993 /* Compute span of values. */
4995 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4999 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5001 emit_jump (default_label);
5004 /* If range of values is much bigger than number of values,
5005 make a sequence of conditional branches instead of a dispatch.
5006 If the switch-index is a constant, do it this way
5007 because we can optimize it. */
5009 #ifndef CASE_VALUES_THRESHOLD
5011 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5013 /* If machine does not have a case insn that compares the
5014 bounds, this means extra overhead for dispatch tables
5015 which raises the threshold for using them. */
5016 #define CASE_VALUES_THRESHOLD 5
5017 #endif /* HAVE_casesi */
5018 #endif /* CASE_VALUES_THRESHOLD */
5020 else if (TREE_INT_CST_HIGH (range) != 0
5021 || count < CASE_VALUES_THRESHOLD
5022 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5024 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5027 || TREE_CODE (index_expr) == INTEGER_CST
5028 /* These will reduce to a constant. */
5029 || (TREE_CODE (index_expr) == CALL_EXPR
5030 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5031 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5032 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5033 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5034 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5036 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5038 /* If the index is a short or char that we do not have
5039 an insn to handle comparisons directly, convert it to
5040 a full integer now, rather than letting each comparison
5041 generate the conversion. */
5043 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5044 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5045 == CODE_FOR_nothing))
5047 enum machine_mode wider_mode;
5048 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5049 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5050 if (cmp_optab->handlers[(int) wider_mode].insn_code
5051 != CODE_FOR_nothing)
5053 index = convert_to_mode (wider_mode, index, unsignedp);
5059 do_pending_stack_adjust ();
5061 index = protect_from_queue (index, 0);
5062 if (GET_CODE (index) == MEM)
5063 index = copy_to_reg (index);
5064 if (GET_CODE (index) == CONST_INT
5065 || TREE_CODE (index_expr) == INTEGER_CST)
5067 /* Make a tree node with the proper constant value
5068 if we don't already have one. */
5069 if (TREE_CODE (index_expr) != INTEGER_CST)
5072 = build_int_2 (INTVAL (index),
5073 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5074 index_expr = convert (index_type, index_expr);
5077 /* For constant index expressions we need only
5078 issue a unconditional branch to the appropriate
5079 target code. The job of removing any unreachable
5080 code is left to the optimisation phase if the
5081 "-O" option is specified. */
5082 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5083 if (! tree_int_cst_lt (index_expr, n->low)
5084 && ! tree_int_cst_lt (n->high, index_expr))
5088 emit_jump (label_rtx (n->code_label));
5090 emit_jump (default_label);
5094 /* If the index expression is not constant we generate
5095 a binary decision tree to select the appropriate
5096 target code. This is done as follows:
5098 The list of cases is rearranged into a binary tree,
5099 nearly optimal assuming equal probability for each case.
5101 The tree is transformed into RTL, eliminating
5102 redundant test conditions at the same time.
5104 If program flow could reach the end of the
5105 decision tree an unconditional jump to the
5106 default code is emitted. */
5109 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5110 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5111 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5113 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5114 default_label, index_type);
5115 emit_jump_if_reachable (default_label);
5124 enum machine_mode index_mode = SImode;
5125 int index_bits = GET_MODE_BITSIZE (index_mode);
5127 enum machine_mode op_mode;
5129 /* Convert the index to SImode. */
5130 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5131 > GET_MODE_BITSIZE (index_mode))
5133 enum machine_mode omode = TYPE_MODE (index_type);
5134 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5136 /* We must handle the endpoints in the original mode. */
5137 index_expr = build (MINUS_EXPR, index_type,
5138 index_expr, minval);
5139 minval = integer_zero_node;
5140 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5141 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
5142 emit_jump_insn (gen_bltu (default_label));
5143 /* Now we can safely truncate. */
5144 index = convert_to_mode (index_mode, index, 0);
5148 if (TYPE_MODE (index_type) != index_mode)
5150 index_expr = convert (type_for_size (index_bits, 0),
5152 index_type = TREE_TYPE (index_expr);
5155 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5158 index = protect_from_queue (index, 0);
5159 do_pending_stack_adjust ();
5161 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5162 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5164 index = copy_to_mode_reg (op_mode, index);
5166 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5168 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5169 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5171 op1 = copy_to_mode_reg (op_mode, op1);
5173 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5175 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5176 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5178 op2 = copy_to_mode_reg (op_mode, op2);
5180 emit_jump_insn (gen_casesi (index, op1, op2,
5181 table_label, default_label));
5185 #ifdef HAVE_tablejump
5186 if (! win && HAVE_tablejump)
5188 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5189 fold (build (MINUS_EXPR, index_type,
5190 index_expr, minval)));
5191 index_type = TREE_TYPE (index_expr);
5192 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5194 index = protect_from_queue (index, 0);
5195 do_pending_stack_adjust ();
5197 do_tablejump (index, TYPE_MODE (index_type),
5198 expand_expr (range, NULL_RTX, VOIDmode, 0),
5199 table_label, default_label);
5206 /* Get table of labels to jump to, in order of case index. */
5208 ncases = TREE_INT_CST_LOW (range) + 1;
5209 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5210 bzero ((char *) labelvec, ncases * sizeof (rtx));
5212 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5214 register HOST_WIDE_INT i
5215 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5220 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
5221 if (i + TREE_INT_CST_LOW (orig_minval)
5222 == TREE_INT_CST_LOW (n->high))
5228 /* Fill in the gaps with the default. */
5229 for (i = 0; i < ncases; i++)
5230 if (labelvec[i] == 0)
5231 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
5233 /* Output the table */
5234 emit_label (table_label);
5236 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
5237 were an expression, instead of an #ifdef/#ifndef. */
5239 #ifdef CASE_VECTOR_PC_RELATIVE
5243 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
5244 gen_rtx (LABEL_REF, Pmode, table_label),
5245 gen_rtvec_v (ncases, labelvec)));
5247 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
5248 gen_rtvec_v (ncases, labelvec)));
5250 /* If the case insn drops through the table,
5251 after the table we must jump to the default-label.
5252 Otherwise record no drop-through after the table. */
5253 #ifdef CASE_DROPS_THROUGH
5254 emit_jump (default_label);
5260 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5261 reorder_insns (before_case, get_last_insn (),
5262 thiscase->data.case_stmt.start);
5265 if (thiscase->exit_label)
5266 emit_label (thiscase->exit_label);
5268 POPSTACK (case_stack);
5273 /* Convert the tree NODE into a list linked by the right field, with the left
5274 field zeroed. RIGHT is used for recursion; it is a list to be placed
5275 rightmost in the resulting list. */
5277 static struct case_node *
5278 case_tree2list (node, right)
5279 struct case_node *node, *right;
5281 struct case_node *left;
5284 right = case_tree2list (node->right, right);
5286 node->right = right;
5287 if (left = node->left)
5290 return case_tree2list (left, node);
5296 /* Terminate a case statement. EXPR is the original index
5300 bc_expand_end_case (expr)
5303 struct nesting *thiscase = case_stack;
5304 enum bytecode_opcode opcode;
5305 struct bc_label *jump_label;
5306 struct case_node *c;
5308 bc_emit_bytecode (jump);
5309 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5311 #ifdef DEBUG_PRINT_CODE
5312 fputc ('\n', stderr);
5315 /* Now that the size of the jump table is known, emit the actual
5316 indexed jump instruction. */
5317 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
5319 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
5320 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
5321 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
5323 bc_emit_bytecode (opcode);
5325 /* Now emit the case instructions literal arguments, in order.
5326 In addition to the value on the stack, it uses:
5327 1. The address of the jump table.
5328 2. The size of the jump table.
5329 3. The default label. */
5331 jump_label = bc_get_bytecode_label ();
5332 bc_emit_bytecode_labelref (jump_label);
5333 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
5334 sizeof thiscase->data.case_stmt.num_ranges);
5336 if (thiscase->data.case_stmt.default_label)
5337 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
5339 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5341 /* Output the jump table. */
5343 bc_align_bytecode (3 /* PTR_ALIGN */);
5344 bc_emit_bytecode_labeldef (jump_label);
5346 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
5347 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5349 opcode = TREE_INT_CST_LOW (c->low);
5350 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5352 opcode = TREE_INT_CST_LOW (c->high);
5353 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5355 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5358 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
5359 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5361 bc_emit_bytecode_DI_const (c->low);
5362 bc_emit_bytecode_DI_const (c->high);
5364 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5371 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
5373 /* Possibly issue enumeration warnings. */
5375 if (!thiscase->data.case_stmt.default_label
5376 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
5377 && TREE_CODE (expr) != INTEGER_CST
5379 check_for_full_enumeration_handling (TREE_TYPE (expr));
5382 #ifdef DEBUG_PRINT_CODE
5383 fputc ('\n', stderr);
5386 POPSTACK (case_stack);
5390 /* Return unique bytecode ID. */
5395 static int bc_uid = 0;
5400 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5403 do_jump_if_equal (op1, op2, label, unsignedp)
5404 rtx op1, op2, label;
5407 if (GET_CODE (op1) == CONST_INT
5408 && GET_CODE (op2) == CONST_INT)
5410 if (INTVAL (op1) == INTVAL (op2))
5415 enum machine_mode mode = GET_MODE (op1);
5416 if (mode == VOIDmode)
5417 mode = GET_MODE (op2);
5418 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5419 emit_jump_insn (gen_beq (label));
5423 /* Not all case values are encountered equally. This function
5424 uses a heuristic to weight case labels, in cases where that
5425 looks like a reasonable thing to do.
5427 Right now, all we try to guess is text, and we establish the
5430 chars above space: 16
5439 If we find any cases in the switch that are not either -1 or in the range
5440 of valid ASCII characters, or are control characters other than those
5441 commonly used with "\", don't treat this switch scanning text.
5443 Return 1 if these nodes are suitable for cost estimation, otherwise
5447 estimate_case_costs (node)
5450 tree min_ascii = build_int_2 (-1, -1);
5451 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5455 /* If we haven't already made the cost table, make it now. Note that the
5456 lower bound of the table is -1, not zero. */
5458 if (cost_table == NULL)
5460 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5461 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5463 for (i = 0; i < 128; i++)
5467 else if (ispunct (i))
5469 else if (iscntrl (i))
5473 cost_table[' '] = 8;
5474 cost_table['\t'] = 4;
5475 cost_table['\0'] = 4;
5476 cost_table['\n'] = 2;
5477 cost_table['\f'] = 1;
5478 cost_table['\v'] = 1;
5479 cost_table['\b'] = 1;
5482 /* See if all the case expressions look like text. It is text if the
5483 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5484 as signed arithmetic since we don't want to ever access cost_table with a
5485 value less than -1. Also check that none of the constants in a range
5486 are strange control characters. */
5488 for (n = node; n; n = n->right)
5490 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5493 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5494 if (cost_table[i] < 0)
5498 /* All interesting values are within the range of interesting
5499 ASCII characters. */
5503 /* Scan an ordered list of case nodes
5504 combining those with consecutive values or ranges.
5506 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5509 group_case_nodes (head)
5512 case_node_ptr node = head;
5516 rtx lb = next_real_insn (label_rtx (node->code_label));
5517 case_node_ptr np = node;
5519 /* Try to group the successors of NODE with NODE. */
5520 while (((np = np->right) != 0)
5521 /* Do they jump to the same place? */
5522 && next_real_insn (label_rtx (np->code_label)) == lb
5523 /* Are their ranges consecutive? */
5524 && tree_int_cst_equal (np->low,
5525 fold (build (PLUS_EXPR,
5526 TREE_TYPE (node->high),
5529 /* An overflow is not consecutive. */
5530 && tree_int_cst_lt (node->high,
5531 fold (build (PLUS_EXPR,
5532 TREE_TYPE (node->high),
5534 integer_one_node))))
5536 node->high = np->high;
5538 /* NP is the first node after NODE which can't be grouped with it.
5539 Delete the nodes in between, and move on to that node. */
5545 /* Take an ordered list of case nodes
5546 and transform them into a near optimal binary tree,
5547 on the assumption that any target code selection value is as
5548 likely as any other.
5550 The transformation is performed by splitting the ordered
5551 list into two equal sections plus a pivot. The parts are
5552 then attached to the pivot as left and right branches. Each
5553 branch is is then transformed recursively. */
5556 balance_case_nodes (head, parent)
5557 case_node_ptr *head;
5558 case_node_ptr parent;
5560 register case_node_ptr np;
5568 register case_node_ptr *npp;
5571 /* Count the number of entries on branch. Also count the ranges. */
5575 if (!tree_int_cst_equal (np->low, np->high))
5579 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5583 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5591 /* Split this list if it is long enough for that to help. */
5596 /* Find the place in the list that bisects the list's total cost,
5597 Here I gets half the total cost. */
5602 /* Skip nodes while their cost does not reach that amount. */
5603 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5604 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5605 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5608 npp = &(*npp)->right;
5613 /* Leave this branch lopsided, but optimize left-hand
5614 side and fill in `parent' fields for right-hand side. */
5616 np->parent = parent;
5617 balance_case_nodes (&np->left, np);
5618 for (; np->right; np = np->right)
5619 np->right->parent = np;
5623 /* If there are just three nodes, split at the middle one. */
5625 npp = &(*npp)->right;
5628 /* Find the place in the list that bisects the list's total cost,
5629 where ranges count as 2.
5630 Here I gets half the total cost. */
5631 i = (i + ranges + 1) / 2;
5634 /* Skip nodes while their cost does not reach that amount. */
5635 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5640 npp = &(*npp)->right;
5645 np->parent = parent;
5648 /* Optimize each of the two split parts. */
5649 balance_case_nodes (&np->left, np);
5650 balance_case_nodes (&np->right, np);
5654 /* Else leave this branch as one level,
5655 but fill in `parent' fields. */
5657 np->parent = parent;
5658 for (; np->right; np = np->right)
5659 np->right->parent = np;
5664 /* Search the parent sections of the case node tree
5665 to see if a test for the lower bound of NODE would be redundant.
5666 INDEX_TYPE is the type of the index expression.
5668 The instructions to generate the case decision tree are
5669 output in the same order as nodes are processed so it is
5670 known that if a parent node checks the range of the current
5671 node minus one that the current node is bounded at its lower
5672 span. Thus the test would be redundant. */
5675 node_has_low_bound (node, index_type)
5680 case_node_ptr pnode;
5682 /* If the lower bound of this node is the lowest value in the index type,
5683 we need not test it. */
5685 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5688 /* If this node has a left branch, the value at the left must be less
5689 than that at this node, so it cannot be bounded at the bottom and
5690 we need not bother testing any further. */
5695 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5696 node->low, integer_one_node));
5698 /* If the subtraction above overflowed, we can't verify anything.
5699 Otherwise, look for a parent that tests our value - 1. */
5701 if (! tree_int_cst_lt (low_minus_one, node->low))
5704 for (pnode = node->parent; pnode; pnode = pnode->parent)
5705 if (tree_int_cst_equal (low_minus_one, pnode->high))
5711 /* Search the parent sections of the case node tree
5712 to see if a test for the upper bound of NODE would be redundant.
5713 INDEX_TYPE is the type of the index expression.
5715 The instructions to generate the case decision tree are
5716 output in the same order as nodes are processed so it is
5717 known that if a parent node checks the range of the current
5718 node plus one that the current node is bounded at its upper
5719 span. Thus the test would be redundant. */
5722 node_has_high_bound (node, index_type)
5727 case_node_ptr pnode;
5729 /* If the upper bound of this node is the highest value in the type
5730 of the index expression, we need not test against it. */
5732 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5735 /* If this node has a right branch, the value at the right must be greater
5736 than that at this node, so it cannot be bounded at the top and
5737 we need not bother testing any further. */
5742 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5743 node->high, integer_one_node));
5745 /* If the addition above overflowed, we can't verify anything.
5746 Otherwise, look for a parent that tests our value + 1. */
5748 if (! tree_int_cst_lt (node->high, high_plus_one))
5751 for (pnode = node->parent; pnode; pnode = pnode->parent)
5752 if (tree_int_cst_equal (high_plus_one, pnode->low))
5758 /* Search the parent sections of the
5759 case node tree to see if both tests for the upper and lower
5760 bounds of NODE would be redundant. */
5763 node_is_bounded (node, index_type)
5767 return (node_has_low_bound (node, index_type)
5768 && node_has_high_bound (node, index_type));
5771 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5774 emit_jump_if_reachable (label)
5777 if (GET_CODE (get_last_insn ()) != BARRIER)
5781 /* Emit step-by-step code to select a case for the value of INDEX.
5782 The thus generated decision tree follows the form of the
5783 case-node binary tree NODE, whose nodes represent test conditions.
5784 INDEX_TYPE is the type of the index of the switch.
5786 Care is taken to prune redundant tests from the decision tree
5787 by detecting any boundary conditions already checked by
5788 emitted rtx. (See node_has_high_bound, node_has_low_bound
5789 and node_is_bounded, above.)
5791 Where the test conditions can be shown to be redundant we emit
5792 an unconditional jump to the target code. As a further
5793 optimization, the subordinates of a tree node are examined to
5794 check for bounded nodes. In this case conditional and/or
5795 unconditional jumps as a result of the boundary check for the
5796 current node are arranged to target the subordinates associated
5797 code for out of bound conditions on the current node node.
5799 We can assume that when control reaches the code generated here,
5800 the index value has already been compared with the parents
5801 of this node, and determined to be on the same side of each parent
5802 as this node is. Thus, if this node tests for the value 51,
5803 and a parent tested for 52, we don't need to consider
5804 the possibility of a value greater than 51. If another parent
5805 tests for the value 50, then this node need not test anything. */
5808 emit_case_nodes (index, node, default_label, index_type)
5814 /* If INDEX has an unsigned type, we must make unsigned branches. */
5815 int unsignedp = TREE_UNSIGNED (index_type);
5816 typedef rtx rtx_function ();
5817 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5818 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5819 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5820 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5821 enum machine_mode mode = GET_MODE (index);
5823 /* See if our parents have already tested everything for us.
5824 If they have, emit an unconditional jump for this node. */
5825 if (node_is_bounded (node, index_type))
5826 emit_jump (label_rtx (node->code_label));
5828 else if (tree_int_cst_equal (node->low, node->high))
5830 /* Node is single valued. First see if the index expression matches
5831 this node and then check our children, if any. */
5833 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5834 label_rtx (node->code_label), unsignedp);
5836 if (node->right != 0 && node->left != 0)
5838 /* This node has children on both sides.
5839 Dispatch to one side or the other
5840 by comparing the index value with this node's value.
5841 If one subtree is bounded, check that one first,
5842 so we can avoid real branches in the tree. */
5844 if (node_is_bounded (node->right, index_type))
5846 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5848 GT, NULL_RTX, mode, unsignedp, 0);
5850 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5851 emit_case_nodes (index, node->left, default_label, index_type);
5854 else if (node_is_bounded (node->left, index_type))
5856 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5858 LT, NULL_RTX, mode, unsignedp, 0);
5859 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5860 emit_case_nodes (index, node->right, default_label, index_type);
5865 /* Neither node is bounded. First distinguish the two sides;
5866 then emit the code for one side at a time. */
5869 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5871 /* See if the value is on the right. */
5872 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5874 GT, NULL_RTX, mode, unsignedp, 0);
5875 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5877 /* Value must be on the left.
5878 Handle the left-hand subtree. */
5879 emit_case_nodes (index, node->left, default_label, index_type);
5880 /* If left-hand subtree does nothing,
5882 emit_jump_if_reachable (default_label);
5884 /* Code branches here for the right-hand subtree. */
5885 expand_label (test_label);
5886 emit_case_nodes (index, node->right, default_label, index_type);
5890 else if (node->right != 0 && node->left == 0)
5892 /* Here we have a right child but no left so we issue conditional
5893 branch to default and process the right child.
5895 Omit the conditional branch to default if we it avoid only one
5896 right child; it costs too much space to save so little time. */
5898 if (node->right->right || node->right->left
5899 || !tree_int_cst_equal (node->right->low, node->right->high))
5901 if (!node_has_low_bound (node, index_type))
5903 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5905 LT, NULL_RTX, mode, unsignedp, 0);
5906 emit_jump_insn ((*gen_blt_pat) (default_label));
5909 emit_case_nodes (index, node->right, default_label, index_type);
5912 /* We cannot process node->right normally
5913 since we haven't ruled out the numbers less than
5914 this node's value. So handle node->right explicitly. */
5915 do_jump_if_equal (index,
5916 expand_expr (node->right->low, NULL_RTX,
5918 label_rtx (node->right->code_label), unsignedp);
5921 else if (node->right == 0 && node->left != 0)
5923 /* Just one subtree, on the left. */
5925 #if 0 /* The following code and comment were formerly part
5926 of the condition here, but they didn't work
5927 and I don't understand what the idea was. -- rms. */
5928 /* If our "most probable entry" is less probable
5929 than the default label, emit a jump to
5930 the default label using condition codes
5931 already lying around. With no right branch,
5932 a branch-greater-than will get us to the default
5935 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5938 if (node->left->left || node->left->right
5939 || !tree_int_cst_equal (node->left->low, node->left->high))
5941 if (!node_has_high_bound (node, index_type))
5943 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5945 GT, NULL_RTX, mode, unsignedp, 0);
5946 emit_jump_insn ((*gen_bgt_pat) (default_label));
5949 emit_case_nodes (index, node->left, default_label, index_type);
5952 /* We cannot process node->left normally
5953 since we haven't ruled out the numbers less than
5954 this node's value. So handle node->left explicitly. */
5955 do_jump_if_equal (index,
5956 expand_expr (node->left->low, NULL_RTX,
5958 label_rtx (node->left->code_label), unsignedp);
5963 /* Node is a range. These cases are very similar to those for a single
5964 value, except that we do not start by testing whether this node
5965 is the one to branch to. */
5967 if (node->right != 0 && node->left != 0)
5969 /* Node has subtrees on both sides.
5970 If the right-hand subtree is bounded,
5971 test for it first, since we can go straight there.
5972 Otherwise, we need to make a branch in the control structure,
5973 then handle the two subtrees. */
5974 tree test_label = 0;
5976 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5978 GT, NULL_RTX, mode, unsignedp, 0);
5980 if (node_is_bounded (node->right, index_type))
5981 /* Right hand node is fully bounded so we can eliminate any
5982 testing and branch directly to the target code. */
5983 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5986 /* Right hand node requires testing.
5987 Branch to a label where we will handle it later. */
5989 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5990 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5993 /* Value belongs to this node or to the left-hand subtree. */
5995 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5996 GE, NULL_RTX, mode, unsignedp, 0);
5997 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5999 /* Handle the left-hand subtree. */
6000 emit_case_nodes (index, node->left, default_label, index_type);
6002 /* If right node had to be handled later, do that now. */
6006 /* If the left-hand subtree fell through,
6007 don't let it fall into the right-hand subtree. */
6008 emit_jump_if_reachable (default_label);
6010 expand_label (test_label);
6011 emit_case_nodes (index, node->right, default_label, index_type);
6015 else if (node->right != 0 && node->left == 0)
6017 /* Deal with values to the left of this node,
6018 if they are possible. */
6019 if (!node_has_low_bound (node, index_type))
6021 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6023 LT, NULL_RTX, mode, unsignedp, 0);
6024 emit_jump_insn ((*gen_blt_pat) (default_label));
6027 /* Value belongs to this node or to the right-hand subtree. */
6029 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6031 LE, NULL_RTX, mode, unsignedp, 0);
6032 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
6034 emit_case_nodes (index, node->right, default_label, index_type);
6037 else if (node->right == 0 && node->left != 0)
6039 /* Deal with values to the right of this node,
6040 if they are possible. */
6041 if (!node_has_high_bound (node, index_type))
6043 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6045 GT, NULL_RTX, mode, unsignedp, 0);
6046 emit_jump_insn ((*gen_bgt_pat) (default_label));
6049 /* Value belongs to this node or to the left-hand subtree. */
6051 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6052 GE, NULL_RTX, mode, unsignedp, 0);
6053 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6055 emit_case_nodes (index, node->left, default_label, index_type);
6060 /* Node has no children so we check low and high bounds to remove
6061 redundant tests. Only one of the bounds can exist,
6062 since otherwise this node is bounded--a case tested already. */
6064 if (!node_has_high_bound (node, index_type))
6066 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6068 GT, NULL_RTX, mode, unsignedp, 0);
6069 emit_jump_insn ((*gen_bgt_pat) (default_label));
6072 if (!node_has_low_bound (node, index_type))
6074 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6076 LT, NULL_RTX, mode, unsignedp, 0);
6077 emit_jump_insn ((*gen_blt_pat) (default_label));
6080 emit_jump (label_rtx (node->code_label));
6085 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6086 so that the debugging info will be correct for the unrolled loop. */
6088 /* Indexed by block number, contains a pointer to the N'th block node. */
6090 static tree *block_vector;
6093 find_loop_tree_blocks ()
6095 tree block = DECL_INITIAL (current_function_decl);
6097 block_vector = identify_blocks (block, get_insns ());
6101 unroll_block_trees ()
6103 tree block = DECL_INITIAL (current_function_decl);
6105 reorder_blocks (block_vector, block, get_insns ());