1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-5, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
45 #include "insn-flags.h"
46 #include "insn-config.h"
47 #include "insn-codes.h"
49 #include "hard-reg-set.h"
56 #include "bc-typecd.h"
57 #include "bc-opcode.h"
61 #define obstack_chunk_alloc xmalloc
62 #define obstack_chunk_free free
63 struct obstack stmt_obstack;
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
73 int expr_stmts_for_value;
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
78 static tree last_expr_type;
79 static rtx last_expr_value;
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
86 static rtx last_block_end_note;
88 /* Number of binding contours started so far in this function. */
90 int block_start_count;
92 /* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
95 extern int current_function_returns_pcc_struct;
97 /* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
101 extern rtx cleanup_label;
103 /* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
107 extern rtx return_label;
109 /* Offset to end of allocated area of stack frame.
110 If stack grows down, this is the address of the last stack slot allocated.
111 If stack grows up, this is the address for the next slot. */
112 extern int frame_offset;
114 /* Label to jump back to for tail recursion, or 0 if we have
115 not yet needed one for this function. */
116 extern rtx tail_recursion_label;
118 /* Place after which to insert the tail_recursion_label if we need one. */
119 extern rtx tail_recursion_reentry;
121 /* Location at which to save the argument pointer if it will need to be
122 referenced. There are two cases where this is done: if nonlocal gotos
123 exist, or if vars whose is an offset from the argument pointer will be
124 needed by inner routines. */
126 extern rtx arg_pointer_save_area;
128 /* Chain of all RTL_EXPRs that have insns in them. */
129 extern tree rtl_expr_chain;
131 #if 0 /* Turned off because 0 seems to work just as well. */
132 /* Cleanup lists are required for binding levels regardless of whether
133 that binding level has cleanups or not. This node serves as the
134 cleanup list whenever an empty list is required. */
135 static tree empty_cleanup_list;
138 extern void (*interim_eh_hook) PROTO((tree));
140 /* Functions and data structures for expanding case statements. */
142 /* Case label structure, used to hold info on labels within case
143 statements. We handle "range" labels; for a single-value label
144 as in C, the high and low limits are the same.
146 An AVL tree of case nodes is initially created, and later transformed
147 to a list linked via the RIGHT fields in the nodes. Nodes with
148 higher case values are later in the list.
150 Switch statements can be output in one of two forms. A branch table
151 is used if there are more than a few labels and the labels are dense
152 within the range between the smallest and largest case value. If a
153 branch table is used, no further manipulations are done with the case
156 The alternative to the use of a branch table is to generate a series
157 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
158 and PARENT fields to hold a binary tree. Initially the tree is
159 totally unbalanced, with everything on the right. We balance the tree
160 with nodes on the left having lower case values than the parent
161 and nodes on the right having higher values. We then output the tree
166 struct case_node *left; /* Left son in binary tree */
167 struct case_node *right; /* Right son in binary tree; also node chain */
168 struct case_node *parent; /* Parent of node in binary tree */
169 tree low; /* Lowest index value for this label */
170 tree high; /* Highest index value for this label */
171 tree code_label; /* Label to jump to when node matches */
175 typedef struct case_node case_node;
176 typedef struct case_node *case_node_ptr;
178 /* These are used by estimate_case_costs and balance_case_nodes. */
180 /* This must be a signed type, and non-ANSI compilers lack signed char. */
181 static short *cost_table;
182 static int use_cost_table;
184 /* Stack of control and binding constructs we are currently inside.
186 These constructs begin when you call `expand_start_WHATEVER'
187 and end when you call `expand_end_WHATEVER'. This stack records
188 info about how the construct began that tells the end-function
189 what to do. It also may provide information about the construct
190 to alter the behavior of other constructs within the body.
191 For example, they may affect the behavior of C `break' and `continue'.
193 Each construct gets one `struct nesting' object.
194 All of these objects are chained through the `all' field.
195 `nesting_stack' points to the first object (innermost construct).
196 The position of an entry on `nesting_stack' is in its `depth' field.
198 Each type of construct has its own individual stack.
199 For example, loops have `loop_stack'. Each object points to the
200 next object of the same type through the `next' field.
202 Some constructs are visible to `break' exit-statements and others
203 are not. Which constructs are visible depends on the language.
204 Therefore, the data structure allows each construct to be visible
205 or not, according to the args given when the construct is started.
206 The construct is visible if the `exit_label' field is non-null.
207 In that case, the value should be a CODE_LABEL rtx. */
212 struct nesting *next;
217 /* For conds (if-then and if-then-else statements). */
220 /* Label for the end of the if construct.
221 There is none if EXITFLAG was not set
222 and no `else' has been seen yet. */
224 /* Label for the end of this alternative.
225 This may be the end of the if or the next else/elseif. */
231 /* Label at the top of the loop; place to loop back to. */
233 /* Label at the end of the whole construct. */
235 /* Label before a jump that branches to the end of the whole
236 construct. This is where destructors go if any. */
238 /* Label for `continue' statement to jump to;
239 this is in front of the stepper of the loop. */
242 /* For variable binding contours. */
245 /* Sequence number of this binding contour within the function,
246 in order of entry. */
247 int block_start_count;
248 /* Nonzero => value to restore stack to on exit. Complemented by
249 bc_stack_level (see below) when generating bytecodes. */
251 /* The NOTE that starts this contour.
252 Used by expand_goto to check whether the destination
253 is within each contour or not. */
255 /* Innermost containing binding contour that has a stack level. */
256 struct nesting *innermost_stack_block;
257 /* List of cleanups to be run on exit from this contour.
258 This is a list of expressions to be evaluated.
259 The TREE_PURPOSE of each link is the ..._DECL node
260 which the cleanup pertains to. */
262 /* List of cleanup-lists of blocks containing this block,
263 as they were at the locus where this block appears.
264 There is an element for each containing block,
265 ordered innermost containing block first.
266 The tail of this list can be 0 (was empty_cleanup_list),
267 if all remaining elements would be empty lists.
268 The element's TREE_VALUE is the cleanup-list of that block,
269 which may be null. */
271 /* Chain of labels defined inside this binding contour.
272 For contours that have stack levels or cleanups. */
273 struct label_chain *label_chain;
274 /* Number of function calls seen, as of start of this block. */
275 int function_call_count;
276 /* Bytecode specific: stack level to restore stack to on exit. */
279 /* For switch (C) or case (Pascal) statements,
280 and also for dummies (see `expand_start_case_dummy'). */
283 /* The insn after which the case dispatch should finally
284 be emitted. Zero for a dummy. */
286 /* For bytecodes, the case table is in-lined right in the code.
287 A label is needed for skipping over this block. It is only
288 used when generating bytecodes. */
290 /* A list of case labels; it is first built as an AVL tree.
291 During expand_end_case, this is converted to a list, and may be
292 rearranged into a nearly balanced binary tree. */
293 struct case_node *case_list;
294 /* Label to jump to if no case matches. */
296 /* The expression to be dispatched on. */
298 /* Type that INDEX_EXPR should be converted to. */
300 /* Number of range exprs in case statement. */
302 /* Name of this kind of statement, for warnings. */
304 /* Nonzero if a case label has been seen in this case stmt. */
310 /* Chain of all pending binding contours. */
311 struct nesting *block_stack;
313 /* If any new stacks are added here, add them to POPSTACKS too. */
315 /* Chain of all pending binding contours that restore stack levels
317 struct nesting *stack_block_stack;
319 /* Chain of all pending conditional statements. */
320 struct nesting *cond_stack;
322 /* Chain of all pending loops. */
323 struct nesting *loop_stack;
325 /* Chain of all pending case or switch statements. */
326 struct nesting *case_stack;
328 /* Separate chain including all of the above,
329 chained through the `all' field. */
330 struct nesting *nesting_stack;
332 /* Number of entries on nesting_stack now. */
335 /* Allocate and return a new `struct nesting'. */
337 #define ALLOC_NESTING() \
338 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
340 /* Pop the nesting stack element by element until we pop off
341 the element which is at the top of STACK.
342 Update all the other stacks, popping off elements from them
343 as we pop them from nesting_stack. */
345 #define POPSTACK(STACK) \
346 do { struct nesting *target = STACK; \
347 struct nesting *this; \
348 do { this = nesting_stack; \
349 if (loop_stack == this) \
350 loop_stack = loop_stack->next; \
351 if (cond_stack == this) \
352 cond_stack = cond_stack->next; \
353 if (block_stack == this) \
354 block_stack = block_stack->next; \
355 if (stack_block_stack == this) \
356 stack_block_stack = stack_block_stack->next; \
357 if (case_stack == this) \
358 case_stack = case_stack->next; \
359 nesting_depth = nesting_stack->depth - 1; \
360 nesting_stack = this->all; \
361 obstack_free (&stmt_obstack, this); } \
362 while (this != target); } while (0)
364 /* In some cases it is impossible to generate code for a forward goto
365 until the label definition is seen. This happens when it may be necessary
366 for the goto to reset the stack pointer: we don't yet know how to do that.
367 So expand_goto puts an entry on this fixup list.
368 Each time a binding contour that resets the stack is exited,
370 If the target label has now been defined, we can insert the proper code. */
374 /* Points to following fixup. */
375 struct goto_fixup *next;
376 /* Points to the insn before the jump insn.
377 If more code must be inserted, it goes after this insn. */
379 /* The LABEL_DECL that this jump is jumping to, or 0
380 for break, continue or return. */
382 /* The BLOCK for the place where this goto was found. */
384 /* The CODE_LABEL rtx that this is jumping to. */
386 /* Number of binding contours started in current function
387 before the label reference. */
388 int block_start_count;
389 /* The outermost stack level that should be restored for this jump.
390 Each time a binding contour that resets the stack is exited,
391 if the target label is *not* yet defined, this slot is updated. */
393 /* List of lists of cleanup expressions to be run by this goto.
394 There is one element for each block that this goto is within.
395 The tail of this list can be 0 (was empty_cleanup_list),
396 if all remaining elements would be empty.
397 The TREE_VALUE contains the cleanup list of that block as of the
398 time this goto was seen.
399 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
400 tree cleanup_list_list;
402 /* Bytecode specific members follow */
404 /* The label that this jump is jumping to, or 0 for break, continue
406 struct bc_label *bc_target;
408 /* The label we use for the fixup patch */
409 struct bc_label *label;
411 /* True (non-0) if fixup has been handled */
414 /* Like stack_level above, except refers to the interpreter stack */
418 static struct goto_fixup *goto_fixup_chain;
420 /* Within any binding contour that must restore a stack level,
421 all labels are recorded with a chain of these structures. */
425 /* Points to following fixup. */
426 struct label_chain *next;
429 static void expand_goto_internal PROTO((tree, rtx, rtx));
430 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
431 struct bc_label *, tree));
432 static int expand_fixup PROTO((tree, rtx, rtx));
433 static void bc_expand_fixup PROTO((enum bytecode_opcode,
434 struct bc_label *, int));
435 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
437 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
439 static void bc_expand_start_cond PROTO((tree, int));
440 static void bc_expand_end_cond PROTO((void));
441 static void bc_expand_start_else PROTO((void));
442 static void bc_expand_end_loop PROTO((void));
443 static void bc_expand_end_bindings PROTO((tree, int, int));
444 static void bc_expand_decl PROTO((tree, tree));
445 static void bc_expand_variable_local_init PROTO((tree));
446 static void bc_expand_decl_init PROTO((tree));
447 static void expand_null_return_1 PROTO((rtx, int));
448 static void expand_value_return PROTO((rtx));
449 static int tail_recursion_args PROTO((tree, tree));
450 static void expand_cleanups PROTO((tree, tree, int, int));
451 static void bc_expand_start_case PROTO((struct nesting *, tree,
453 static int bc_pushcase PROTO((tree, tree));
454 static void bc_check_for_full_enumeration_handling PROTO((tree));
455 static void bc_expand_end_case PROTO((tree));
456 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
457 static int estimate_case_costs PROTO((case_node_ptr));
458 static void group_case_nodes PROTO((case_node_ptr));
459 static void balance_case_nodes PROTO((case_node_ptr *,
461 static int node_has_low_bound PROTO((case_node_ptr, tree));
462 static int node_has_high_bound PROTO((case_node_ptr, tree));
463 static int node_is_bounded PROTO((case_node_ptr, tree));
464 static void emit_jump_if_reachable PROTO((rtx));
465 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
466 static int add_case_node PROTO((tree, tree, tree, tree *));
467 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
469 extern rtx bc_allocate_local ();
470 extern rtx bc_allocate_variable_array ();
475 gcc_obstack_init (&stmt_obstack);
477 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
482 init_stmt_for_function ()
484 /* We are not currently within any block, conditional, loop or case. */
486 stack_block_stack = 0;
493 block_start_count = 0;
495 /* No gotos have been expanded yet. */
496 goto_fixup_chain = 0;
498 /* We are not processing a ({...}) grouping. */
499 expr_stmts_for_value = 0;
507 p->block_stack = block_stack;
508 p->stack_block_stack = stack_block_stack;
509 p->cond_stack = cond_stack;
510 p->loop_stack = loop_stack;
511 p->case_stack = case_stack;
512 p->nesting_stack = nesting_stack;
513 p->nesting_depth = nesting_depth;
514 p->block_start_count = block_start_count;
515 p->last_expr_type = last_expr_type;
516 p->last_expr_value = last_expr_value;
517 p->expr_stmts_for_value = expr_stmts_for_value;
518 p->emit_filename = emit_filename;
519 p->emit_lineno = emit_lineno;
520 p->goto_fixup_chain = goto_fixup_chain;
524 restore_stmt_status (p)
527 block_stack = p->block_stack;
528 stack_block_stack = p->stack_block_stack;
529 cond_stack = p->cond_stack;
530 loop_stack = p->loop_stack;
531 case_stack = p->case_stack;
532 nesting_stack = p->nesting_stack;
533 nesting_depth = p->nesting_depth;
534 block_start_count = p->block_start_count;
535 last_expr_type = p->last_expr_type;
536 last_expr_value = p->last_expr_value;
537 expr_stmts_for_value = p->expr_stmts_for_value;
538 emit_filename = p->emit_filename;
539 emit_lineno = p->emit_lineno;
540 goto_fixup_chain = p->goto_fixup_chain;
543 /* Emit a no-op instruction. */
550 if (!output_bytecode)
552 last_insn = get_last_insn ();
554 && (GET_CODE (last_insn) == CODE_LABEL
555 || (GET_CODE (last_insn) == NOTE
556 && prev_real_insn (last_insn) == 0)))
557 emit_insn (gen_nop ());
561 /* Return the rtx-label that corresponds to a LABEL_DECL,
562 creating it if necessary. */
568 if (TREE_CODE (label) != LABEL_DECL)
571 if (DECL_RTL (label))
572 return DECL_RTL (label);
574 return DECL_RTL (label) = gen_label_rtx ();
577 /* Add an unconditional jump to LABEL as the next sequential instruction. */
583 do_pending_stack_adjust ();
584 emit_jump_insn (gen_jump (label));
588 /* Emit code to jump to the address
589 specified by the pointer expression EXP. */
592 expand_computed_goto (exp)
597 bc_expand_expr (exp);
598 bc_emit_instruction (jumpP);
602 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
604 #ifdef POINTERS_EXTEND_UNSIGNED
605 x = convert_memory_address (Pmode, x);
609 do_pending_stack_adjust ();
610 emit_indirect_jump (x);
614 /* Handle goto statements and the labels that they can go to. */
616 /* Specify the location in the RTL code of a label LABEL,
617 which is a LABEL_DECL tree node.
619 This is used for the kind of label that the user can jump to with a
620 goto statement, and for alternatives of a switch or case statement.
621 RTL labels generated for loops and conditionals don't go through here;
622 they are generated directly at the RTL level, by other functions below.
624 Note that this has nothing to do with defining label *names*.
625 Languages vary in how they do that and what that even means. */
631 struct label_chain *p;
635 if (! DECL_RTL (label))
636 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
637 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
638 error ("multiply defined label");
642 do_pending_stack_adjust ();
643 emit_label (label_rtx (label));
644 if (DECL_NAME (label))
645 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
647 if (stack_block_stack != 0)
649 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
650 p->next = stack_block_stack->data.block.label_chain;
651 stack_block_stack->data.block.label_chain = p;
656 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
657 from nested functions. */
660 declare_nonlocal_label (label)
663 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
664 LABEL_PRESERVE_P (label_rtx (label)) = 1;
665 if (nonlocal_goto_handler_slot == 0)
667 nonlocal_goto_handler_slot
668 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
669 emit_stack_save (SAVE_NONLOCAL,
670 &nonlocal_goto_stack_level,
671 PREV_INSN (tail_recursion_reentry));
675 /* Generate RTL code for a `goto' statement with target label LABEL.
676 LABEL should be a LABEL_DECL tree node that was or will later be
677 defined with `expand_label'. */
687 expand_goto_internal (label, label_rtx (label), NULL_RTX);
691 /* Check for a nonlocal goto to a containing function. */
692 context = decl_function_context (label);
693 if (context != 0 && context != current_function_decl)
695 struct function *p = find_function_data (context);
696 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
699 p->has_nonlocal_label = 1;
700 current_function_has_nonlocal_goto = 1;
701 LABEL_REF_NONLOCAL_P (label_ref) = 1;
703 /* Copy the rtl for the slots so that they won't be shared in
704 case the virtual stack vars register gets instantiated differently
705 in the parent than in the child. */
707 #if HAVE_nonlocal_goto
708 if (HAVE_nonlocal_goto)
709 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
710 copy_rtx (p->nonlocal_goto_handler_slot),
711 copy_rtx (p->nonlocal_goto_stack_level),
718 /* Restore frame pointer for containing function.
719 This sets the actual hard register used for the frame pointer
720 to the location of the function's incoming static chain info.
721 The non-local goto handler will then adjust it to contain the
722 proper value and reload the argument pointer, if needed. */
723 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
725 /* We have now loaded the frame pointer hardware register with
726 the address of that corresponds to the start of the virtual
727 stack vars. So replace virtual_stack_vars_rtx in all
728 addresses we use with stack_pointer_rtx. */
730 /* Get addr of containing function's current nonlocal goto handler,
731 which will do any cleanups and then jump to the label. */
732 addr = copy_rtx (p->nonlocal_goto_handler_slot);
733 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
734 hard_frame_pointer_rtx));
736 /* Restore the stack pointer. Note this uses fp just restored. */
737 addr = p->nonlocal_goto_stack_level;
739 addr = replace_rtx (copy_rtx (addr),
740 virtual_stack_vars_rtx,
741 hard_frame_pointer_rtx);
743 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
745 /* Put in the static chain register the nonlocal label address. */
746 emit_move_insn (static_chain_rtx, label_ref);
747 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
749 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
750 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
751 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
752 emit_indirect_jump (temp);
756 expand_goto_internal (label, label_rtx (label), NULL_RTX);
759 /* Generate RTL code for a `goto' statement with target label BODY.
760 LABEL should be a LABEL_REF.
761 LAST_INSN, if non-0, is the rtx we should consider as the last
762 insn emitted (for the purposes of cleaning up a return). */
765 expand_goto_internal (body, label, last_insn)
770 struct nesting *block;
773 /* NOTICE! If a bytecode instruction other than `jump' is needed,
774 then the caller has to call bc_expand_goto_internal()
775 directly. This is rather an exceptional case, and there aren't
776 that many places where this is necessary. */
779 expand_goto_internal (body, label, last_insn);
783 if (GET_CODE (label) != CODE_LABEL)
786 /* If label has already been defined, we can tell now
787 whether and how we must alter the stack level. */
789 if (PREV_INSN (label) != 0)
791 /* Find the innermost pending block that contains the label.
792 (Check containment by comparing insn-uids.)
793 Then restore the outermost stack level within that block,
794 and do cleanups of all blocks contained in it. */
795 for (block = block_stack; block; block = block->next)
797 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
799 if (block->data.block.stack_level != 0)
800 stack_level = block->data.block.stack_level;
801 /* Execute the cleanups for blocks we are exiting. */
802 if (block->data.block.cleanups != 0)
804 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
805 do_pending_stack_adjust ();
811 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
812 the stack pointer. This one should be deleted as dead by flow. */
813 clear_pending_stack_adjust ();
814 do_pending_stack_adjust ();
815 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
818 if (body != 0 && DECL_TOO_LATE (body))
819 error ("jump to `%s' invalidly jumps into binding contour",
820 IDENTIFIER_POINTER (DECL_NAME (body)));
822 /* Label not yet defined: may need to put this goto
823 on the fixup list. */
824 else if (! expand_fixup (body, label, last_insn))
826 /* No fixup needed. Record that the label is the target
827 of at least one goto that has no fixup. */
829 TREE_ADDRESSABLE (body) = 1;
835 /* Generate a jump with OPCODE to the given bytecode LABEL which is
836 found within BODY. */
839 bc_expand_goto_internal (opcode, label, body)
840 enum bytecode_opcode opcode;
841 struct bc_label *label;
844 struct nesting *block;
845 int stack_level = -1;
847 /* If the label is defined, adjust the stack as necessary.
848 If it's not defined, we have to push the reference on the
854 /* Find the innermost pending block that contains the label.
855 (Check containment by comparing bytecode uids.) Then restore the
856 outermost stack level within that block. */
858 for (block = block_stack; block; block = block->next)
860 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
862 if (block->data.block.bc_stack_level)
863 stack_level = block->data.block.bc_stack_level;
865 /* Execute the cleanups for blocks we are exiting. */
866 if (block->data.block.cleanups != 0)
868 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
869 do_pending_stack_adjust ();
873 /* Restore the stack level. If we need to adjust the stack, we
874 must do so after the jump, since the jump may depend on
875 what's on the stack. Thus, any stack-modifying conditional
876 jumps (these are the only ones that rely on what's on the
877 stack) go into the fixup list. */
880 && stack_depth != stack_level
883 bc_expand_fixup (opcode, label, stack_level);
886 if (stack_level >= 0)
887 bc_adjust_stack (stack_depth - stack_level);
889 if (body && DECL_BIT_FIELD (body))
890 error ("jump to `%s' invalidly jumps into binding contour",
891 IDENTIFIER_POINTER (DECL_NAME (body)));
893 /* Emit immediate jump */
894 bc_emit_bytecode (opcode);
895 bc_emit_bytecode_labelref (label);
897 #ifdef DEBUG_PRINT_CODE
898 fputc ('\n', stderr);
903 /* Put goto in the fixup list */
904 bc_expand_fixup (opcode, label, stack_level);
907 /* Generate if necessary a fixup for a goto
908 whose target label in tree structure (if any) is TREE_LABEL
909 and whose target in rtl is RTL_LABEL.
911 If LAST_INSN is nonzero, we pretend that the jump appears
912 after insn LAST_INSN instead of at the current point in the insn stream.
914 The fixup will be used later to insert insns just before the goto.
915 Those insns will restore the stack level as appropriate for the
916 target label, and will (in the case of C++) also invoke any object
917 destructors which have to be invoked when we exit the scopes which
918 are exited by the goto.
920 Value is nonzero if a fixup is made. */
923 expand_fixup (tree_label, rtl_label, last_insn)
928 struct nesting *block, *end_block;
930 /* See if we can recognize which block the label will be output in.
931 This is possible in some very common cases.
932 If we succeed, set END_BLOCK to that block.
933 Otherwise, set it to 0. */
936 && (rtl_label == cond_stack->data.cond.endif_label
937 || rtl_label == cond_stack->data.cond.next_label))
938 end_block = cond_stack;
939 /* If we are in a loop, recognize certain labels which
940 are likely targets. This reduces the number of fixups
941 we need to create. */
943 && (rtl_label == loop_stack->data.loop.start_label
944 || rtl_label == loop_stack->data.loop.end_label
945 || rtl_label == loop_stack->data.loop.continue_label))
946 end_block = loop_stack;
950 /* Now set END_BLOCK to the binding level to which we will return. */
954 struct nesting *next_block = end_block->all;
957 /* First see if the END_BLOCK is inside the innermost binding level.
958 If so, then no cleanups or stack levels are relevant. */
959 while (next_block && next_block != block)
960 next_block = next_block->all;
965 /* Otherwise, set END_BLOCK to the innermost binding level
966 which is outside the relevant control-structure nesting. */
967 next_block = block_stack->next;
968 for (block = block_stack; block != end_block; block = block->all)
969 if (block == next_block)
970 next_block = next_block->next;
971 end_block = next_block;
974 /* Does any containing block have a stack level or cleanups?
975 If not, no fixup is needed, and that is the normal case
976 (the only case, for standard C). */
977 for (block = block_stack; block != end_block; block = block->next)
978 if (block->data.block.stack_level != 0
979 || block->data.block.cleanups != 0)
982 if (block != end_block)
984 /* Ok, a fixup is needed. Add a fixup to the list of such. */
985 struct goto_fixup *fixup
986 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
987 /* In case an old stack level is restored, make sure that comes
988 after any pending stack adjust. */
989 /* ?? If the fixup isn't to come at the present position,
990 doing the stack adjust here isn't useful. Doing it with our
991 settings at that location isn't useful either. Let's hope
994 do_pending_stack_adjust ();
995 fixup->target = tree_label;
996 fixup->target_rtl = rtl_label;
998 /* Create a BLOCK node and a corresponding matched set of
999 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1000 this point. The notes will encapsulate any and all fixup
1001 code which we might later insert at this point in the insn
1002 stream. Also, the BLOCK node will be the parent (i.e. the
1003 `SUPERBLOCK') of any other BLOCK nodes which we might create
1004 later on when we are expanding the fixup code. */
1007 register rtx original_before_jump
1008 = last_insn ? last_insn : get_last_insn ();
1012 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1013 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1014 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1016 emit_insns_after (fixup->before_jump, original_before_jump);
1019 fixup->block_start_count = block_start_count;
1020 fixup->stack_level = 0;
1021 fixup->cleanup_list_list
1022 = (((block->data.block.outer_cleanups
1024 && block->data.block.outer_cleanups != empty_cleanup_list
1027 || block->data.block.cleanups)
1028 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1029 block->data.block.outer_cleanups)
1031 fixup->next = goto_fixup_chain;
1032 goto_fixup_chain = fixup;
1039 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1040 Make the fixup restore the stack level to STACK_LEVEL. */
1043 bc_expand_fixup (opcode, label, stack_level)
1044 enum bytecode_opcode opcode;
1045 struct bc_label *label;
1048 struct goto_fixup *fixup
1049 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1051 fixup->label = bc_get_bytecode_label ();
1052 fixup->bc_target = label;
1053 fixup->bc_stack_level = stack_level;
1054 fixup->bc_handled = FALSE;
1056 fixup->next = goto_fixup_chain;
1057 goto_fixup_chain = fixup;
1059 /* Insert a jump to the fixup code */
1060 bc_emit_bytecode (opcode);
1061 bc_emit_bytecode_labelref (fixup->label);
1063 #ifdef DEBUG_PRINT_CODE
1064 fputc ('\n', stderr);
1068 /* Expand any needed fixups in the outputmost binding level of the
1069 function. FIRST_INSN is the first insn in the function. */
1072 expand_fixups (first_insn)
1075 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1078 /* When exiting a binding contour, process all pending gotos requiring fixups.
1079 THISBLOCK is the structure that describes the block being exited.
1080 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1081 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1082 FIRST_INSN is the insn that began this contour.
1084 Gotos that jump out of this contour must restore the
1085 stack level and do the cleanups before actually jumping.
1087 DONT_JUMP_IN nonzero means report error there is a jump into this
1088 contour from before the beginning of the contour.
1089 This is also done if STACK_LEVEL is nonzero. */
1092 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1093 struct nesting *thisblock;
1099 register struct goto_fixup *f, *prev;
1101 if (output_bytecode)
1103 /* ??? The second arg is the bc stack level, which is not the same
1104 as STACK_LEVEL. I have no idea what should go here, so I'll
1106 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1110 /* F is the fixup we are considering; PREV is the previous one. */
1111 /* We run this loop in two passes so that cleanups of exited blocks
1112 are run first, and blocks that are exited are marked so
1115 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1117 /* Test for a fixup that is inactive because it is already handled. */
1118 if (f->before_jump == 0)
1120 /* Delete inactive fixup from the chain, if that is easy to do. */
1122 prev->next = f->next;
1124 /* Has this fixup's target label been defined?
1125 If so, we can finalize it. */
1126 else if (PREV_INSN (f->target_rtl) != 0)
1128 register rtx cleanup_insns;
1130 /* Get the first non-label after the label
1131 this goto jumps to. If that's before this scope begins,
1132 we don't have a jump into the scope. */
1133 rtx after_label = f->target_rtl;
1134 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1135 after_label = NEXT_INSN (after_label);
1137 /* If this fixup jumped into this contour from before the beginning
1138 of this contour, report an error. */
1139 /* ??? Bug: this does not detect jumping in through intermediate
1140 blocks that have stack levels or cleanups.
1141 It detects only a problem with the innermost block
1142 around the label. */
1144 && (dont_jump_in || stack_level || cleanup_list)
1145 /* If AFTER_LABEL is 0, it means the jump goes to the end
1146 of the rtl, which means it jumps into this scope. */
1147 && (after_label == 0
1148 || INSN_UID (first_insn) < INSN_UID (after_label))
1149 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1150 && ! DECL_ERROR_ISSUED (f->target))
1152 error_with_decl (f->target,
1153 "label `%s' used before containing binding contour");
1154 /* Prevent multiple errors for one label. */
1155 DECL_ERROR_ISSUED (f->target) = 1;
1158 /* We will expand the cleanups into a sequence of their own and
1159 then later on we will attach this new sequence to the insn
1160 stream just ahead of the actual jump insn. */
1164 /* Temporarily restore the lexical context where we will
1165 logically be inserting the fixup code. We do this for the
1166 sake of getting the debugging information right. */
1169 set_block (f->context);
1171 /* Expand the cleanups for blocks this jump exits. */
1172 if (f->cleanup_list_list)
1175 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1176 /* Marked elements correspond to blocks that have been closed.
1177 Do their cleanups. */
1178 if (TREE_ADDRESSABLE (lists)
1179 && TREE_VALUE (lists) != 0)
1181 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1182 /* Pop any pushes done in the cleanups,
1183 in case function is about to return. */
1184 do_pending_stack_adjust ();
1188 /* Restore stack level for the biggest contour that this
1189 jump jumps out of. */
1191 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1193 /* Finish up the sequence containing the insns which implement the
1194 necessary cleanups, and then attach that whole sequence to the
1195 insn stream just ahead of the actual jump insn. Attaching it
1196 at that point insures that any cleanups which are in fact
1197 implicit C++ object destructions (which must be executed upon
1198 leaving the block) appear (to the debugger) to be taking place
1199 in an area of the generated code where the object(s) being
1200 destructed are still "in scope". */
1202 cleanup_insns = get_insns ();
1206 emit_insns_after (cleanup_insns, f->before_jump);
1213 /* For any still-undefined labels, do the cleanups for this block now.
1214 We must do this now since items in the cleanup list may go out
1215 of scope when the block ends. */
1216 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1217 if (f->before_jump != 0
1218 && PREV_INSN (f->target_rtl) == 0
1219 /* Label has still not appeared. If we are exiting a block with
1220 a stack level to restore, that started before the fixup,
1221 mark this stack level as needing restoration
1222 when the fixup is later finalized. */
1224 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1225 means the label is undefined. That's erroneous, but possible. */
1226 && (thisblock->data.block.block_start_count
1227 <= f->block_start_count))
1229 tree lists = f->cleanup_list_list;
1232 for (; lists; lists = TREE_CHAIN (lists))
1233 /* If the following elt. corresponds to our containing block
1234 then the elt. must be for this block. */
1235 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1239 set_block (f->context);
1240 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1241 do_pending_stack_adjust ();
1242 cleanup_insns = get_insns ();
1246 = emit_insns_after (cleanup_insns, f->before_jump);
1248 f->cleanup_list_list = TREE_CHAIN (lists);
1252 f->stack_level = stack_level;
1257 /* When exiting a binding contour, process all pending gotos requiring fixups.
1258 Note: STACK_DEPTH is not altered.
1260 The arguments are currently not used in the bytecode compiler, but we may
1261 need them one day for languages other than C.
1263 THISBLOCK is the structure that describes the block being exited.
1264 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1265 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1266 FIRST_INSN is the insn that began this contour.
1268 Gotos that jump out of this contour must restore the
1269 stack level and do the cleanups before actually jumping.
1271 DONT_JUMP_IN nonzero means report error there is a jump into this
1272 contour from before the beginning of the contour.
1273 This is also done if STACK_LEVEL is nonzero. */
1276 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1277 struct nesting *thisblock;
1283 register struct goto_fixup *f, *prev;
1284 int saved_stack_depth;
1286 /* F is the fixup we are considering; PREV is the previous one. */
1288 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1290 /* Test for a fixup that is inactive because it is already handled. */
1291 if (f->before_jump == 0)
1293 /* Delete inactive fixup from the chain, if that is easy to do. */
1295 prev->next = f->next;
1298 /* Emit code to restore the stack and continue */
1299 bc_emit_bytecode_labeldef (f->label);
1301 /* Save stack_depth across call, since bc_adjust_stack () will alter
1302 the perceived stack depth via the instructions generated. */
1304 if (f->bc_stack_level >= 0)
1306 saved_stack_depth = stack_depth;
1307 bc_adjust_stack (stack_depth - f->bc_stack_level);
1308 stack_depth = saved_stack_depth;
1311 bc_emit_bytecode (jump);
1312 bc_emit_bytecode_labelref (f->bc_target);
1314 #ifdef DEBUG_PRINT_CODE
1315 fputc ('\n', stderr);
1319 goto_fixup_chain = NULL;
1322 /* Generate RTL for an asm statement (explicit assembler code).
1323 BODY is a STRING_CST node containing the assembler code text,
1324 or an ADDR_EXPR containing a STRING_CST. */
1330 if (output_bytecode)
1332 error ("`asm' is invalid when generating bytecode");
1336 if (TREE_CODE (body) == ADDR_EXPR)
1337 body = TREE_OPERAND (body, 0);
1339 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1340 TREE_STRING_POINTER (body)));
1344 /* Generate RTL for an asm statement with arguments.
1345 STRING is the instruction template.
1346 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1347 Each output or input has an expression in the TREE_VALUE and
1348 a constraint-string in the TREE_PURPOSE.
1349 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1350 that is clobbered by this insn.
1352 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1353 Some elements of OUTPUTS may be replaced with trees representing temporary
1354 values. The caller should copy those temporary values to the originally
1357 VOL nonzero means the insn is volatile; don't optimize it. */
1360 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1361 tree string, outputs, inputs, clobbers;
1366 rtvec argvec, constraints;
1368 int ninputs = list_length (inputs);
1369 int noutputs = list_length (outputs);
1373 /* Vector of RTX's of evaluated output operands. */
1374 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1375 /* The insn we have emitted. */
1378 if (output_bytecode)
1380 error ("`asm' is invalid when generating bytecode");
1384 /* Count the number of meaningful clobbered registers, ignoring what
1385 we would ignore later. */
1387 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1389 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1390 i = decode_reg_name (regname);
1391 if (i >= 0 || i == -4)
1394 error ("unknown register name `%s' in `asm'", regname);
1399 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1401 tree val = TREE_VALUE (tail);
1402 tree type = TREE_TYPE (val);
1405 int found_equal = 0;
1408 /* If there's an erroneous arg, emit no insn. */
1409 if (TREE_TYPE (val) == error_mark_node)
1412 /* Make sure constraint has `=' and does not have `+'. Also, see
1413 if it allows any register. Be liberal on the latter test, since
1414 the worst that happens if we get it wrong is we issue an error
1417 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1418 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1421 error ("output operand constraint contains `+'");
1428 case '?': case '!': case '*': case '%': case '&':
1429 case 'V': case 'm': case 'o': case '<': case '>':
1430 case 'E': case 'F': case 'G': case 'H': case 'X':
1431 case 's': case 'i': case 'n':
1432 case 'I': case 'J': case 'K': case 'L': case 'M':
1433 case 'N': case 'O': case 'P': case ',':
1434 #ifdef EXTRA_CONSTRAINT
1435 case 'Q': case 'R': case 'S': case 'T': case 'U':
1439 case 'p': case 'g': case 'r':
1440 /* Whether or not a numeric constraint allows a register is
1441 decided by the matching constraint, and so there is no need
1442 to do anything special with them. We must handle them in
1443 the default case, so that we don't unnecessarily force
1444 operands to memory. */
1445 case '0': case '1': case '2': case '3': case '4':
1453 error ("output operand constraint lacks `='");
1457 /* If an output operand is not a decl or indirect ref and our constraint
1458 allows a register, make a temporary to act as an intermediate.
1459 Make the asm insn write into that, then our caller will copy it to
1460 the real output operand. Likewise for promoted variables. */
1462 if (TREE_CODE (val) == INDIRECT_REF
1463 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1464 && ! (GET_CODE (DECL_RTL (val)) == REG
1465 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1469 mark_addressable (TREE_VALUE (tail));
1472 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1474 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1475 error ("output number %d not directly addressable", i);
1479 output_rtx[i] = assign_temp (type, 0, 0, 0);
1480 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1484 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1486 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1490 /* Make vectors for the expression-rtx and constraint strings. */
1492 argvec = rtvec_alloc (ninputs);
1493 constraints = rtvec_alloc (ninputs);
1495 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1496 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1498 MEM_VOLATILE_P (body) = vol;
1500 /* Eval the inputs and put them into ARGVEC.
1501 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1504 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1509 /* If there's an erroneous arg, emit no insn,
1510 because the ASM_INPUT would get VOIDmode
1511 and that could cause a crash in reload. */
1512 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1514 if (TREE_PURPOSE (tail) == NULL_TREE)
1516 error ("hard register `%s' listed as input operand to `asm'",
1517 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1521 /* Make sure constraint has neither `=' nor `+'. */
1523 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1524 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1527 error ("input operand constraint contains `%c'",
1528 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1531 case '?': case '!': case '*': case '%': case '&':
1532 case 'V': case 'm': case 'o': case '<': case '>':
1533 case 'E': case 'F': case 'G': case 'H': case 'X':
1534 case 's': case 'i': case 'n':
1535 case 'I': case 'J': case 'K': case 'L': case 'M':
1536 case 'N': case 'O': case 'P': case ',':
1537 #ifdef EXTRA_CONSTRAINT
1538 case 'Q': case 'R': case 'S': case 'T': case 'U':
1542 case 'p': case 'g': case 'r':
1543 /* Whether or not a numeric constraint allows a register is
1544 decided by the matching constraint, and so there is no need
1545 to do anything special with them. We must handle them in
1546 the default case, so that we don't unnecessarily force
1547 operands to memory. */
1548 case '0': case '1': case '2': case '3': case '4':
1555 mark_addressable (TREE_VALUE (tail));
1557 XVECEXP (body, 3, i) /* argvec */
1558 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1559 if (CONSTANT_P (XVECEXP (body, 3, i))
1560 && ! general_operand (XVECEXP (body, 3, i),
1561 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1564 XVECEXP (body, 3, i)
1565 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1566 XVECEXP (body, 3, i));
1568 XVECEXP (body, 3, i)
1569 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1570 XVECEXP (body, 3, i));
1574 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1575 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1576 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1578 tree type = TREE_TYPE (TREE_VALUE (tail));
1579 rtx memloc = assign_temp (type, 1, 1, 1);
1581 emit_move_insn (memloc, XVECEXP (body, 3, i));
1582 XVECEXP (body, 3, i) = memloc;
1585 XVECEXP (body, 4, i) /* constraints */
1586 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1587 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1591 /* Protect all the operands from the queue,
1592 now that they have all been evaluated. */
1594 for (i = 0; i < ninputs; i++)
1595 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1597 for (i = 0; i < noutputs; i++)
1598 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1600 /* Now, for each output, construct an rtx
1601 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1602 ARGVEC CONSTRAINTS))
1603 If there is more than one, put them inside a PARALLEL. */
1605 if (noutputs == 1 && nclobbers == 0)
1607 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1608 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1610 else if (noutputs == 0 && nclobbers == 0)
1612 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1613 insn = emit_insn (body);
1619 if (num == 0) num = 1;
1620 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1622 /* For each output operand, store a SET. */
1624 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1626 XVECEXP (body, 0, i)
1627 = gen_rtx (SET, VOIDmode,
1629 gen_rtx (ASM_OPERANDS, VOIDmode,
1630 TREE_STRING_POINTER (string),
1631 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1632 i, argvec, constraints,
1634 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1637 /* If there are no outputs (but there are some clobbers)
1638 store the bare ASM_OPERANDS into the PARALLEL. */
1641 XVECEXP (body, 0, i++) = obody;
1643 /* Store (clobber REG) for each clobbered register specified. */
1645 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1647 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1648 int j = decode_reg_name (regname);
1652 if (j == -3) /* `cc', which is not a register */
1655 if (j == -4) /* `memory', don't cache memory across asm */
1657 XVECEXP (body, 0, i++)
1658 = gen_rtx (CLOBBER, VOIDmode,
1659 gen_rtx (MEM, BLKmode,
1660 gen_rtx (SCRATCH, VOIDmode, 0)));
1664 /* Ignore unknown register, error already signalled. */
1668 /* Use QImode since that's guaranteed to clobber just one reg. */
1669 XVECEXP (body, 0, i++)
1670 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1673 insn = emit_insn (body);
1679 /* Generate RTL to evaluate the expression EXP
1680 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1683 expand_expr_stmt (exp)
1686 if (output_bytecode)
1688 int org_stack_depth = stack_depth;
1690 bc_expand_expr (exp);
1692 /* Restore stack depth */
1693 if (stack_depth < org_stack_depth)
1696 bc_emit_instruction (drop);
1698 last_expr_type = TREE_TYPE (exp);
1702 /* If -W, warn about statements with no side effects,
1703 except for an explicit cast to void (e.g. for assert()), and
1704 except inside a ({...}) where they may be useful. */
1705 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1707 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1708 && !(TREE_CODE (exp) == CONVERT_EXPR
1709 && TREE_TYPE (exp) == void_type_node))
1710 warning_with_file_and_line (emit_filename, emit_lineno,
1711 "statement with no effect");
1712 else if (warn_unused)
1713 warn_if_unused_value (exp);
1716 /* If EXP is of function type and we are expanding statements for
1717 value, convert it to pointer-to-function. */
1718 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1719 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1721 last_expr_type = TREE_TYPE (exp);
1722 if (! flag_syntax_only)
1723 last_expr_value = expand_expr (exp,
1724 (expr_stmts_for_value
1725 ? NULL_RTX : const0_rtx),
1728 /* If all we do is reference a volatile value in memory,
1729 copy it to a register to be sure it is actually touched. */
1730 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1731 && TREE_THIS_VOLATILE (exp))
1733 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1735 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1736 copy_to_reg (last_expr_value);
1739 rtx lab = gen_label_rtx ();
1741 /* Compare the value with itself to reference it. */
1742 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1743 expand_expr (TYPE_SIZE (last_expr_type),
1744 NULL_RTX, VOIDmode, 0),
1746 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1747 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1752 /* If this expression is part of a ({...}) and is in memory, we may have
1753 to preserve temporaries. */
1754 preserve_temp_slots (last_expr_value);
1756 /* Free any temporaries used to evaluate this expression. Any temporary
1757 used as a result of this expression will already have been preserved
1764 /* Warn if EXP contains any computations whose results are not used.
1765 Return 1 if a warning is printed; 0 otherwise. */
1768 warn_if_unused_value (exp)
1771 if (TREE_USED (exp))
1774 switch (TREE_CODE (exp))
1776 case PREINCREMENT_EXPR:
1777 case POSTINCREMENT_EXPR:
1778 case PREDECREMENT_EXPR:
1779 case POSTDECREMENT_EXPR:
1784 case METHOD_CALL_EXPR:
1786 case WITH_CLEANUP_EXPR:
1788 /* We don't warn about COND_EXPR because it may be a useful
1789 construct if either arm contains a side effect. */
1794 /* For a binding, warn if no side effect within it. */
1795 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1798 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1800 case TRUTH_ORIF_EXPR:
1801 case TRUTH_ANDIF_EXPR:
1802 /* In && or ||, warn if 2nd operand has no side effect. */
1803 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1806 if (TREE_NO_UNUSED_WARNING (exp))
1808 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1810 /* Let people do `(foo (), 0)' without a warning. */
1811 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1813 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1817 case NON_LVALUE_EXPR:
1818 /* Don't warn about values cast to void. */
1819 if (TREE_TYPE (exp) == void_type_node)
1821 /* Don't warn about conversions not explicit in the user's program. */
1822 if (TREE_NO_UNUSED_WARNING (exp))
1824 /* Assignment to a cast usually results in a cast of a modify.
1825 Don't complain about that. There can be an arbitrary number of
1826 casts before the modify, so we must loop until we find the first
1827 non-cast expression and then test to see if that is a modify. */
1829 tree tem = TREE_OPERAND (exp, 0);
1831 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1832 tem = TREE_OPERAND (tem, 0);
1834 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1835 || TREE_CODE (tem) == CALL_EXPR)
1841 /* Don't warn about automatic dereferencing of references, since
1842 the user cannot control it. */
1843 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1844 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1845 /* ... fall through ... */
1848 /* Referencing a volatile value is a side effect, so don't warn. */
1849 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1850 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1851 && TREE_THIS_VOLATILE (exp))
1854 warning_with_file_and_line (emit_filename, emit_lineno,
1855 "value computed is not used");
1860 /* Clear out the memory of the last expression evaluated. */
1868 /* Begin a statement which will return a value.
1869 Return the RTL_EXPR for this statement expr.
1870 The caller must save that value and pass it to expand_end_stmt_expr. */
1873 expand_start_stmt_expr ()
1878 /* When generating bytecode just note down the stack depth */
1879 if (output_bytecode)
1880 return (build_int_2 (stack_depth, 0));
1882 /* Make the RTL_EXPR node temporary, not momentary,
1883 so that rtl_expr_chain doesn't become garbage. */
1884 momentary = suspend_momentary ();
1885 t = make_node (RTL_EXPR);
1886 resume_momentary (momentary);
1887 do_pending_stack_adjust ();
1888 start_sequence_for_rtl_expr (t);
1890 expr_stmts_for_value++;
1894 /* Restore the previous state at the end of a statement that returns a value.
1895 Returns a tree node representing the statement's value and the
1896 insns to compute the value.
1898 The nodes of that expression have been freed by now, so we cannot use them.
1899 But we don't want to do that anyway; the expression has already been
1900 evaluated and now we just want to use the value. So generate a RTL_EXPR
1901 with the proper type and RTL value.
1903 If the last substatement was not an expression,
1904 return something with type `void'. */
1907 expand_end_stmt_expr (t)
1910 if (output_bytecode)
1916 /* At this point, all expressions have been evaluated in order.
1917 However, all expression values have been popped when evaluated,
1918 which means we have to recover the last expression value. This is
1919 the last value removed by means of a `drop' instruction. Instead
1920 of adding code to inhibit dropping the last expression value, it
1921 is here recovered by undoing the `drop'. Since `drop' is
1922 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1925 bc_adjust_stack (-1);
1927 if (!last_expr_type)
1928 last_expr_type = void_type_node;
1930 t = make_node (RTL_EXPR);
1931 TREE_TYPE (t) = last_expr_type;
1932 RTL_EXPR_RTL (t) = NULL;
1933 RTL_EXPR_SEQUENCE (t) = NULL;
1935 /* Don't consider deleting this expr or containing exprs at tree level. */
1936 TREE_THIS_VOLATILE (t) = 1;
1944 if (last_expr_type == 0)
1946 last_expr_type = void_type_node;
1947 last_expr_value = const0_rtx;
1949 else if (last_expr_value == 0)
1950 /* There are some cases where this can happen, such as when the
1951 statement is void type. */
1952 last_expr_value = const0_rtx;
1953 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1954 /* Remove any possible QUEUED. */
1955 last_expr_value = protect_from_queue (last_expr_value, 0);
1959 TREE_TYPE (t) = last_expr_type;
1960 RTL_EXPR_RTL (t) = last_expr_value;
1961 RTL_EXPR_SEQUENCE (t) = get_insns ();
1963 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1967 /* Don't consider deleting this expr or containing exprs at tree level. */
1968 TREE_SIDE_EFFECTS (t) = 1;
1969 /* Propagate volatility of the actual RTL expr. */
1970 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1973 expr_stmts_for_value--;
1978 /* Generate RTL for the start of an if-then. COND is the expression
1979 whose truth should be tested.
1981 If EXITFLAG is nonzero, this conditional is visible to
1982 `exit_something'. */
1985 expand_start_cond (cond, exitflag)
1989 struct nesting *thiscond = ALLOC_NESTING ();
1991 /* Make an entry on cond_stack for the cond we are entering. */
1993 thiscond->next = cond_stack;
1994 thiscond->all = nesting_stack;
1995 thiscond->depth = ++nesting_depth;
1996 thiscond->data.cond.next_label = gen_label_rtx ();
1997 /* Before we encounter an `else', we don't need a separate exit label
1998 unless there are supposed to be exit statements
1999 to exit this conditional. */
2000 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2001 thiscond->data.cond.endif_label = thiscond->exit_label;
2002 cond_stack = thiscond;
2003 nesting_stack = thiscond;
2005 if (output_bytecode)
2006 bc_expand_start_cond (cond, exitflag);
2008 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2011 /* Generate RTL between then-clause and the elseif-clause
2012 of an if-then-elseif-.... */
2015 expand_start_elseif (cond)
2018 if (cond_stack->data.cond.endif_label == 0)
2019 cond_stack->data.cond.endif_label = gen_label_rtx ();
2020 emit_jump (cond_stack->data.cond.endif_label);
2021 emit_label (cond_stack->data.cond.next_label);
2022 cond_stack->data.cond.next_label = gen_label_rtx ();
2023 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2026 /* Generate RTL between the then-clause and the else-clause
2027 of an if-then-else. */
2030 expand_start_else ()
2032 if (cond_stack->data.cond.endif_label == 0)
2033 cond_stack->data.cond.endif_label = gen_label_rtx ();
2035 if (output_bytecode)
2037 bc_expand_start_else ();
2041 emit_jump (cond_stack->data.cond.endif_label);
2042 emit_label (cond_stack->data.cond.next_label);
2043 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2046 /* After calling expand_start_else, turn this "else" into an "else if"
2047 by providing another condition. */
2050 expand_elseif (cond)
2053 cond_stack->data.cond.next_label = gen_label_rtx ();
2054 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2057 /* Generate RTL for the end of an if-then.
2058 Pop the record for it off of cond_stack. */
2063 struct nesting *thiscond = cond_stack;
2065 if (output_bytecode)
2066 bc_expand_end_cond ();
2069 do_pending_stack_adjust ();
2070 if (thiscond->data.cond.next_label)
2071 emit_label (thiscond->data.cond.next_label);
2072 if (thiscond->data.cond.endif_label)
2073 emit_label (thiscond->data.cond.endif_label);
2076 POPSTACK (cond_stack);
2081 /* Generate code for the start of an if-then. COND is the expression
2082 whose truth is to be tested; if EXITFLAG is nonzero this conditional
2083 is to be visible to exit_something. It is assumed that the caller
2084 has pushed the previous context on the cond stack. */
2087 bc_expand_start_cond (cond, exitflag)
2091 struct nesting *thiscond = cond_stack;
2093 thiscond->data.case_stmt.nominal_type = cond;
2095 thiscond->exit_label = gen_label_rtx ();
2096 bc_expand_expr (cond);
2097 bc_emit_bytecode (xjumpifnot);
2098 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2100 #ifdef DEBUG_PRINT_CODE
2101 fputc ('\n', stderr);
2105 /* Generate the label for the end of an if with
2109 bc_expand_end_cond ()
2111 struct nesting *thiscond = cond_stack;
2113 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
2116 /* Generate code for the start of the else- clause of
2120 bc_expand_start_else ()
2122 struct nesting *thiscond = cond_stack;
2124 thiscond->data.cond.endif_label = thiscond->exit_label;
2125 thiscond->exit_label = gen_label_rtx ();
2126 bc_emit_bytecode (jump);
2127 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2129 #ifdef DEBUG_PRINT_CODE
2130 fputc ('\n', stderr);
2133 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2136 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2137 loop should be exited by `exit_something'. This is a loop for which
2138 `expand_continue' will jump to the top of the loop.
2140 Make an entry on loop_stack to record the labels associated with
2144 expand_start_loop (exit_flag)
2147 register struct nesting *thisloop = ALLOC_NESTING ();
2149 /* Make an entry on loop_stack for the loop we are entering. */
2151 thisloop->next = loop_stack;
2152 thisloop->all = nesting_stack;
2153 thisloop->depth = ++nesting_depth;
2154 thisloop->data.loop.start_label = gen_label_rtx ();
2155 thisloop->data.loop.end_label = gen_label_rtx ();
2156 thisloop->data.loop.alt_end_label = 0;
2157 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2158 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2159 loop_stack = thisloop;
2160 nesting_stack = thisloop;
2162 if (output_bytecode)
2164 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2168 do_pending_stack_adjust ();
2170 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2171 emit_label (thisloop->data.loop.start_label);
2176 /* Like expand_start_loop but for a loop where the continuation point
2177 (for expand_continue_loop) will be specified explicitly. */
2180 expand_start_loop_continue_elsewhere (exit_flag)
2183 struct nesting *thisloop = expand_start_loop (exit_flag);
2184 loop_stack->data.loop.continue_label = gen_label_rtx ();
2188 /* Specify the continuation point for a loop started with
2189 expand_start_loop_continue_elsewhere.
2190 Use this at the point in the code to which a continue statement
2194 expand_loop_continue_here ()
2196 if (output_bytecode)
2198 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2201 do_pending_stack_adjust ();
2202 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2203 emit_label (loop_stack->data.loop.continue_label);
2209 bc_expand_end_loop ()
2211 struct nesting *thisloop = loop_stack;
2213 bc_emit_bytecode (jump);
2214 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2216 #ifdef DEBUG_PRINT_CODE
2217 fputc ('\n', stderr);
2220 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2221 POPSTACK (loop_stack);
2226 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2227 Pop the block off of loop_stack. */
2233 register rtx start_label;
2234 rtx last_test_insn = 0;
2237 if (output_bytecode)
2239 bc_expand_end_loop ();
2243 insn = get_last_insn ();
2244 start_label = loop_stack->data.loop.start_label;
2246 /* Mark the continue-point at the top of the loop if none elsewhere. */
2247 if (start_label == loop_stack->data.loop.continue_label)
2248 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2250 do_pending_stack_adjust ();
2252 /* If optimizing, perhaps reorder the loop. If the loop
2253 starts with a conditional exit, roll that to the end
2254 where it will optimize together with the jump back.
2256 We look for the last conditional branch to the exit that we encounter
2257 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2258 branch to the exit first, use it.
2260 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2261 because moving them is not valid. */
2265 ! (GET_CODE (insn) == JUMP_INSN
2266 && GET_CODE (PATTERN (insn)) == SET
2267 && SET_DEST (PATTERN (insn)) == pc_rtx
2268 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2270 /* Scan insns from the top of the loop looking for a qualified
2271 conditional exit. */
2272 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2273 insn = NEXT_INSN (insn))
2275 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2278 if (GET_CODE (insn) == NOTE
2279 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2280 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2283 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2286 if (last_test_insn && num_insns > 30)
2289 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2290 && SET_DEST (PATTERN (insn)) == pc_rtx
2291 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2292 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2293 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2294 == loop_stack->data.loop.end_label)
2295 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2296 == loop_stack->data.loop.alt_end_label)))
2297 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2298 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2299 == loop_stack->data.loop.end_label)
2300 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2301 == loop_stack->data.loop.alt_end_label)))))
2302 last_test_insn = insn;
2304 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2305 && GET_CODE (PATTERN (insn)) == SET
2306 && SET_DEST (PATTERN (insn)) == pc_rtx
2307 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2308 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
2309 == loop_stack->data.loop.end_label)
2310 || (XEXP (SET_SRC (PATTERN (insn)), 0)
2311 == loop_stack->data.loop.alt_end_label)))
2312 /* Include BARRIER. */
2313 last_test_insn = NEXT_INSN (insn);
2316 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2318 /* We found one. Move everything from there up
2319 to the end of the loop, and add a jump into the loop
2320 to jump to there. */
2321 register rtx newstart_label = gen_label_rtx ();
2322 register rtx start_move = start_label;
2324 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2325 then we want to move this note also. */
2326 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2327 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2328 == NOTE_INSN_LOOP_CONT))
2329 start_move = PREV_INSN (start_move);
2331 emit_label_after (newstart_label, PREV_INSN (start_move));
2332 reorder_insns (start_move, last_test_insn, get_last_insn ());
2333 emit_jump_insn_after (gen_jump (start_label),
2334 PREV_INSN (newstart_label));
2335 emit_barrier_after (PREV_INSN (newstart_label));
2336 start_label = newstart_label;
2340 emit_jump (start_label);
2341 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2342 emit_label (loop_stack->data.loop.end_label);
2344 POPSTACK (loop_stack);
2349 /* Generate a jump to the current loop's continue-point.
2350 This is usually the top of the loop, but may be specified
2351 explicitly elsewhere. If not currently inside a loop,
2352 return 0 and do nothing; caller will print an error message. */
2355 expand_continue_loop (whichloop)
2356 struct nesting *whichloop;
2360 whichloop = loop_stack;
2363 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2368 /* Generate a jump to exit the current loop. If not currently inside a loop,
2369 return 0 and do nothing; caller will print an error message. */
2372 expand_exit_loop (whichloop)
2373 struct nesting *whichloop;
2377 whichloop = loop_stack;
2380 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2384 /* Generate a conditional jump to exit the current loop if COND
2385 evaluates to zero. If not currently inside a loop,
2386 return 0 and do nothing; caller will print an error message. */
2389 expand_exit_loop_if_false (whichloop, cond)
2390 struct nesting *whichloop;
2395 whichloop = loop_stack;
2398 if (output_bytecode)
2400 bc_expand_expr (cond);
2401 bc_expand_goto_internal (xjumpifnot,
2402 BYTECODE_BC_LABEL (whichloop->exit_label),
2407 /* In order to handle fixups, we actually create a conditional jump
2408 around a unconditional branch to exit the loop. If fixups are
2409 necessary, they go before the unconditional branch. */
2411 rtx label = gen_label_rtx ();
2414 do_jump (cond, NULL_RTX, label);
2415 last_insn = get_last_insn ();
2416 if (GET_CODE (last_insn) == CODE_LABEL)
2417 whichloop->data.loop.alt_end_label = last_insn;
2418 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2426 /* Return non-zero if we should preserve sub-expressions as separate
2427 pseudos. We never do so if we aren't optimizing. We always do so
2428 if -fexpensive-optimizations.
2430 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2431 the loop may still be a small one. */
2434 preserve_subexpressions_p ()
2438 if (flag_expensive_optimizations)
2441 if (optimize == 0 || loop_stack == 0)
2444 insn = get_last_insn_anywhere ();
2447 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2448 < n_non_fixed_regs * 3));
2452 /* Generate a jump to exit the current loop, conditional, binding contour
2453 or case statement. Not all such constructs are visible to this function,
2454 only those started with EXIT_FLAG nonzero. Individual languages use
2455 the EXIT_FLAG parameter to control which kinds of constructs you can
2458 If not currently inside anything that can be exited,
2459 return 0 and do nothing; caller will print an error message. */
2462 expand_exit_something ()
2466 for (n = nesting_stack; n; n = n->all)
2467 if (n->exit_label != 0)
2469 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2476 /* Generate RTL to return from the current function, with no value.
2477 (That is, we do not do anything about returning any value.) */
2480 expand_null_return ()
2482 struct nesting *block = block_stack;
2485 if (output_bytecode)
2487 bc_emit_instruction (ret);
2491 /* Does any pending block have cleanups? */
2493 while (block && block->data.block.cleanups == 0)
2494 block = block->next;
2496 /* If yes, use a goto to return, since that runs cleanups. */
2498 expand_null_return_1 (last_insn, block != 0);
2501 /* Generate RTL to return from the current function, with value VAL. */
2504 expand_value_return (val)
2507 struct nesting *block = block_stack;
2508 rtx last_insn = get_last_insn ();
2509 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2511 /* Copy the value to the return location
2512 unless it's already there. */
2514 if (return_reg != val)
2516 #ifdef PROMOTE_FUNCTION_RETURN
2517 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2518 int unsignedp = TREE_UNSIGNED (type);
2519 enum machine_mode mode
2520 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2523 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2524 convert_move (return_reg, val, unsignedp);
2527 emit_move_insn (return_reg, val);
2529 if (GET_CODE (return_reg) == REG
2530 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2531 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2533 /* Does any pending block have cleanups? */
2535 while (block && block->data.block.cleanups == 0)
2536 block = block->next;
2538 /* If yes, use a goto to return, since that runs cleanups.
2539 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2541 expand_null_return_1 (last_insn, block != 0);
2544 /* Output a return with no value. If LAST_INSN is nonzero,
2545 pretend that the return takes place after LAST_INSN.
2546 If USE_GOTO is nonzero then don't use a return instruction;
2547 go to the return label instead. This causes any cleanups
2548 of pending blocks to be executed normally. */
2551 expand_null_return_1 (last_insn, use_goto)
2555 rtx end_label = cleanup_label ? cleanup_label : return_label;
2557 clear_pending_stack_adjust ();
2558 do_pending_stack_adjust ();
2561 /* PCC-struct return always uses an epilogue. */
2562 if (current_function_returns_pcc_struct || use_goto)
2565 end_label = return_label = gen_label_rtx ();
2566 expand_goto_internal (NULL_TREE, end_label, last_insn);
2570 /* Otherwise output a simple return-insn if one is available,
2571 unless it won't do the job. */
2573 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2575 emit_jump_insn (gen_return ());
2581 /* Otherwise jump to the epilogue. */
2582 expand_goto_internal (NULL_TREE, end_label, last_insn);
2585 /* Generate RTL to evaluate the expression RETVAL and return it
2586 from the current function. */
2589 expand_return (retval)
2592 /* If there are any cleanups to be performed, then they will
2593 be inserted following LAST_INSN. It is desirable
2594 that the last_insn, for such purposes, should be the
2595 last insn before computing the return value. Otherwise, cleanups
2596 which call functions can clobber the return value. */
2597 /* ??? rms: I think that is erroneous, because in C++ it would
2598 run destructors on variables that might be used in the subsequent
2599 computation of the return value. */
2601 register rtx val = 0;
2605 struct nesting *block;
2607 /* Bytecode returns are quite simple, just leave the result on the
2608 arithmetic stack. */
2609 if (output_bytecode)
2611 bc_expand_expr (retval);
2612 bc_emit_instruction (ret);
2616 /* If function wants no value, give it none. */
2617 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2619 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2621 expand_null_return ();
2625 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2626 /* This is not sufficient. We also need to watch for cleanups of the
2627 expression we are about to expand. Unfortunately, we cannot know
2628 if it has cleanups until we expand it, and we want to change how we
2629 expand it depending upon if we need cleanups. We can't win. */
2631 cleanups = any_pending_cleanups (1);
2636 if (TREE_CODE (retval) == RESULT_DECL)
2637 retval_rhs = retval;
2638 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2639 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2640 retval_rhs = TREE_OPERAND (retval, 1);
2641 else if (TREE_TYPE (retval) == void_type_node)
2642 /* Recognize tail-recursive call to void function. */
2643 retval_rhs = retval;
2645 retval_rhs = NULL_TREE;
2647 /* Only use `last_insn' if there are cleanups which must be run. */
2648 if (cleanups || cleanup_label != 0)
2649 last_insn = get_last_insn ();
2651 /* Distribute return down conditional expr if either of the sides
2652 may involve tail recursion (see test below). This enhances the number
2653 of tail recursions we see. Don't do this always since it can produce
2654 sub-optimal code in some cases and we distribute assignments into
2655 conditional expressions when it would help. */
2657 if (optimize && retval_rhs != 0
2658 && frame_offset == 0
2659 && TREE_CODE (retval_rhs) == COND_EXPR
2660 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2661 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2663 rtx label = gen_label_rtx ();
2666 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2667 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2668 DECL_RESULT (current_function_decl),
2669 TREE_OPERAND (retval_rhs, 1));
2670 TREE_SIDE_EFFECTS (expr) = 1;
2671 expand_return (expr);
2674 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2675 DECL_RESULT (current_function_decl),
2676 TREE_OPERAND (retval_rhs, 2));
2677 TREE_SIDE_EFFECTS (expr) = 1;
2678 expand_return (expr);
2682 /* For tail-recursive call to current function,
2683 just jump back to the beginning.
2684 It's unsafe if any auto variable in this function
2685 has its address taken; for simplicity,
2686 require stack frame to be empty. */
2687 if (optimize && retval_rhs != 0
2688 && frame_offset == 0
2689 && TREE_CODE (retval_rhs) == CALL_EXPR
2690 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2691 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2692 /* Finish checking validity, and if valid emit code
2693 to set the argument variables for the new call. */
2694 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2695 DECL_ARGUMENTS (current_function_decl)))
2697 if (tail_recursion_label == 0)
2699 tail_recursion_label = gen_label_rtx ();
2700 emit_label_after (tail_recursion_label,
2701 tail_recursion_reentry);
2704 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2709 /* This optimization is safe if there are local cleanups
2710 because expand_null_return takes care of them.
2711 ??? I think it should also be safe when there is a cleanup label,
2712 because expand_null_return takes care of them, too.
2713 Any reason why not? */
2714 if (HAVE_return && cleanup_label == 0
2715 && ! current_function_returns_pcc_struct
2716 && BRANCH_COST <= 1)
2718 /* If this is return x == y; then generate
2719 if (x == y) return 1; else return 0;
2720 if we can do it with explicit return insns and
2721 branches are cheap. */
2723 switch (TREE_CODE (retval_rhs))
2731 case TRUTH_ANDIF_EXPR:
2732 case TRUTH_ORIF_EXPR:
2733 case TRUTH_AND_EXPR:
2735 case TRUTH_NOT_EXPR:
2736 case TRUTH_XOR_EXPR:
2737 op0 = gen_label_rtx ();
2738 jumpifnot (retval_rhs, op0);
2739 expand_value_return (const1_rtx);
2741 expand_value_return (const0_rtx);
2745 #endif /* HAVE_return */
2747 /* If the result is an aggregate that is being returned in one (or more)
2748 registers, load the registers here. The compiler currently can't handle
2749 copying a BLKmode value into registers. We could put this code in a
2750 more general area (for use by everyone instead of just function
2751 call/return), but until this feature is generally usable it is kept here
2752 (and in expand_call). The value must go into a pseudo in case there
2753 are cleanups that will clobber the real return register. */
2756 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2757 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2759 int i, bitpos, xbitpos;
2760 int big_endian_correction = 0;
2761 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2762 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2763 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2764 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2765 rtx result_reg, src, dst;
2766 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2767 enum machine_mode tmpmode, result_reg_mode;
2769 /* Structures whose size is not a multiple of a word are aligned
2770 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2771 machine, this means we must skip the empty high order bytes when
2772 calculating the bit offset. */
2773 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2774 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2777 /* Copy the structure BITSIZE bits at a time. */
2778 for (bitpos = 0, xbitpos = big_endian_correction;
2779 bitpos < bytes * BITS_PER_UNIT;
2780 bitpos += bitsize, xbitpos += bitsize)
2782 /* We need a new destination pseudo each time xbitpos is
2783 on a word boundary and when xbitpos == big_endian_correction
2784 (the first time through). */
2785 if (xbitpos % BITS_PER_WORD == 0
2786 || xbitpos == big_endian_correction)
2788 /* Generate an appropriate register. */
2789 dst = gen_reg_rtx (word_mode);
2790 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2792 /* Clobber the destination before we move anything into it. */
2793 emit_insn (gen_rtx (CLOBBER, VOIDmode, dst));
2796 /* We need a new source operand each time bitpos is on a word
2798 if (bitpos % BITS_PER_WORD == 0)
2799 src = operand_subword_force (result_val,
2800 bitpos / BITS_PER_WORD,
2803 /* Use bitpos for the source extraction (left justified) and
2804 xbitpos for the destination store (right justified). */
2805 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2806 extract_bit_field (src, bitsize,
2807 bitpos % BITS_PER_WORD, 1,
2808 NULL_RTX, word_mode,
2810 bitsize / BITS_PER_UNIT,
2812 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2815 /* Find the smallest integer mode large enough to hold the
2816 entire structure and use that mode instead of BLKmode
2817 on the USE insn for the return register. */
2818 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2819 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2820 tmpmode != MAX_MACHINE_MODE;
2821 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2823 /* Have we found a large enough mode? */
2824 if (GET_MODE_SIZE (tmpmode) >= bytes)
2828 /* No suitable mode found. */
2829 if (tmpmode == MAX_MACHINE_MODE)
2832 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2834 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2835 result_reg_mode = word_mode;
2837 result_reg_mode = tmpmode;
2838 result_reg = gen_reg_rtx (result_reg_mode);
2840 /* Now that the value is in pseudos, copy it to the result reg(s). */
2843 for (i = 0; i < n_regs; i++)
2844 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2847 if (tmpmode != result_reg_mode)
2848 result_reg = gen_lowpart (tmpmode, result_reg);
2850 expand_value_return (result_reg);
2854 && TREE_TYPE (retval_rhs) != void_type_node
2855 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2857 /* Calculate the return value into a pseudo reg. */
2858 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2860 /* All temporaries have now been used. */
2862 /* Return the calculated value, doing cleanups first. */
2863 expand_value_return (val);
2867 /* No cleanups or no hard reg used;
2868 calculate value into hard return reg. */
2869 expand_expr (retval, const0_rtx, VOIDmode, 0);
2872 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2876 /* Return 1 if the end of the generated RTX is not a barrier.
2877 This means code already compiled can drop through. */
2880 drop_through_at_end_p ()
2882 rtx insn = get_last_insn ();
2883 while (insn && GET_CODE (insn) == NOTE)
2884 insn = PREV_INSN (insn);
2885 return insn && GET_CODE (insn) != BARRIER;
2888 /* Emit code to alter this function's formal parms for a tail-recursive call.
2889 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2890 FORMALS is the chain of decls of formals.
2891 Return 1 if this can be done;
2892 otherwise return 0 and do not emit any code. */
2895 tail_recursion_args (actuals, formals)
2896 tree actuals, formals;
2898 register tree a = actuals, f = formals;
2900 register rtx *argvec;
2902 /* Check that number and types of actuals are compatible
2903 with the formals. This is not always true in valid C code.
2904 Also check that no formal needs to be addressable
2905 and that all formals are scalars. */
2907 /* Also count the args. */
2909 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2911 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2912 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2914 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2917 if (a != 0 || f != 0)
2920 /* Compute all the actuals. */
2922 argvec = (rtx *) alloca (i * sizeof (rtx));
2924 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2925 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2927 /* Find which actual values refer to current values of previous formals.
2928 Copy each of them now, before any formal is changed. */
2930 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2934 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2935 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2936 { copy = 1; break; }
2938 argvec[i] = copy_to_reg (argvec[i]);
2941 /* Store the values of the actuals into the formals. */
2943 for (f = formals, a = actuals, i = 0; f;
2944 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2946 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2947 emit_move_insn (DECL_RTL (f), argvec[i]);
2949 convert_move (DECL_RTL (f), argvec[i],
2950 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2957 /* Generate the RTL code for entering a binding contour.
2958 The variables are declared one by one, by calls to `expand_decl'.
2960 EXIT_FLAG is nonzero if this construct should be visible to
2961 `exit_something'. */
2964 expand_start_bindings (exit_flag)
2967 struct nesting *thisblock = ALLOC_NESTING ();
2968 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2970 /* Make an entry on block_stack for the block we are entering. */
2972 thisblock->next = block_stack;
2973 thisblock->all = nesting_stack;
2974 thisblock->depth = ++nesting_depth;
2975 thisblock->data.block.stack_level = 0;
2976 thisblock->data.block.cleanups = 0;
2977 thisblock->data.block.function_call_count = 0;
2981 if (block_stack->data.block.cleanups == NULL_TREE
2982 && (block_stack->data.block.outer_cleanups == NULL_TREE
2983 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2984 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2986 thisblock->data.block.outer_cleanups
2987 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2988 block_stack->data.block.outer_cleanups);
2991 thisblock->data.block.outer_cleanups = 0;
2995 && !(block_stack->data.block.cleanups == NULL_TREE
2996 && block_stack->data.block.outer_cleanups == NULL_TREE))
2997 thisblock->data.block.outer_cleanups
2998 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2999 block_stack->data.block.outer_cleanups);
3001 thisblock->data.block.outer_cleanups = 0;
3003 thisblock->data.block.label_chain = 0;
3004 thisblock->data.block.innermost_stack_block = stack_block_stack;
3005 thisblock->data.block.first_insn = note;
3006 thisblock->data.block.block_start_count = ++block_start_count;
3007 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3008 block_stack = thisblock;
3009 nesting_stack = thisblock;
3011 if (!output_bytecode)
3013 /* Make a new level for allocating stack slots. */
3018 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3019 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3023 remember_end_note (block)
3024 register tree block;
3026 BLOCK_END_NOTE (block) = last_block_end_note;
3027 last_block_end_note = NULL_RTX;
3030 /* Generate RTL code to terminate a binding contour.
3031 VARS is the chain of VAR_DECL nodes
3032 for the variables bound in this contour.
3033 MARK_ENDS is nonzero if we should put a note at the beginning
3034 and end of this binding contour.
3036 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3037 (That is true automatically if the contour has a saved stack level.) */
3040 expand_end_bindings (vars, mark_ends, dont_jump_in)
3045 register struct nesting *thisblock = block_stack;
3048 if (output_bytecode)
3050 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
3055 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3056 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
3057 && ! DECL_IN_SYSTEM_HEADER (decl))
3058 warning_with_decl (decl, "unused variable `%s'");
3060 if (thisblock->exit_label)
3062 do_pending_stack_adjust ();
3063 emit_label (thisblock->exit_label);
3066 /* If necessary, make a handler for nonlocal gotos taking
3067 place in the function calls in this block. */
3068 if (function_call_count != thisblock->data.block.function_call_count
3070 /* Make handler for outermost block
3071 if there were any nonlocal gotos to this function. */
3072 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3073 /* Make handler for inner block if it has something
3074 special to do when you jump out of it. */
3075 : (thisblock->data.block.cleanups != 0
3076 || thisblock->data.block.stack_level != 0)))
3079 rtx afterward = gen_label_rtx ();
3080 rtx handler_label = gen_label_rtx ();
3081 rtx save_receiver = gen_reg_rtx (Pmode);
3084 /* Don't let jump_optimize delete the handler. */
3085 LABEL_PRESERVE_P (handler_label) = 1;
3087 /* Record the handler address in the stack slot for that purpose,
3088 during this block, saving and restoring the outer value. */
3089 if (thisblock->next != 0)
3091 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
3094 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
3095 insns = get_insns ();
3097 emit_insns_before (insns, thisblock->data.block.first_insn);
3101 emit_move_insn (nonlocal_goto_handler_slot,
3102 gen_rtx (LABEL_REF, Pmode, handler_label));
3103 insns = get_insns ();
3105 emit_insns_before (insns, thisblock->data.block.first_insn);
3107 /* Jump around the handler; it runs only when specially invoked. */
3108 emit_jump (afterward);
3109 emit_label (handler_label);
3111 #ifdef HAVE_nonlocal_goto
3112 if (! HAVE_nonlocal_goto)
3114 /* First adjust our frame pointer to its actual value. It was
3115 previously set to the start of the virtual area corresponding to
3116 the stacked variables when we branched here and now needs to be
3117 adjusted to the actual hardware fp value.
3119 Assignments are to virtual registers are converted by
3120 instantiate_virtual_regs into the corresponding assignment
3121 to the underlying register (fp in this case) that makes
3122 the original assignment true.
3123 So the following insn will actually be
3124 decrementing fp by STARTING_FRAME_OFFSET. */
3125 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3127 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3128 if (fixed_regs[ARG_POINTER_REGNUM])
3130 #ifdef ELIMINABLE_REGS
3131 /* If the argument pointer can be eliminated in favor of the
3132 frame pointer, we don't need to restore it. We assume here
3133 that if such an elimination is present, it can always be used.
3134 This is the case on all known machines; if we don't make this
3135 assumption, we do unnecessary saving on many machines. */
3136 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3139 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3140 if (elim_regs[i].from == ARG_POINTER_REGNUM
3141 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3144 if (i == sizeof elim_regs / sizeof elim_regs [0])
3147 /* Now restore our arg pointer from the address at which it
3148 was saved in our stack frame.
3149 If there hasn't be space allocated for it yet, make
3151 if (arg_pointer_save_area == 0)
3152 arg_pointer_save_area
3153 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3154 emit_move_insn (virtual_incoming_args_rtx,
3155 /* We need a pseudo here, or else
3156 instantiate_virtual_regs_1 complains. */
3157 copy_to_reg (arg_pointer_save_area));
3162 /* The handler expects the desired label address in the static chain
3163 register. It tests the address and does an appropriate jump
3164 to whatever label is desired. */
3165 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
3166 /* Skip any labels we shouldn't be able to jump to from here. */
3167 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3169 rtx not_this = gen_label_rtx ();
3170 rtx this = gen_label_rtx ();
3171 do_jump_if_equal (static_chain_rtx,
3172 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
3174 emit_jump (not_this);
3176 expand_goto (TREE_VALUE (link));
3177 emit_label (not_this);
3179 /* If label is not recognized, abort. */
3180 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
3183 emit_label (afterward);
3186 /* Don't allow jumping into a block that has cleanups or a stack level. */
3188 || thisblock->data.block.stack_level != 0
3189 || thisblock->data.block.cleanups != 0)
3191 struct label_chain *chain;
3193 /* Any labels in this block are no longer valid to go to.
3194 Mark them to cause an error message. */
3195 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3197 DECL_TOO_LATE (chain->label) = 1;
3198 /* If any goto without a fixup came to this label,
3199 that must be an error, because gotos without fixups
3200 come from outside all saved stack-levels and all cleanups. */
3201 if (TREE_ADDRESSABLE (chain->label))
3202 error_with_decl (chain->label,
3203 "label `%s' used before containing binding contour");
3207 /* Restore stack level in effect before the block
3208 (only if variable-size objects allocated). */
3209 /* Perform any cleanups associated with the block. */
3211 if (thisblock->data.block.stack_level != 0
3212 || thisblock->data.block.cleanups != 0)
3214 /* Only clean up here if this point can actually be reached. */
3215 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3217 /* Don't let cleanups affect ({...}) constructs. */
3218 int old_expr_stmts_for_value = expr_stmts_for_value;
3219 rtx old_last_expr_value = last_expr_value;
3220 tree old_last_expr_type = last_expr_type;
3221 expr_stmts_for_value = 0;
3223 /* Do the cleanups. */
3224 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3226 do_pending_stack_adjust ();
3228 expr_stmts_for_value = old_expr_stmts_for_value;
3229 last_expr_value = old_last_expr_value;
3230 last_expr_type = old_last_expr_type;
3232 /* Restore the stack level. */
3234 if (reachable && thisblock->data.block.stack_level != 0)
3236 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3237 thisblock->data.block.stack_level, NULL_RTX);
3238 if (nonlocal_goto_handler_slot != 0)
3239 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3243 /* Any gotos out of this block must also do these things.
3244 Also report any gotos with fixups that came to labels in this
3246 fixup_gotos (thisblock,
3247 thisblock->data.block.stack_level,
3248 thisblock->data.block.cleanups,
3249 thisblock->data.block.first_insn,
3253 /* Mark the beginning and end of the scope if requested.
3254 We do this now, after running cleanups on the variables
3255 just going out of scope, so they are in scope for their cleanups. */
3258 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3260 /* Get rid of the beginning-mark if we don't make an end-mark. */
3261 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3263 /* If doing stupid register allocation, make sure lives of all
3264 register variables declared here extend thru end of scope. */
3267 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3269 rtx rtl = DECL_RTL (decl);
3270 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3274 /* Restore block_stack level for containing block. */
3276 stack_block_stack = thisblock->data.block.innermost_stack_block;
3277 POPSTACK (block_stack);
3279 /* Pop the stack slot nesting and free any slots at this level. */
3284 /* End a binding contour.
3285 VARS is the chain of VAR_DECL nodes for the variables bound
3286 in this contour. MARK_ENDS is nonzer if we should put a note
3287 at the beginning and end of this binding contour.
3288 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3292 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3297 struct nesting *thisbind = nesting_stack;
3301 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3302 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3303 warning_with_decl (decl, "unused variable `%s'");
3305 if (thisbind->exit_label)
3306 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3308 /* Pop block/bindings off stack */
3309 POPSTACK (block_stack);
3312 /* Generate RTL for the automatic variable declaration DECL.
3313 (Other kinds of declarations are simply ignored if seen here.) */
3319 struct nesting *thisblock = block_stack;
3322 if (output_bytecode)
3324 bc_expand_decl (decl, 0);
3328 type = TREE_TYPE (decl);
3330 /* Only automatic variables need any expansion done.
3331 Static and external variables, and external functions,
3332 will be handled by `assemble_variable' (called from finish_decl).
3333 TYPE_DECL and CONST_DECL require nothing.
3334 PARM_DECLs are handled in `assign_parms'. */
3336 if (TREE_CODE (decl) != VAR_DECL)
3338 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3341 /* Create the RTL representation for the variable. */
3343 if (type == error_mark_node)
3344 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3345 else if (DECL_SIZE (decl) == 0)
3346 /* Variable with incomplete type. */
3348 if (DECL_INITIAL (decl) == 0)
3349 /* Error message was already done; now avoid a crash. */
3350 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3352 /* An initializer is going to decide the size of this array.
3353 Until we know the size, represent its address with a reg. */
3354 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3355 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3357 else if (DECL_MODE (decl) != BLKmode
3358 /* If -ffloat-store, don't put explicit float vars
3360 && !(flag_float_store
3361 && TREE_CODE (type) == REAL_TYPE)
3362 && ! TREE_THIS_VOLATILE (decl)
3363 && ! TREE_ADDRESSABLE (decl)
3364 && (DECL_REGISTER (decl) || ! obey_regdecls))
3366 /* Automatic variable that can go in a register. */
3367 int unsignedp = TREE_UNSIGNED (type);
3368 enum machine_mode reg_mode
3369 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3371 if (TREE_CODE (type) == COMPLEX_TYPE)
3373 rtx realpart, imagpart;
3374 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3376 /* For a complex type variable, make a CONCAT of two pseudos
3377 so that the real and imaginary parts
3378 can be allocated separately. */
3379 realpart = gen_reg_rtx (partmode);
3380 REG_USERVAR_P (realpart) = 1;
3381 imagpart = gen_reg_rtx (partmode);
3382 REG_USERVAR_P (imagpart) = 1;
3383 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3387 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3388 if (TREE_CODE (type) == POINTER_TYPE)
3389 mark_reg_pointer (DECL_RTL (decl),
3390 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3392 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3395 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3397 /* Variable of fixed size that goes on the stack. */
3401 /* If we previously made RTL for this decl, it must be an array
3402 whose size was determined by the initializer.
3403 The old address was a register; set that register now
3404 to the proper address. */
3405 if (DECL_RTL (decl) != 0)
3407 if (GET_CODE (DECL_RTL (decl)) != MEM
3408 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3410 oldaddr = XEXP (DECL_RTL (decl), 0);
3414 = assign_stack_temp (DECL_MODE (decl),
3415 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3416 + BITS_PER_UNIT - 1)
3419 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3421 /* Set alignment we actually gave this decl. */
3422 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3423 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3427 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3428 if (addr != oldaddr)
3429 emit_move_insn (oldaddr, addr);
3432 /* If this is a memory ref that contains aggregate components,
3433 mark it as such for cse and loop optimize. */
3434 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3436 /* If this is in memory because of -ffloat-store,
3437 set the volatile bit, to prevent optimizations from
3438 undoing the effects. */
3439 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3440 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3444 /* Dynamic-size object: must push space on the stack. */
3448 /* Record the stack pointer on entry to block, if have
3449 not already done so. */
3450 if (thisblock->data.block.stack_level == 0)
3452 do_pending_stack_adjust ();
3453 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3454 &thisblock->data.block.stack_level,
3455 thisblock->data.block.first_insn);
3456 stack_block_stack = thisblock;
3459 /* Compute the variable's size, in bytes. */
3460 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3462 size_int (BITS_PER_UNIT)),
3463 NULL_RTX, VOIDmode, 0);
3466 /* Allocate space on the stack for the variable. */
3467 address = allocate_dynamic_stack_space (size, NULL_RTX,
3470 /* Reference the variable indirect through that rtx. */
3471 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3473 /* If this is a memory ref that contains aggregate components,
3474 mark it as such for cse and loop optimize. */
3475 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3477 /* Indicate the alignment we actually gave this variable. */
3478 #ifdef STACK_BOUNDARY
3479 DECL_ALIGN (decl) = STACK_BOUNDARY;
3481 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3485 if (TREE_THIS_VOLATILE (decl))
3486 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3487 #if 0 /* A variable is not necessarily unchanging
3488 just because it is const. RTX_UNCHANGING_P
3489 means no change in the function,
3490 not merely no change in the variable's scope.
3491 It is correct to set RTX_UNCHANGING_P if the variable's scope
3492 is the whole function. There's no convenient way to test that. */
3493 if (TREE_READONLY (decl))
3494 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3497 /* If doing stupid register allocation, make sure life of any
3498 register variable starts here, at the start of its scope. */
3501 use_variable (DECL_RTL (decl));
3505 /* Generate code for the automatic variable declaration DECL. For
3506 most variables this just means we give it a stack offset. The
3507 compiler sometimes emits cleanups without variables and we will
3508 have to deal with those too. */
3511 bc_expand_decl (decl, cleanup)
3519 /* A cleanup with no variable. */
3526 /* Only auto variables need any work. */
3527 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3530 type = TREE_TYPE (decl);
3532 if (type == error_mark_node)
3533 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3535 else if (DECL_SIZE (decl) == 0)
3537 /* Variable with incomplete type. The stack offset herein will be
3538 fixed later in expand_decl_init (). */
3539 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3541 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3543 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3547 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3550 /* Emit code to perform the initialization of a declaration DECL. */
3553 expand_decl_init (decl)
3556 int was_used = TREE_USED (decl);
3558 if (output_bytecode)
3560 bc_expand_decl_init (decl);
3564 /* If this is a CONST_DECL, we don't have to generate any code, but
3565 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3566 to be set while in the obstack containing the constant. If we don't
3567 do this, we can lose if we have functions nested three deep and the middle
3568 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3569 the innermost function is the first to expand that STRING_CST. */
3570 if (TREE_CODE (decl) == CONST_DECL)
3572 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3573 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3574 EXPAND_INITIALIZER);
3578 if (TREE_STATIC (decl))
3581 /* Compute and store the initial value now. */
3583 if (DECL_INITIAL (decl) == error_mark_node)
3585 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3586 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3587 || code == POINTER_TYPE)
3588 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3592 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3594 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3595 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3599 /* Don't let the initialization count as "using" the variable. */
3600 TREE_USED (decl) = was_used;
3602 /* Free any temporaries we made while initializing the decl. */
3603 preserve_temp_slots (NULL_RTX);
3607 /* Expand initialization for variable-sized types. Allocate array
3608 using newlocalSI and set local variable, which is a pointer to the
3612 bc_expand_variable_local_init (decl)
3615 /* Evaluate size expression and coerce to SI */
3616 bc_expand_expr (DECL_SIZE (decl));
3618 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3619 no coercion is necessary (?) */
3621 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3622 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3624 /* Emit code to allocate array */
3625 bc_emit_instruction (newlocalSI);
3627 /* Store array pointer in local variable. This is the only instance
3628 where we actually want the address of the pointer to the
3629 variable-size block, rather than the pointer itself. We avoid
3630 using expand_address() since that would cause the pointer to be
3631 pushed rather than its address. Hence the hard-coded reference;
3632 notice also that the variable is always local (no global
3633 variable-size type variables). */
3635 bc_load_localaddr (DECL_RTL (decl));
3636 bc_emit_instruction (storeP);
3640 /* Emit code to initialize a declaration. */
3643 bc_expand_decl_init (decl)
3646 int org_stack_depth;
3648 /* Statical initializers are handled elsewhere */
3650 if (TREE_STATIC (decl))
3653 /* Memory original stack depth */
3654 org_stack_depth = stack_depth;
3656 /* If the type is variable-size, we first create its space (we ASSUME
3657 it CAN'T be static). We do this regardless of whether there's an
3658 initializer assignment or not. */
3660 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3661 bc_expand_variable_local_init (decl);
3663 /* Expand initializer assignment */
3664 if (DECL_INITIAL (decl) == error_mark_node)
3666 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3668 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3669 || code == POINTER_TYPE)
3671 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3673 else if (DECL_INITIAL (decl))
3674 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3676 /* Restore stack depth */
3677 if (org_stack_depth > stack_depth)
3680 bc_adjust_stack (stack_depth - org_stack_depth);
3684 /* CLEANUP is an expression to be executed at exit from this binding contour;
3685 for example, in C++, it might call the destructor for this variable.
3687 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3688 either before or after calling `expand_decl_cleanup' but before compiling
3689 any subsequent expressions. This is because CLEANUP may be expanded
3690 more than once, on different branches of execution.
3691 For the same reason, CLEANUP may not contain a CALL_EXPR
3692 except as its topmost node--else `preexpand_calls' would get confused.
3694 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3695 that is not associated with any particular variable. */
3698 expand_decl_cleanup (decl, cleanup)
3701 struct nesting *thisblock = block_stack;
3703 /* Error if we are not in any block. */
3707 /* Record the cleanup if there is one. */
3711 thisblock->data.block.cleanups
3712 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3713 /* If this block has a cleanup, it belongs in stack_block_stack. */
3714 stack_block_stack = thisblock;
3715 (*interim_eh_hook) (NULL_TREE);
3720 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3721 DECL_ELTS is the list of elements that belong to DECL's type.
3722 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3725 expand_anon_union_decl (decl, cleanup, decl_elts)
3726 tree decl, cleanup, decl_elts;
3728 struct nesting *thisblock = block_stack;
3732 expand_decl_cleanup (decl, cleanup);
3733 x = DECL_RTL (decl);
3737 tree decl_elt = TREE_VALUE (decl_elts);
3738 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3739 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3741 /* Propagate the union's alignment to the elements. */
3742 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3744 /* If the element has BLKmode and the union doesn't, the union is
3745 aligned such that the element doesn't need to have BLKmode, so
3746 change the element's mode to the appropriate one for its size. */
3747 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3748 DECL_MODE (decl_elt) = mode
3749 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3752 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3753 instead create a new MEM rtx with the proper mode. */
3754 if (GET_CODE (x) == MEM)
3756 if (mode == GET_MODE (x))
3757 DECL_RTL (decl_elt) = x;
3760 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3761 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3762 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3765 else if (GET_CODE (x) == REG)
3767 if (mode == GET_MODE (x))
3768 DECL_RTL (decl_elt) = x;
3770 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3775 /* Record the cleanup if there is one. */
3778 thisblock->data.block.cleanups
3779 = temp_tree_cons (decl_elt, cleanup_elt,
3780 thisblock->data.block.cleanups);
3782 decl_elts = TREE_CHAIN (decl_elts);
3786 /* Expand a list of cleanups LIST.
3787 Elements may be expressions or may be nested lists.
3789 If DONT_DO is nonnull, then any list-element
3790 whose TREE_PURPOSE matches DONT_DO is omitted.
3791 This is sometimes used to avoid a cleanup associated with
3792 a value that is being returned out of the scope.
3794 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3795 goto and handle protection regions specially in that case.
3797 If REACHABLE, we emit code, otherwise just inform the exception handling
3798 code about this finalization. */
3801 expand_cleanups (list, dont_do, in_fixup, reachable)
3808 for (tail = list; tail; tail = TREE_CHAIN (tail))
3809 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3811 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3812 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3816 (*interim_eh_hook) (TREE_VALUE (tail));
3820 /* Cleanups may be run multiple times. For example,
3821 when exiting a binding contour, we expand the
3822 cleanups associated with that contour. When a goto
3823 within that binding contour has a target outside that
3824 contour, it will expand all cleanups from its scope to
3825 the target. Though the cleanups are expanded multiple
3826 times, the control paths are non-overlapping so the
3827 cleanups will not be executed twice. */
3828 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3835 /* Move all cleanups from the current block_stack
3836 to the containing block_stack, where they are assumed to
3837 have been created. If anything can cause a temporary to
3838 be created, but not expanded for more than one level of
3839 block_stacks, then this code will have to change. */
3844 struct nesting *block = block_stack;
3845 struct nesting *outer = block->next;
3847 outer->data.block.cleanups
3848 = chainon (block->data.block.cleanups,
3849 outer->data.block.cleanups);
3850 block->data.block.cleanups = 0;
3854 last_cleanup_this_contour ()
3856 if (block_stack == 0)
3859 return block_stack->data.block.cleanups;
3862 /* Return 1 if there are any pending cleanups at this point.
3863 If THIS_CONTOUR is nonzero, check the current contour as well.
3864 Otherwise, look only at the contours that enclose this one. */
3867 any_pending_cleanups (this_contour)
3870 struct nesting *block;
3872 if (block_stack == 0)
3875 if (this_contour && block_stack->data.block.cleanups != NULL)
3877 if (block_stack->data.block.cleanups == 0
3878 && (block_stack->data.block.outer_cleanups == 0
3880 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3885 for (block = block_stack->next; block; block = block->next)
3886 if (block->data.block.cleanups != 0)
3892 /* Enter a case (Pascal) or switch (C) statement.
3893 Push a block onto case_stack and nesting_stack
3894 to accumulate the case-labels that are seen
3895 and to record the labels generated for the statement.
3897 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3898 Otherwise, this construct is transparent for `exit_something'.
3900 EXPR is the index-expression to be dispatched on.
3901 TYPE is its nominal type. We could simply convert EXPR to this type,
3902 but instead we take short cuts. */
3905 expand_start_case (exit_flag, expr, type, printname)
3911 register struct nesting *thiscase = ALLOC_NESTING ();
3913 /* Make an entry on case_stack for the case we are entering. */
3915 thiscase->next = case_stack;
3916 thiscase->all = nesting_stack;
3917 thiscase->depth = ++nesting_depth;
3918 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3919 thiscase->data.case_stmt.case_list = 0;
3920 thiscase->data.case_stmt.index_expr = expr;
3921 thiscase->data.case_stmt.nominal_type = type;
3922 thiscase->data.case_stmt.default_label = 0;
3923 thiscase->data.case_stmt.num_ranges = 0;
3924 thiscase->data.case_stmt.printname = printname;
3925 thiscase->data.case_stmt.seenlabel = 0;
3926 case_stack = thiscase;
3927 nesting_stack = thiscase;
3929 if (output_bytecode)
3931 bc_expand_start_case (thiscase, expr, type, printname);
3935 do_pending_stack_adjust ();
3937 /* Make sure case_stmt.start points to something that won't
3938 need any transformation before expand_end_case. */
3939 if (GET_CODE (get_last_insn ()) != NOTE)
3940 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3942 thiscase->data.case_stmt.start = get_last_insn ();
3946 /* Enter a case statement. It is assumed that the caller has pushed
3947 the current context onto the case stack. */
3950 bc_expand_start_case (thiscase, expr, type, printname)
3951 struct nesting *thiscase;
3956 bc_expand_expr (expr);
3957 bc_expand_conversion (TREE_TYPE (expr), type);
3959 /* For cases, the skip is a place we jump to that's emitted after
3960 the size of the jump table is known. */
3962 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3963 bc_emit_bytecode (jump);
3964 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
3966 #ifdef DEBUG_PRINT_CODE
3967 fputc ('\n', stderr);
3972 /* Start a "dummy case statement" within which case labels are invalid
3973 and are not connected to any larger real case statement.
3974 This can be used if you don't want to let a case statement jump
3975 into the middle of certain kinds of constructs. */
3978 expand_start_case_dummy ()
3980 register struct nesting *thiscase = ALLOC_NESTING ();
3982 /* Make an entry on case_stack for the dummy. */
3984 thiscase->next = case_stack;
3985 thiscase->all = nesting_stack;
3986 thiscase->depth = ++nesting_depth;
3987 thiscase->exit_label = 0;
3988 thiscase->data.case_stmt.case_list = 0;
3989 thiscase->data.case_stmt.start = 0;
3990 thiscase->data.case_stmt.nominal_type = 0;
3991 thiscase->data.case_stmt.default_label = 0;
3992 thiscase->data.case_stmt.num_ranges = 0;
3993 case_stack = thiscase;
3994 nesting_stack = thiscase;
3997 /* End a dummy case statement. */
4000 expand_end_case_dummy ()
4002 POPSTACK (case_stack);
4005 /* Return the data type of the index-expression
4006 of the innermost case statement, or null if none. */
4009 case_index_expr_type ()
4012 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4016 /* Accumulate one case or default label inside a case or switch statement.
4017 VALUE is the value of the case (a null pointer, for a default label).
4018 The function CONVERTER, when applied to arguments T and V,
4019 converts the value V to the type T.
4021 If not currently inside a case or switch statement, return 1 and do
4022 nothing. The caller will print a language-specific error message.
4023 If VALUE is a duplicate or overlaps, return 2 and do nothing
4024 except store the (first) duplicate node in *DUPLICATE.
4025 If VALUE is out of range, return 3 and do nothing.
4026 If we are jumping into the scope of a cleaup or var-sized array, return 5.
4027 Return 0 on success.
4029 Extended to handle range statements. */
4032 pushcase (value, converter, label, duplicate)
4033 register tree value;
4034 tree (*converter) PROTO((tree, tree));
4035 register tree label;
4038 register struct case_node **l;
4039 register struct case_node *n;
4043 if (output_bytecode)
4044 return bc_pushcase (value, label);
4046 /* Fail if not inside a real case statement. */
4047 if (! (case_stack && case_stack->data.case_stmt.start))
4050 if (stack_block_stack
4051 && stack_block_stack->depth > case_stack->depth)
4054 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4055 nominal_type = case_stack->data.case_stmt.nominal_type;
4057 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4058 if (index_type == error_mark_node)
4061 /* Convert VALUE to the type in which the comparisons are nominally done. */
4063 value = (*converter) (nominal_type, value);
4065 /* If this is the first label, warn if any insns have been emitted. */
4066 if (case_stack->data.case_stmt.seenlabel == 0)
4069 for (insn = case_stack->data.case_stmt.start;
4071 insn = NEXT_INSN (insn))
4073 if (GET_CODE (insn) == CODE_LABEL)
4075 if (GET_CODE (insn) != NOTE
4076 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4078 warning ("unreachable code at beginning of %s",
4079 case_stack->data.case_stmt.printname);
4084 case_stack->data.case_stmt.seenlabel = 1;
4086 /* Fail if this value is out of range for the actual type of the index
4087 (which may be narrower than NOMINAL_TYPE). */
4088 if (value != 0 && ! int_fits_type_p (value, index_type))
4091 /* Fail if this is a duplicate or overlaps another entry. */
4094 if (case_stack->data.case_stmt.default_label != 0)
4096 *duplicate = case_stack->data.case_stmt.default_label;
4099 case_stack->data.case_stmt.default_label = label;
4102 return add_case_node (value, value, label, duplicate);
4104 expand_label (label);
4108 /* Like pushcase but this case applies to all values
4109 between VALUE1 and VALUE2 (inclusive).
4110 The return value is the same as that of pushcase
4111 but there is one additional error code:
4112 4 means the specified range was empty. */
4115 pushcase_range (value1, value2, converter, label, duplicate)
4116 register tree value1, value2;
4117 tree (*converter) PROTO((tree, tree));
4118 register tree label;
4121 register struct case_node **l;
4122 register struct case_node *n;
4126 /* Fail if not inside a real case statement. */
4127 if (! (case_stack && case_stack->data.case_stmt.start))
4130 if (stack_block_stack
4131 && stack_block_stack->depth > case_stack->depth)
4134 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4135 nominal_type = case_stack->data.case_stmt.nominal_type;
4137 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4138 if (index_type == error_mark_node)
4141 /* If this is the first label, warn if any insns have been emitted. */
4142 if (case_stack->data.case_stmt.seenlabel == 0)
4145 for (insn = case_stack->data.case_stmt.start;
4147 insn = NEXT_INSN (insn))
4149 if (GET_CODE (insn) == CODE_LABEL)
4151 if (GET_CODE (insn) != NOTE
4152 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4154 warning ("unreachable code at beginning of %s",
4155 case_stack->data.case_stmt.printname);
4160 case_stack->data.case_stmt.seenlabel = 1;
4162 /* Convert VALUEs to type in which the comparisons are nominally done. */
4163 if (value1 == 0) /* Negative infinity. */
4164 value1 = TYPE_MIN_VALUE(index_type);
4165 value1 = (*converter) (nominal_type, value1);
4167 if (value2 == 0) /* Positive infinity. */
4168 value2 = TYPE_MAX_VALUE(index_type);
4169 value2 = (*converter) (nominal_type, value2);
4171 /* Fail if these values are out of range. */
4172 if (! int_fits_type_p (value1, index_type))
4175 if (! int_fits_type_p (value2, index_type))
4178 /* Fail if the range is empty. */
4179 if (tree_int_cst_lt (value2, value1))
4182 return add_case_node (value1, value2, label, duplicate);
4185 /* Do the actual insertion of a case label for pushcase and pushcase_range
4186 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4187 slowdown for large switch statements. */
4190 add_case_node (low, high, label, duplicate)
4195 struct case_node *p, **q, *r;
4197 q = &case_stack->data.case_stmt.case_list;
4204 /* Keep going past elements distinctly greater than HIGH. */
4205 if (tree_int_cst_lt (high, p->low))
4208 /* or distinctly less than LOW. */
4209 else if (tree_int_cst_lt (p->high, low))
4214 /* We have an overlap; this is an error. */
4215 *duplicate = p->code_label;
4220 /* Add this label to the chain, and succeed.
4221 Copy LOW, HIGH so they are on temporary rather than momentary
4222 obstack and will thus survive till the end of the case statement. */
4224 r = (struct case_node *) oballoc (sizeof (struct case_node));
4225 r->low = copy_node (low);
4227 /* If the bounds are equal, turn this into the one-value case. */
4229 if (tree_int_cst_equal (low, high))
4233 r->high = copy_node (high);
4234 case_stack->data.case_stmt.num_ranges++;
4237 r->code_label = label;
4238 expand_label (label);
4248 struct case_node *s;
4254 if (! (b = p->balance))
4255 /* Growth propagation from left side. */
4262 if (p->left = s = r->right)
4279 case_stack->data.case_stmt.case_list = r;
4282 /* r->balance == +1 */
4287 struct case_node *t = r->right;
4289 if (p->left = s = t->right)
4293 if (r->right = s = t->left)
4315 case_stack->data.case_stmt.case_list = t;
4322 /* p->balance == +1; growth of left side balances the node. */
4332 if (! (b = p->balance))
4333 /* Growth propagation from right side. */
4341 if (p->right = s = r->left)
4358 case_stack->data.case_stmt.case_list = r;
4362 /* r->balance == -1 */
4366 struct case_node *t = r->left;
4368 if (p->right = s = t->left)
4373 if (r->left = s = t->right)
4396 case_stack->data.case_stmt.case_list = t;
4402 /* p->balance == -1; growth of right side balances the node. */
4415 /* Accumulate one case or default label; VALUE is the value of the
4416 case, or nil for a default label. If not currently inside a case,
4417 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
4418 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
4419 Return 0 on success. This function is a leftover from the earlier
4420 bytecode compiler, which was based on gcc 1.37. It should be
4421 merged into pushcase. */
4424 bc_pushcase (value, label)
4428 struct nesting *thiscase = case_stack;
4429 struct case_node *case_label, *new_label;
4434 /* Fail if duplicate, overlap, or out of type range. */
4437 value = convert (thiscase->data.case_stmt.nominal_type, value);
4438 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
4441 for (case_label = thiscase->data.case_stmt.case_list;
4442 case_label->left; case_label = case_label->left)
4443 if (! tree_int_cst_lt (case_label->left->high, value))
4446 if (case_label != thiscase->data.case_stmt.case_list
4447 && ! tree_int_cst_lt (case_label->high, value)
4448 || (case_label->left && ! tree_int_cst_lt (value, case_label->left->low)))
4451 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4452 new_label->low = new_label->high = copy_node (value);
4453 new_label->code_label = label;
4454 new_label->left = case_label->left;
4456 case_label->left = new_label;
4457 thiscase->data.case_stmt.num_ranges++;
4461 if (thiscase->data.case_stmt.default_label)
4463 thiscase->data.case_stmt.default_label = label;
4466 expand_label (label);
4470 /* Returns the number of possible values of TYPE.
4471 Returns -1 if the number is unknown or variable.
4472 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4473 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4474 do not increase monotonically (there may be duplicates);
4475 to 1 if the values increase monotonically, but not always by 1;
4476 otherwise sets it to 0. */
4479 all_cases_count (type, spareness)
4483 HOST_WIDE_INT count, count_high = 0;
4486 switch (TREE_CODE (type))
4493 count = 1 << BITS_PER_UNIT;
4497 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4498 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4503 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4504 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4505 but with overflow checking. */
4506 tree mint = TYPE_MIN_VALUE (type);
4507 tree maxt = TYPE_MAX_VALUE (type);
4508 HOST_WIDE_INT lo, hi;
4509 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4511 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4513 add_double (lo, hi, 1, 0, &lo, &hi);
4514 if (hi != 0 || lo < 0)
4521 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4523 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4524 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4525 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4526 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4530 if (*spareness == 1)
4532 tree prev = TREE_VALUE (TYPE_VALUES (type));
4533 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4535 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4540 prev = TREE_VALUE (t);
4549 #define BITARRAY_TEST(ARRAY, INDEX) \
4550 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4551 & (1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR)))
4552 #define BITARRAY_SET(ARRAY, INDEX) \
4553 ((ARRAY)[(unsigned)(INDEX) / HOST_BITS_PER_CHAR]\
4554 |= 1 << ((unsigned)(INDEX) % HOST_BITS_PER_CHAR))
4556 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4557 with the case values we have seen, assuming the case expression
4559 SPARSENESS is as determined by all_cases_count.
4561 The time needed is proportional to COUNT, unless
4562 SPARSENESS is 2, in which case quadratic time is needed. */
4565 mark_seen_cases (type, cases_seen, count, sparseness)
4567 unsigned char *cases_seen;
4573 tree next_node_to_try = NULL_TREE;
4574 long next_node_offset = 0;
4576 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4577 tree val = make_node (INTEGER_CST);
4578 TREE_TYPE (val) = type;
4581 else if (sparseness == 2)
4586 /* This less efficient loop is only needed to handle
4587 duplicate case values (multiple enum constants
4588 with the same value). */
4589 TREE_TYPE (val) = TREE_TYPE (root->low);
4590 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4591 t = TREE_CHAIN (t), xlo++)
4593 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4594 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4598 /* Keep going past elements distinctly greater than VAL. */
4599 if (tree_int_cst_lt (val, n->low))
4602 /* or distinctly less than VAL. */
4603 else if (tree_int_cst_lt (n->high, val))
4608 /* We have found a matching range. */
4609 BITARRAY_SET (cases_seen, xlo);
4619 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4620 for (n = root; n; n = n->right)
4622 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4623 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4624 while ( ! tree_int_cst_lt (n->high, val))
4626 /* Calculate (into xlo) the "offset" of the integer (val).
4627 The element with lowest value has offset 0, the next smallest
4628 element has offset 1, etc. */
4630 HOST_WIDE_INT xlo, xhi;
4632 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4634 /* The TYPE_VALUES will be in increasing order, so
4635 starting searching where we last ended. */
4636 t = next_node_to_try;
4637 xlo = next_node_offset;
4643 t = TYPE_VALUES (type);
4646 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4648 next_node_to_try = TREE_CHAIN (t);
4649 next_node_offset = xlo + 1;
4654 if (t == next_node_to_try)
4663 t = TYPE_MIN_VALUE (type);
4665 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4669 add_double (xlo, xhi,
4670 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4674 if (xhi == 0 && xlo >= 0 && xlo < count)
4675 BITARRAY_SET (cases_seen, xlo);
4676 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4678 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4684 /* Called when the index of a switch statement is an enumerated type
4685 and there is no default label.
4687 Checks that all enumeration literals are covered by the case
4688 expressions of a switch. Also, warn if there are any extra
4689 switch cases that are *not* elements of the enumerated type.
4691 If all enumeration literals were covered by the case expressions,
4692 turn one of the expressions into the default expression since it should
4693 not be possible to fall through such a switch. */
4696 check_for_full_enumeration_handling (type)
4699 register struct case_node *n;
4700 register struct case_node **l;
4701 register tree chain;
4704 /* True iff the selector type is a numbered set mode. */
4707 /* The number of possible selector values. */
4710 /* For each possible selector value. a one iff it has been matched
4711 by a case value alternative. */
4712 unsigned char *cases_seen;
4714 /* The allocated size of cases_seen, in chars. */
4718 if (output_bytecode)
4720 bc_check_for_full_enumeration_handling (type);
4727 size = all_cases_count (type, &sparseness);
4728 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4730 if (size > 0 && size < 600000
4731 /* We deliberately use malloc here - not xmalloc. */
4732 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4735 tree v = TYPE_VALUES (type);
4736 bzero (cases_seen, bytes_needed);
4738 /* The time complexity of this code is normally O(N), where
4739 N being the number of members in the enumerated type.
4740 However, if type is a ENUMERAL_TYPE whose values do not
4741 increase monotonically, O(N*log(N)) time may be needed. */
4743 mark_seen_cases (type, cases_seen, size, sparseness);
4745 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4747 if (BITARRAY_TEST(cases_seen, i) == 0)
4748 warning ("enumeration value `%s' not handled in switch",
4749 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4755 /* Now we go the other way around; we warn if there are case
4756 expressions that don't correspond to enumerators. This can
4757 occur since C and C++ don't enforce type-checking of
4758 assignments to enumeration variables. */
4760 if (case_stack->data.case_stmt.case_list
4761 && case_stack->data.case_stmt.case_list->left)
4762 case_stack->data.case_stmt.case_list
4763 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4765 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4767 for (chain = TYPE_VALUES (type);
4768 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4769 chain = TREE_CHAIN (chain))
4774 if (TYPE_NAME (type) == 0)
4775 warning ("case value `%d' not in enumerated type",
4776 TREE_INT_CST_LOW (n->low));
4778 warning ("case value `%d' not in enumerated type `%s'",
4779 TREE_INT_CST_LOW (n->low),
4780 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4783 : DECL_NAME (TYPE_NAME (type))));
4785 if (!tree_int_cst_equal (n->low, n->high))
4787 for (chain = TYPE_VALUES (type);
4788 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4789 chain = TREE_CHAIN (chain))
4794 if (TYPE_NAME (type) == 0)
4795 warning ("case value `%d' not in enumerated type",
4796 TREE_INT_CST_LOW (n->high));
4798 warning ("case value `%d' not in enumerated type `%s'",
4799 TREE_INT_CST_LOW (n->high),
4800 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4803 : DECL_NAME (TYPE_NAME (type))));
4809 /* ??? This optimization is disabled because it causes valid programs to
4810 fail. ANSI C does not guarantee that an expression with enum type
4811 will have a value that is the same as one of the enumeration literals. */
4813 /* If all values were found as case labels, make one of them the default
4814 label. Thus, this switch will never fall through. We arbitrarily pick
4815 the last one to make the default since this is likely the most
4816 efficient choice. */
4820 for (l = &case_stack->data.case_stmt.case_list;
4825 case_stack->data.case_stmt.default_label = (*l)->code_label;
4832 /* Check that all enumeration literals are covered by the case
4833 expressions of a switch. Also warn if there are any cases
4834 that are not elements of the enumerated type. */
4837 bc_check_for_full_enumeration_handling (type)
4840 struct nesting *thiscase = case_stack;
4841 struct case_node *c;
4844 /* Check for enums not handled. */
4845 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4847 for (c = thiscase->data.case_stmt.case_list->left;
4848 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4851 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4852 warning ("enumerated value `%s' not handled in switch",
4853 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4856 /* Check for cases not in the enumeration. */
4857 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4859 for (e = TYPE_VALUES (type);
4860 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4864 warning ("case value `%d' not in enumerated type `%s'",
4865 TREE_INT_CST_LOW (c->low),
4866 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4868 : DECL_NAME (TYPE_NAME (type))));
4872 /* Terminate a case (Pascal) or switch (C) statement
4873 in which ORIG_INDEX is the expression to be tested.
4874 Generate the code to test it and jump to the right place. */
4877 expand_end_case (orig_index)
4880 tree minval, maxval, range, orig_minval;
4881 rtx default_label = 0;
4882 register struct case_node *n;
4890 register struct nesting *thiscase = case_stack;
4891 tree index_expr, index_type;
4894 if (output_bytecode)
4896 bc_expand_end_case (orig_index);
4900 table_label = gen_label_rtx ();
4901 index_expr = thiscase->data.case_stmt.index_expr;
4902 index_type = TREE_TYPE (index_expr);
4903 unsignedp = TREE_UNSIGNED (index_type);
4905 do_pending_stack_adjust ();
4907 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4908 if (index_type != error_mark_node)
4910 /* If switch expression was an enumerated type, check that all
4911 enumeration literals are covered by the cases.
4912 No sense trying this if there's a default case, however. */
4914 if (!thiscase->data.case_stmt.default_label
4915 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4916 && TREE_CODE (index_expr) != INTEGER_CST)
4917 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4919 /* If this is the first label, warn if any insns have been emitted. */
4920 if (thiscase->data.case_stmt.seenlabel == 0)
4923 for (insn = get_last_insn ();
4924 insn != case_stack->data.case_stmt.start;
4925 insn = PREV_INSN (insn))
4926 if (GET_CODE (insn) != NOTE
4927 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4929 warning ("unreachable code at beginning of %s",
4930 case_stack->data.case_stmt.printname);
4935 /* If we don't have a default-label, create one here,
4936 after the body of the switch. */
4937 if (thiscase->data.case_stmt.default_label == 0)
4939 thiscase->data.case_stmt.default_label
4940 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4941 expand_label (thiscase->data.case_stmt.default_label);
4943 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4945 before_case = get_last_insn ();
4947 if (thiscase->data.case_stmt.case_list
4948 && thiscase->data.case_stmt.case_list->left)
4949 thiscase->data.case_stmt.case_list
4950 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
4952 /* Simplify the case-list before we count it. */
4953 group_case_nodes (thiscase->data.case_stmt.case_list);
4955 /* Get upper and lower bounds of case values.
4956 Also convert all the case values to the index expr's data type. */
4959 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4961 /* Check low and high label values are integers. */
4962 if (TREE_CODE (n->low) != INTEGER_CST)
4964 if (TREE_CODE (n->high) != INTEGER_CST)
4967 n->low = convert (index_type, n->low);
4968 n->high = convert (index_type, n->high);
4970 /* Count the elements and track the largest and smallest
4971 of them (treating them as signed even if they are not). */
4979 if (INT_CST_LT (n->low, minval))
4981 if (INT_CST_LT (maxval, n->high))
4984 /* A range counts double, since it requires two compares. */
4985 if (! tree_int_cst_equal (n->low, n->high))
4989 orig_minval = minval;
4991 /* Compute span of values. */
4993 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4997 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4999 emit_jump (default_label);
5002 /* If range of values is much bigger than number of values,
5003 make a sequence of conditional branches instead of a dispatch.
5004 If the switch-index is a constant, do it this way
5005 because we can optimize it. */
5007 #ifndef CASE_VALUES_THRESHOLD
5009 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5011 /* If machine does not have a case insn that compares the
5012 bounds, this means extra overhead for dispatch tables
5013 which raises the threshold for using them. */
5014 #define CASE_VALUES_THRESHOLD 5
5015 #endif /* HAVE_casesi */
5016 #endif /* CASE_VALUES_THRESHOLD */
5018 else if (TREE_INT_CST_HIGH (range) != 0
5019 || count < CASE_VALUES_THRESHOLD
5020 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5022 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5025 || TREE_CODE (index_expr) == INTEGER_CST
5026 /* These will reduce to a constant. */
5027 || (TREE_CODE (index_expr) == CALL_EXPR
5028 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5029 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5030 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5031 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5032 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5034 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5036 /* If the index is a short or char that we do not have
5037 an insn to handle comparisons directly, convert it to
5038 a full integer now, rather than letting each comparison
5039 generate the conversion. */
5041 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5042 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5043 == CODE_FOR_nothing))
5045 enum machine_mode wider_mode;
5046 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5047 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5048 if (cmp_optab->handlers[(int) wider_mode].insn_code
5049 != CODE_FOR_nothing)
5051 index = convert_to_mode (wider_mode, index, unsignedp);
5057 do_pending_stack_adjust ();
5059 index = protect_from_queue (index, 0);
5060 if (GET_CODE (index) == MEM)
5061 index = copy_to_reg (index);
5062 if (GET_CODE (index) == CONST_INT
5063 || TREE_CODE (index_expr) == INTEGER_CST)
5065 /* Make a tree node with the proper constant value
5066 if we don't already have one. */
5067 if (TREE_CODE (index_expr) != INTEGER_CST)
5070 = build_int_2 (INTVAL (index),
5071 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5072 index_expr = convert (index_type, index_expr);
5075 /* For constant index expressions we need only
5076 issue a unconditional branch to the appropriate
5077 target code. The job of removing any unreachable
5078 code is left to the optimisation phase if the
5079 "-O" option is specified. */
5080 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5081 if (! tree_int_cst_lt (index_expr, n->low)
5082 && ! tree_int_cst_lt (n->high, index_expr))
5086 emit_jump (label_rtx (n->code_label));
5088 emit_jump (default_label);
5092 /* If the index expression is not constant we generate
5093 a binary decision tree to select the appropriate
5094 target code. This is done as follows:
5096 The list of cases is rearranged into a binary tree,
5097 nearly optimal assuming equal probability for each case.
5099 The tree is transformed into RTL, eliminating
5100 redundant test conditions at the same time.
5102 If program flow could reach the end of the
5103 decision tree an unconditional jump to the
5104 default code is emitted. */
5107 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5108 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5109 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5111 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5112 default_label, index_type);
5113 emit_jump_if_reachable (default_label);
5122 enum machine_mode index_mode = SImode;
5123 int index_bits = GET_MODE_BITSIZE (index_mode);
5125 enum machine_mode op_mode;
5127 /* Convert the index to SImode. */
5128 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5129 > GET_MODE_BITSIZE (index_mode))
5131 enum machine_mode omode = TYPE_MODE (index_type);
5132 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5134 /* We must handle the endpoints in the original mode. */
5135 index_expr = build (MINUS_EXPR, index_type,
5136 index_expr, minval);
5137 minval = integer_zero_node;
5138 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5139 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
5140 emit_jump_insn (gen_bltu (default_label));
5141 /* Now we can safely truncate. */
5142 index = convert_to_mode (index_mode, index, 0);
5146 if (TYPE_MODE (index_type) != index_mode)
5148 index_expr = convert (type_for_size (index_bits, 0),
5150 index_type = TREE_TYPE (index_expr);
5153 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5156 index = protect_from_queue (index, 0);
5157 do_pending_stack_adjust ();
5159 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5160 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5162 index = copy_to_mode_reg (op_mode, index);
5164 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5166 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5167 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5169 op1 = copy_to_mode_reg (op_mode, op1);
5171 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5173 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5174 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5176 op2 = copy_to_mode_reg (op_mode, op2);
5178 emit_jump_insn (gen_casesi (index, op1, op2,
5179 table_label, default_label));
5183 #ifdef HAVE_tablejump
5184 if (! win && HAVE_tablejump)
5186 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5187 fold (build (MINUS_EXPR, index_type,
5188 index_expr, minval)));
5189 index_type = TREE_TYPE (index_expr);
5190 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5192 index = protect_from_queue (index, 0);
5193 do_pending_stack_adjust ();
5195 do_tablejump (index, TYPE_MODE (index_type),
5196 expand_expr (range, NULL_RTX, VOIDmode, 0),
5197 table_label, default_label);
5204 /* Get table of labels to jump to, in order of case index. */
5206 ncases = TREE_INT_CST_LOW (range) + 1;
5207 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5208 bzero ((char *) labelvec, ncases * sizeof (rtx));
5210 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5212 register HOST_WIDE_INT i
5213 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5218 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
5219 if (i + TREE_INT_CST_LOW (orig_minval)
5220 == TREE_INT_CST_LOW (n->high))
5226 /* Fill in the gaps with the default. */
5227 for (i = 0; i < ncases; i++)
5228 if (labelvec[i] == 0)
5229 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
5231 /* Output the table */
5232 emit_label (table_label);
5234 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
5235 were an expression, instead of an #ifdef/#ifndef. */
5237 #ifdef CASE_VECTOR_PC_RELATIVE
5241 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
5242 gen_rtx (LABEL_REF, Pmode, table_label),
5243 gen_rtvec_v (ncases, labelvec)));
5245 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
5246 gen_rtvec_v (ncases, labelvec)));
5248 /* If the case insn drops through the table,
5249 after the table we must jump to the default-label.
5250 Otherwise record no drop-through after the table. */
5251 #ifdef CASE_DROPS_THROUGH
5252 emit_jump (default_label);
5258 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5259 reorder_insns (before_case, get_last_insn (),
5260 thiscase->data.case_stmt.start);
5263 if (thiscase->exit_label)
5264 emit_label (thiscase->exit_label);
5266 POPSTACK (case_stack);
5271 /* Convert the tree NODE into a list linked by the right field, with the left
5272 field zeroed. RIGHT is used for recursion; it is a list to be placed
5273 rightmost in the resulting list. */
5275 static struct case_node *
5276 case_tree2list (node, right)
5277 struct case_node *node, *right;
5279 struct case_node *left;
5282 right = case_tree2list (node->right, right);
5284 node->right = right;
5285 if (left = node->left)
5288 return case_tree2list (left, node);
5294 /* Terminate a case statement. EXPR is the original index
5298 bc_expand_end_case (expr)
5301 struct nesting *thiscase = case_stack;
5302 enum bytecode_opcode opcode;
5303 struct bc_label *jump_label;
5304 struct case_node *c;
5306 bc_emit_bytecode (jump);
5307 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5309 #ifdef DEBUG_PRINT_CODE
5310 fputc ('\n', stderr);
5313 /* Now that the size of the jump table is known, emit the actual
5314 indexed jump instruction. */
5315 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
5317 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
5318 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
5319 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
5321 bc_emit_bytecode (opcode);
5323 /* Now emit the case instructions literal arguments, in order.
5324 In addition to the value on the stack, it uses:
5325 1. The address of the jump table.
5326 2. The size of the jump table.
5327 3. The default label. */
5329 jump_label = bc_get_bytecode_label ();
5330 bc_emit_bytecode_labelref (jump_label);
5331 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
5332 sizeof thiscase->data.case_stmt.num_ranges);
5334 if (thiscase->data.case_stmt.default_label)
5335 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
5337 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
5339 /* Output the jump table. */
5341 bc_align_bytecode (3 /* PTR_ALIGN */);
5342 bc_emit_bytecode_labeldef (jump_label);
5344 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
5345 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5347 opcode = TREE_INT_CST_LOW (c->low);
5348 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5350 opcode = TREE_INT_CST_LOW (c->high);
5351 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
5353 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5356 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
5357 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
5359 bc_emit_bytecode_DI_const (c->low);
5360 bc_emit_bytecode_DI_const (c->high);
5362 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
5369 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
5371 /* Possibly issue enumeration warnings. */
5373 if (!thiscase->data.case_stmt.default_label
5374 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
5375 && TREE_CODE (expr) != INTEGER_CST
5377 check_for_full_enumeration_handling (TREE_TYPE (expr));
5380 #ifdef DEBUG_PRINT_CODE
5381 fputc ('\n', stderr);
5384 POPSTACK (case_stack);
5388 /* Return unique bytecode ID. */
5393 static int bc_uid = 0;
5398 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5401 do_jump_if_equal (op1, op2, label, unsignedp)
5402 rtx op1, op2, label;
5405 if (GET_CODE (op1) == CONST_INT
5406 && GET_CODE (op2) == CONST_INT)
5408 if (INTVAL (op1) == INTVAL (op2))
5413 enum machine_mode mode = GET_MODE (op1);
5414 if (mode == VOIDmode)
5415 mode = GET_MODE (op2);
5416 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5417 emit_jump_insn (gen_beq (label));
5421 /* Not all case values are encountered equally. This function
5422 uses a heuristic to weight case labels, in cases where that
5423 looks like a reasonable thing to do.
5425 Right now, all we try to guess is text, and we establish the
5428 chars above space: 16
5437 If we find any cases in the switch that are not either -1 or in the range
5438 of valid ASCII characters, or are control characters other than those
5439 commonly used with "\", don't treat this switch scanning text.
5441 Return 1 if these nodes are suitable for cost estimation, otherwise
5445 estimate_case_costs (node)
5448 tree min_ascii = build_int_2 (-1, -1);
5449 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5453 /* If we haven't already made the cost table, make it now. Note that the
5454 lower bound of the table is -1, not zero. */
5456 if (cost_table == NULL)
5458 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5459 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5461 for (i = 0; i < 128; i++)
5465 else if (ispunct (i))
5467 else if (iscntrl (i))
5471 cost_table[' '] = 8;
5472 cost_table['\t'] = 4;
5473 cost_table['\0'] = 4;
5474 cost_table['\n'] = 2;
5475 cost_table['\f'] = 1;
5476 cost_table['\v'] = 1;
5477 cost_table['\b'] = 1;
5480 /* See if all the case expressions look like text. It is text if the
5481 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5482 as signed arithmetic since we don't want to ever access cost_table with a
5483 value less than -1. Also check that none of the constants in a range
5484 are strange control characters. */
5486 for (n = node; n; n = n->right)
5488 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5491 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5492 if (cost_table[i] < 0)
5496 /* All interesting values are within the range of interesting
5497 ASCII characters. */
5501 /* Scan an ordered list of case nodes
5502 combining those with consecutive values or ranges.
5504 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5507 group_case_nodes (head)
5510 case_node_ptr node = head;
5514 rtx lb = next_real_insn (label_rtx (node->code_label));
5515 case_node_ptr np = node;
5517 /* Try to group the successors of NODE with NODE. */
5518 while (((np = np->right) != 0)
5519 /* Do they jump to the same place? */
5520 && next_real_insn (label_rtx (np->code_label)) == lb
5521 /* Are their ranges consecutive? */
5522 && tree_int_cst_equal (np->low,
5523 fold (build (PLUS_EXPR,
5524 TREE_TYPE (node->high),
5527 /* An overflow is not consecutive. */
5528 && tree_int_cst_lt (node->high,
5529 fold (build (PLUS_EXPR,
5530 TREE_TYPE (node->high),
5532 integer_one_node))))
5534 node->high = np->high;
5536 /* NP is the first node after NODE which can't be grouped with it.
5537 Delete the nodes in between, and move on to that node. */
5543 /* Take an ordered list of case nodes
5544 and transform them into a near optimal binary tree,
5545 on the assumption that any target code selection value is as
5546 likely as any other.
5548 The transformation is performed by splitting the ordered
5549 list into two equal sections plus a pivot. The parts are
5550 then attached to the pivot as left and right branches. Each
5551 branch is is then transformed recursively. */
5554 balance_case_nodes (head, parent)
5555 case_node_ptr *head;
5556 case_node_ptr parent;
5558 register case_node_ptr np;
5566 register case_node_ptr *npp;
5569 /* Count the number of entries on branch. Also count the ranges. */
5573 if (!tree_int_cst_equal (np->low, np->high))
5577 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5581 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5589 /* Split this list if it is long enough for that to help. */
5594 /* Find the place in the list that bisects the list's total cost,
5595 Here I gets half the total cost. */
5600 /* Skip nodes while their cost does not reach that amount. */
5601 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5602 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5603 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5606 npp = &(*npp)->right;
5611 /* Leave this branch lopsided, but optimize left-hand
5612 side and fill in `parent' fields for right-hand side. */
5614 np->parent = parent;
5615 balance_case_nodes (&np->left, np);
5616 for (; np->right; np = np->right)
5617 np->right->parent = np;
5621 /* If there are just three nodes, split at the middle one. */
5623 npp = &(*npp)->right;
5626 /* Find the place in the list that bisects the list's total cost,
5627 where ranges count as 2.
5628 Here I gets half the total cost. */
5629 i = (i + ranges + 1) / 2;
5632 /* Skip nodes while their cost does not reach that amount. */
5633 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5638 npp = &(*npp)->right;
5643 np->parent = parent;
5646 /* Optimize each of the two split parts. */
5647 balance_case_nodes (&np->left, np);
5648 balance_case_nodes (&np->right, np);
5652 /* Else leave this branch as one level,
5653 but fill in `parent' fields. */
5655 np->parent = parent;
5656 for (; np->right; np = np->right)
5657 np->right->parent = np;
5662 /* Search the parent sections of the case node tree
5663 to see if a test for the lower bound of NODE would be redundant.
5664 INDEX_TYPE is the type of the index expression.
5666 The instructions to generate the case decision tree are
5667 output in the same order as nodes are processed so it is
5668 known that if a parent node checks the range of the current
5669 node minus one that the current node is bounded at its lower
5670 span. Thus the test would be redundant. */
5673 node_has_low_bound (node, index_type)
5678 case_node_ptr pnode;
5680 /* If the lower bound of this node is the lowest value in the index type,
5681 we need not test it. */
5683 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5686 /* If this node has a left branch, the value at the left must be less
5687 than that at this node, so it cannot be bounded at the bottom and
5688 we need not bother testing any further. */
5693 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5694 node->low, integer_one_node));
5696 /* If the subtraction above overflowed, we can't verify anything.
5697 Otherwise, look for a parent that tests our value - 1. */
5699 if (! tree_int_cst_lt (low_minus_one, node->low))
5702 for (pnode = node->parent; pnode; pnode = pnode->parent)
5703 if (tree_int_cst_equal (low_minus_one, pnode->high))
5709 /* Search the parent sections of the case node tree
5710 to see if a test for the upper bound of NODE would be redundant.
5711 INDEX_TYPE is the type of the index expression.
5713 The instructions to generate the case decision tree are
5714 output in the same order as nodes are processed so it is
5715 known that if a parent node checks the range of the current
5716 node plus one that the current node is bounded at its upper
5717 span. Thus the test would be redundant. */
5720 node_has_high_bound (node, index_type)
5725 case_node_ptr pnode;
5727 /* If the upper bound of this node is the highest value in the type
5728 of the index expression, we need not test against it. */
5730 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5733 /* If this node has a right branch, the value at the right must be greater
5734 than that at this node, so it cannot be bounded at the top and
5735 we need not bother testing any further. */
5740 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5741 node->high, integer_one_node));
5743 /* If the addition above overflowed, we can't verify anything.
5744 Otherwise, look for a parent that tests our value + 1. */
5746 if (! tree_int_cst_lt (node->high, high_plus_one))
5749 for (pnode = node->parent; pnode; pnode = pnode->parent)
5750 if (tree_int_cst_equal (high_plus_one, pnode->low))
5756 /* Search the parent sections of the
5757 case node tree to see if both tests for the upper and lower
5758 bounds of NODE would be redundant. */
5761 node_is_bounded (node, index_type)
5765 return (node_has_low_bound (node, index_type)
5766 && node_has_high_bound (node, index_type));
5769 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5772 emit_jump_if_reachable (label)
5775 if (GET_CODE (get_last_insn ()) != BARRIER)
5779 /* Emit step-by-step code to select a case for the value of INDEX.
5780 The thus generated decision tree follows the form of the
5781 case-node binary tree NODE, whose nodes represent test conditions.
5782 INDEX_TYPE is the type of the index of the switch.
5784 Care is taken to prune redundant tests from the decision tree
5785 by detecting any boundary conditions already checked by
5786 emitted rtx. (See node_has_high_bound, node_has_low_bound
5787 and node_is_bounded, above.)
5789 Where the test conditions can be shown to be redundant we emit
5790 an unconditional jump to the target code. As a further
5791 optimization, the subordinates of a tree node are examined to
5792 check for bounded nodes. In this case conditional and/or
5793 unconditional jumps as a result of the boundary check for the
5794 current node are arranged to target the subordinates associated
5795 code for out of bound conditions on the current node node.
5797 We can assume that when control reaches the code generated here,
5798 the index value has already been compared with the parents
5799 of this node, and determined to be on the same side of each parent
5800 as this node is. Thus, if this node tests for the value 51,
5801 and a parent tested for 52, we don't need to consider
5802 the possibility of a value greater than 51. If another parent
5803 tests for the value 50, then this node need not test anything. */
5806 emit_case_nodes (index, node, default_label, index_type)
5812 /* If INDEX has an unsigned type, we must make unsigned branches. */
5813 int unsignedp = TREE_UNSIGNED (index_type);
5814 typedef rtx rtx_function ();
5815 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5816 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5817 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5818 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5819 enum machine_mode mode = GET_MODE (index);
5821 /* See if our parents have already tested everything for us.
5822 If they have, emit an unconditional jump for this node. */
5823 if (node_is_bounded (node, index_type))
5824 emit_jump (label_rtx (node->code_label));
5826 else if (tree_int_cst_equal (node->low, node->high))
5828 /* Node is single valued. First see if the index expression matches
5829 this node and then check our children, if any. */
5831 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5832 label_rtx (node->code_label), unsignedp);
5834 if (node->right != 0 && node->left != 0)
5836 /* This node has children on both sides.
5837 Dispatch to one side or the other
5838 by comparing the index value with this node's value.
5839 If one subtree is bounded, check that one first,
5840 so we can avoid real branches in the tree. */
5842 if (node_is_bounded (node->right, index_type))
5844 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5846 GT, NULL_RTX, mode, unsignedp, 0);
5848 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5849 emit_case_nodes (index, node->left, default_label, index_type);
5852 else if (node_is_bounded (node->left, index_type))
5854 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5856 LT, NULL_RTX, mode, unsignedp, 0);
5857 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5858 emit_case_nodes (index, node->right, default_label, index_type);
5863 /* Neither node is bounded. First distinguish the two sides;
5864 then emit the code for one side at a time. */
5867 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5869 /* See if the value is on the right. */
5870 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5872 GT, NULL_RTX, mode, unsignedp, 0);
5873 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5875 /* Value must be on the left.
5876 Handle the left-hand subtree. */
5877 emit_case_nodes (index, node->left, default_label, index_type);
5878 /* If left-hand subtree does nothing,
5880 emit_jump_if_reachable (default_label);
5882 /* Code branches here for the right-hand subtree. */
5883 expand_label (test_label);
5884 emit_case_nodes (index, node->right, default_label, index_type);
5888 else if (node->right != 0 && node->left == 0)
5890 /* Here we have a right child but no left so we issue conditional
5891 branch to default and process the right child.
5893 Omit the conditional branch to default if we it avoid only one
5894 right child; it costs too much space to save so little time. */
5896 if (node->right->right || node->right->left
5897 || !tree_int_cst_equal (node->right->low, node->right->high))
5899 if (!node_has_low_bound (node, index_type))
5901 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5903 LT, NULL_RTX, mode, unsignedp, 0);
5904 emit_jump_insn ((*gen_blt_pat) (default_label));
5907 emit_case_nodes (index, node->right, default_label, index_type);
5910 /* We cannot process node->right normally
5911 since we haven't ruled out the numbers less than
5912 this node's value. So handle node->right explicitly. */
5913 do_jump_if_equal (index,
5914 expand_expr (node->right->low, NULL_RTX,
5916 label_rtx (node->right->code_label), unsignedp);
5919 else if (node->right == 0 && node->left != 0)
5921 /* Just one subtree, on the left. */
5923 #if 0 /* The following code and comment were formerly part
5924 of the condition here, but they didn't work
5925 and I don't understand what the idea was. -- rms. */
5926 /* If our "most probable entry" is less probable
5927 than the default label, emit a jump to
5928 the default label using condition codes
5929 already lying around. With no right branch,
5930 a branch-greater-than will get us to the default
5933 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5936 if (node->left->left || node->left->right
5937 || !tree_int_cst_equal (node->left->low, node->left->high))
5939 if (!node_has_high_bound (node, index_type))
5941 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5943 GT, NULL_RTX, mode, unsignedp, 0);
5944 emit_jump_insn ((*gen_bgt_pat) (default_label));
5947 emit_case_nodes (index, node->left, default_label, index_type);
5950 /* We cannot process node->left normally
5951 since we haven't ruled out the numbers less than
5952 this node's value. So handle node->left explicitly. */
5953 do_jump_if_equal (index,
5954 expand_expr (node->left->low, NULL_RTX,
5956 label_rtx (node->left->code_label), unsignedp);
5961 /* Node is a range. These cases are very similar to those for a single
5962 value, except that we do not start by testing whether this node
5963 is the one to branch to. */
5965 if (node->right != 0 && node->left != 0)
5967 /* Node has subtrees on both sides.
5968 If the right-hand subtree is bounded,
5969 test for it first, since we can go straight there.
5970 Otherwise, we need to make a branch in the control structure,
5971 then handle the two subtrees. */
5972 tree test_label = 0;
5974 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5976 GT, NULL_RTX, mode, unsignedp, 0);
5978 if (node_is_bounded (node->right, index_type))
5979 /* Right hand node is fully bounded so we can eliminate any
5980 testing and branch directly to the target code. */
5981 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5984 /* Right hand node requires testing.
5985 Branch to a label where we will handle it later. */
5987 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5988 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5991 /* Value belongs to this node or to the left-hand subtree. */
5993 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5994 GE, NULL_RTX, mode, unsignedp, 0);
5995 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5997 /* Handle the left-hand subtree. */
5998 emit_case_nodes (index, node->left, default_label, index_type);
6000 /* If right node had to be handled later, do that now. */
6004 /* If the left-hand subtree fell through,
6005 don't let it fall into the right-hand subtree. */
6006 emit_jump_if_reachable (default_label);
6008 expand_label (test_label);
6009 emit_case_nodes (index, node->right, default_label, index_type);
6013 else if (node->right != 0 && node->left == 0)
6015 /* Deal with values to the left of this node,
6016 if they are possible. */
6017 if (!node_has_low_bound (node, index_type))
6019 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6021 LT, NULL_RTX, mode, unsignedp, 0);
6022 emit_jump_insn ((*gen_blt_pat) (default_label));
6025 /* Value belongs to this node or to the right-hand subtree. */
6027 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6029 LE, NULL_RTX, mode, unsignedp, 0);
6030 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
6032 emit_case_nodes (index, node->right, default_label, index_type);
6035 else if (node->right == 0 && node->left != 0)
6037 /* Deal with values to the right of this node,
6038 if they are possible. */
6039 if (!node_has_high_bound (node, index_type))
6041 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6043 GT, NULL_RTX, mode, unsignedp, 0);
6044 emit_jump_insn ((*gen_bgt_pat) (default_label));
6047 /* Value belongs to this node or to the left-hand subtree. */
6049 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6050 GE, NULL_RTX, mode, unsignedp, 0);
6051 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
6053 emit_case_nodes (index, node->left, default_label, index_type);
6058 /* Node has no children so we check low and high bounds to remove
6059 redundant tests. Only one of the bounds can exist,
6060 since otherwise this node is bounded--a case tested already. */
6062 if (!node_has_high_bound (node, index_type))
6064 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
6066 GT, NULL_RTX, mode, unsignedp, 0);
6067 emit_jump_insn ((*gen_bgt_pat) (default_label));
6070 if (!node_has_low_bound (node, index_type))
6072 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
6074 LT, NULL_RTX, mode, unsignedp, 0);
6075 emit_jump_insn ((*gen_blt_pat) (default_label));
6078 emit_jump (label_rtx (node->code_label));
6083 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6084 so that the debugging info will be correct for the unrolled loop. */
6086 /* Indexed by block number, contains a pointer to the N'th block node. */
6088 static tree *block_vector;
6091 find_loop_tree_blocks ()
6093 tree block = DECL_INITIAL (current_function_decl);
6095 block_vector = identify_blocks (block, get_insns ());
6099 unroll_block_trees ()
6101 tree block = DECL_INITIAL (current_function_decl);
6103 reorder_blocks (block_vector, block, get_insns ());