1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
53 #define obstack_chunk_alloc xmalloc
54 #define obstack_chunk_free free
55 struct obstack stmt_obstack;
57 /* Filename and line number of last line-number note,
58 whether we actually emitted it or not. */
62 /* Nonzero if within a ({...}) grouping, in which case we must
63 always compute a value for each expr-stmt in case it is the last one. */
65 int expr_stmts_for_value;
67 /* Each time we expand an expression-statement,
68 record the expr's type and its RTL value here. */
70 static tree last_expr_type;
71 static rtx last_expr_value;
73 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
74 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
75 This is used by the `remember_end_note' function to record the endpoint
76 of each generated block in its associated BLOCK node. */
78 static rtx last_block_end_note;
80 /* Number of binding contours started so far in this function. */
82 int block_start_count;
84 /* Nonzero if function being compiled needs to
85 return the address of where it has put a structure value. */
87 extern int current_function_returns_pcc_struct;
89 /* Label that will go on parm cleanup code, if any.
90 Jumping to this label runs cleanup code for parameters, if
91 such code must be run. Following this code is the logical return label. */
93 extern rtx cleanup_label;
95 /* Label that will go on function epilogue.
96 Jumping to this label serves as a "return" instruction
97 on machines which require execution of the epilogue on all returns. */
99 extern rtx return_label;
101 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
102 So we can mark them all live at the end of the function, if nonopt. */
103 extern rtx save_expr_regs;
105 /* Offset to end of allocated area of stack frame.
106 If stack grows down, this is the address of the last stack slot allocated.
107 If stack grows up, this is the address for the next slot. */
108 extern int frame_offset;
110 /* Label to jump back to for tail recursion, or 0 if we have
111 not yet needed one for this function. */
112 extern rtx tail_recursion_label;
114 /* Place after which to insert the tail_recursion_label if we need one. */
115 extern rtx tail_recursion_reentry;
117 /* Location at which to save the argument pointer if it will need to be
118 referenced. There are two cases where this is done: if nonlocal gotos
119 exist, or if vars whose is an offset from the argument pointer will be
120 needed by inner routines. */
122 extern rtx arg_pointer_save_area;
124 /* Chain of all RTL_EXPRs that have insns in them. */
125 extern tree rtl_expr_chain;
127 #if 0 /* Turned off because 0 seems to work just as well. */
128 /* Cleanup lists are required for binding levels regardless of whether
129 that binding level has cleanups or not. This node serves as the
130 cleanup list whenever an empty list is required. */
131 static tree empty_cleanup_list;
134 /* Functions and data structures for expanding case statements. */
136 /* Case label structure, used to hold info on labels within case
137 statements. We handle "range" labels; for a single-value label
138 as in C, the high and low limits are the same.
140 A chain of case nodes is initially maintained via the RIGHT fields
141 in the nodes. Nodes with higher case values are later in the list.
143 Switch statements can be output in one of two forms. A branch table
144 is used if there are more than a few labels and the labels are dense
145 within the range between the smallest and largest case value. If a
146 branch table is used, no further manipulations are done with the case
149 The alternative to the use of a branch table is to generate a series
150 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
151 and PARENT fields to hold a binary tree. Initially the tree is
152 totally unbalanced, with everything on the right. We balance the tree
153 with nodes on the left having lower case values than the parent
154 and nodes on the right having higher values. We then output the tree
159 struct case_node *left; /* Left son in binary tree */
160 struct case_node *right; /* Right son in binary tree; also node chain */
161 struct case_node *parent; /* Parent of node in binary tree */
162 tree low; /* Lowest index value for this label */
163 tree high; /* Highest index value for this label */
164 tree code_label; /* Label to jump to when node matches */
167 typedef struct case_node case_node;
168 typedef struct case_node *case_node_ptr;
170 /* These are used by estimate_case_costs and balance_case_nodes. */
172 /* This must be a signed type, and non-ANSI compilers lack signed char. */
173 static short *cost_table;
174 static int use_cost_table;
176 static int estimate_case_costs ();
177 static void balance_case_nodes ();
178 static void emit_case_nodes ();
179 static void group_case_nodes ();
180 static void emit_jump_if_reachable ();
182 static int warn_if_unused_value ();
183 static void expand_goto_internal ();
184 static int expand_fixup ();
186 void free_temp_slots ();
187 static void expand_cleanups ();
188 static void expand_null_return_1 ();
189 static int tail_recursion_args ();
190 static void do_jump_if_equal ();
192 /* Stack of control and binding constructs we are currently inside.
194 These constructs begin when you call `expand_start_WHATEVER'
195 and end when you call `expand_end_WHATEVER'. This stack records
196 info about how the construct began that tells the end-function
197 what to do. It also may provide information about the construct
198 to alter the behavior of other constructs within the body.
199 For example, they may affect the behavior of C `break' and `continue'.
201 Each construct gets one `struct nesting' object.
202 All of these objects are chained through the `all' field.
203 `nesting_stack' points to the first object (innermost construct).
204 The position of an entry on `nesting_stack' is in its `depth' field.
206 Each type of construct has its own individual stack.
207 For example, loops have `loop_stack'. Each object points to the
208 next object of the same type through the `next' field.
210 Some constructs are visible to `break' exit-statements and others
211 are not. Which constructs are visible depends on the language.
212 Therefore, the data structure allows each construct to be visible
213 or not, according to the args given when the construct is started.
214 The construct is visible if the `exit_label' field is non-null.
215 In that case, the value should be a CODE_LABEL rtx. */
220 struct nesting *next;
225 /* For conds (if-then and if-then-else statements). */
228 /* Label for the end of the if construct.
229 There is none if EXITFLAG was not set
230 and no `else' has been seen yet. */
232 /* Label for the end of this alternative.
233 This may be the end of the if or the next else/elseif. */
239 /* Label at the top of the loop; place to loop back to. */
241 /* Label at the end of the whole construct. */
243 /* Label for `continue' statement to jump to;
244 this is in front of the stepper of the loop. */
247 /* For variable binding contours. */
250 /* Sequence number of this binding contour within the function,
251 in order of entry. */
252 int block_start_count;
253 /* Nonzero => value to restore stack to on exit. */
255 /* The NOTE that starts this contour.
256 Used by expand_goto to check whether the destination
257 is within each contour or not. */
259 /* Innermost containing binding contour that has a stack level. */
260 struct nesting *innermost_stack_block;
261 /* List of cleanups to be run on exit from this contour.
262 This is a list of expressions to be evaluated.
263 The TREE_PURPOSE of each link is the ..._DECL node
264 which the cleanup pertains to. */
266 /* List of cleanup-lists of blocks containing this block,
267 as they were at the locus where this block appears.
268 There is an element for each containing block,
269 ordered innermost containing block first.
270 The tail of this list can be 0 (was empty_cleanup_list),
271 if all remaining elements would be empty lists.
272 The element's TREE_VALUE is the cleanup-list of that block,
273 which may be null. */
275 /* Chain of labels defined inside this binding contour.
276 For contours that have stack levels or cleanups. */
277 struct label_chain *label_chain;
278 /* Number of function calls seen, as of start of this block. */
279 int function_call_count;
281 /* For switch (C) or case (Pascal) statements,
282 and also for dummies (see `expand_start_case_dummy'). */
285 /* The insn after which the case dispatch should finally
286 be emitted. Zero for a dummy. */
288 /* A list of case labels, kept in ascending order by value
289 as the list is built.
290 During expand_end_case, this list may be rearranged into a
291 nearly balanced binary tree. */
292 struct case_node *case_list;
293 /* Label to jump to if no case matches. */
295 /* The expression to be dispatched on. */
297 /* Type that INDEX_EXPR should be converted to. */
299 /* Number of range exprs in case statement. */
301 /* Name of this kind of statement, for warnings. */
303 /* Nonzero if a case label has been seen in this case stmt. */
306 /* For exception contours. */
309 /* List of exceptions raised. This is a TREE_LIST
310 of whatever you want. */
312 /* List of exceptions caught. This is also a TREE_LIST
313 of whatever you want. As a special case, it has the
314 value `void_type_node' if it handles default exceptions. */
317 /* First insn of TRY block, in case resumptive model is needed. */
319 /* Label for the catch clauses. */
321 /* Label for unhandled exceptions. */
323 /* Label at the end of whole construct. */
325 /* Label which "escapes" the exception construct.
326 Like EXIT_LABEL for BREAK construct, but for exceptions. */
332 /* Chain of all pending binding contours. */
333 struct nesting *block_stack;
335 /* If any new stacks are added here, add them to POPSTACKS too. */
337 /* Chain of all pending binding contours that restore stack levels
339 struct nesting *stack_block_stack;
341 /* Chain of all pending conditional statements. */
342 struct nesting *cond_stack;
344 /* Chain of all pending loops. */
345 struct nesting *loop_stack;
347 /* Chain of all pending case or switch statements. */
348 struct nesting *case_stack;
350 /* Chain of all pending exception contours. */
351 struct nesting *except_stack;
353 /* Separate chain including all of the above,
354 chained through the `all' field. */
355 struct nesting *nesting_stack;
357 /* Number of entries on nesting_stack now. */
360 /* Allocate and return a new `struct nesting'. */
362 #define ALLOC_NESTING() \
363 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
365 /* Pop the nesting stack element by element until we pop off
366 the element which is at the top of STACK.
367 Update all the other stacks, popping off elements from them
368 as we pop them from nesting_stack. */
370 #define POPSTACK(STACK) \
371 do { struct nesting *target = STACK; \
372 struct nesting *this; \
373 do { this = nesting_stack; \
374 if (loop_stack == this) \
375 loop_stack = loop_stack->next; \
376 if (cond_stack == this) \
377 cond_stack = cond_stack->next; \
378 if (block_stack == this) \
379 block_stack = block_stack->next; \
380 if (stack_block_stack == this) \
381 stack_block_stack = stack_block_stack->next; \
382 if (case_stack == this) \
383 case_stack = case_stack->next; \
384 if (except_stack == this) \
385 except_stack = except_stack->next; \
386 nesting_depth = nesting_stack->depth - 1; \
387 nesting_stack = this->all; \
388 obstack_free (&stmt_obstack, this); } \
389 while (this != target); } while (0)
391 /* In some cases it is impossible to generate code for a forward goto
392 until the label definition is seen. This happens when it may be necessary
393 for the goto to reset the stack pointer: we don't yet know how to do that.
394 So expand_goto puts an entry on this fixup list.
395 Each time a binding contour that resets the stack is exited,
397 If the target label has now been defined, we can insert the proper code. */
401 /* Points to following fixup. */
402 struct goto_fixup *next;
403 /* Points to the insn before the jump insn.
404 If more code must be inserted, it goes after this insn. */
406 /* The LABEL_DECL that this jump is jumping to, or 0
407 for break, continue or return. */
409 /* The BLOCK for the place where this goto was found. */
411 /* The CODE_LABEL rtx that this is jumping to. */
413 /* Number of binding contours started in current function
414 before the label reference. */
415 int block_start_count;
416 /* The outermost stack level that should be restored for this jump.
417 Each time a binding contour that resets the stack is exited,
418 if the target label is *not* yet defined, this slot is updated. */
420 /* List of lists of cleanup expressions to be run by this goto.
421 There is one element for each block that this goto is within.
422 The tail of this list can be 0 (was empty_cleanup_list),
423 if all remaining elements would be empty.
424 The TREE_VALUE contains the cleanup list of that block as of the
425 time this goto was seen.
426 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
427 tree cleanup_list_list;
430 static struct goto_fixup *goto_fixup_chain;
432 /* Within any binding contour that must restore a stack level,
433 all labels are recorded with a chain of these structures. */
437 /* Points to following fixup. */
438 struct label_chain *next;
445 gcc_obstack_init (&stmt_obstack);
447 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
452 init_stmt_for_function ()
454 /* We are not currently within any block, conditional, loop or case. */
462 block_start_count = 0;
464 /* No gotos have been expanded yet. */
465 goto_fixup_chain = 0;
467 /* We are not processing a ({...}) grouping. */
468 expr_stmts_for_value = 0;
476 p->block_stack = block_stack;
477 p->stack_block_stack = stack_block_stack;
478 p->cond_stack = cond_stack;
479 p->loop_stack = loop_stack;
480 p->case_stack = case_stack;
481 p->nesting_stack = nesting_stack;
482 p->nesting_depth = nesting_depth;
483 p->block_start_count = block_start_count;
484 p->last_expr_type = last_expr_type;
485 p->last_expr_value = last_expr_value;
486 p->expr_stmts_for_value = expr_stmts_for_value;
487 p->emit_filename = emit_filename;
488 p->emit_lineno = emit_lineno;
489 p->goto_fixup_chain = goto_fixup_chain;
493 restore_stmt_status (p)
496 block_stack = p->block_stack;
497 stack_block_stack = p->stack_block_stack;
498 cond_stack = p->cond_stack;
499 loop_stack = p->loop_stack;
500 case_stack = p->case_stack;
501 nesting_stack = p->nesting_stack;
502 nesting_depth = p->nesting_depth;
503 block_start_count = p->block_start_count;
504 last_expr_type = p->last_expr_type;
505 last_expr_value = p->last_expr_value;
506 expr_stmts_for_value = p->expr_stmts_for_value;
507 emit_filename = p->emit_filename;
508 emit_lineno = p->emit_lineno;
509 goto_fixup_chain = p->goto_fixup_chain;
512 /* Emit a no-op instruction. */
517 rtx last_insn = get_last_insn ();
519 && (GET_CODE (last_insn) == CODE_LABEL
520 || prev_real_insn (last_insn) == 0))
521 emit_insn (gen_nop ());
524 /* Return the rtx-label that corresponds to a LABEL_DECL,
525 creating it if necessary. */
531 if (TREE_CODE (label) != LABEL_DECL)
534 if (DECL_RTL (label))
535 return DECL_RTL (label);
537 return DECL_RTL (label) = gen_label_rtx ();
540 /* Add an unconditional jump to LABEL as the next sequential instruction. */
546 do_pending_stack_adjust ();
547 emit_jump_insn (gen_jump (label));
551 /* Emit code to jump to the address
552 specified by the pointer expression EXP. */
555 expand_computed_goto (exp)
558 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
560 emit_indirect_jump (x);
563 /* Handle goto statements and the labels that they can go to. */
565 /* Specify the location in the RTL code of a label LABEL,
566 which is a LABEL_DECL tree node.
568 This is used for the kind of label that the user can jump to with a
569 goto statement, and for alternatives of a switch or case statement.
570 RTL labels generated for loops and conditionals don't go through here;
571 they are generated directly at the RTL level, by other functions below.
573 Note that this has nothing to do with defining label *names*.
574 Languages vary in how they do that and what that even means. */
580 struct label_chain *p;
582 do_pending_stack_adjust ();
583 emit_label (label_rtx (label));
584 if (DECL_NAME (label))
585 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
587 if (stack_block_stack != 0)
589 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
590 p->next = stack_block_stack->data.block.label_chain;
591 stack_block_stack->data.block.label_chain = p;
596 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
597 from nested functions. */
600 declare_nonlocal_label (label)
603 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
604 LABEL_PRESERVE_P (label_rtx (label)) = 1;
605 if (nonlocal_goto_handler_slot == 0)
607 nonlocal_goto_handler_slot
608 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
609 emit_stack_save (SAVE_NONLOCAL,
610 &nonlocal_goto_stack_level,
611 PREV_INSN (tail_recursion_reentry));
615 /* Generate RTL code for a `goto' statement with target label LABEL.
616 LABEL should be a LABEL_DECL tree node that was or will later be
617 defined with `expand_label'. */
623 /* Check for a nonlocal goto to a containing function. */
624 tree context = decl_function_context (label);
625 if (context != 0 && context != current_function_decl)
627 struct function *p = find_function_data (context);
628 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
631 p->has_nonlocal_label = 1;
632 LABEL_REF_NONLOCAL_P (label_ref) = 1;
634 /* Copy the rtl for the slots so that they won't be shared in
635 case the virtual stack vars register gets instantiated differently
636 in the parent than in the child. */
638 #if HAVE_nonlocal_goto
639 if (HAVE_nonlocal_goto)
640 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
641 copy_rtx (p->nonlocal_goto_handler_slot),
642 copy_rtx (p->nonlocal_goto_stack_level),
649 /* Restore frame pointer for containing function.
650 This sets the actual hard register used for the frame pointer
651 to the location of the function's incoming static chain info.
652 The non-local goto handler will then adjust it to contain the
653 proper value and reload the argument pointer, if needed. */
654 emit_move_insn (frame_pointer_rtx, lookup_static_chain (label));
656 /* We have now loaded the frame pointer hardware register with
657 the address of that corresponds to the start of the virtual
658 stack vars. So replace virtual_stack_vars_rtx in all
659 addresses we use with stack_pointer_rtx. */
661 /* Get addr of containing function's current nonlocal goto handler,
662 which will do any cleanups and then jump to the label. */
663 addr = copy_rtx (p->nonlocal_goto_handler_slot);
664 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
667 /* Restore the stack pointer. Note this uses fp just restored. */
668 addr = p->nonlocal_goto_stack_level;
670 addr = replace_rtx (copy_rtx (addr),
671 virtual_stack_vars_rtx, frame_pointer_rtx);
673 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
675 /* Put in the static chain register the nonlocal label address. */
676 emit_move_insn (static_chain_rtx, label_ref);
677 /* USE of frame_pointer_rtx added for consistency; not clear if
679 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
680 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
681 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
682 emit_indirect_jump (temp);
686 expand_goto_internal (label, label_rtx (label), NULL_RTX);
689 /* Generate RTL code for a `goto' statement with target label BODY.
690 LABEL should be a LABEL_REF.
691 LAST_INSN, if non-0, is the rtx we should consider as the last
692 insn emitted (for the purposes of cleaning up a return). */
695 expand_goto_internal (body, label, last_insn)
700 struct nesting *block;
703 if (GET_CODE (label) != CODE_LABEL)
706 /* If label has already been defined, we can tell now
707 whether and how we must alter the stack level. */
709 if (PREV_INSN (label) != 0)
711 /* Find the innermost pending block that contains the label.
712 (Check containment by comparing insn-uids.)
713 Then restore the outermost stack level within that block,
714 and do cleanups of all blocks contained in it. */
715 for (block = block_stack; block; block = block->next)
717 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
719 if (block->data.block.stack_level != 0)
720 stack_level = block->data.block.stack_level;
721 /* Execute the cleanups for blocks we are exiting. */
722 if (block->data.block.cleanups != 0)
724 expand_cleanups (block->data.block.cleanups, NULL_TREE);
725 do_pending_stack_adjust ();
731 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
732 the stack pointer. This one should be deleted as dead by flow. */
733 clear_pending_stack_adjust ();
734 do_pending_stack_adjust ();
735 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
738 if (body != 0 && DECL_TOO_LATE (body))
739 error ("jump to `%s' invalidly jumps into binding contour",
740 IDENTIFIER_POINTER (DECL_NAME (body)));
742 /* Label not yet defined: may need to put this goto
743 on the fixup list. */
744 else if (! expand_fixup (body, label, last_insn))
746 /* No fixup needed. Record that the label is the target
747 of at least one goto that has no fixup. */
749 TREE_ADDRESSABLE (body) = 1;
755 /* Generate if necessary a fixup for a goto
756 whose target label in tree structure (if any) is TREE_LABEL
757 and whose target in rtl is RTL_LABEL.
759 If LAST_INSN is nonzero, we pretend that the jump appears
760 after insn LAST_INSN instead of at the current point in the insn stream.
762 The fixup will be used later to insert insns just before the goto.
763 Those insns will restore the stack level as appropriate for the
764 target label, and will (in the case of C++) also invoke any object
765 destructors which have to be invoked when we exit the scopes which
766 are exited by the goto.
768 Value is nonzero if a fixup is made. */
771 expand_fixup (tree_label, rtl_label, last_insn)
776 struct nesting *block, *end_block;
778 /* See if we can recognize which block the label will be output in.
779 This is possible in some very common cases.
780 If we succeed, set END_BLOCK to that block.
781 Otherwise, set it to 0. */
784 && (rtl_label == cond_stack->data.cond.endif_label
785 || rtl_label == cond_stack->data.cond.next_label))
786 end_block = cond_stack;
787 /* If we are in a loop, recognize certain labels which
788 are likely targets. This reduces the number of fixups
789 we need to create. */
791 && (rtl_label == loop_stack->data.loop.start_label
792 || rtl_label == loop_stack->data.loop.end_label
793 || rtl_label == loop_stack->data.loop.continue_label))
794 end_block = loop_stack;
798 /* Now set END_BLOCK to the binding level to which we will return. */
802 struct nesting *next_block = end_block->all;
805 /* First see if the END_BLOCK is inside the innermost binding level.
806 If so, then no cleanups or stack levels are relevant. */
807 while (next_block && next_block != block)
808 next_block = next_block->all;
813 /* Otherwise, set END_BLOCK to the innermost binding level
814 which is outside the relevant control-structure nesting. */
815 next_block = block_stack->next;
816 for (block = block_stack; block != end_block; block = block->all)
817 if (block == next_block)
818 next_block = next_block->next;
819 end_block = next_block;
822 /* Does any containing block have a stack level or cleanups?
823 If not, no fixup is needed, and that is the normal case
824 (the only case, for standard C). */
825 for (block = block_stack; block != end_block; block = block->next)
826 if (block->data.block.stack_level != 0
827 || block->data.block.cleanups != 0)
830 if (block != end_block)
832 /* Ok, a fixup is needed. Add a fixup to the list of such. */
833 struct goto_fixup *fixup
834 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
835 /* In case an old stack level is restored, make sure that comes
836 after any pending stack adjust. */
837 /* ?? If the fixup isn't to come at the present position,
838 doing the stack adjust here isn't useful. Doing it with our
839 settings at that location isn't useful either. Let's hope
842 do_pending_stack_adjust ();
843 fixup->target = tree_label;
844 fixup->target_rtl = rtl_label;
846 /* Create a BLOCK node and a corresponding matched set of
847 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
848 this point. The notes will encapsulate any and all fixup
849 code which we might later insert at this point in the insn
850 stream. Also, the BLOCK node will be the parent (i.e. the
851 `SUPERBLOCK') of any other BLOCK nodes which we might create
852 later on when we are expanding the fixup code. */
855 register rtx original_before_jump
856 = last_insn ? last_insn : get_last_insn ();
860 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
861 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
862 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
864 emit_insns_after (fixup->before_jump, original_before_jump);
867 fixup->block_start_count = block_start_count;
868 fixup->stack_level = 0;
869 fixup->cleanup_list_list
870 = (((block->data.block.outer_cleanups
872 && block->data.block.outer_cleanups != empty_cleanup_list
875 || block->data.block.cleanups)
876 ? tree_cons (NULL_TREE, block->data.block.cleanups,
877 block->data.block.outer_cleanups)
879 fixup->next = goto_fixup_chain;
880 goto_fixup_chain = fixup;
886 /* When exiting a binding contour, process all pending gotos requiring fixups.
887 THISBLOCK is the structure that describes the block being exited.
888 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
889 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
890 FIRST_INSN is the insn that began this contour.
892 Gotos that jump out of this contour must restore the
893 stack level and do the cleanups before actually jumping.
895 DONT_JUMP_IN nonzero means report error there is a jump into this
896 contour from before the beginning of the contour.
897 This is also done if STACK_LEVEL is nonzero. */
900 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
901 struct nesting *thisblock;
907 register struct goto_fixup *f, *prev;
909 /* F is the fixup we are considering; PREV is the previous one. */
910 /* We run this loop in two passes so that cleanups of exited blocks
911 are run first, and blocks that are exited are marked so
914 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
916 /* Test for a fixup that is inactive because it is already handled. */
917 if (f->before_jump == 0)
919 /* Delete inactive fixup from the chain, if that is easy to do. */
921 prev->next = f->next;
923 /* Has this fixup's target label been defined?
924 If so, we can finalize it. */
925 else if (PREV_INSN (f->target_rtl) != 0)
927 register rtx cleanup_insns;
929 /* Get the first non-label after the label
930 this goto jumps to. If that's before this scope begins,
931 we don't have a jump into the scope. */
932 rtx after_label = f->target_rtl;
933 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
934 after_label = NEXT_INSN (after_label);
936 /* If this fixup jumped into this contour from before the beginning
937 of this contour, report an error. */
938 /* ??? Bug: this does not detect jumping in through intermediate
939 blocks that have stack levels or cleanups.
940 It detects only a problem with the innermost block
943 && (dont_jump_in || stack_level || cleanup_list)
944 /* If AFTER_LABEL is 0, it means the jump goes to the end
945 of the rtl, which means it jumps into this scope. */
947 || INSN_UID (first_insn) < INSN_UID (after_label))
948 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
949 && ! DECL_REGISTER (f->target))
951 error_with_decl (f->target,
952 "label `%s' used before containing binding contour");
953 /* Prevent multiple errors for one label. */
954 DECL_REGISTER (f->target) = 1;
957 /* We will expand the cleanups into a sequence of their own and
958 then later on we will attach this new sequence to the insn
959 stream just ahead of the actual jump insn. */
963 /* Temporarily restore the lexical context where we will
964 logically be inserting the fixup code. We do this for the
965 sake of getting the debugging information right. */
968 set_block (f->context);
970 /* Expand the cleanups for blocks this jump exits. */
971 if (f->cleanup_list_list)
974 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
975 /* Marked elements correspond to blocks that have been closed.
976 Do their cleanups. */
977 if (TREE_ADDRESSABLE (lists)
978 && TREE_VALUE (lists) != 0)
980 expand_cleanups (TREE_VALUE (lists), 0);
981 /* Pop any pushes done in the cleanups,
982 in case function is about to return. */
983 do_pending_stack_adjust ();
987 /* Restore stack level for the biggest contour that this
988 jump jumps out of. */
990 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
992 /* Finish up the sequence containing the insns which implement the
993 necessary cleanups, and then attach that whole sequence to the
994 insn stream just ahead of the actual jump insn. Attaching it
995 at that point insures that any cleanups which are in fact
996 implicit C++ object destructions (which must be executed upon
997 leaving the block) appear (to the debugger) to be taking place
998 in an area of the generated code where the object(s) being
999 destructed are still "in scope". */
1001 cleanup_insns = get_insns ();
1005 emit_insns_after (cleanup_insns, f->before_jump);
1012 /* Mark the cleanups of exited blocks so that they are executed
1013 by the code above. */
1014 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1015 if (f->before_jump != 0
1016 && PREV_INSN (f->target_rtl) == 0
1017 /* Label has still not appeared. If we are exiting a block with
1018 a stack level to restore, that started before the fixup,
1019 mark this stack level as needing restoration
1020 when the fixup is later finalized.
1021 Also mark the cleanup_list_list element for F
1022 that corresponds to this block, so that ultimately
1023 this block's cleanups will be executed by the code above. */
1025 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1026 it means the label is undefined. That's erroneous, but possible. */
1027 && (thisblock->data.block.block_start_count
1028 <= f->block_start_count))
1030 tree lists = f->cleanup_list_list;
1031 for (; lists; lists = TREE_CHAIN (lists))
1032 /* If the following elt. corresponds to our containing block
1033 then the elt. must be for this block. */
1034 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1035 TREE_ADDRESSABLE (lists) = 1;
1038 f->stack_level = stack_level;
1042 /* Generate RTL for an asm statement (explicit assembler code).
1043 BODY is a STRING_CST node containing the assembler code text,
1044 or an ADDR_EXPR containing a STRING_CST. */
1050 if (TREE_CODE (body) == ADDR_EXPR)
1051 body = TREE_OPERAND (body, 0);
1053 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1054 TREE_STRING_POINTER (body)));
1058 /* Generate RTL for an asm statement with arguments.
1059 STRING is the instruction template.
1060 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1061 Each output or input has an expression in the TREE_VALUE and
1062 a constraint-string in the TREE_PURPOSE.
1063 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1064 that is clobbered by this insn.
1066 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1067 Some elements of OUTPUTS may be replaced with trees representing temporary
1068 values. The caller should copy those temporary values to the originally
1071 VOL nonzero means the insn is volatile; don't optimize it. */
1074 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1075 tree string, outputs, inputs, clobbers;
1080 rtvec argvec, constraints;
1082 int ninputs = list_length (inputs);
1083 int noutputs = list_length (outputs);
1087 /* Vector of RTX's of evaluated output operands. */
1088 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1089 /* The insn we have emitted. */
1092 /* Count the number of meaningful clobbered registers, ignoring what
1093 we would ignore later. */
1095 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1097 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1098 i = decode_reg_name (regname);
1099 if (i >= 0 || i == -4)
1105 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1107 tree val = TREE_VALUE (tail);
1112 /* If there's an erroneous arg, emit no insn. */
1113 if (TREE_TYPE (val) == error_mark_node)
1116 /* Make sure constraint has `=' and does not have `+'. */
1119 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1121 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1123 error ("output operand constraint contains `+'");
1126 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1131 error ("output operand constraint lacks `='");
1135 /* If an output operand is not a variable or indirect ref,
1137 create a SAVE_EXPR which is a pseudo-reg
1138 to act as an intermediate temporary.
1139 Make the asm insn write into that, then copy it to
1140 the real output operand. */
1142 while (TREE_CODE (val) == COMPONENT_REF
1143 || TREE_CODE (val) == ARRAY_REF)
1144 val = TREE_OPERAND (val, 0);
1146 if (TREE_CODE (val) != VAR_DECL
1147 && TREE_CODE (val) != PARM_DECL
1148 && TREE_CODE (val) != INDIRECT_REF)
1149 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1151 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1154 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1156 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1160 /* Make vectors for the expression-rtx and constraint strings. */
1162 argvec = rtvec_alloc (ninputs);
1163 constraints = rtvec_alloc (ninputs);
1165 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1166 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1168 MEM_VOLATILE_P (body) = vol;
1170 /* Eval the inputs and put them into ARGVEC.
1171 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1174 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1178 /* If there's an erroneous arg, emit no insn,
1179 because the ASM_INPUT would get VOIDmode
1180 and that could cause a crash in reload. */
1181 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1183 if (TREE_PURPOSE (tail) == NULL_TREE)
1185 error ("hard register `%s' listed as input operand to `asm'",
1186 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1190 /* Make sure constraint has neither `=' nor `+'. */
1192 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1193 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1194 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1196 error ("input operand constraint contains `%c'",
1197 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1201 XVECEXP (body, 3, i) /* argvec */
1202 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1203 XVECEXP (body, 4, i) /* constraints */
1204 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1205 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1209 /* Protect all the operands from the queue,
1210 now that they have all been evaluated. */
1212 for (i = 0; i < ninputs; i++)
1213 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1215 for (i = 0; i < noutputs; i++)
1216 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1218 /* Now, for each output, construct an rtx
1219 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1220 ARGVEC CONSTRAINTS))
1221 If there is more than one, put them inside a PARALLEL. */
1223 if (noutputs == 1 && nclobbers == 0)
1225 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1226 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1228 else if (noutputs == 0 && nclobbers == 0)
1230 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1231 insn = emit_insn (body);
1237 if (num == 0) num = 1;
1238 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1240 /* For each output operand, store a SET. */
1242 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1244 XVECEXP (body, 0, i)
1245 = gen_rtx (SET, VOIDmode,
1247 gen_rtx (ASM_OPERANDS, VOIDmode,
1248 TREE_STRING_POINTER (string),
1249 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1250 i, argvec, constraints,
1252 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1255 /* If there are no outputs (but there are some clobbers)
1256 store the bare ASM_OPERANDS into the PARALLEL. */
1259 XVECEXP (body, 0, i++) = obody;
1261 /* Store (clobber REG) for each clobbered register specified. */
1263 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1265 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1266 int j = decode_reg_name (regname);
1270 if (j == -3) /* `cc', which is not a register */
1273 if (j == -4) /* `memory', don't cache memory across asm */
1275 XVECEXP (body, 0, i++) = gen_rtx (CLOBBER, VOIDmode, const0_rtx);
1279 error ("unknown register name `%s' in `asm'", regname);
1283 /* Use QImode since that's guaranteed to clobber just one reg. */
1284 XVECEXP (body, 0, i++)
1285 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1288 insn = emit_insn (body);
1294 /* Generate RTL to evaluate the expression EXP
1295 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1298 expand_expr_stmt (exp)
1301 /* If -W, warn about statements with no side effects,
1302 except for an explicit cast to void (e.g. for assert()), and
1303 except inside a ({...}) where they may be useful. */
1304 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1306 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1307 && !(TREE_CODE (exp) == CONVERT_EXPR
1308 && TREE_TYPE (exp) == void_type_node))
1309 warning_with_file_and_line (emit_filename, emit_lineno,
1310 "statement with no effect");
1311 else if (warn_unused)
1312 warn_if_unused_value (exp);
1314 last_expr_type = TREE_TYPE (exp);
1315 if (! flag_syntax_only)
1316 last_expr_value = expand_expr (exp,
1317 (expr_stmts_for_value
1318 ? NULL_RTX : const0_rtx),
1321 /* If all we do is reference a volatile value in memory,
1322 copy it to a register to be sure it is actually touched. */
1323 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1324 && TREE_THIS_VOLATILE (exp))
1326 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1327 copy_to_reg (last_expr_value);
1330 rtx lab = gen_label_rtx ();
1332 /* Compare the value with itself to reference it. */
1333 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1334 expand_expr (TYPE_SIZE (last_expr_type),
1335 NULL_RTX, VOIDmode, 0),
1337 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1338 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1343 /* If this expression is part of a ({...}) and is in memory, we may have
1344 to preserve temporaries. */
1345 preserve_temp_slots (last_expr_value);
1347 /* Free any temporaries used to evaluate this expression. Any temporary
1348 used as a result of this expression will already have been preserved
1355 /* Warn if EXP contains any computations whose results are not used.
1356 Return 1 if a warning is printed; 0 otherwise. */
1359 warn_if_unused_value (exp)
1362 if (TREE_USED (exp))
1365 switch (TREE_CODE (exp))
1367 case PREINCREMENT_EXPR:
1368 case POSTINCREMENT_EXPR:
1369 case PREDECREMENT_EXPR:
1370 case POSTDECREMENT_EXPR:
1375 case METHOD_CALL_EXPR:
1377 case WITH_CLEANUP_EXPR:
1379 /* We don't warn about COND_EXPR because it may be a useful
1380 construct if either arm contains a side effect. */
1385 /* For a binding, warn if no side effect within it. */
1386 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1388 case TRUTH_ORIF_EXPR:
1389 case TRUTH_ANDIF_EXPR:
1390 /* In && or ||, warn if 2nd operand has no side effect. */
1391 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1394 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1396 /* Let people do `(foo (), 0)' without a warning. */
1397 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1399 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1403 case NON_LVALUE_EXPR:
1404 /* Don't warn about values cast to void. */
1405 if (TREE_TYPE (exp) == void_type_node)
1407 /* Don't warn about conversions not explicit in the user's program. */
1408 if (TREE_NO_UNUSED_WARNING (exp))
1410 /* Assignment to a cast usually results in a cast of a modify.
1411 Don't complain about that. */
1412 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR)
1414 /* Sometimes it results in a cast of a cast of a modify.
1415 Don't complain about that. */
1416 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR
1417 || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR)
1418 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR)
1422 /* Referencing a volatile value is a side effect, so don't warn. */
1423 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1424 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1425 && TREE_THIS_VOLATILE (exp))
1427 warning_with_file_and_line (emit_filename, emit_lineno,
1428 "value computed is not used");
1433 /* Clear out the memory of the last expression evaluated. */
1441 /* Begin a statement which will return a value.
1442 Return the RTL_EXPR for this statement expr.
1443 The caller must save that value and pass it to expand_end_stmt_expr. */
1446 expand_start_stmt_expr ()
1448 /* Make the RTL_EXPR node temporary, not momentary,
1449 so that rtl_expr_chain doesn't become garbage. */
1450 int momentary = suspend_momentary ();
1451 tree t = make_node (RTL_EXPR);
1452 resume_momentary (momentary);
1455 expr_stmts_for_value++;
1459 /* Restore the previous state at the end of a statement that returns a value.
1460 Returns a tree node representing the statement's value and the
1461 insns to compute the value.
1463 The nodes of that expression have been freed by now, so we cannot use them.
1464 But we don't want to do that anyway; the expression has already been
1465 evaluated and now we just want to use the value. So generate a RTL_EXPR
1466 with the proper type and RTL value.
1468 If the last substatement was not an expression,
1469 return something with type `void'. */
1472 expand_end_stmt_expr (t)
1477 if (last_expr_type == 0)
1479 last_expr_type = void_type_node;
1480 last_expr_value = const0_rtx;
1482 else if (last_expr_value == 0)
1483 /* There are some cases where this can happen, such as when the
1484 statement is void type. */
1485 last_expr_value = const0_rtx;
1486 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1487 /* Remove any possible QUEUED. */
1488 last_expr_value = protect_from_queue (last_expr_value, 0);
1492 TREE_TYPE (t) = last_expr_type;
1493 RTL_EXPR_RTL (t) = last_expr_value;
1494 RTL_EXPR_SEQUENCE (t) = get_insns ();
1496 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1500 /* Don't consider deleting this expr or containing exprs at tree level. */
1501 TREE_SIDE_EFFECTS (t) = 1;
1502 /* Propagate volatility of the actual RTL expr. */
1503 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1506 expr_stmts_for_value--;
1511 /* The exception handling nesting looks like this:
1514 { <-- exception handler block
1516 <-- in an exception handler
1518 : <-- in a TRY block
1519 : <-- in an exception handler
1524 : <-- in an except block
1525 : <-- in an exception handler
1532 /* Return nonzero iff in a try block at level LEVEL. */
1535 in_try_block (level)
1538 struct nesting *n = except_stack;
1541 while (n && n->data.except_stmt.after_label != 0)
1552 /* Return nonzero iff in an except block at level LEVEL. */
1555 in_except_block (level)
1558 struct nesting *n = except_stack;
1561 while (n && n->data.except_stmt.after_label == 0)
1572 /* Return nonzero iff in an exception handler at level LEVEL. */
1575 in_exception_handler (level)
1578 struct nesting *n = except_stack;
1579 while (n && level--)
1584 /* Record the fact that the current exception nesting raises
1585 exception EX. If not in an exception handler, return 0. */
1592 if (except_stack == 0)
1594 raises_ptr = &except_stack->data.except_stmt.raised;
1595 if (! value_member (ex, *raises_ptr))
1596 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1600 /* Generate RTL for the start of a try block.
1602 TRY_CLAUSE is the condition to test to enter the try block. */
1605 expand_start_try (try_clause, exitflag, escapeflag)
1610 struct nesting *thishandler = ALLOC_NESTING ();
1612 /* Make an entry on cond_stack for the cond we are entering. */
1614 thishandler->next = except_stack;
1615 thishandler->all = nesting_stack;
1616 thishandler->depth = ++nesting_depth;
1617 thishandler->data.except_stmt.raised = 0;
1618 thishandler->data.except_stmt.handled = 0;
1619 thishandler->data.except_stmt.first_insn = get_insns ();
1620 thishandler->data.except_stmt.except_label = gen_label_rtx ();
1621 thishandler->data.except_stmt.unhandled_label = 0;
1622 thishandler->data.except_stmt.after_label = 0;
1623 thishandler->data.except_stmt.escape_label
1624 = escapeflag ? thishandler->data.except_stmt.except_label : 0;
1625 thishandler->exit_label = exitflag ? gen_label_rtx () : 0;
1626 except_stack = thishandler;
1627 nesting_stack = thishandler;
1629 do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX);
1632 /* End of a TRY block. Nothing to do for now. */
1637 except_stack->data.except_stmt.after_label = gen_label_rtx ();
1638 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1642 /* Start an `except' nesting contour.
1643 EXITFLAG says whether this contour should be able to `exit' something.
1644 ESCAPEFLAG says whether this contour should be escapable. */
1647 expand_start_except (exitflag, escapeflag)
1654 /* An `exit' from catch clauses goes out to next exit level,
1655 if there is one. Otherwise, it just goes to the end
1656 of the construct. */
1657 for (n = except_stack->next; n; n = n->next)
1658 if (n->exit_label != 0)
1660 except_stack->exit_label = n->exit_label;
1664 except_stack->exit_label = except_stack->data.except_stmt.after_label;
1669 /* An `escape' from catch clauses goes out to next escape level,
1670 if there is one. Otherwise, it just goes to the end
1671 of the construct. */
1672 for (n = except_stack->next; n; n = n->next)
1673 if (n->data.except_stmt.escape_label != 0)
1675 except_stack->data.except_stmt.escape_label
1676 = n->data.except_stmt.escape_label;
1680 except_stack->data.except_stmt.escape_label
1681 = except_stack->data.except_stmt.after_label;
1683 do_pending_stack_adjust ();
1684 emit_label (except_stack->data.except_stmt.except_label);
1687 /* Generate code to `escape' from an exception contour. This
1688 is like `exiting', but does not conflict with constructs which
1691 Return nonzero if this contour is escapable, otherwise
1692 return zero, and language-specific code will emit the
1693 appropriate error message. */
1695 expand_escape_except ()
1699 for (n = except_stack; n; n = n->next)
1700 if (n->data.except_stmt.escape_label != 0)
1702 expand_goto_internal (NULL_TREE,
1703 n->data.except_stmt.escape_label, NULL_RTX);
1710 /* Finish processing and `except' contour.
1711 Culls out all exceptions which might be raise but not
1712 handled, and returns the list to the caller.
1713 Language-specific code is responsible for dealing with these
1717 expand_end_except ()
1720 tree raised = NULL_TREE;
1722 do_pending_stack_adjust ();
1723 emit_label (except_stack->data.except_stmt.after_label);
1725 n = except_stack->next;
1728 /* Propagate exceptions raised but not handled to next
1730 tree handled = except_stack->data.except_stmt.raised;
1731 if (handled != void_type_node)
1733 tree prev = NULL_TREE;
1734 raised = except_stack->data.except_stmt.raised;
1738 for (this_raise = raised, prev = 0; this_raise;
1739 this_raise = TREE_CHAIN (this_raise))
1741 if (value_member (TREE_VALUE (this_raise), handled))
1744 TREE_CHAIN (prev) = TREE_CHAIN (this_raise);
1747 raised = TREE_CHAIN (raised);
1748 if (raised == NULL_TREE)
1755 handled = TREE_CHAIN (handled);
1757 if (prev == NULL_TREE)
1760 TREE_CHAIN (prev) = n->data.except_stmt.raised;
1762 n->data.except_stmt.raised = raised;
1766 POPSTACK (except_stack);
1771 /* Record that exception EX is caught by this exception handler.
1772 Return nonzero if in exception handling construct, otherwise return 0. */
1779 if (except_stack == 0)
1781 raises_ptr = &except_stack->data.except_stmt.handled;
1782 if (*raises_ptr != void_type_node
1784 && ! value_member (ex, *raises_ptr))
1785 *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr);
1789 /* Record that this exception handler catches all exceptions.
1790 Return nonzero if in exception handling construct, otherwise return 0. */
1793 expand_catch_default ()
1795 if (except_stack == 0)
1797 except_stack->data.except_stmt.handled = void_type_node;
1804 if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
1806 expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
1811 /* Generate RTL for the start of an if-then. COND is the expression
1812 whose truth should be tested.
1814 If EXITFLAG is nonzero, this conditional is visible to
1815 `exit_something'. */
1818 expand_start_cond (cond, exitflag)
1822 struct nesting *thiscond = ALLOC_NESTING ();
1824 /* Make an entry on cond_stack for the cond we are entering. */
1826 thiscond->next = cond_stack;
1827 thiscond->all = nesting_stack;
1828 thiscond->depth = ++nesting_depth;
1829 thiscond->data.cond.next_label = gen_label_rtx ();
1830 /* Before we encounter an `else', we don't need a separate exit label
1831 unless there are supposed to be exit statements
1832 to exit this conditional. */
1833 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1834 thiscond->data.cond.endif_label = thiscond->exit_label;
1835 cond_stack = thiscond;
1836 nesting_stack = thiscond;
1838 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1841 /* Generate RTL between then-clause and the elseif-clause
1842 of an if-then-elseif-.... */
1845 expand_start_elseif (cond)
1848 if (cond_stack->data.cond.endif_label == 0)
1849 cond_stack->data.cond.endif_label = gen_label_rtx ();
1850 emit_jump (cond_stack->data.cond.endif_label);
1851 emit_label (cond_stack->data.cond.next_label);
1852 cond_stack->data.cond.next_label = gen_label_rtx ();
1853 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1856 /* Generate RTL between the then-clause and the else-clause
1857 of an if-then-else. */
1860 expand_start_else ()
1862 if (cond_stack->data.cond.endif_label == 0)
1863 cond_stack->data.cond.endif_label = gen_label_rtx ();
1864 emit_jump (cond_stack->data.cond.endif_label);
1865 emit_label (cond_stack->data.cond.next_label);
1866 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1869 /* Generate RTL for the end of an if-then.
1870 Pop the record for it off of cond_stack. */
1875 struct nesting *thiscond = cond_stack;
1877 do_pending_stack_adjust ();
1878 if (thiscond->data.cond.next_label)
1879 emit_label (thiscond->data.cond.next_label);
1880 if (thiscond->data.cond.endif_label)
1881 emit_label (thiscond->data.cond.endif_label);
1883 POPSTACK (cond_stack);
1887 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1888 loop should be exited by `exit_something'. This is a loop for which
1889 `expand_continue' will jump to the top of the loop.
1891 Make an entry on loop_stack to record the labels associated with
1895 expand_start_loop (exit_flag)
1898 register struct nesting *thisloop = ALLOC_NESTING ();
1900 /* Make an entry on loop_stack for the loop we are entering. */
1902 thisloop->next = loop_stack;
1903 thisloop->all = nesting_stack;
1904 thisloop->depth = ++nesting_depth;
1905 thisloop->data.loop.start_label = gen_label_rtx ();
1906 thisloop->data.loop.end_label = gen_label_rtx ();
1907 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1908 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1909 loop_stack = thisloop;
1910 nesting_stack = thisloop;
1912 do_pending_stack_adjust ();
1914 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
1915 emit_label (thisloop->data.loop.start_label);
1920 /* Like expand_start_loop but for a loop where the continuation point
1921 (for expand_continue_loop) will be specified explicitly. */
1924 expand_start_loop_continue_elsewhere (exit_flag)
1927 struct nesting *thisloop = expand_start_loop (exit_flag);
1928 loop_stack->data.loop.continue_label = gen_label_rtx ();
1932 /* Specify the continuation point for a loop started with
1933 expand_start_loop_continue_elsewhere.
1934 Use this at the point in the code to which a continue statement
1938 expand_loop_continue_here ()
1940 do_pending_stack_adjust ();
1941 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
1942 emit_label (loop_stack->data.loop.continue_label);
1945 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1946 Pop the block off of loop_stack. */
1951 register rtx insn = get_last_insn ();
1952 register rtx start_label = loop_stack->data.loop.start_label;
1953 rtx last_test_insn = 0;
1956 /* Mark the continue-point at the top of the loop if none elsewhere. */
1957 if (start_label == loop_stack->data.loop.continue_label)
1958 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1960 do_pending_stack_adjust ();
1962 /* If optimizing, perhaps reorder the loop. If the loop
1963 starts with a conditional exit, roll that to the end
1964 where it will optimize together with the jump back.
1966 We look for the last conditional branch to the exit that we encounter
1967 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1968 branch to the exit first, use it.
1970 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1971 because moving them is not valid. */
1975 ! (GET_CODE (insn) == JUMP_INSN
1976 && GET_CODE (PATTERN (insn)) == SET
1977 && SET_DEST (PATTERN (insn)) == pc_rtx
1978 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1980 /* Scan insns from the top of the loop looking for a qualified
1981 conditional exit. */
1982 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1983 insn = NEXT_INSN (insn))
1985 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1988 if (GET_CODE (insn) == NOTE
1989 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1990 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1993 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1996 if (last_test_insn && num_insns > 30)
1999 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2000 && SET_DEST (PATTERN (insn)) == pc_rtx
2001 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2002 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2003 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2004 == loop_stack->data.loop.end_label))
2005 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2006 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2007 == loop_stack->data.loop.end_label))))
2008 last_test_insn = insn;
2010 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2011 && GET_CODE (PATTERN (insn)) == SET
2012 && SET_DEST (PATTERN (insn)) == pc_rtx
2013 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2014 && (XEXP (SET_SRC (PATTERN (insn)), 0)
2015 == loop_stack->data.loop.end_label))
2016 /* Include BARRIER. */
2017 last_test_insn = NEXT_INSN (insn);
2020 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2022 /* We found one. Move everything from there up
2023 to the end of the loop, and add a jump into the loop
2024 to jump to there. */
2025 register rtx newstart_label = gen_label_rtx ();
2026 register rtx start_move = start_label;
2028 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2029 then we want to move this note also. */
2030 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2031 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2032 == NOTE_INSN_LOOP_CONT))
2033 start_move = PREV_INSN (start_move);
2035 emit_label_after (newstart_label, PREV_INSN (start_move));
2036 reorder_insns (start_move, last_test_insn, get_last_insn ());
2037 emit_jump_insn_after (gen_jump (start_label),
2038 PREV_INSN (newstart_label));
2039 emit_barrier_after (PREV_INSN (newstart_label));
2040 start_label = newstart_label;
2044 emit_jump (start_label);
2045 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2046 emit_label (loop_stack->data.loop.end_label);
2048 POPSTACK (loop_stack);
2053 /* Generate a jump to the current loop's continue-point.
2054 This is usually the top of the loop, but may be specified
2055 explicitly elsewhere. If not currently inside a loop,
2056 return 0 and do nothing; caller will print an error message. */
2059 expand_continue_loop (whichloop)
2060 struct nesting *whichloop;
2064 whichloop = loop_stack;
2067 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2072 /* Generate a jump to exit the current loop. If not currently inside a loop,
2073 return 0 and do nothing; caller will print an error message. */
2076 expand_exit_loop (whichloop)
2077 struct nesting *whichloop;
2081 whichloop = loop_stack;
2084 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2088 /* Generate a conditional jump to exit the current loop if COND
2089 evaluates to zero. If not currently inside a loop,
2090 return 0 and do nothing; caller will print an error message. */
2093 expand_exit_loop_if_false (whichloop, cond)
2094 struct nesting *whichloop;
2099 whichloop = loop_stack;
2102 do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
2106 /* Return non-zero if we should preserve sub-expressions as separate
2107 pseudos. We never do so if we aren't optimizing. We always do so
2108 if -fexpensive-optimizations.
2110 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2111 the loop may still be a small one. */
2114 preserve_subexpressions_p ()
2118 if (flag_expensive_optimizations)
2121 if (optimize == 0 || loop_stack == 0)
2124 insn = get_last_insn_anywhere ();
2127 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2128 < n_non_fixed_regs * 3));
2132 /* Generate a jump to exit the current loop, conditional, binding contour
2133 or case statement. Not all such constructs are visible to this function,
2134 only those started with EXIT_FLAG nonzero. Individual languages use
2135 the EXIT_FLAG parameter to control which kinds of constructs you can
2138 If not currently inside anything that can be exited,
2139 return 0 and do nothing; caller will print an error message. */
2142 expand_exit_something ()
2146 for (n = nesting_stack; n; n = n->all)
2147 if (n->exit_label != 0)
2149 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2156 /* Generate RTL to return from the current function, with no value.
2157 (That is, we do not do anything about returning any value.) */
2160 expand_null_return ()
2162 struct nesting *block = block_stack;
2165 /* Does any pending block have cleanups? */
2167 while (block && block->data.block.cleanups == 0)
2168 block = block->next;
2170 /* If yes, use a goto to return, since that runs cleanups. */
2172 expand_null_return_1 (last_insn, block != 0);
2175 /* Generate RTL to return from the current function, with value VAL. */
2178 expand_value_return (val)
2181 struct nesting *block = block_stack;
2182 rtx last_insn = get_last_insn ();
2183 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2185 /* Copy the value to the return location
2186 unless it's already there. */
2188 if (return_reg != val)
2190 #ifdef PROMOTE_FUNCTION_RETURN
2191 enum machine_mode mode = DECL_MODE (DECL_RESULT (current_function_decl));
2192 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2193 int unsignedp = TREE_UNSIGNED (type);
2195 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
2196 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
2197 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
2198 || TREE_CODE (type) == OFFSET_TYPE)
2200 PROMOTE_MODE (mode, unsignedp, type);
2203 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2204 convert_to_mode (return_reg, val, unsignedp);
2207 emit_move_insn (return_reg, val);
2209 if (GET_CODE (return_reg) == REG
2210 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2211 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2213 /* Does any pending block have cleanups? */
2215 while (block && block->data.block.cleanups == 0)
2216 block = block->next;
2218 /* If yes, use a goto to return, since that runs cleanups.
2219 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2221 expand_null_return_1 (last_insn, block != 0);
2224 /* Output a return with no value. If LAST_INSN is nonzero,
2225 pretend that the return takes place after LAST_INSN.
2226 If USE_GOTO is nonzero then don't use a return instruction;
2227 go to the return label instead. This causes any cleanups
2228 of pending blocks to be executed normally. */
2231 expand_null_return_1 (last_insn, use_goto)
2235 rtx end_label = cleanup_label ? cleanup_label : return_label;
2237 clear_pending_stack_adjust ();
2238 do_pending_stack_adjust ();
2241 /* PCC-struct return always uses an epilogue. */
2242 if (current_function_returns_pcc_struct || use_goto)
2245 end_label = return_label = gen_label_rtx ();
2246 expand_goto_internal (NULL_TREE, end_label, last_insn);
2250 /* Otherwise output a simple return-insn if one is available,
2251 unless it won't do the job. */
2253 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2255 emit_jump_insn (gen_return ());
2261 /* Otherwise jump to the epilogue. */
2262 expand_goto_internal (NULL_TREE, end_label, last_insn);
2265 /* Generate RTL to evaluate the expression RETVAL and return it
2266 from the current function. */
2269 expand_return (retval)
2272 /* If there are any cleanups to be performed, then they will
2273 be inserted following LAST_INSN. It is desirable
2274 that the last_insn, for such purposes, should be the
2275 last insn before computing the return value. Otherwise, cleanups
2276 which call functions can clobber the return value. */
2277 /* ??? rms: I think that is erroneous, because in C++ it would
2278 run destructors on variables that might be used in the subsequent
2279 computation of the return value. */
2281 register rtx val = 0;
2285 struct nesting *block;
2287 /* If function wants no value, give it none. */
2288 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2290 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2292 expand_null_return ();
2296 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2297 cleanups = any_pending_cleanups (1);
2299 if (TREE_CODE (retval) == RESULT_DECL)
2300 retval_rhs = retval;
2301 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2302 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2303 retval_rhs = TREE_OPERAND (retval, 1);
2304 else if (TREE_TYPE (retval) == void_type_node)
2305 /* Recognize tail-recursive call to void function. */
2306 retval_rhs = retval;
2308 retval_rhs = NULL_TREE;
2310 /* Only use `last_insn' if there are cleanups which must be run. */
2311 if (cleanups || cleanup_label != 0)
2312 last_insn = get_last_insn ();
2314 /* Distribute return down conditional expr if either of the sides
2315 may involve tail recursion (see test below). This enhances the number
2316 of tail recursions we see. Don't do this always since it can produce
2317 sub-optimal code in some cases and we distribute assignments into
2318 conditional expressions when it would help. */
2320 if (optimize && retval_rhs != 0
2321 && frame_offset == 0
2322 && TREE_CODE (retval_rhs) == COND_EXPR
2323 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2324 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2326 rtx label = gen_label_rtx ();
2327 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2328 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2329 DECL_RESULT (current_function_decl),
2330 TREE_OPERAND (retval_rhs, 1)));
2332 expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2333 DECL_RESULT (current_function_decl),
2334 TREE_OPERAND (retval_rhs, 2)));
2338 /* For tail-recursive call to current function,
2339 just jump back to the beginning.
2340 It's unsafe if any auto variable in this function
2341 has its address taken; for simplicity,
2342 require stack frame to be empty. */
2343 if (optimize && retval_rhs != 0
2344 && frame_offset == 0
2345 && TREE_CODE (retval_rhs) == CALL_EXPR
2346 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2347 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2348 /* Finish checking validity, and if valid emit code
2349 to set the argument variables for the new call. */
2350 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2351 DECL_ARGUMENTS (current_function_decl)))
2353 if (tail_recursion_label == 0)
2355 tail_recursion_label = gen_label_rtx ();
2356 emit_label_after (tail_recursion_label,
2357 tail_recursion_reentry);
2360 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2365 /* This optimization is safe if there are local cleanups
2366 because expand_null_return takes care of them.
2367 ??? I think it should also be safe when there is a cleanup label,
2368 because expand_null_return takes care of them, too.
2369 Any reason why not? */
2370 if (HAVE_return && cleanup_label == 0
2371 && ! current_function_returns_pcc_struct)
2373 /* If this is return x == y; then generate
2374 if (x == y) return 1; else return 0;
2375 if we can do it with explicit return insns. */
2377 switch (TREE_CODE (retval_rhs))
2385 case TRUTH_ANDIF_EXPR:
2386 case TRUTH_ORIF_EXPR:
2387 case TRUTH_AND_EXPR:
2389 case TRUTH_NOT_EXPR:
2390 case TRUTH_XOR_EXPR:
2391 op0 = gen_label_rtx ();
2392 jumpifnot (retval_rhs, op0);
2393 expand_value_return (const1_rtx);
2395 expand_value_return (const0_rtx);
2399 #endif /* HAVE_return */
2403 && TREE_TYPE (retval_rhs) != void_type_node
2404 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2406 /* Calculate the return value into a pseudo reg. */
2407 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2409 /* All temporaries have now been used. */
2411 /* Return the calculated value, doing cleanups first. */
2412 expand_value_return (val);
2416 /* No cleanups or no hard reg used;
2417 calculate value into hard return reg. */
2418 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2421 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2425 /* Return 1 if the end of the generated RTX is not a barrier.
2426 This means code already compiled can drop through. */
2429 drop_through_at_end_p ()
2431 rtx insn = get_last_insn ();
2432 while (insn && GET_CODE (insn) == NOTE)
2433 insn = PREV_INSN (insn);
2434 return insn && GET_CODE (insn) != BARRIER;
2437 /* Emit code to alter this function's formal parms for a tail-recursive call.
2438 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2439 FORMALS is the chain of decls of formals.
2440 Return 1 if this can be done;
2441 otherwise return 0 and do not emit any code. */
2444 tail_recursion_args (actuals, formals)
2445 tree actuals, formals;
2447 register tree a = actuals, f = formals;
2449 register rtx *argvec;
2451 /* Check that number and types of actuals are compatible
2452 with the formals. This is not always true in valid C code.
2453 Also check that no formal needs to be addressable
2454 and that all formals are scalars. */
2456 /* Also count the args. */
2458 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2460 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2462 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2465 if (a != 0 || f != 0)
2468 /* Compute all the actuals. */
2470 argvec = (rtx *) alloca (i * sizeof (rtx));
2472 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2473 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2475 /* Find which actual values refer to current values of previous formals.
2476 Copy each of them now, before any formal is changed. */
2478 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2482 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2483 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2484 { copy = 1; break; }
2486 argvec[i] = copy_to_reg (argvec[i]);
2489 /* Store the values of the actuals into the formals. */
2491 for (f = formals, a = actuals, i = 0; f;
2492 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2494 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2495 emit_move_insn (DECL_RTL (f), argvec[i]);
2497 convert_move (DECL_RTL (f), argvec[i],
2498 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2505 /* Generate the RTL code for entering a binding contour.
2506 The variables are declared one by one, by calls to `expand_decl'.
2508 EXIT_FLAG is nonzero if this construct should be visible to
2509 `exit_something'. */
2512 expand_start_bindings (exit_flag)
2515 struct nesting *thisblock = ALLOC_NESTING ();
2517 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2519 /* Make an entry on block_stack for the block we are entering. */
2521 thisblock->next = block_stack;
2522 thisblock->all = nesting_stack;
2523 thisblock->depth = ++nesting_depth;
2524 thisblock->data.block.stack_level = 0;
2525 thisblock->data.block.cleanups = 0;
2526 thisblock->data.block.function_call_count = 0;
2530 if (block_stack->data.block.cleanups == NULL_TREE
2531 && (block_stack->data.block.outer_cleanups == NULL_TREE
2532 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2533 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2535 thisblock->data.block.outer_cleanups
2536 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2537 block_stack->data.block.outer_cleanups);
2540 thisblock->data.block.outer_cleanups = 0;
2544 && !(block_stack->data.block.cleanups == NULL_TREE
2545 && block_stack->data.block.outer_cleanups == NULL_TREE))
2546 thisblock->data.block.outer_cleanups
2547 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2548 block_stack->data.block.outer_cleanups);
2550 thisblock->data.block.outer_cleanups = 0;
2552 thisblock->data.block.label_chain = 0;
2553 thisblock->data.block.innermost_stack_block = stack_block_stack;
2554 thisblock->data.block.first_insn = note;
2555 thisblock->data.block.block_start_count = ++block_start_count;
2556 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2557 block_stack = thisblock;
2558 nesting_stack = thisblock;
2560 /* Make a new level for allocating stack slots. */
2564 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2565 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2569 remember_end_note (block)
2570 register tree block;
2572 BLOCK_END_NOTE (block) = last_block_end_note;
2573 last_block_end_note = NULL_RTX;
2576 /* Generate RTL code to terminate a binding contour.
2577 VARS is the chain of VAR_DECL nodes
2578 for the variables bound in this contour.
2579 MARK_ENDS is nonzero if we should put a note at the beginning
2580 and end of this binding contour.
2582 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2583 (That is true automatically if the contour has a saved stack level.) */
2586 expand_end_bindings (vars, mark_ends, dont_jump_in)
2591 register struct nesting *thisblock = block_stack;
2595 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2596 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
2597 && ! DECL_IN_SYSTEM_HEADER (decl))
2598 warning_with_decl (decl, "unused variable `%s'");
2600 if (thisblock->exit_label)
2602 do_pending_stack_adjust ();
2603 emit_label (thisblock->exit_label);
2606 /* If necessary, make a handler for nonlocal gotos taking
2607 place in the function calls in this block. */
2608 if (function_call_count != thisblock->data.block.function_call_count
2610 /* Make handler for outermost block
2611 if there were any nonlocal gotos to this function. */
2612 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2613 /* Make handler for inner block if it has something
2614 special to do when you jump out of it. */
2615 : (thisblock->data.block.cleanups != 0
2616 || thisblock->data.block.stack_level != 0)))
2619 rtx afterward = gen_label_rtx ();
2620 rtx handler_label = gen_label_rtx ();
2621 rtx save_receiver = gen_reg_rtx (Pmode);
2623 /* Don't let jump_optimize delete the handler. */
2624 LABEL_PRESERVE_P (handler_label) = 1;
2626 /* Record the handler address in the stack slot for that purpose,
2627 during this block, saving and restoring the outer value. */
2628 if (thisblock->next != 0)
2630 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2631 emit_insn_before (gen_move_insn (save_receiver,
2632 nonlocal_goto_handler_slot),
2633 thisblock->data.block.first_insn);
2635 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot,
2636 gen_rtx (LABEL_REF, Pmode,
2638 thisblock->data.block.first_insn);
2640 /* Jump around the handler; it runs only when specially invoked. */
2641 emit_jump (afterward);
2642 emit_label (handler_label);
2644 #ifdef HAVE_nonlocal_goto
2645 if (! HAVE_nonlocal_goto)
2647 /* First adjust our frame pointer to its actual value. It was
2648 previously set to the start of the virtual area corresponding to
2649 the stacked variables when we branched here and now needs to be
2650 adjusted to the actual hardware fp value.
2652 Assignments are to virtual registers are converted by
2653 instantiate_virtual_regs into the corresponding assignment
2654 to the underlying register (fp in this case) that makes
2655 the original assignment true.
2656 So the following insn will actually be
2657 decrementing fp by STARTING_FRAME_OFFSET. */
2658 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2660 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2661 if (fixed_regs[ARG_POINTER_REGNUM])
2663 #ifdef ELIMINABLE_REGS
2664 /* If the argument pointer can be eliminated in favor of the
2665 frame pointer, we don't need to restore it. We assume here
2666 that if such an elimination is present, it can always be used.
2667 This is the case on all known machines; if we don't make this
2668 assumption, we do unnecessary saving on many machines. */
2669 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
2672 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
2673 if (elim_regs[i].from == ARG_POINTER_REGNUM
2674 && elim_regs[i].to == FRAME_POINTER_REGNUM)
2677 if (i == sizeof elim_regs / sizeof elim_regs [0])
2680 /* Now restore our arg pointer from the address at which it
2681 was saved in our stack frame.
2682 If there hasn't be space allocated for it yet, make
2684 if (arg_pointer_save_area == 0)
2685 arg_pointer_save_area
2686 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2687 emit_move_insn (virtual_incoming_args_rtx,
2688 /* We need a pseudo here, or else
2689 instantiate_virtual_regs_1 complains. */
2690 copy_to_reg (arg_pointer_save_area));
2695 /* The handler expects the desired label address in the static chain
2696 register. It tests the address and does an appropriate jump
2697 to whatever label is desired. */
2698 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2699 /* Skip any labels we shouldn't be able to jump to from here. */
2700 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2702 rtx not_this = gen_label_rtx ();
2703 rtx this = gen_label_rtx ();
2704 do_jump_if_equal (static_chain_rtx,
2705 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2707 emit_jump (not_this);
2709 expand_goto (TREE_VALUE (link));
2710 emit_label (not_this);
2712 /* If label is not recognized, abort. */
2713 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2715 emit_label (afterward);
2718 /* Don't allow jumping into a block that has cleanups or a stack level. */
2720 || thisblock->data.block.stack_level != 0
2721 || thisblock->data.block.cleanups != 0)
2723 struct label_chain *chain;
2725 /* Any labels in this block are no longer valid to go to.
2726 Mark them to cause an error message. */
2727 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2729 DECL_TOO_LATE (chain->label) = 1;
2730 /* If any goto without a fixup came to this label,
2731 that must be an error, because gotos without fixups
2732 come from outside all saved stack-levels and all cleanups. */
2733 if (TREE_ADDRESSABLE (chain->label))
2734 error_with_decl (chain->label,
2735 "label `%s' used before containing binding contour");
2739 /* Restore stack level in effect before the block
2740 (only if variable-size objects allocated). */
2741 /* Perform any cleanups associated with the block. */
2743 if (thisblock->data.block.stack_level != 0
2744 || thisblock->data.block.cleanups != 0)
2746 /* Don't let cleanups affect ({...}) constructs. */
2747 int old_expr_stmts_for_value = expr_stmts_for_value;
2748 rtx old_last_expr_value = last_expr_value;
2749 tree old_last_expr_type = last_expr_type;
2750 expr_stmts_for_value = 0;
2752 /* Do the cleanups. */
2753 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
2754 do_pending_stack_adjust ();
2756 expr_stmts_for_value = old_expr_stmts_for_value;
2757 last_expr_value = old_last_expr_value;
2758 last_expr_type = old_last_expr_type;
2760 /* Restore the stack level. */
2762 if (thisblock->data.block.stack_level != 0)
2764 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2765 thisblock->data.block.stack_level, NULL_RTX);
2766 if (nonlocal_goto_handler_slot != 0)
2767 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
2771 /* Any gotos out of this block must also do these things.
2772 Also report any gotos with fixups that came to labels in this
2774 fixup_gotos (thisblock,
2775 thisblock->data.block.stack_level,
2776 thisblock->data.block.cleanups,
2777 thisblock->data.block.first_insn,
2781 /* Mark the beginning and end of the scope if requested.
2782 We do this now, after running cleanups on the variables
2783 just going out of scope, so they are in scope for their cleanups. */
2786 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
2788 /* Get rid of the beginning-mark if we don't make an end-mark. */
2789 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2791 /* If doing stupid register allocation, make sure lives of all
2792 register variables declared here extend thru end of scope. */
2795 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2797 rtx rtl = DECL_RTL (decl);
2798 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
2802 /* Restore block_stack level for containing block. */
2804 stack_block_stack = thisblock->data.block.innermost_stack_block;
2805 POPSTACK (block_stack);
2807 /* Pop the stack slot nesting and free any slots at this level. */
2811 /* Generate RTL for the automatic variable declaration DECL.
2812 (Other kinds of declarations are simply ignored if seen here.)
2813 CLEANUP is an expression to be executed at exit from this binding contour;
2814 for example, in C++, it might call the destructor for this variable.
2816 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2817 either before or after calling `expand_decl' but before compiling
2818 any subsequent expressions. This is because CLEANUP may be expanded
2819 more than once, on different branches of execution.
2820 For the same reason, CLEANUP may not contain a CALL_EXPR
2821 except as its topmost node--else `preexpand_calls' would get confused.
2823 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2824 that is not associated with any particular variable.
2826 There is no special support here for C++ constructors.
2827 They should be handled by the proper code in DECL_INITIAL. */
2833 struct nesting *thisblock = block_stack;
2834 tree type = TREE_TYPE (decl);
2836 /* Only automatic variables need any expansion done.
2837 Static and external variables, and external functions,
2838 will be handled by `assemble_variable' (called from finish_decl).
2839 TYPE_DECL and CONST_DECL require nothing.
2840 PARM_DECLs are handled in `assign_parms'. */
2842 if (TREE_CODE (decl) != VAR_DECL)
2844 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
2847 /* Create the RTL representation for the variable. */
2849 if (type == error_mark_node)
2850 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
2851 else if (DECL_SIZE (decl) == 0)
2852 /* Variable with incomplete type. */
2854 if (DECL_INITIAL (decl) == 0)
2855 /* Error message was already done; now avoid a crash. */
2856 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
2858 /* An initializer is going to decide the size of this array.
2859 Until we know the size, represent its address with a reg. */
2860 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
2862 else if (DECL_MODE (decl) != BLKmode
2863 /* If -ffloat-store, don't put explicit float vars
2865 && !(flag_float_store
2866 && TREE_CODE (type) == REAL_TYPE)
2867 && ! TREE_THIS_VOLATILE (decl)
2868 && ! TREE_ADDRESSABLE (decl)
2869 && (DECL_REGISTER (decl) || ! obey_regdecls))
2871 /* Automatic variable that can go in a register. */
2872 enum machine_mode reg_mode = DECL_MODE (decl);
2873 int unsignedp = TREE_UNSIGNED (type);
2875 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
2876 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
2877 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
2878 || TREE_CODE (type) == OFFSET_TYPE)
2880 PROMOTE_MODE (reg_mode, unsignedp, type);
2883 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
2884 if (TREE_CODE (type) == POINTER_TYPE)
2885 mark_reg_pointer (DECL_RTL (decl));
2886 REG_USERVAR_P (DECL_RTL (decl)) = 1;
2888 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
2890 /* Variable of fixed size that goes on the stack. */
2894 /* If we previously made RTL for this decl, it must be an array
2895 whose size was determined by the initializer.
2896 The old address was a register; set that register now
2897 to the proper address. */
2898 if (DECL_RTL (decl) != 0)
2900 if (GET_CODE (DECL_RTL (decl)) != MEM
2901 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
2903 oldaddr = XEXP (DECL_RTL (decl), 0);
2907 = assign_stack_temp (DECL_MODE (decl),
2908 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
2909 + BITS_PER_UNIT - 1)
2913 /* Set alignment we actually gave this decl. */
2914 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2915 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2919 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2920 if (addr != oldaddr)
2921 emit_move_insn (oldaddr, addr);
2924 /* If this is a memory ref that contains aggregate components,
2925 mark it as such for cse and loop optimize. */
2926 MEM_IN_STRUCT_P (DECL_RTL (decl))
2927 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2928 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2929 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2931 /* If this is in memory because of -ffloat-store,
2932 set the volatile bit, to prevent optimizations from
2933 undoing the effects. */
2934 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
2935 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2939 /* Dynamic-size object: must push space on the stack. */
2943 /* Record the stack pointer on entry to block, if have
2944 not already done so. */
2945 if (thisblock->data.block.stack_level == 0)
2947 do_pending_stack_adjust ();
2948 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2949 &thisblock->data.block.stack_level,
2950 thisblock->data.block.first_insn);
2951 stack_block_stack = thisblock;
2954 /* Compute the variable's size, in bytes. */
2955 size = expand_expr (size_binop (CEIL_DIV_EXPR,
2957 size_int (BITS_PER_UNIT)),
2958 NULL_RTX, VOIDmode, 0);
2961 /* This is equivalent to calling alloca. */
2962 current_function_calls_alloca = 1;
2964 /* Allocate space on the stack for the variable. */
2965 address = allocate_dynamic_stack_space (size, NULL_RTX,
2968 if (nonlocal_goto_handler_slot != 0)
2969 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2971 /* Reference the variable indirect through that rtx. */
2972 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
2974 /* If this is a memory ref that contains aggregate components,
2975 mark it as such for cse and loop optimize. */
2976 MEM_IN_STRUCT_P (DECL_RTL (decl))
2977 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2978 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2979 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
2981 /* Indicate the alignment we actually gave this variable. */
2982 #ifdef STACK_BOUNDARY
2983 DECL_ALIGN (decl) = STACK_BOUNDARY;
2985 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2989 if (TREE_THIS_VOLATILE (decl))
2990 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
2991 if (TREE_READONLY (decl))
2992 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
2994 /* If doing stupid register allocation, make sure life of any
2995 register variable starts here, at the start of its scope. */
2998 use_variable (DECL_RTL (decl));
3001 /* Emit code to perform the initialization of a declaration DECL. */
3004 expand_decl_init (decl)
3007 int was_used = TREE_USED (decl);
3009 if (TREE_STATIC (decl))
3012 /* Compute and store the initial value now. */
3014 if (DECL_INITIAL (decl) == error_mark_node)
3016 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3017 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3018 || code == POINTER_TYPE)
3019 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3023 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3025 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3026 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3030 /* Don't let the initialization count as "using" the variable. */
3031 TREE_USED (decl) = was_used;
3033 /* Free any temporaries we made while initializing the decl. */
3037 /* CLEANUP is an expression to be executed at exit from this binding contour;
3038 for example, in C++, it might call the destructor for this variable.
3040 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3041 either before or after calling `expand_decl' but before compiling
3042 any subsequent expressions. This is because CLEANUP may be expanded
3043 more than once, on different branches of execution.
3044 For the same reason, CLEANUP may not contain a CALL_EXPR
3045 except as its topmost node--else `preexpand_calls' would get confused.
3047 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3048 that is not associated with any particular variable. */
3051 expand_decl_cleanup (decl, cleanup)
3054 struct nesting *thisblock = block_stack;
3056 /* Error if we are not in any block. */
3060 /* Record the cleanup if there is one. */
3064 thisblock->data.block.cleanups
3065 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3066 /* If this block has a cleanup, it belongs in stack_block_stack. */
3067 stack_block_stack = thisblock;
3072 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3073 DECL_ELTS is the list of elements that belong to DECL's type.
3074 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3077 expand_anon_union_decl (decl, cleanup, decl_elts)
3078 tree decl, cleanup, decl_elts;
3080 struct nesting *thisblock = block_stack;
3083 expand_decl (decl, cleanup);
3084 x = DECL_RTL (decl);
3088 tree decl_elt = TREE_VALUE (decl_elts);
3089 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3090 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3092 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3093 instead create a new MEM rtx with the proper mode. */
3094 if (GET_CODE (x) == MEM)
3096 if (mode == GET_MODE (x))
3097 DECL_RTL (decl_elt) = x;
3100 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3101 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3102 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3105 else if (GET_CODE (x) == REG)
3107 if (mode == GET_MODE (x))
3108 DECL_RTL (decl_elt) = x;
3110 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3115 /* Record the cleanup if there is one. */
3118 thisblock->data.block.cleanups
3119 = temp_tree_cons (decl_elt, cleanup_elt,
3120 thisblock->data.block.cleanups);
3122 decl_elts = TREE_CHAIN (decl_elts);
3126 /* Expand a list of cleanups LIST.
3127 Elements may be expressions or may be nested lists.
3129 If DONT_DO is nonnull, then any list-element
3130 whose TREE_PURPOSE matches DONT_DO is omitted.
3131 This is sometimes used to avoid a cleanup associated with
3132 a value that is being returned out of the scope. */
3135 expand_cleanups (list, dont_do)
3140 for (tail = list; tail; tail = TREE_CHAIN (tail))
3141 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3143 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3144 expand_cleanups (TREE_VALUE (tail), dont_do);
3147 /* Cleanups may be run multiple times. For example,
3148 when exiting a binding contour, we expand the
3149 cleanups associated with that contour. When a goto
3150 within that binding contour has a target outside that
3151 contour, it will expand all cleanups from its scope to
3152 the target. Though the cleanups are expanded multiple
3153 times, the control paths are non-overlapping so the
3154 cleanups will not be executed twice. */
3155 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3161 /* Move all cleanups from the current block_stack
3162 to the containing block_stack, where they are assumed to
3163 have been created. If anything can cause a temporary to
3164 be created, but not expanded for more than one level of
3165 block_stacks, then this code will have to change. */
3170 struct nesting *block = block_stack;
3171 struct nesting *outer = block->next;
3173 outer->data.block.cleanups
3174 = chainon (block->data.block.cleanups,
3175 outer->data.block.cleanups);
3176 block->data.block.cleanups = 0;
3180 last_cleanup_this_contour ()
3182 if (block_stack == 0)
3185 return block_stack->data.block.cleanups;
3188 /* Return 1 if there are any pending cleanups at this point.
3189 If THIS_CONTOUR is nonzero, check the current contour as well.
3190 Otherwise, look only at the contours that enclose this one. */
3193 any_pending_cleanups (this_contour)
3196 struct nesting *block;
3198 if (block_stack == 0)
3201 if (this_contour && block_stack->data.block.cleanups != NULL)
3203 if (block_stack->data.block.cleanups == 0
3204 && (block_stack->data.block.outer_cleanups == 0
3206 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3211 for (block = block_stack->next; block; block = block->next)
3212 if (block->data.block.cleanups != 0)
3218 /* Enter a case (Pascal) or switch (C) statement.
3219 Push a block onto case_stack and nesting_stack
3220 to accumulate the case-labels that are seen
3221 and to record the labels generated for the statement.
3223 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3224 Otherwise, this construct is transparent for `exit_something'.
3226 EXPR is the index-expression to be dispatched on.
3227 TYPE is its nominal type. We could simply convert EXPR to this type,
3228 but instead we take short cuts. */
3231 expand_start_case (exit_flag, expr, type, printname)
3237 register struct nesting *thiscase = ALLOC_NESTING ();
3239 /* Make an entry on case_stack for the case we are entering. */
3241 thiscase->next = case_stack;
3242 thiscase->all = nesting_stack;
3243 thiscase->depth = ++nesting_depth;
3244 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3245 thiscase->data.case_stmt.case_list = 0;
3246 thiscase->data.case_stmt.index_expr = expr;
3247 thiscase->data.case_stmt.nominal_type = type;
3248 thiscase->data.case_stmt.default_label = 0;
3249 thiscase->data.case_stmt.num_ranges = 0;
3250 thiscase->data.case_stmt.printname = printname;
3251 thiscase->data.case_stmt.seenlabel = 0;
3252 case_stack = thiscase;
3253 nesting_stack = thiscase;
3255 do_pending_stack_adjust ();
3257 /* Make sure case_stmt.start points to something that won't
3258 need any transformation before expand_end_case. */
3259 if (GET_CODE (get_last_insn ()) != NOTE)
3260 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3262 thiscase->data.case_stmt.start = get_last_insn ();
3265 /* Start a "dummy case statement" within which case labels are invalid
3266 and are not connected to any larger real case statement.
3267 This can be used if you don't want to let a case statement jump
3268 into the middle of certain kinds of constructs. */
3271 expand_start_case_dummy ()
3273 register struct nesting *thiscase = ALLOC_NESTING ();
3275 /* Make an entry on case_stack for the dummy. */
3277 thiscase->next = case_stack;
3278 thiscase->all = nesting_stack;
3279 thiscase->depth = ++nesting_depth;
3280 thiscase->exit_label = 0;
3281 thiscase->data.case_stmt.case_list = 0;
3282 thiscase->data.case_stmt.start = 0;
3283 thiscase->data.case_stmt.nominal_type = 0;
3284 thiscase->data.case_stmt.default_label = 0;
3285 thiscase->data.case_stmt.num_ranges = 0;
3286 case_stack = thiscase;
3287 nesting_stack = thiscase;
3290 /* End a dummy case statement. */
3293 expand_end_case_dummy ()
3295 POPSTACK (case_stack);
3298 /* Return the data type of the index-expression
3299 of the innermost case statement, or null if none. */
3302 case_index_expr_type ()
3305 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3309 /* Accumulate one case or default label inside a case or switch statement.
3310 VALUE is the value of the case (a null pointer, for a default label).
3312 If not currently inside a case or switch statement, return 1 and do
3313 nothing. The caller will print a language-specific error message.
3314 If VALUE is a duplicate or overlaps, return 2 and do nothing
3315 except store the (first) duplicate node in *DUPLICATE.
3316 If VALUE is out of range, return 3 and do nothing.
3317 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3318 Return 0 on success.
3320 Extended to handle range statements. */
3323 pushcase (value, label, duplicate)
3324 register tree value;
3325 register tree label;
3328 register struct case_node **l;
3329 register struct case_node *n;
3333 /* Fail if not inside a real case statement. */
3334 if (! (case_stack && case_stack->data.case_stmt.start))
3337 if (stack_block_stack
3338 && stack_block_stack->depth > case_stack->depth)
3341 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3342 nominal_type = case_stack->data.case_stmt.nominal_type;
3344 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3345 if (index_type == error_mark_node)
3348 /* Convert VALUE to the type in which the comparisons are nominally done. */
3350 value = convert (nominal_type, value);
3352 /* If this is the first label, warn if any insns have been emitted. */
3353 if (case_stack->data.case_stmt.seenlabel == 0)
3356 for (insn = case_stack->data.case_stmt.start;
3358 insn = NEXT_INSN (insn))
3360 if (GET_CODE (insn) == CODE_LABEL)
3362 if (GET_CODE (insn) != NOTE
3363 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3365 warning ("unreachable code at beginning of %s",
3366 case_stack->data.case_stmt.printname);
3371 case_stack->data.case_stmt.seenlabel = 1;
3373 /* Fail if this value is out of range for the actual type of the index
3374 (which may be narrower than NOMINAL_TYPE). */
3375 if (value != 0 && ! int_fits_type_p (value, index_type))
3378 /* Fail if this is a duplicate or overlaps another entry. */
3381 if (case_stack->data.case_stmt.default_label != 0)
3383 *duplicate = case_stack->data.case_stmt.default_label;
3386 case_stack->data.case_stmt.default_label = label;
3390 /* Find the elt in the chain before which to insert the new value,
3391 to keep the chain sorted in increasing order.
3392 But report an error if this element is a duplicate. */
3393 for (l = &case_stack->data.case_stmt.case_list;
3394 /* Keep going past elements distinctly less than VALUE. */
3395 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3400 /* Element we will insert before must be distinctly greater;
3401 overlap means error. */
3402 if (! tree_int_cst_lt (value, (*l)->low))
3404 *duplicate = (*l)->code_label;
3409 /* Add this label to the chain, and succeed.
3410 Copy VALUE so it is on temporary rather than momentary
3411 obstack and will thus survive till the end of the case statement. */
3412 n = (struct case_node *) oballoc (sizeof (struct case_node));
3415 n->high = n->low = copy_node (value);
3416 n->code_label = label;
3420 expand_label (label);
3424 /* Like pushcase but this case applies to all values
3425 between VALUE1 and VALUE2 (inclusive).
3426 The return value is the same as that of pushcase
3427 but there is one additional error code:
3428 4 means the specified range was empty. */
3431 pushcase_range (value1, value2, label, duplicate)
3432 register tree value1, value2;
3433 register tree label;
3436 register struct case_node **l;
3437 register struct case_node *n;
3441 /* Fail if not inside a real case statement. */
3442 if (! (case_stack && case_stack->data.case_stmt.start))
3445 if (stack_block_stack
3446 && stack_block_stack->depth > case_stack->depth)
3449 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3450 nominal_type = case_stack->data.case_stmt.nominal_type;
3452 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3453 if (index_type == error_mark_node)
3456 /* If this is the first label, warn if any insns have been emitted. */
3457 if (case_stack->data.case_stmt.seenlabel == 0)
3460 for (insn = case_stack->data.case_stmt.start;
3462 insn = NEXT_INSN (insn))
3464 if (GET_CODE (insn) == CODE_LABEL)
3466 if (GET_CODE (insn) != NOTE
3467 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3469 warning ("unreachable code at beginning of %s",
3470 case_stack->data.case_stmt.printname);
3475 case_stack->data.case_stmt.seenlabel = 1;
3477 /* Convert VALUEs to type in which the comparisons are nominally done. */
3478 if (value1 == 0) /* Negative infinity. */
3479 value1 = TYPE_MIN_VALUE(index_type);
3480 value1 = convert (nominal_type, value1);
3482 if (value2 == 0) /* Positive infinity. */
3483 value2 = TYPE_MAX_VALUE(index_type);
3484 value2 = convert (nominal_type, value2);
3486 /* Fail if these values are out of range. */
3487 if (! int_fits_type_p (value1, index_type))
3490 if (! int_fits_type_p (value2, index_type))
3493 /* Fail if the range is empty. */
3494 if (tree_int_cst_lt (value2, value1))
3497 /* If the bounds are equal, turn this into the one-value case. */
3498 if (tree_int_cst_equal (value1, value2))
3499 return pushcase (value1, label, duplicate);
3501 /* Find the elt in the chain before which to insert the new value,
3502 to keep the chain sorted in increasing order.
3503 But report an error if this element is a duplicate. */
3504 for (l = &case_stack->data.case_stmt.case_list;
3505 /* Keep going past elements distinctly less than this range. */
3506 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3511 /* Element we will insert before must be distinctly greater;
3512 overlap means error. */
3513 if (! tree_int_cst_lt (value2, (*l)->low))
3515 *duplicate = (*l)->code_label;
3520 /* Add this label to the chain, and succeed.
3521 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3522 obstack and will thus survive till the end of the case statement. */
3524 n = (struct case_node *) oballoc (sizeof (struct case_node));
3527 n->low = copy_node (value1);
3528 n->high = copy_node (value2);
3529 n->code_label = label;
3532 expand_label (label);
3534 case_stack->data.case_stmt.num_ranges++;
3539 /* Called when the index of a switch statement is an enumerated type
3540 and there is no default label.
3542 Checks that all enumeration literals are covered by the case
3543 expressions of a switch. Also, warn if there are any extra
3544 switch cases that are *not* elements of the enumerated type.
3546 If all enumeration literals were covered by the case expressions,
3547 turn one of the expressions into the default expression since it should
3548 not be possible to fall through such a switch. */
3551 check_for_full_enumeration_handling (type)
3554 register struct case_node *n;
3555 register struct case_node **l;
3556 register tree chain;
3559 /* The time complexity of this loop is currently O(N * M), with
3560 N being the number of enumerals in the enumerated type, and
3561 M being the number of case expressions in the switch. */
3563 for (chain = TYPE_VALUES (type);
3565 chain = TREE_CHAIN (chain))
3567 /* Find a match between enumeral and case expression, if possible.
3568 Quit looking when we've gone too far (since case expressions
3569 are kept sorted in ascending order). Warn about enumerals not
3570 handled in the switch statement case expression list. */
3572 for (n = case_stack->data.case_stmt.case_list;
3573 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
3577 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
3580 warning ("enumeration value `%s' not handled in switch",
3581 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
3586 /* Now we go the other way around; we warn if there are case
3587 expressions that don't correspond to enumerals. This can
3588 occur since C and C++ don't enforce type-checking of
3589 assignments to enumeration variables. */
3592 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
3594 for (chain = TYPE_VALUES (type);
3595 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
3596 chain = TREE_CHAIN (chain))
3600 warning ("case value `%d' not in enumerated type `%s'",
3601 TREE_INT_CST_LOW (n->low),
3602 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3605 : DECL_NAME (TYPE_NAME (type))));
3606 if (!tree_int_cst_equal (n->low, n->high))
3608 for (chain = TYPE_VALUES (type);
3609 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
3610 chain = TREE_CHAIN (chain))
3614 warning ("case value `%d' not in enumerated type `%s'",
3615 TREE_INT_CST_LOW (n->high),
3616 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
3619 : DECL_NAME (TYPE_NAME (type))));
3623 /* If all values were found as case labels, make one of them the default
3624 label. Thus, this switch will never fall through. We arbitrarily pick
3625 the last one to make the default since this is likely the most
3626 efficient choice. */
3630 for (l = &case_stack->data.case_stmt.case_list;
3635 case_stack->data.case_stmt.default_label = (*l)->code_label;
3640 /* Terminate a case (Pascal) or switch (C) statement
3641 in which ORIG_INDEX is the expression to be tested.
3642 Generate the code to test it and jump to the right place. */
3645 expand_end_case (orig_index)
3648 tree minval, maxval, range;
3649 rtx default_label = 0;
3650 register struct case_node *n;
3653 rtx table_label = gen_label_rtx ();
3658 register struct nesting *thiscase = case_stack;
3659 tree index_expr = thiscase->data.case_stmt.index_expr;
3660 int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
3662 do_pending_stack_adjust ();
3664 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3665 if (TREE_TYPE (index_expr) != error_mark_node)
3667 /* If switch expression was an enumerated type, check that all
3668 enumeration literals are covered by the cases.
3669 No sense trying this if there's a default case, however. */
3671 if (!thiscase->data.case_stmt.default_label
3672 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
3673 && TREE_CODE (index_expr) != INTEGER_CST)
3674 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
3676 /* If this is the first label, warn if any insns have been emitted. */
3677 if (thiscase->data.case_stmt.seenlabel == 0)
3680 for (insn = get_last_insn ();
3681 insn != case_stack->data.case_stmt.start;
3682 insn = PREV_INSN (insn))
3683 if (GET_CODE (insn) != NOTE
3684 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
3686 warning ("unreachable code at beginning of %s",
3687 case_stack->data.case_stmt.printname);
3692 /* If we don't have a default-label, create one here,
3693 after the body of the switch. */
3694 if (thiscase->data.case_stmt.default_label == 0)
3696 thiscase->data.case_stmt.default_label
3697 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3698 expand_label (thiscase->data.case_stmt.default_label);
3700 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3702 before_case = get_last_insn ();
3704 /* Simplify the case-list before we count it. */
3705 group_case_nodes (thiscase->data.case_stmt.case_list);
3707 /* Get upper and lower bounds of case values.
3708 Also convert all the case values to the index expr's data type. */
3711 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3713 /* Check low and high label values are integers. */
3714 if (TREE_CODE (n->low) != INTEGER_CST)
3716 if (TREE_CODE (n->high) != INTEGER_CST)
3719 n->low = convert (TREE_TYPE (index_expr), n->low);
3720 n->high = convert (TREE_TYPE (index_expr), n->high);
3722 /* Count the elements and track the largest and smallest
3723 of them (treating them as signed even if they are not). */
3731 if (INT_CST_LT (n->low, minval))
3733 if (INT_CST_LT (maxval, n->high))
3736 /* A range counts double, since it requires two compares. */
3737 if (! tree_int_cst_equal (n->low, n->high))
3741 /* Compute span of values. */
3743 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
3746 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
3748 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3750 emit_jump (default_label);
3752 /* If range of values is much bigger than number of values,
3753 make a sequence of conditional branches instead of a dispatch.
3754 If the switch-index is a constant, do it this way
3755 because we can optimize it. */
3757 #ifndef CASE_VALUES_THRESHOLD
3759 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
3761 /* If machine does not have a case insn that compares the
3762 bounds, this means extra overhead for dispatch tables
3763 which raises the threshold for using them. */
3764 #define CASE_VALUES_THRESHOLD 5
3765 #endif /* HAVE_casesi */
3766 #endif /* CASE_VALUES_THRESHOLD */
3768 else if (TREE_INT_CST_HIGH (range) != 0
3769 || count < CASE_VALUES_THRESHOLD
3770 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
3772 || TREE_CODE (index_expr) == INTEGER_CST
3773 /* These will reduce to a constant. */
3774 || (TREE_CODE (index_expr) == CALL_EXPR
3775 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
3776 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
3777 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
3778 || (TREE_CODE (index_expr) == COMPOUND_EXPR
3779 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
3781 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3783 /* If the index is a short or char that we do not have
3784 an insn to handle comparisons directly, convert it to
3785 a full integer now, rather than letting each comparison
3786 generate the conversion. */
3788 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3789 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
3790 == CODE_FOR_nothing))
3792 enum machine_mode wider_mode;
3793 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3794 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3795 if (cmp_optab->handlers[(int) wider_mode].insn_code
3796 != CODE_FOR_nothing)
3798 index = convert_to_mode (wider_mode, index, unsignedp);
3804 do_pending_stack_adjust ();
3806 index = protect_from_queue (index, 0);
3807 if (GET_CODE (index) == MEM)
3808 index = copy_to_reg (index);
3809 if (GET_CODE (index) == CONST_INT
3810 || TREE_CODE (index_expr) == INTEGER_CST)
3812 /* Make a tree node with the proper constant value
3813 if we don't already have one. */
3814 if (TREE_CODE (index_expr) != INTEGER_CST)
3817 = build_int_2 (INTVAL (index),
3818 !unsignedp && INTVAL (index) >= 0 ? 0 : -1);
3819 index_expr = convert (TREE_TYPE (index_expr), index_expr);
3822 /* For constant index expressions we need only
3823 issue a unconditional branch to the appropriate
3824 target code. The job of removing any unreachable
3825 code is left to the optimisation phase if the
3826 "-O" option is specified. */
3827 for (n = thiscase->data.case_stmt.case_list;
3831 if (! tree_int_cst_lt (index_expr, n->low)
3832 && ! tree_int_cst_lt (n->high, index_expr))
3836 emit_jump (label_rtx (n->code_label));
3838 emit_jump (default_label);
3842 /* If the index expression is not constant we generate
3843 a binary decision tree to select the appropriate
3844 target code. This is done as follows:
3846 The list of cases is rearranged into a binary tree,
3847 nearly optimal assuming equal probability for each case.
3849 The tree is transformed into RTL, eliminating
3850 redundant test conditions at the same time.
3852 If program flow could reach the end of the
3853 decision tree an unconditional jump to the
3854 default code is emitted. */
3857 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
3858 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3859 balance_case_nodes (&thiscase->data.case_stmt.case_list,
3861 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3862 default_label, TREE_TYPE (index_expr));
3863 emit_jump_if_reachable (default_label);
3872 enum machine_mode index_mode = SImode;
3873 int index_bits = GET_MODE_BITSIZE (index_mode);
3875 /* Convert the index to SImode. */
3876 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
3877 > GET_MODE_BITSIZE (index_mode))
3879 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
3880 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
3882 /* We must handle the endpoints in the original mode. */
3883 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
3884 index_expr, minval);
3885 minval = integer_zero_node;
3886 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3887 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 0, 0);
3888 emit_jump_insn (gen_bltu (default_label));
3889 /* Now we can safely truncate. */
3890 index = convert_to_mode (index_mode, index, 0);
3894 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
3895 index_expr = convert (type_for_size (index_bits, 0),
3897 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3900 index = protect_from_queue (index, 0);
3901 do_pending_stack_adjust ();
3903 emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX,
3905 expand_expr (range, NULL_RTX,
3907 table_label, default_label));
3911 #ifdef HAVE_tablejump
3912 if (! win && HAVE_tablejump)
3914 index_expr = convert (thiscase->data.case_stmt.nominal_type,
3915 fold (build (MINUS_EXPR,
3916 TREE_TYPE (index_expr),
3917 index_expr, minval)));
3918 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3920 index = protect_from_queue (index, 0);
3921 do_pending_stack_adjust ();
3923 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
3924 expand_expr (range, NULL_RTX, VOIDmode, 0),
3925 table_label, default_label);
3932 /* Get table of labels to jump to, in order of case index. */
3934 ncases = TREE_INT_CST_LOW (range) + 1;
3935 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
3936 bzero (labelvec, ncases * sizeof (rtx));
3938 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3940 register HOST_WIDE_INT i
3941 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
3946 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
3947 if (i + TREE_INT_CST_LOW (minval)
3948 == TREE_INT_CST_LOW (n->high))
3954 /* Fill in the gaps with the default. */
3955 for (i = 0; i < ncases; i++)
3956 if (labelvec[i] == 0)
3957 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
3959 /* Output the table */
3960 emit_label (table_label);
3962 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3963 were an expression, instead of an #ifdef/#ifndef. */
3965 #ifdef CASE_VECTOR_PC_RELATIVE
3969 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
3970 gen_rtx (LABEL_REF, Pmode, table_label),
3971 gen_rtvec_v (ncases, labelvec)));
3973 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
3974 gen_rtvec_v (ncases, labelvec)));
3976 /* If the case insn drops through the table,
3977 after the table we must jump to the default-label.
3978 Otherwise record no drop-through after the table. */
3979 #ifdef CASE_DROPS_THROUGH
3980 emit_jump (default_label);
3986 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
3987 reorder_insns (before_case, get_last_insn (),
3988 thiscase->data.case_stmt.start);
3990 if (thiscase->exit_label)
3991 emit_label (thiscase->exit_label);
3993 POPSTACK (case_stack);
3998 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4001 do_jump_if_equal (op1, op2, label, unsignedp)
4002 rtx op1, op2, label;
4005 if (GET_CODE (op1) == CONST_INT
4006 && GET_CODE (op2) == CONST_INT)
4008 if (INTVAL (op1) == INTVAL (op2))
4013 enum machine_mode mode = GET_MODE (op1);
4014 if (mode == VOIDmode)
4015 mode = GET_MODE (op2);
4016 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
4017 emit_jump_insn (gen_beq (label));
4021 /* Not all case values are encountered equally. This function
4022 uses a heuristic to weight case labels, in cases where that
4023 looks like a reasonable thing to do.
4025 Right now, all we try to guess is text, and we establish the
4028 chars above space: 16
4037 If we find any cases in the switch that are not either -1 or in the range
4038 of valid ASCII characters, or are control characters other than those
4039 commonly used with "\", don't treat this switch scanning text.
4041 Return 1 if these nodes are suitable for cost estimation, otherwise
4045 estimate_case_costs (node)
4048 tree min_ascii = build_int_2 (-1, -1);
4049 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4053 /* If we haven't already made the cost table, make it now. Note that the
4054 lower bound of the table is -1, not zero. */
4056 if (cost_table == NULL)
4058 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
4059 bzero (cost_table - 1, 129 * sizeof (short));
4061 for (i = 0; i < 128; i++)
4065 else if (ispunct (i))
4067 else if (iscntrl (i))
4071 cost_table[' '] = 8;
4072 cost_table['\t'] = 4;
4073 cost_table['\0'] = 4;
4074 cost_table['\n'] = 2;
4075 cost_table['\f'] = 1;
4076 cost_table['\v'] = 1;
4077 cost_table['\b'] = 1;
4080 /* See if all the case expressions look like text. It is text if the
4081 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4082 as signed arithmetic since we don't want to ever access cost_table with a
4083 value less than -1. Also check that none of the constants in a range
4084 are strange control characters. */
4086 for (n = node; n; n = n->right)
4088 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4091 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
4092 if (cost_table[i] < 0)
4096 /* All interesting values are within the range of interesting
4097 ASCII characters. */
4101 /* Scan an ordered list of case nodes
4102 combining those with consecutive values or ranges.
4104 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4107 group_case_nodes (head)
4110 case_node_ptr node = head;
4114 rtx lb = next_real_insn (label_rtx (node->code_label));
4115 case_node_ptr np = node;
4117 /* Try to group the successors of NODE with NODE. */
4118 while (((np = np->right) != 0)
4119 /* Do they jump to the same place? */
4120 && next_real_insn (label_rtx (np->code_label)) == lb
4121 /* Are their ranges consecutive? */
4122 && tree_int_cst_equal (np->low,
4123 fold (build (PLUS_EXPR,
4124 TREE_TYPE (node->high),
4127 /* An overflow is not consecutive. */
4128 && tree_int_cst_lt (node->high,
4129 fold (build (PLUS_EXPR,
4130 TREE_TYPE (node->high),
4132 integer_one_node))))
4134 node->high = np->high;
4136 /* NP is the first node after NODE which can't be grouped with it.
4137 Delete the nodes in between, and move on to that node. */
4143 /* Take an ordered list of case nodes
4144 and transform them into a near optimal binary tree,
4145 on the assumption that any target code selection value is as
4146 likely as any other.
4148 The transformation is performed by splitting the ordered
4149 list into two equal sections plus a pivot. The parts are
4150 then attached to the pivot as left and right branches. Each
4151 branch is is then transformed recursively. */
4154 balance_case_nodes (head, parent)
4155 case_node_ptr *head;
4156 case_node_ptr parent;
4158 register case_node_ptr np;
4166 register case_node_ptr *npp;
4169 /* Count the number of entries on branch. Also count the ranges. */
4173 if (!tree_int_cst_equal (np->low, np->high))
4177 cost += cost_table[TREE_INT_CST_LOW (np->high)];
4181 cost += cost_table[TREE_INT_CST_LOW (np->low)];
4189 /* Split this list if it is long enough for that to help. */
4194 /* Find the place in the list that bisects the list's total cost,
4195 Here I gets half the total cost. */
4200 /* Skip nodes while their cost does not reach that amount. */
4201 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4202 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
4203 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
4206 npp = &(*npp)->right;
4211 /* Leave this branch lopsided, but optimize left-hand
4212 side and fill in `parent' fields for right-hand side. */
4214 np->parent = parent;
4215 balance_case_nodes (&np->left, np);
4216 for (; np->right; np = np->right)
4217 np->right->parent = np;
4221 /* If there are just three nodes, split at the middle one. */
4223 npp = &(*npp)->right;
4226 /* Find the place in the list that bisects the list's total cost,
4227 where ranges count as 2.
4228 Here I gets half the total cost. */
4229 i = (i + ranges + 1) / 2;
4232 /* Skip nodes while their cost does not reach that amount. */
4233 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4238 npp = &(*npp)->right;
4243 np->parent = parent;
4246 /* Optimize each of the two split parts. */
4247 balance_case_nodes (&np->left, np);
4248 balance_case_nodes (&np->right, np);
4252 /* Else leave this branch as one level,
4253 but fill in `parent' fields. */
4255 np->parent = parent;
4256 for (; np->right; np = np->right)
4257 np->right->parent = np;
4262 /* Search the parent sections of the case node tree
4263 to see if a test for the lower bound of NODE would be redundant.
4264 INDEX_TYPE is the type of the index expression.
4266 The instructions to generate the case decision tree are
4267 output in the same order as nodes are processed so it is
4268 known that if a parent node checks the range of the current
4269 node minus one that the current node is bounded at its lower
4270 span. Thus the test would be redundant. */
4273 node_has_low_bound (node, index_type)
4278 case_node_ptr pnode;
4280 /* If the lower bound of this node is the lowest value in the index type,
4281 we need not test it. */
4283 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4286 /* If this node has a left branch, the value at the left must be less
4287 than that at this node, so it cannot be bounded at the bottom and
4288 we need not bother testing any further. */
4293 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4294 node->low, integer_one_node));
4296 /* If the subtraction above overflowed, we can't verify anything.
4297 Otherwise, look for a parent that tests our value - 1. */
4299 if (! tree_int_cst_lt (low_minus_one, node->low))
4302 for (pnode = node->parent; pnode; pnode = pnode->parent)
4303 if (tree_int_cst_equal (low_minus_one, pnode->high))
4309 /* Search the parent sections of the case node tree
4310 to see if a test for the upper bound of NODE would be redundant.
4311 INDEX_TYPE is the type of the index expression.
4313 The instructions to generate the case decision tree are
4314 output in the same order as nodes are processed so it is
4315 known that if a parent node checks the range of the current
4316 node plus one that the current node is bounded at its upper
4317 span. Thus the test would be redundant. */
4320 node_has_high_bound (node, index_type)
4325 case_node_ptr pnode;
4327 /* If the upper bound of this node is the highest value in the type
4328 of the index expression, we need not test against it. */
4330 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4333 /* If this node has a right branch, the value at the right must be greater
4334 than that at this node, so it cannot be bounded at the top and
4335 we need not bother testing any further. */
4340 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4341 node->high, integer_one_node));
4343 /* If the addition above overflowed, we can't verify anything.
4344 Otherwise, look for a parent that tests our value + 1. */
4346 if (! tree_int_cst_lt (node->high, high_plus_one))
4349 for (pnode = node->parent; pnode; pnode = pnode->parent)
4350 if (tree_int_cst_equal (high_plus_one, pnode->low))
4356 /* Search the parent sections of the
4357 case node tree to see if both tests for the upper and lower
4358 bounds of NODE would be redundant. */
4361 node_is_bounded (node, index_type)
4365 return (node_has_low_bound (node, index_type)
4366 && node_has_high_bound (node, index_type));
4369 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4372 emit_jump_if_reachable (label)
4375 if (GET_CODE (get_last_insn ()) != BARRIER)
4379 /* Emit step-by-step code to select a case for the value of INDEX.
4380 The thus generated decision tree follows the form of the
4381 case-node binary tree NODE, whose nodes represent test conditions.
4382 INDEX_TYPE is the type of the index of the switch.
4384 Care is taken to prune redundant tests from the decision tree
4385 by detecting any boundary conditions already checked by
4386 emitted rtx. (See node_has_high_bound, node_has_low_bound
4387 and node_is_bounded, above.)
4389 Where the test conditions can be shown to be redundant we emit
4390 an unconditional jump to the target code. As a further
4391 optimization, the subordinates of a tree node are examined to
4392 check for bounded nodes. In this case conditional and/or
4393 unconditional jumps as a result of the boundary check for the
4394 current node are arranged to target the subordinates associated
4395 code for out of bound conditions on the current node node.
4397 We can assume that when control reaches the code generated here,
4398 the index value has already been compared with the parents
4399 of this node, and determined to be on the same side of each parent
4400 as this node is. Thus, if this node tests for the value 51,
4401 and a parent tested for 52, we don't need to consider
4402 the possibility of a value greater than 51. If another parent
4403 tests for the value 50, then this node need not test anything. */
4406 emit_case_nodes (index, node, default_label, index_type)
4412 /* If INDEX has an unsigned type, we must make unsigned branches. */
4413 int unsignedp = TREE_UNSIGNED (index_type);
4414 typedef rtx rtx_function ();
4415 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
4416 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
4417 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
4418 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
4419 enum machine_mode mode = GET_MODE (index);
4421 /* See if our parents have already tested everything for us.
4422 If they have, emit an unconditional jump for this node. */
4423 if (node_is_bounded (node, index_type))
4424 emit_jump (label_rtx (node->code_label));
4426 else if (tree_int_cst_equal (node->low, node->high))
4428 /* Node is single valued. First see if the index expression matches
4429 this node and then check our children, if any. */
4431 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
4432 label_rtx (node->code_label), unsignedp);
4434 if (node->right != 0 && node->left != 0)
4436 /* This node has children on both sides.
4437 Dispatch to one side or the other
4438 by comparing the index value with this node's value.
4439 If one subtree is bounded, check that one first,
4440 so we can avoid real branches in the tree. */
4442 if (node_is_bounded (node->right, index_type))
4444 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4446 GT, NULL_RTX, mode, unsignedp, 0);
4448 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4449 emit_case_nodes (index, node->left, default_label, index_type);
4452 else if (node_is_bounded (node->left, index_type))
4454 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4456 LT, NULL_RTX, mode, unsignedp, 0);
4457 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
4458 emit_case_nodes (index, node->right, default_label, index_type);
4463 /* Neither node is bounded. First distinguish the two sides;
4464 then emit the code for one side at a time. */
4467 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4469 /* See if the value is on the right. */
4470 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4472 GT, NULL_RTX, mode, unsignedp, 0);
4473 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4475 /* Value must be on the left.
4476 Handle the left-hand subtree. */
4477 emit_case_nodes (index, node->left, default_label, index_type);
4478 /* If left-hand subtree does nothing,
4480 emit_jump_if_reachable (default_label);
4482 /* Code branches here for the right-hand subtree. */
4483 expand_label (test_label);
4484 emit_case_nodes (index, node->right, default_label, index_type);
4488 else if (node->right != 0 && node->left == 0)
4490 /* Here we have a right child but no left so we issue conditional
4491 branch to default and process the right child.
4493 Omit the conditional branch to default if we it avoid only one
4494 right child; it costs too much space to save so little time. */
4496 if (node->right->right || node->right->left
4497 || !tree_int_cst_equal (node->right->low, node->right->high))
4499 if (!node_has_low_bound (node, index_type))
4501 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4503 LT, NULL_RTX, mode, unsignedp, 0);
4504 emit_jump_insn ((*gen_blt_pat) (default_label));
4507 emit_case_nodes (index, node->right, default_label, index_type);
4510 /* We cannot process node->right normally
4511 since we haven't ruled out the numbers less than
4512 this node's value. So handle node->right explicitly. */
4513 do_jump_if_equal (index,
4514 expand_expr (node->right->low, NULL_RTX,
4516 label_rtx (node->right->code_label), unsignedp);
4519 else if (node->right == 0 && node->left != 0)
4521 /* Just one subtree, on the left. */
4523 #if 0 /* The following code and comment were formerly part
4524 of the condition here, but they didn't work
4525 and I don't understand what the idea was. -- rms. */
4526 /* If our "most probable entry" is less probable
4527 than the default label, emit a jump to
4528 the default label using condition codes
4529 already lying around. With no right branch,
4530 a branch-greater-than will get us to the default
4533 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
4536 if (node->left->left || node->left->right
4537 || !tree_int_cst_equal (node->left->low, node->left->high))
4539 if (!node_has_high_bound (node, index_type))
4541 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4543 GT, NULL_RTX, mode, unsignedp, 0);
4544 emit_jump_insn ((*gen_bgt_pat) (default_label));
4547 emit_case_nodes (index, node->left, default_label, index_type);
4550 /* We cannot process node->left normally
4551 since we haven't ruled out the numbers less than
4552 this node's value. So handle node->left explicitly. */
4553 do_jump_if_equal (index,
4554 expand_expr (node->left->low, NULL_RTX,
4556 label_rtx (node->left->code_label), unsignedp);
4561 /* Node is a range. These cases are very similar to those for a single
4562 value, except that we do not start by testing whether this node
4563 is the one to branch to. */
4565 if (node->right != 0 && node->left != 0)
4567 /* Node has subtrees on both sides.
4568 If the right-hand subtree is bounded,
4569 test for it first, since we can go straight there.
4570 Otherwise, we need to make a branch in the control structure,
4571 then handle the two subtrees. */
4572 tree test_label = 0;
4574 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4576 GT, NULL_RTX, mode, unsignedp, 0);
4578 if (node_is_bounded (node->right, index_type))
4579 /* Right hand node is fully bounded so we can eliminate any
4580 testing and branch directly to the target code. */
4581 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
4584 /* Right hand node requires testing.
4585 Branch to a label where we will handle it later. */
4587 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4588 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
4591 /* Value belongs to this node or to the left-hand subtree. */
4593 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
4594 GE, NULL_RTX, mode, unsignedp, 0);
4595 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4597 /* Handle the left-hand subtree. */
4598 emit_case_nodes (index, node->left, default_label, index_type);
4600 /* If right node had to be handled later, do that now. */
4604 /* If the left-hand subtree fell through,
4605 don't let it fall into the right-hand subtree. */
4606 emit_jump_if_reachable (default_label);
4608 expand_label (test_label);
4609 emit_case_nodes (index, node->right, default_label, index_type);
4613 else if (node->right != 0 && node->left == 0)
4615 /* Deal with values to the left of this node,
4616 if they are possible. */
4617 if (!node_has_low_bound (node, index_type))
4619 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
4621 LT, NULL_RTX, mode, unsignedp, 0);
4622 emit_jump_insn ((*gen_blt_pat) (default_label));
4625 /* Value belongs to this node or to the right-hand subtree. */
4627 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4629 LE, NULL_RTX, mode, unsignedp, 0);
4630 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
4632 emit_case_nodes (index, node->right, default_label, index_type);
4635 else if (node->right == 0 && node->left != 0)
4637 /* Deal with values to the right of this node,
4638 if they are possible. */
4639 if (!node_has_high_bound (node, index_type))
4641 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4643 GT, NULL_RTX, mode, unsignedp, 0);
4644 emit_jump_insn ((*gen_bgt_pat) (default_label));
4647 /* Value belongs to this node or to the left-hand subtree. */
4649 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
4650 GE, NULL_RTX, mode, unsignedp, 0);
4651 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
4653 emit_case_nodes (index, node->left, default_label, index_type);
4658 /* Node has no children so we check low and high bounds to remove
4659 redundant tests. Only one of the bounds can exist,
4660 since otherwise this node is bounded--a case tested already. */
4662 if (!node_has_high_bound (node, index_type))
4664 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
4666 GT, NULL_RTX, mode, unsignedp, 0);
4667 emit_jump_insn ((*gen_bgt_pat) (default_label));
4670 if (!node_has_low_bound (node, index_type))
4672 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
4674 LT, NULL_RTX, mode, unsignedp, 0);
4675 emit_jump_insn ((*gen_blt_pat) (default_label));
4678 emit_jump (label_rtx (node->code_label));
4683 /* These routines are used by the loop unrolling code. They copy BLOCK trees
4684 so that the debugging info will be correct for the unrolled loop. */
4686 /* Indexed by block number, contains a pointer to the N'th block node. */
4688 static tree *block_vector;
4691 find_loop_tree_blocks ()
4693 tree block = DECL_INITIAL (current_function_decl);
4695 /* There first block is for the function body, and does not have
4696 corresponding block notes. Don't include it in the block vector. */
4697 block = BLOCK_SUBBLOCKS (block);
4699 block_vector = identify_blocks (block, get_insns ());
4703 unroll_block_trees ()
4705 tree block = DECL_INITIAL (current_function_decl);
4707 reorder_blocks (block_vector, block, get_insns ());