1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
56 #define obstack_chunk_alloc xmalloc
57 #define obstack_chunk_free free
58 struct obstack stmt_obstack;
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
65 /* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
70 /* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
73 int expr_stmts_for_value;
75 /* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
78 static tree last_expr_type;
79 static rtx last_expr_value;
81 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
86 static rtx last_block_end_note;
88 /* Number of binding contours started so far in this function. */
90 int block_start_count;
92 /* Functions and data structures for expanding case statements. */
94 /* Case label structure, used to hold info on labels within case
95 statements. We handle "range" labels; for a single-value label
96 as in C, the high and low limits are the same.
98 An AVL tree of case nodes is initially created, and later transformed
99 to a list linked via the RIGHT fields in the nodes. Nodes with
100 higher case values are later in the list.
102 Switch statements can be output in one of two forms. A branch table
103 is used if there are more than a few labels and the labels are dense
104 within the range between the smallest and largest case value. If a
105 branch table is used, no further manipulations are done with the case
108 The alternative to the use of a branch table is to generate a series
109 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
110 and PARENT fields to hold a binary tree. Initially the tree is
111 totally unbalanced, with everything on the right. We balance the tree
112 with nodes on the left having lower case values than the parent
113 and nodes on the right having higher values. We then output the tree
118 struct case_node *left; /* Left son in binary tree */
119 struct case_node *right; /* Right son in binary tree; also node chain */
120 struct case_node *parent; /* Parent of node in binary tree */
121 tree low; /* Lowest index value for this label */
122 tree high; /* Highest index value for this label */
123 tree code_label; /* Label to jump to when node matches */
127 typedef struct case_node case_node;
128 typedef struct case_node *case_node_ptr;
130 /* These are used by estimate_case_costs and balance_case_nodes. */
132 /* This must be a signed type, and non-ANSI compilers lack signed char. */
133 static short *cost_table;
134 static int use_cost_table;
136 /* Stack of control and binding constructs we are currently inside.
138 These constructs begin when you call `expand_start_WHATEVER'
139 and end when you call `expand_end_WHATEVER'. This stack records
140 info about how the construct began that tells the end-function
141 what to do. It also may provide information about the construct
142 to alter the behavior of other constructs within the body.
143 For example, they may affect the behavior of C `break' and `continue'.
145 Each construct gets one `struct nesting' object.
146 All of these objects are chained through the `all' field.
147 `nesting_stack' points to the first object (innermost construct).
148 The position of an entry on `nesting_stack' is in its `depth' field.
150 Each type of construct has its own individual stack.
151 For example, loops have `loop_stack'. Each object points to the
152 next object of the same type through the `next' field.
154 Some constructs are visible to `break' exit-statements and others
155 are not. Which constructs are visible depends on the language.
156 Therefore, the data structure allows each construct to be visible
157 or not, according to the args given when the construct is started.
158 The construct is visible if the `exit_label' field is non-null.
159 In that case, the value should be a CODE_LABEL rtx. */
164 struct nesting *next;
169 /* For conds (if-then and if-then-else statements). */
172 /* Label for the end of the if construct.
173 There is none if EXITFLAG was not set
174 and no `else' has been seen yet. */
176 /* Label for the end of this alternative.
177 This may be the end of the if or the next else/elseif. */
183 /* Label at the top of the loop; place to loop back to. */
185 /* Label at the end of the whole construct. */
187 /* Label before a jump that branches to the end of the whole
188 construct. This is where destructors go if any. */
190 /* Label for `continue' statement to jump to;
191 this is in front of the stepper of the loop. */
194 /* For variable binding contours. */
197 /* Sequence number of this binding contour within the function,
198 in order of entry. */
199 int block_start_count;
200 /* Nonzero => value to restore stack to on exit. */
202 /* The NOTE that starts this contour.
203 Used by expand_goto to check whether the destination
204 is within each contour or not. */
206 /* Innermost containing binding contour that has a stack level. */
207 struct nesting *innermost_stack_block;
208 /* List of cleanups to be run on exit from this contour.
209 This is a list of expressions to be evaluated.
210 The TREE_PURPOSE of each link is the ..._DECL node
211 which the cleanup pertains to. */
213 /* List of cleanup-lists of blocks containing this block,
214 as they were at the locus where this block appears.
215 There is an element for each containing block,
216 ordered innermost containing block first.
217 The tail of this list can be 0,
218 if all remaining elements would be empty lists.
219 The element's TREE_VALUE is the cleanup-list of that block,
220 which may be null. */
222 /* Chain of labels defined inside this binding contour.
223 For contours that have stack levels or cleanups. */
224 struct label_chain *label_chain;
225 /* Number of function calls seen, as of start of this block. */
226 int function_call_count;
227 /* Nonzero if this is associated with a EH region. */
228 int exception_region;
229 /* The saved target_temp_slot_level from our outer block.
230 We may reset target_temp_slot_level to be the level of
231 this block, if that is done, target_temp_slot_level
232 reverts to the saved target_temp_slot_level at the very
234 int target_temp_slot_level;
235 /* True if we are currently emitting insns in an area of
236 output code that is controlled by a conditional
237 expression. This is used by the cleanup handling code to
238 generate conditional cleanup actions. */
239 int conditional_code;
240 /* A place to move the start of the exception region for any
241 of the conditional cleanups, must be at the end or after
242 the start of the last unconditional cleanup, and before any
243 conditional branch points. */
244 rtx last_unconditional_cleanup;
245 /* When in a conditional context, this is the specific
246 cleanup list associated with last_unconditional_cleanup,
247 where we place the conditionalized cleanups. */
250 /* For switch (C) or case (Pascal) statements,
251 and also for dummies (see `expand_start_case_dummy'). */
254 /* The insn after which the case dispatch should finally
255 be emitted. Zero for a dummy. */
257 /* A list of case labels; it is first built as an AVL tree.
258 During expand_end_case, this is converted to a list, and may be
259 rearranged into a nearly balanced binary tree. */
260 struct case_node *case_list;
261 /* Label to jump to if no case matches. */
263 /* The expression to be dispatched on. */
265 /* Type that INDEX_EXPR should be converted to. */
267 /* Number of range exprs in case statement. */
269 /* Name of this kind of statement, for warnings. */
270 const char *printname;
271 /* Used to save no_line_numbers till we see the first case label.
272 We set this to -1 when we see the first case label in this
274 int line_number_status;
279 /* Chain of all pending binding contours. */
280 struct nesting *block_stack;
282 /* If any new stacks are added here, add them to POPSTACKS too. */
284 /* Chain of all pending binding contours that restore stack levels
286 struct nesting *stack_block_stack;
288 /* Chain of all pending conditional statements. */
289 struct nesting *cond_stack;
291 /* Chain of all pending loops. */
292 struct nesting *loop_stack;
294 /* Chain of all pending case or switch statements. */
295 struct nesting *case_stack;
297 /* Separate chain including all of the above,
298 chained through the `all' field. */
299 struct nesting *nesting_stack;
301 /* Number of entries on nesting_stack now. */
304 /* Allocate and return a new `struct nesting'. */
306 #define ALLOC_NESTING() \
307 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
309 /* Pop the nesting stack element by element until we pop off
310 the element which is at the top of STACK.
311 Update all the other stacks, popping off elements from them
312 as we pop them from nesting_stack. */
314 #define POPSTACK(STACK) \
315 do { struct nesting *target = STACK; \
316 struct nesting *this; \
317 do { this = nesting_stack; \
318 if (loop_stack == this) \
319 loop_stack = loop_stack->next; \
320 if (cond_stack == this) \
321 cond_stack = cond_stack->next; \
322 if (block_stack == this) \
323 block_stack = block_stack->next; \
324 if (stack_block_stack == this) \
325 stack_block_stack = stack_block_stack->next; \
326 if (case_stack == this) \
327 case_stack = case_stack->next; \
328 nesting_depth = nesting_stack->depth - 1; \
329 nesting_stack = this->all; \
330 obstack_free (&stmt_obstack, this); } \
331 while (this != target); } while (0)
333 /* In some cases it is impossible to generate code for a forward goto
334 until the label definition is seen. This happens when it may be necessary
335 for the goto to reset the stack pointer: we don't yet know how to do that.
336 So expand_goto puts an entry on this fixup list.
337 Each time a binding contour that resets the stack is exited,
339 If the target label has now been defined, we can insert the proper code. */
343 /* Points to following fixup. */
344 struct goto_fixup *next;
345 /* Points to the insn before the jump insn.
346 If more code must be inserted, it goes after this insn. */
348 /* The LABEL_DECL that this jump is jumping to, or 0
349 for break, continue or return. */
351 /* The BLOCK for the place where this goto was found. */
353 /* The CODE_LABEL rtx that this is jumping to. */
355 /* Number of binding contours started in current function
356 before the label reference. */
357 int block_start_count;
358 /* The outermost stack level that should be restored for this jump.
359 Each time a binding contour that resets the stack is exited,
360 if the target label is *not* yet defined, this slot is updated. */
362 /* List of lists of cleanup expressions to be run by this goto.
363 There is one element for each block that this goto is within.
364 The tail of this list can be 0,
365 if all remaining elements would be empty.
366 The TREE_VALUE contains the cleanup list of that block as of the
367 time this goto was seen.
368 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
369 tree cleanup_list_list;
372 static struct goto_fixup *goto_fixup_chain;
374 /* Within any binding contour that must restore a stack level,
375 all labels are recorded with a chain of these structures. */
379 /* Points to following fixup. */
380 struct label_chain *next;
385 /* Non-zero if we are using EH to handle cleanus. */
386 static int using_eh_for_cleanups_p = 0;
389 static int n_occurrences PROTO((int, const char *));
390 static void expand_goto_internal PROTO((tree, rtx, rtx));
391 static int expand_fixup PROTO((tree, rtx, rtx));
392 static rtx expand_nl_handler_label PROTO((rtx, rtx));
393 static void expand_nl_goto_receiver PROTO((void));
394 static void expand_nl_goto_receivers PROTO((struct nesting *));
395 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
397 static void expand_null_return_1 PROTO((rtx, int));
398 static void expand_value_return PROTO((rtx));
399 static int tail_recursion_args PROTO((tree, tree));
400 static void expand_cleanups PROTO((tree, tree, int, int));
401 static void check_seenlabel PROTO((void));
402 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
403 static int estimate_case_costs PROTO((case_node_ptr));
404 static void group_case_nodes PROTO((case_node_ptr));
405 static void balance_case_nodes PROTO((case_node_ptr *,
407 static int node_has_low_bound PROTO((case_node_ptr, tree));
408 static int node_has_high_bound PROTO((case_node_ptr, tree));
409 static int node_is_bounded PROTO((case_node_ptr, tree));
410 static void emit_jump_if_reachable PROTO((rtx));
411 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
412 static int add_case_node PROTO((tree, tree, tree, tree *));
413 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
416 using_eh_for_cleanups ()
418 using_eh_for_cleanups_p = 1;
424 gcc_obstack_init (&stmt_obstack);
429 init_stmt_for_function ()
431 /* We are not currently within any block, conditional, loop or case. */
433 stack_block_stack = 0;
440 block_start_count = 0;
442 /* No gotos have been expanded yet. */
443 goto_fixup_chain = 0;
445 /* We are not processing a ({...}) grouping. */
446 expr_stmts_for_value = 0;
449 init_eh_for_function ();
456 p->block_stack = block_stack;
457 p->stack_block_stack = stack_block_stack;
458 p->cond_stack = cond_stack;
459 p->loop_stack = loop_stack;
460 p->case_stack = case_stack;
461 p->nesting_stack = nesting_stack;
462 p->nesting_depth = nesting_depth;
463 p->block_start_count = block_start_count;
464 p->last_expr_type = last_expr_type;
465 p->last_expr_value = last_expr_value;
466 p->expr_stmts_for_value = expr_stmts_for_value;
467 p->emit_filename = emit_filename;
468 p->emit_lineno = emit_lineno;
469 p->goto_fixup_chain = goto_fixup_chain;
473 restore_stmt_status (p)
476 block_stack = p->block_stack;
477 stack_block_stack = p->stack_block_stack;
478 cond_stack = p->cond_stack;
479 loop_stack = p->loop_stack;
480 case_stack = p->case_stack;
481 nesting_stack = p->nesting_stack;
482 nesting_depth = p->nesting_depth;
483 block_start_count = p->block_start_count;
484 last_expr_type = p->last_expr_type;
485 last_expr_value = p->last_expr_value;
486 expr_stmts_for_value = p->expr_stmts_for_value;
487 emit_filename = p->emit_filename;
488 emit_lineno = p->emit_lineno;
489 goto_fixup_chain = p->goto_fixup_chain;
492 /* Emit a no-op instruction. */
499 last_insn = get_last_insn ();
501 && (GET_CODE (last_insn) == CODE_LABEL
502 || (GET_CODE (last_insn) == NOTE
503 && prev_real_insn (last_insn) == 0)))
504 emit_insn (gen_nop ());
507 /* Return the rtx-label that corresponds to a LABEL_DECL,
508 creating it if necessary. */
514 if (TREE_CODE (label) != LABEL_DECL)
517 if (DECL_RTL (label))
518 return DECL_RTL (label);
520 return DECL_RTL (label) = gen_label_rtx ();
523 /* Add an unconditional jump to LABEL as the next sequential instruction. */
529 do_pending_stack_adjust ();
530 emit_jump_insn (gen_jump (label));
534 /* Emit code to jump to the address
535 specified by the pointer expression EXP. */
538 expand_computed_goto (exp)
541 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
543 #ifdef POINTERS_EXTEND_UNSIGNED
544 x = convert_memory_address (Pmode, x);
548 /* Be sure the function is executable. */
549 if (current_function_check_memory_usage)
550 emit_library_call (chkr_check_exec_libfunc, 1,
551 VOIDmode, 1, x, ptr_mode);
553 do_pending_stack_adjust ();
554 emit_indirect_jump (x);
556 current_function_has_computed_jump = 1;
559 /* Handle goto statements and the labels that they can go to. */
561 /* Specify the location in the RTL code of a label LABEL,
562 which is a LABEL_DECL tree node.
564 This is used for the kind of label that the user can jump to with a
565 goto statement, and for alternatives of a switch or case statement.
566 RTL labels generated for loops and conditionals don't go through here;
567 they are generated directly at the RTL level, by other functions below.
569 Note that this has nothing to do with defining label *names*.
570 Languages vary in how they do that and what that even means. */
576 struct label_chain *p;
578 do_pending_stack_adjust ();
579 emit_label (label_rtx (label));
580 if (DECL_NAME (label))
581 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
583 if (stack_block_stack != 0)
585 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
586 p->next = stack_block_stack->data.block.label_chain;
587 stack_block_stack->data.block.label_chain = p;
592 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
593 from nested functions. */
596 declare_nonlocal_label (label)
599 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
601 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
602 LABEL_PRESERVE_P (label_rtx (label)) = 1;
603 if (nonlocal_goto_handler_slots == 0)
605 emit_stack_save (SAVE_NONLOCAL,
606 &nonlocal_goto_stack_level,
607 PREV_INSN (tail_recursion_reentry));
609 nonlocal_goto_handler_slots
610 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
613 /* Generate RTL code for a `goto' statement with target label LABEL.
614 LABEL should be a LABEL_DECL tree node that was or will later be
615 defined with `expand_label'. */
623 /* Check for a nonlocal goto to a containing function. */
624 context = decl_function_context (label);
625 if (context != 0 && context != current_function_decl)
627 struct function *p = find_function_data (context);
628 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
629 rtx temp, handler_slot;
632 /* Find the corresponding handler slot for this label. */
633 handler_slot = p->nonlocal_goto_handler_slots;
634 for (link = p->nonlocal_labels; TREE_VALUE (link) != label;
635 link = TREE_CHAIN (link))
636 handler_slot = XEXP (handler_slot, 1);
637 handler_slot = XEXP (handler_slot, 0);
639 p->has_nonlocal_label = 1;
640 current_function_has_nonlocal_goto = 1;
641 LABEL_REF_NONLOCAL_P (label_ref) = 1;
643 /* Copy the rtl for the slots so that they won't be shared in
644 case the virtual stack vars register gets instantiated differently
645 in the parent than in the child. */
647 #if HAVE_nonlocal_goto
648 if (HAVE_nonlocal_goto)
649 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
650 copy_rtx (handler_slot),
651 copy_rtx (p->nonlocal_goto_stack_level),
658 /* Restore frame pointer for containing function.
659 This sets the actual hard register used for the frame pointer
660 to the location of the function's incoming static chain info.
661 The non-local goto handler will then adjust it to contain the
662 proper value and reload the argument pointer, if needed. */
663 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
665 /* We have now loaded the frame pointer hardware register with
666 the address of that corresponds to the start of the virtual
667 stack vars. So replace virtual_stack_vars_rtx in all
668 addresses we use with stack_pointer_rtx. */
670 /* Get addr of containing function's current nonlocal goto handler,
671 which will do any cleanups and then jump to the label. */
672 addr = copy_rtx (handler_slot);
673 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
674 hard_frame_pointer_rtx));
676 /* Restore the stack pointer. Note this uses fp just restored. */
677 addr = p->nonlocal_goto_stack_level;
679 addr = replace_rtx (copy_rtx (addr),
680 virtual_stack_vars_rtx,
681 hard_frame_pointer_rtx);
683 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
685 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
687 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
688 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
689 emit_indirect_jump (temp);
693 expand_goto_internal (label, label_rtx (label), NULL_RTX);
696 /* Generate RTL code for a `goto' statement with target label BODY.
697 LABEL should be a LABEL_REF.
698 LAST_INSN, if non-0, is the rtx we should consider as the last
699 insn emitted (for the purposes of cleaning up a return). */
702 expand_goto_internal (body, label, last_insn)
707 struct nesting *block;
710 if (GET_CODE (label) != CODE_LABEL)
713 /* If label has already been defined, we can tell now
714 whether and how we must alter the stack level. */
716 if (PREV_INSN (label) != 0)
718 /* Find the innermost pending block that contains the label.
719 (Check containment by comparing insn-uids.)
720 Then restore the outermost stack level within that block,
721 and do cleanups of all blocks contained in it. */
722 for (block = block_stack; block; block = block->next)
724 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
726 if (block->data.block.stack_level != 0)
727 stack_level = block->data.block.stack_level;
728 /* Execute the cleanups for blocks we are exiting. */
729 if (block->data.block.cleanups != 0)
731 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
732 do_pending_stack_adjust ();
738 /* Ensure stack adjust isn't done by emit_jump, as this
739 would clobber the stack pointer. This one should be
740 deleted as dead by flow. */
741 clear_pending_stack_adjust ();
742 do_pending_stack_adjust ();
743 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
746 if (body != 0 && DECL_TOO_LATE (body))
747 error ("jump to `%s' invalidly jumps into binding contour",
748 IDENTIFIER_POINTER (DECL_NAME (body)));
750 /* Label not yet defined: may need to put this goto
751 on the fixup list. */
752 else if (! expand_fixup (body, label, last_insn))
754 /* No fixup needed. Record that the label is the target
755 of at least one goto that has no fixup. */
757 TREE_ADDRESSABLE (body) = 1;
763 /* Generate if necessary a fixup for a goto
764 whose target label in tree structure (if any) is TREE_LABEL
765 and whose target in rtl is RTL_LABEL.
767 If LAST_INSN is nonzero, we pretend that the jump appears
768 after insn LAST_INSN instead of at the current point in the insn stream.
770 The fixup will be used later to insert insns just before the goto.
771 Those insns will restore the stack level as appropriate for the
772 target label, and will (in the case of C++) also invoke any object
773 destructors which have to be invoked when we exit the scopes which
774 are exited by the goto.
776 Value is nonzero if a fixup is made. */
779 expand_fixup (tree_label, rtl_label, last_insn)
784 struct nesting *block, *end_block;
786 /* See if we can recognize which block the label will be output in.
787 This is possible in some very common cases.
788 If we succeed, set END_BLOCK to that block.
789 Otherwise, set it to 0. */
792 && (rtl_label == cond_stack->data.cond.endif_label
793 || rtl_label == cond_stack->data.cond.next_label))
794 end_block = cond_stack;
795 /* If we are in a loop, recognize certain labels which
796 are likely targets. This reduces the number of fixups
797 we need to create. */
799 && (rtl_label == loop_stack->data.loop.start_label
800 || rtl_label == loop_stack->data.loop.end_label
801 || rtl_label == loop_stack->data.loop.continue_label))
802 end_block = loop_stack;
806 /* Now set END_BLOCK to the binding level to which we will return. */
810 struct nesting *next_block = end_block->all;
813 /* First see if the END_BLOCK is inside the innermost binding level.
814 If so, then no cleanups or stack levels are relevant. */
815 while (next_block && next_block != block)
816 next_block = next_block->all;
821 /* Otherwise, set END_BLOCK to the innermost binding level
822 which is outside the relevant control-structure nesting. */
823 next_block = block_stack->next;
824 for (block = block_stack; block != end_block; block = block->all)
825 if (block == next_block)
826 next_block = next_block->next;
827 end_block = next_block;
830 /* Does any containing block have a stack level or cleanups?
831 If not, no fixup is needed, and that is the normal case
832 (the only case, for standard C). */
833 for (block = block_stack; block != end_block; block = block->next)
834 if (block->data.block.stack_level != 0
835 || block->data.block.cleanups != 0)
838 if (block != end_block)
840 /* Ok, a fixup is needed. Add a fixup to the list of such. */
841 struct goto_fixup *fixup
842 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
843 /* In case an old stack level is restored, make sure that comes
844 after any pending stack adjust. */
845 /* ?? If the fixup isn't to come at the present position,
846 doing the stack adjust here isn't useful. Doing it with our
847 settings at that location isn't useful either. Let's hope
850 do_pending_stack_adjust ();
851 fixup->target = tree_label;
852 fixup->target_rtl = rtl_label;
854 /* Create a BLOCK node and a corresponding matched set of
855 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
856 this point. The notes will encapsulate any and all fixup
857 code which we might later insert at this point in the insn
858 stream. Also, the BLOCK node will be the parent (i.e. the
859 `SUPERBLOCK') of any other BLOCK nodes which we might create
860 later on when we are expanding the fixup code.
862 Note that optimization passes (including expand_end_loop)
863 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
867 register rtx original_before_jump
868 = last_insn ? last_insn : get_last_insn ();
873 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
874 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
875 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
876 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
878 emit_insns_after (start, original_before_jump);
881 fixup->block_start_count = block_start_count;
882 fixup->stack_level = 0;
883 fixup->cleanup_list_list
884 = ((block->data.block.outer_cleanups
885 || block->data.block.cleanups)
886 ? tree_cons (NULL_TREE, block->data.block.cleanups,
887 block->data.block.outer_cleanups)
889 fixup->next = goto_fixup_chain;
890 goto_fixup_chain = fixup;
898 /* Expand any needed fixups in the outputmost binding level of the
899 function. FIRST_INSN is the first insn in the function. */
902 expand_fixups (first_insn)
905 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
908 /* When exiting a binding contour, process all pending gotos requiring fixups.
909 THISBLOCK is the structure that describes the block being exited.
910 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
911 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
912 FIRST_INSN is the insn that began this contour.
914 Gotos that jump out of this contour must restore the
915 stack level and do the cleanups before actually jumping.
917 DONT_JUMP_IN nonzero means report error there is a jump into this
918 contour from before the beginning of the contour.
919 This is also done if STACK_LEVEL is nonzero. */
922 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
923 struct nesting *thisblock;
929 register struct goto_fixup *f, *prev;
931 /* F is the fixup we are considering; PREV is the previous one. */
932 /* We run this loop in two passes so that cleanups of exited blocks
933 are run first, and blocks that are exited are marked so
936 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
938 /* Test for a fixup that is inactive because it is already handled. */
939 if (f->before_jump == 0)
941 /* Delete inactive fixup from the chain, if that is easy to do. */
943 prev->next = f->next;
945 /* Has this fixup's target label been defined?
946 If so, we can finalize it. */
947 else if (PREV_INSN (f->target_rtl) != 0)
949 register rtx cleanup_insns;
951 /* Get the first non-label after the label
952 this goto jumps to. If that's before this scope begins,
953 we don't have a jump into the scope. */
954 rtx after_label = f->target_rtl;
955 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
956 after_label = NEXT_INSN (after_label);
958 /* If this fixup jumped into this contour from before the beginning
959 of this contour, report an error. */
960 /* ??? Bug: this does not detect jumping in through intermediate
961 blocks that have stack levels or cleanups.
962 It detects only a problem with the innermost block
965 && (dont_jump_in || stack_level || cleanup_list)
966 /* If AFTER_LABEL is 0, it means the jump goes to the end
967 of the rtl, which means it jumps into this scope. */
969 || INSN_UID (first_insn) < INSN_UID (after_label))
970 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
971 && ! DECL_ERROR_ISSUED (f->target))
973 error_with_decl (f->target,
974 "label `%s' used before containing binding contour");
975 /* Prevent multiple errors for one label. */
976 DECL_ERROR_ISSUED (f->target) = 1;
979 /* We will expand the cleanups into a sequence of their own and
980 then later on we will attach this new sequence to the insn
981 stream just ahead of the actual jump insn. */
985 /* Temporarily restore the lexical context where we will
986 logically be inserting the fixup code. We do this for the
987 sake of getting the debugging information right. */
990 set_block (f->context);
992 /* Expand the cleanups for blocks this jump exits. */
993 if (f->cleanup_list_list)
996 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
997 /* Marked elements correspond to blocks that have been closed.
998 Do their cleanups. */
999 if (TREE_ADDRESSABLE (lists)
1000 && TREE_VALUE (lists) != 0)
1002 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1003 /* Pop any pushes done in the cleanups,
1004 in case function is about to return. */
1005 do_pending_stack_adjust ();
1009 /* Restore stack level for the biggest contour that this
1010 jump jumps out of. */
1012 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1014 /* Finish up the sequence containing the insns which implement the
1015 necessary cleanups, and then attach that whole sequence to the
1016 insn stream just ahead of the actual jump insn. Attaching it
1017 at that point insures that any cleanups which are in fact
1018 implicit C++ object destructions (which must be executed upon
1019 leaving the block) appear (to the debugger) to be taking place
1020 in an area of the generated code where the object(s) being
1021 destructed are still "in scope". */
1023 cleanup_insns = get_insns ();
1027 emit_insns_after (cleanup_insns, f->before_jump);
1034 /* For any still-undefined labels, do the cleanups for this block now.
1035 We must do this now since items in the cleanup list may go out
1036 of scope when the block ends. */
1037 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1038 if (f->before_jump != 0
1039 && PREV_INSN (f->target_rtl) == 0
1040 /* Label has still not appeared. If we are exiting a block with
1041 a stack level to restore, that started before the fixup,
1042 mark this stack level as needing restoration
1043 when the fixup is later finalized. */
1045 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1046 means the label is undefined. That's erroneous, but possible. */
1047 && (thisblock->data.block.block_start_count
1048 <= f->block_start_count))
1050 tree lists = f->cleanup_list_list;
1053 for (; lists; lists = TREE_CHAIN (lists))
1054 /* If the following elt. corresponds to our containing block
1055 then the elt. must be for this block. */
1056 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1060 set_block (f->context);
1061 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1062 do_pending_stack_adjust ();
1063 cleanup_insns = get_insns ();
1066 if (cleanup_insns != 0)
1068 = emit_insns_after (cleanup_insns, f->before_jump);
1070 f->cleanup_list_list = TREE_CHAIN (lists);
1074 f->stack_level = stack_level;
1078 /* Return the number of times character C occurs in string S. */
1080 n_occurrences (c, s)
1090 /* Generate RTL for an asm statement (explicit assembler code).
1091 BODY is a STRING_CST node containing the assembler code text,
1092 or an ADDR_EXPR containing a STRING_CST. */
1098 if (current_function_check_memory_usage)
1100 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1104 if (TREE_CODE (body) == ADDR_EXPR)
1105 body = TREE_OPERAND (body, 0);
1107 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1108 TREE_STRING_POINTER (body)));
1112 /* Generate RTL for an asm statement with arguments.
1113 STRING is the instruction template.
1114 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1115 Each output or input has an expression in the TREE_VALUE and
1116 a constraint-string in the TREE_PURPOSE.
1117 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1118 that is clobbered by this insn.
1120 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1121 Some elements of OUTPUTS may be replaced with trees representing temporary
1122 values. The caller should copy those temporary values to the originally
1125 VOL nonzero means the insn is volatile; don't optimize it. */
1128 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1129 tree string, outputs, inputs, clobbers;
1134 rtvec argvec, constraints;
1136 int ninputs = list_length (inputs);
1137 int noutputs = list_length (outputs);
1142 /* Vector of RTX's of evaluated output operands. */
1143 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1144 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1145 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1146 enum machine_mode *inout_mode
1147 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1148 /* The insn we have emitted. */
1151 /* An ASM with no outputs needs to be treated as volatile, for now. */
1155 if (current_function_check_memory_usage)
1157 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1161 /* Count the number of meaningful clobbered registers, ignoring what
1162 we would ignore later. */
1164 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1166 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1167 i = decode_reg_name (regname);
1168 if (i >= 0 || i == -4)
1171 error ("unknown register name `%s' in `asm'", regname);
1176 /* Check that the number of alternatives is constant across all
1178 if (outputs || inputs)
1180 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1181 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1184 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1186 error ("too many alternatives in `asm'");
1193 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1194 if (n_occurrences (',', constraint) != nalternatives)
1196 error ("operand constraints for `asm' differ in number of alternatives");
1199 if (TREE_CHAIN (tmp))
1200 tmp = TREE_CHAIN (tmp);
1202 tmp = next, next = 0;
1206 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1208 tree val = TREE_VALUE (tail);
1209 tree type = TREE_TYPE (val);
1218 /* If there's an erroneous arg, emit no insn. */
1219 if (TREE_TYPE (val) == error_mark_node)
1222 /* Make sure constraint has `=' and does not have `+'. Also, see
1223 if it allows any register. Be liberal on the latter test, since
1224 the worst that happens if we get it wrong is we issue an error
1227 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1228 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1230 /* Allow the `=' or `+' to not be at the beginning of the string,
1231 since it wasn't explicitly documented that way, and there is a
1232 large body of code that puts it last. Swap the character to
1233 the front, so as not to uglify any place else. */
1237 if ((p = strchr (constraint, '=')) != NULL)
1239 if ((p = strchr (constraint, '+')) != NULL)
1242 error ("output operand constraint lacks `='");
1246 if (p != constraint)
1249 bcopy (constraint, constraint+1, p-constraint);
1252 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1255 is_inout = constraint[0] == '+';
1256 /* Replace '+' with '='. */
1257 constraint[0] = '=';
1258 /* Make sure we can specify the matching operand. */
1259 if (is_inout && i > 9)
1261 error ("output operand constraint %d contains `+'", i);
1265 for (j = 1; j < c_len; j++)
1266 switch (constraint[j])
1270 error ("operand constraint contains '+' or '=' at illegal position.");
1274 if (i + 1 == ninputs + noutputs)
1276 error ("`%%' constraint used with last operand");
1281 case '?': case '!': case '*': case '&':
1282 case 'E': case 'F': case 'G': case 'H':
1283 case 's': case 'i': case 'n':
1284 case 'I': case 'J': case 'K': case 'L': case 'M':
1285 case 'N': case 'O': case 'P': case ',':
1286 #ifdef EXTRA_CONSTRAINT
1287 case 'Q': case 'R': case 'S': case 'T': case 'U':
1291 case '0': case '1': case '2': case '3': case '4':
1292 case '5': case '6': case '7': case '8': case '9':
1293 error ("matching constraint not valid in output operand");
1296 case 'V': case 'm': case 'o':
1301 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1302 excepting those that expand_call created. So match memory
1318 /* If an output operand is not a decl or indirect ref and our constraint
1319 allows a register, make a temporary to act as an intermediate.
1320 Make the asm insn write into that, then our caller will copy it to
1321 the real output operand. Likewise for promoted variables. */
1323 real_output_rtx[i] = NULL_RTX;
1324 if ((TREE_CODE (val) == INDIRECT_REF
1326 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1327 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1328 && ! (GET_CODE (DECL_RTL (val)) == REG
1329 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1334 mark_addressable (TREE_VALUE (tail));
1337 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1338 EXPAND_MEMORY_USE_WO);
1340 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1341 error ("output number %d not directly addressable", i);
1342 if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1344 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1345 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1347 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1352 output_rtx[i] = assign_temp (type, 0, 0, 0);
1353 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1358 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1359 inout_opnum[ninout++] = i;
1364 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1366 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1370 /* Make vectors for the expression-rtx and constraint strings. */
1372 argvec = rtvec_alloc (ninputs);
1373 constraints = rtvec_alloc (ninputs);
1375 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1376 TREE_STRING_POINTER (string), "", 0, argvec,
1377 constraints, filename, line);
1379 MEM_VOLATILE_P (body) = vol;
1381 /* Eval the inputs and put them into ARGVEC.
1382 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1385 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1388 int allows_reg = 0, allows_mem = 0;
1389 char *constraint, *orig_constraint;
1393 /* If there's an erroneous arg, emit no insn,
1394 because the ASM_INPUT would get VOIDmode
1395 and that could cause a crash in reload. */
1396 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1399 /* ??? Can this happen, and does the error message make any sense? */
1400 if (TREE_PURPOSE (tail) == NULL_TREE)
1402 error ("hard register `%s' listed as input operand to `asm'",
1403 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1407 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1408 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1409 orig_constraint = constraint;
1411 /* Make sure constraint has neither `=', `+', nor '&'. */
1413 for (j = 0; j < c_len; j++)
1414 switch (constraint[j])
1416 case '+': case '=': case '&':
1417 if (constraint == orig_constraint)
1419 error ("input operand constraint contains `%c'", constraint[j]);
1425 if (constraint == orig_constraint
1426 && i + 1 == ninputs - ninout)
1428 error ("`%%' constraint used with last operand");
1433 case 'V': case 'm': case 'o':
1438 case '?': case '!': case '*':
1439 case 'E': case 'F': case 'G': case 'H': case 'X':
1440 case 's': case 'i': case 'n':
1441 case 'I': case 'J': case 'K': case 'L': case 'M':
1442 case 'N': case 'O': case 'P': case ',':
1443 #ifdef EXTRA_CONSTRAINT
1444 case 'Q': case 'R': case 'S': case 'T': case 'U':
1448 /* Whether or not a numeric constraint allows a register is
1449 decided by the matching constraint, and so there is no need
1450 to do anything special with them. We must handle them in
1451 the default case, so that we don't unnecessarily force
1452 operands to memory. */
1453 case '0': case '1': case '2': case '3': case '4':
1454 case '5': case '6': case '7': case '8': case '9':
1455 if (constraint[j] >= '0' + noutputs)
1458 ("matching constraint references invalid operand number");
1462 /* Try and find the real constraint for this dup. */
1463 if ((j == 0 && c_len == 1)
1464 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1467 for (j = constraint[j] - '0'; j > 0; --j)
1470 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1471 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1476 /* ... fall through ... */
1489 if (! allows_reg && allows_mem)
1490 mark_addressable (TREE_VALUE (tail));
1492 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1494 if (asm_operand_ok (op, constraint) <= 0)
1497 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1498 else if (!allows_mem)
1499 warning ("asm operand %d probably doesn't match constraints", i);
1500 else if (CONSTANT_P (op))
1501 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1503 else if (GET_CODE (op) == REG
1504 || GET_CODE (op) == SUBREG
1505 || GET_CODE (op) == CONCAT)
1507 tree type = TREE_TYPE (TREE_VALUE (tail));
1508 rtx memloc = assign_temp (type, 1, 1, 1);
1510 emit_move_insn (memloc, op);
1513 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1514 /* We won't recognize volatile memory as available a
1515 memory_operand at this point. Ignore it. */
1517 else if (queued_subexp_p (op))
1520 /* ??? Leave this only until we have experience with what
1521 happens in combine and elsewhere when constraints are
1523 warning ("asm operand %d probably doesn't match constraints", i);
1525 XVECEXP (body, 3, i) = op;
1527 XVECEXP (body, 4, i) /* constraints */
1528 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1533 /* Protect all the operands from the queue,
1534 now that they have all been evaluated. */
1536 for (i = 0; i < ninputs - ninout; i++)
1537 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1539 for (i = 0; i < noutputs; i++)
1540 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1542 /* For in-out operands, copy output rtx to input rtx. */
1543 for (i = 0; i < ninout; i++)
1545 static char match[9+1][2]
1546 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1547 int j = inout_opnum[i];
1549 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1551 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1552 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1555 /* Now, for each output, construct an rtx
1556 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1557 ARGVEC CONSTRAINTS))
1558 If there is more than one, put them inside a PARALLEL. */
1560 if (noutputs == 1 && nclobbers == 0)
1562 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1563 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1565 else if (noutputs == 0 && nclobbers == 0)
1567 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1568 insn = emit_insn (body);
1574 if (num == 0) num = 1;
1575 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1577 /* For each output operand, store a SET. */
1579 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1581 XVECEXP (body, 0, i)
1582 = gen_rtx_SET (VOIDmode,
1584 gen_rtx_ASM_OPERANDS (VOIDmode,
1585 TREE_STRING_POINTER (string),
1586 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1587 i, argvec, constraints,
1589 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1592 /* If there are no outputs (but there are some clobbers)
1593 store the bare ASM_OPERANDS into the PARALLEL. */
1596 XVECEXP (body, 0, i++) = obody;
1598 /* Store (clobber REG) for each clobbered register specified. */
1600 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1602 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1603 int j = decode_reg_name (regname);
1607 if (j == -3) /* `cc', which is not a register */
1610 if (j == -4) /* `memory', don't cache memory across asm */
1612 XVECEXP (body, 0, i++)
1613 = gen_rtx_CLOBBER (VOIDmode,
1614 gen_rtx_MEM (BLKmode,
1615 gen_rtx_SCRATCH (VOIDmode)));
1619 /* Ignore unknown register, error already signaled. */
1623 /* Use QImode since that's guaranteed to clobber just one reg. */
1624 XVECEXP (body, 0, i++)
1625 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1628 insn = emit_insn (body);
1631 /* For any outputs that needed reloading into registers, spill them
1632 back to where they belong. */
1633 for (i = 0; i < noutputs; ++i)
1634 if (real_output_rtx[i])
1635 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1640 /* Generate RTL to evaluate the expression EXP
1641 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1644 expand_expr_stmt (exp)
1647 /* If -W, warn about statements with no side effects,
1648 except for an explicit cast to void (e.g. for assert()), and
1649 except inside a ({...}) where they may be useful. */
1650 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1652 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1653 && !(TREE_CODE (exp) == CONVERT_EXPR
1654 && TREE_TYPE (exp) == void_type_node))
1655 warning_with_file_and_line (emit_filename, emit_lineno,
1656 "statement with no effect");
1657 else if (warn_unused)
1658 warn_if_unused_value (exp);
1661 /* If EXP is of function type and we are expanding statements for
1662 value, convert it to pointer-to-function. */
1663 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1664 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1666 last_expr_type = TREE_TYPE (exp);
1667 last_expr_value = expand_expr (exp,
1668 (expr_stmts_for_value
1669 ? NULL_RTX : const0_rtx),
1672 /* If all we do is reference a volatile value in memory,
1673 copy it to a register to be sure it is actually touched. */
1674 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1675 && TREE_THIS_VOLATILE (exp))
1677 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1679 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1680 copy_to_reg (last_expr_value);
1683 rtx lab = gen_label_rtx ();
1685 /* Compare the value with itself to reference it. */
1686 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1687 expand_expr (TYPE_SIZE (last_expr_type),
1688 NULL_RTX, VOIDmode, 0),
1690 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1696 /* If this expression is part of a ({...}) and is in memory, we may have
1697 to preserve temporaries. */
1698 preserve_temp_slots (last_expr_value);
1700 /* Free any temporaries used to evaluate this expression. Any temporary
1701 used as a result of this expression will already have been preserved
1708 /* Warn if EXP contains any computations whose results are not used.
1709 Return 1 if a warning is printed; 0 otherwise. */
1712 warn_if_unused_value (exp)
1715 if (TREE_USED (exp))
1718 switch (TREE_CODE (exp))
1720 case PREINCREMENT_EXPR:
1721 case POSTINCREMENT_EXPR:
1722 case PREDECREMENT_EXPR:
1723 case POSTDECREMENT_EXPR:
1728 case METHOD_CALL_EXPR:
1730 case TRY_CATCH_EXPR:
1731 case WITH_CLEANUP_EXPR:
1733 /* We don't warn about COND_EXPR because it may be a useful
1734 construct if either arm contains a side effect. */
1739 /* For a binding, warn if no side effect within it. */
1740 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1743 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1745 case TRUTH_ORIF_EXPR:
1746 case TRUTH_ANDIF_EXPR:
1747 /* In && or ||, warn if 2nd operand has no side effect. */
1748 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1751 if (TREE_NO_UNUSED_WARNING (exp))
1753 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1755 /* Let people do `(foo (), 0)' without a warning. */
1756 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1758 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1762 case NON_LVALUE_EXPR:
1763 /* Don't warn about values cast to void. */
1764 if (TREE_TYPE (exp) == void_type_node)
1766 /* Don't warn about conversions not explicit in the user's program. */
1767 if (TREE_NO_UNUSED_WARNING (exp))
1769 /* Assignment to a cast usually results in a cast of a modify.
1770 Don't complain about that. There can be an arbitrary number of
1771 casts before the modify, so we must loop until we find the first
1772 non-cast expression and then test to see if that is a modify. */
1774 tree tem = TREE_OPERAND (exp, 0);
1776 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1777 tem = TREE_OPERAND (tem, 0);
1779 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1780 || TREE_CODE (tem) == CALL_EXPR)
1786 /* Don't warn about automatic dereferencing of references, since
1787 the user cannot control it. */
1788 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1789 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1790 /* ... fall through ... */
1793 /* Referencing a volatile value is a side effect, so don't warn. */
1794 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1795 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1796 && TREE_THIS_VOLATILE (exp))
1799 warning_with_file_and_line (emit_filename, emit_lineno,
1800 "value computed is not used");
1805 /* Clear out the memory of the last expression evaluated. */
1813 /* Begin a statement which will return a value.
1814 Return the RTL_EXPR for this statement expr.
1815 The caller must save that value and pass it to expand_end_stmt_expr. */
1818 expand_start_stmt_expr ()
1823 /* Make the RTL_EXPR node temporary, not momentary,
1824 so that rtl_expr_chain doesn't become garbage. */
1825 momentary = suspend_momentary ();
1826 t = make_node (RTL_EXPR);
1827 resume_momentary (momentary);
1828 do_pending_stack_adjust ();
1829 start_sequence_for_rtl_expr (t);
1831 expr_stmts_for_value++;
1835 /* Restore the previous state at the end of a statement that returns a value.
1836 Returns a tree node representing the statement's value and the
1837 insns to compute the value.
1839 The nodes of that expression have been freed by now, so we cannot use them.
1840 But we don't want to do that anyway; the expression has already been
1841 evaluated and now we just want to use the value. So generate a RTL_EXPR
1842 with the proper type and RTL value.
1844 If the last substatement was not an expression,
1845 return something with type `void'. */
1848 expand_end_stmt_expr (t)
1853 if (last_expr_type == 0)
1855 last_expr_type = void_type_node;
1856 last_expr_value = const0_rtx;
1858 else if (last_expr_value == 0)
1859 /* There are some cases where this can happen, such as when the
1860 statement is void type. */
1861 last_expr_value = const0_rtx;
1862 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1863 /* Remove any possible QUEUED. */
1864 last_expr_value = protect_from_queue (last_expr_value, 0);
1868 TREE_TYPE (t) = last_expr_type;
1869 RTL_EXPR_RTL (t) = last_expr_value;
1870 RTL_EXPR_SEQUENCE (t) = get_insns ();
1872 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1876 /* Don't consider deleting this expr or containing exprs at tree level. */
1877 TREE_SIDE_EFFECTS (t) = 1;
1878 /* Propagate volatility of the actual RTL expr. */
1879 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1882 expr_stmts_for_value--;
1887 /* Generate RTL for the start of an if-then. COND is the expression
1888 whose truth should be tested.
1890 If EXITFLAG is nonzero, this conditional is visible to
1891 `exit_something'. */
1894 expand_start_cond (cond, exitflag)
1898 struct nesting *thiscond = ALLOC_NESTING ();
1900 /* Make an entry on cond_stack for the cond we are entering. */
1902 thiscond->next = cond_stack;
1903 thiscond->all = nesting_stack;
1904 thiscond->depth = ++nesting_depth;
1905 thiscond->data.cond.next_label = gen_label_rtx ();
1906 /* Before we encounter an `else', we don't need a separate exit label
1907 unless there are supposed to be exit statements
1908 to exit this conditional. */
1909 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1910 thiscond->data.cond.endif_label = thiscond->exit_label;
1911 cond_stack = thiscond;
1912 nesting_stack = thiscond;
1914 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1917 /* Generate RTL between then-clause and the elseif-clause
1918 of an if-then-elseif-.... */
1921 expand_start_elseif (cond)
1924 if (cond_stack->data.cond.endif_label == 0)
1925 cond_stack->data.cond.endif_label = gen_label_rtx ();
1926 emit_jump (cond_stack->data.cond.endif_label);
1927 emit_label (cond_stack->data.cond.next_label);
1928 cond_stack->data.cond.next_label = gen_label_rtx ();
1929 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1932 /* Generate RTL between the then-clause and the else-clause
1933 of an if-then-else. */
1936 expand_start_else ()
1938 if (cond_stack->data.cond.endif_label == 0)
1939 cond_stack->data.cond.endif_label = gen_label_rtx ();
1941 emit_jump (cond_stack->data.cond.endif_label);
1942 emit_label (cond_stack->data.cond.next_label);
1943 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1946 /* After calling expand_start_else, turn this "else" into an "else if"
1947 by providing another condition. */
1950 expand_elseif (cond)
1953 cond_stack->data.cond.next_label = gen_label_rtx ();
1954 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1957 /* Generate RTL for the end of an if-then.
1958 Pop the record for it off of cond_stack. */
1963 struct nesting *thiscond = cond_stack;
1965 do_pending_stack_adjust ();
1966 if (thiscond->data.cond.next_label)
1967 emit_label (thiscond->data.cond.next_label);
1968 if (thiscond->data.cond.endif_label)
1969 emit_label (thiscond->data.cond.endif_label);
1971 POPSTACK (cond_stack);
1977 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1978 loop should be exited by `exit_something'. This is a loop for which
1979 `expand_continue' will jump to the top of the loop.
1981 Make an entry on loop_stack to record the labels associated with
1985 expand_start_loop (exit_flag)
1988 register struct nesting *thisloop = ALLOC_NESTING ();
1990 /* Make an entry on loop_stack for the loop we are entering. */
1992 thisloop->next = loop_stack;
1993 thisloop->all = nesting_stack;
1994 thisloop->depth = ++nesting_depth;
1995 thisloop->data.loop.start_label = gen_label_rtx ();
1996 thisloop->data.loop.end_label = gen_label_rtx ();
1997 thisloop->data.loop.alt_end_label = 0;
1998 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1999 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2000 loop_stack = thisloop;
2001 nesting_stack = thisloop;
2003 do_pending_stack_adjust ();
2005 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2006 emit_label (thisloop->data.loop.start_label);
2011 /* Like expand_start_loop but for a loop where the continuation point
2012 (for expand_continue_loop) will be specified explicitly. */
2015 expand_start_loop_continue_elsewhere (exit_flag)
2018 struct nesting *thisloop = expand_start_loop (exit_flag);
2019 loop_stack->data.loop.continue_label = gen_label_rtx ();
2023 /* Specify the continuation point for a loop started with
2024 expand_start_loop_continue_elsewhere.
2025 Use this at the point in the code to which a continue statement
2029 expand_loop_continue_here ()
2031 do_pending_stack_adjust ();
2032 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2033 emit_label (loop_stack->data.loop.continue_label);
2036 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2037 Pop the block off of loop_stack. */
2042 rtx start_label = loop_stack->data.loop.start_label;
2043 rtx insn = get_last_insn ();
2044 int needs_end_jump = 1;
2046 /* Mark the continue-point at the top of the loop if none elsewhere. */
2047 if (start_label == loop_stack->data.loop.continue_label)
2048 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2050 do_pending_stack_adjust ();
2052 /* If optimizing, perhaps reorder the loop.
2053 First, try to use a condjump near the end.
2054 expand_exit_loop_if_false ends loops with unconditional jumps,
2057 if (test) goto label;
2059 goto loop_stack->data.loop.end_label
2063 If we find such a pattern, we can end the loop earlier. */
2066 && GET_CODE (insn) == CODE_LABEL
2067 && LABEL_NAME (insn) == NULL
2068 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2071 rtx jump = PREV_INSN (PREV_INSN (label));
2073 if (GET_CODE (jump) == JUMP_INSN
2074 && GET_CODE (PATTERN (jump)) == SET
2075 && SET_DEST (PATTERN (jump)) == pc_rtx
2076 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2077 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2078 == loop_stack->data.loop.end_label))
2082 /* The test might be complex and reference LABEL multiple times,
2083 like the loop in loop_iterations to set vtop. To handle this,
2085 insn = PREV_INSN (label);
2086 reorder_insns (label, label, start_label);
2088 for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
2090 /* We ignore line number notes, but if we see any other note,
2091 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2092 NOTE_INSN_LOOP_*, we disable this optimization. */
2093 if (GET_CODE (prev) == NOTE)
2095 if (NOTE_LINE_NUMBER (prev) < 0)
2099 if (GET_CODE (prev) == CODE_LABEL)
2101 if (GET_CODE (prev) == JUMP_INSN)
2103 if (GET_CODE (PATTERN (prev)) == SET
2104 && SET_DEST (PATTERN (prev)) == pc_rtx
2105 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2106 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2108 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2110 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2112 emit_note_after (NOTE_INSN_LOOP_END, prev);
2121 /* If the loop starts with a loop exit, roll that to the end where
2122 it will optimize together with the jump back.
2124 We look for the conditional branch to the exit, except that once
2125 we find such a branch, we don't look past 30 instructions.
2127 In more detail, if the loop presently looks like this (in pseudo-C):
2130 if (test) goto end_label;
2135 transform it to look like:
2141 if (test) goto end_label;
2142 goto newstart_label;
2145 Here, the `test' may actually consist of some reasonably complex
2146 code, terminating in a test. */
2151 ! (GET_CODE (insn) == JUMP_INSN
2152 && GET_CODE (PATTERN (insn)) == SET
2153 && SET_DEST (PATTERN (insn)) == pc_rtx
2154 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2158 rtx last_test_insn = NULL_RTX;
2160 /* Scan insns from the top of the loop looking for a qualified
2161 conditional exit. */
2162 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2163 insn = NEXT_INSN (insn))
2165 if (GET_CODE (insn) == NOTE)
2168 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2169 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2170 /* The code that actually moves the exit test will
2171 carefully leave BLOCK notes in their original
2172 location. That means, however, that we can't debug
2173 the exit test itself. So, we refuse to move code
2174 containing BLOCK notes at low optimization levels. */
2177 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2179 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2183 /* We've come to the end of an EH region, but
2184 never saw the beginning of that region. That
2185 means that an EH region begins before the top
2186 of the loop, and ends in the middle of it. The
2187 existence of such a situation violates a basic
2188 assumption in this code, since that would imply
2189 that even when EH_REGIONS is zero, we might
2190 move code out of an exception region. */
2194 /* We must not walk into a nested loop. */
2195 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2198 /* We already know this INSN is a NOTE, so there's no
2199 point in looking at it to see if it's a JUMP. */
2203 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2206 if (last_test_insn && num_insns > 30)
2210 /* We don't want to move a partial EH region. Consider:
2224 This isn't legal C++, but here's what it's supposed to
2225 mean: if cond() is true, stop looping. Otherwise,
2226 call bar, and keep looping. In addition, if cond
2227 throws an exception, catch it and keep looping. Such
2228 constructs are certainy legal in LISP.
2230 We should not move the `if (cond()) 0' test since then
2231 the EH-region for the try-block would be broken up.
2232 (In this case we would the EH_BEG note for the `try'
2233 and `if cond()' but not the call to bar() or the
2236 So we don't look for tests within an EH region. */
2239 if (GET_CODE (insn) == JUMP_INSN
2240 && GET_CODE (PATTERN (insn)) == SET
2241 && SET_DEST (PATTERN (insn)) == pc_rtx)
2243 /* This is indeed a jump. */
2244 rtx dest1 = NULL_RTX;
2245 rtx dest2 = NULL_RTX;
2246 rtx potential_last_test;
2247 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2249 /* A conditional jump. */
2250 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2251 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2252 potential_last_test = insn;
2256 /* An unconditional jump. */
2257 dest1 = SET_SRC (PATTERN (insn));
2258 /* Include the BARRIER after the JUMP. */
2259 potential_last_test = NEXT_INSN (insn);
2263 if (dest1 && GET_CODE (dest1) == LABEL_REF
2264 && ((XEXP (dest1, 0)
2265 == loop_stack->data.loop.alt_end_label)
2267 == loop_stack->data.loop.end_label)))
2269 last_test_insn = potential_last_test;
2273 /* If this was a conditional jump, there may be
2274 another label at which we should look. */
2281 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2283 /* We found one. Move everything from there up
2284 to the end of the loop, and add a jump into the loop
2285 to jump to there. */
2286 register rtx newstart_label = gen_label_rtx ();
2287 register rtx start_move = start_label;
2290 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2291 then we want to move this note also. */
2292 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2293 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2294 == NOTE_INSN_LOOP_CONT))
2295 start_move = PREV_INSN (start_move);
2297 emit_label_after (newstart_label, PREV_INSN (start_move));
2299 /* Actually move the insns. Start at the beginning, and
2300 keep copying insns until we've copied the
2302 for (insn = start_move; insn; insn = next_insn)
2304 /* Figure out which insn comes after this one. We have
2305 to do this before we move INSN. */
2306 if (insn == last_test_insn)
2307 /* We've moved all the insns. */
2308 next_insn = NULL_RTX;
2310 next_insn = NEXT_INSN (insn);
2312 if (GET_CODE (insn) == NOTE
2313 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2314 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2315 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2316 NOTE_INSN_BLOCK_ENDs because the correct generation
2317 of debugging information depends on these appearing
2318 in the same order in the RTL and in the tree
2319 structure, where they are represented as BLOCKs.
2320 So, we don't move block notes. Of course, moving
2321 the code inside the block is likely to make it
2322 impossible to debug the instructions in the exit
2323 test, but such is the price of optimization. */
2326 /* Move the INSN. */
2327 reorder_insns (insn, insn, get_last_insn ());
2330 emit_jump_insn_after (gen_jump (start_label),
2331 PREV_INSN (newstart_label));
2332 emit_barrier_after (PREV_INSN (newstart_label));
2333 start_label = newstart_label;
2339 emit_jump (start_label);
2340 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2342 emit_label (loop_stack->data.loop.end_label);
2344 POPSTACK (loop_stack);
2349 /* Generate a jump to the current loop's continue-point.
2350 This is usually the top of the loop, but may be specified
2351 explicitly elsewhere. If not currently inside a loop,
2352 return 0 and do nothing; caller will print an error message. */
2355 expand_continue_loop (whichloop)
2356 struct nesting *whichloop;
2360 whichloop = loop_stack;
2363 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2368 /* Generate a jump to exit the current loop. If not currently inside a loop,
2369 return 0 and do nothing; caller will print an error message. */
2372 expand_exit_loop (whichloop)
2373 struct nesting *whichloop;
2377 whichloop = loop_stack;
2380 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2384 /* Generate a conditional jump to exit the current loop if COND
2385 evaluates to zero. If not currently inside a loop,
2386 return 0 and do nothing; caller will print an error message. */
2389 expand_exit_loop_if_false (whichloop, cond)
2390 struct nesting *whichloop;
2393 rtx label = gen_label_rtx ();
2398 whichloop = loop_stack;
2401 /* In order to handle fixups, we actually create a conditional jump
2402 around a unconditional branch to exit the loop. If fixups are
2403 necessary, they go before the unconditional branch. */
2406 do_jump (cond, NULL_RTX, label);
2407 last_insn = get_last_insn ();
2408 if (GET_CODE (last_insn) == CODE_LABEL)
2409 whichloop->data.loop.alt_end_label = last_insn;
2410 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2417 /* Return nonzero if the loop nest is empty. Else return zero. */
2420 stmt_loop_nest_empty ()
2422 return (loop_stack == NULL);
2425 /* Return non-zero if we should preserve sub-expressions as separate
2426 pseudos. We never do so if we aren't optimizing. We always do so
2427 if -fexpensive-optimizations.
2429 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2430 the loop may still be a small one. */
2433 preserve_subexpressions_p ()
2437 if (flag_expensive_optimizations)
2440 if (optimize == 0 || loop_stack == 0)
2443 insn = get_last_insn_anywhere ();
2446 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2447 < n_non_fixed_regs * 3));
2451 /* Generate a jump to exit the current loop, conditional, binding contour
2452 or case statement. Not all such constructs are visible to this function,
2453 only those started with EXIT_FLAG nonzero. Individual languages use
2454 the EXIT_FLAG parameter to control which kinds of constructs you can
2457 If not currently inside anything that can be exited,
2458 return 0 and do nothing; caller will print an error message. */
2461 expand_exit_something ()
2465 for (n = nesting_stack; n; n = n->all)
2466 if (n->exit_label != 0)
2468 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2475 /* Generate RTL to return from the current function, with no value.
2476 (That is, we do not do anything about returning any value.) */
2479 expand_null_return ()
2481 struct nesting *block = block_stack;
2484 /* Does any pending block have cleanups? */
2486 while (block && block->data.block.cleanups == 0)
2487 block = block->next;
2489 /* If yes, use a goto to return, since that runs cleanups. */
2491 expand_null_return_1 (last_insn, block != 0);
2494 /* Generate RTL to return from the current function, with value VAL. */
2497 expand_value_return (val)
2500 struct nesting *block = block_stack;
2501 rtx last_insn = get_last_insn ();
2502 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2504 /* Copy the value to the return location
2505 unless it's already there. */
2507 if (return_reg != val)
2509 #ifdef PROMOTE_FUNCTION_RETURN
2510 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2511 int unsignedp = TREE_UNSIGNED (type);
2512 enum machine_mode mode
2513 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2516 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2517 convert_move (return_reg, val, unsignedp);
2520 emit_move_insn (return_reg, val);
2522 if (GET_CODE (return_reg) == REG
2523 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2524 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2525 /* Handle calls that return values in multiple non-contiguous locations.
2526 The Irix 6 ABI has examples of this. */
2527 else if (GET_CODE (return_reg) == PARALLEL)
2531 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2533 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2535 if (GET_CODE (x) == REG
2536 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2537 emit_insn (gen_rtx_USE (VOIDmode, x));
2541 /* Does any pending block have cleanups? */
2543 while (block && block->data.block.cleanups == 0)
2544 block = block->next;
2546 /* If yes, use a goto to return, since that runs cleanups.
2547 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2549 expand_null_return_1 (last_insn, block != 0);
2552 /* Output a return with no value. If LAST_INSN is nonzero,
2553 pretend that the return takes place after LAST_INSN.
2554 If USE_GOTO is nonzero then don't use a return instruction;
2555 go to the return label instead. This causes any cleanups
2556 of pending blocks to be executed normally. */
2559 expand_null_return_1 (last_insn, use_goto)
2563 rtx end_label = cleanup_label ? cleanup_label : return_label;
2565 clear_pending_stack_adjust ();
2566 do_pending_stack_adjust ();
2569 /* PCC-struct return always uses an epilogue. */
2570 if (current_function_returns_pcc_struct || use_goto)
2573 end_label = return_label = gen_label_rtx ();
2574 expand_goto_internal (NULL_TREE, end_label, last_insn);
2578 /* Otherwise output a simple return-insn if one is available,
2579 unless it won't do the job. */
2581 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2583 emit_jump_insn (gen_return ());
2589 /* Otherwise jump to the epilogue. */
2590 expand_goto_internal (NULL_TREE, end_label, last_insn);
2593 /* Generate RTL to evaluate the expression RETVAL and return it
2594 from the current function. */
2597 expand_return (retval)
2600 /* If there are any cleanups to be performed, then they will
2601 be inserted following LAST_INSN. It is desirable
2602 that the last_insn, for such purposes, should be the
2603 last insn before computing the return value. Otherwise, cleanups
2604 which call functions can clobber the return value. */
2605 /* ??? rms: I think that is erroneous, because in C++ it would
2606 run destructors on variables that might be used in the subsequent
2607 computation of the return value. */
2609 register rtx val = 0;
2614 /* If function wants no value, give it none. */
2615 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2617 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2619 expand_null_return ();
2623 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2624 /* This is not sufficient. We also need to watch for cleanups of the
2625 expression we are about to expand. Unfortunately, we cannot know
2626 if it has cleanups until we expand it, and we want to change how we
2627 expand it depending upon if we need cleanups. We can't win. */
2629 cleanups = any_pending_cleanups (1);
2634 if (TREE_CODE (retval) == RESULT_DECL)
2635 retval_rhs = retval;
2636 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2637 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2638 retval_rhs = TREE_OPERAND (retval, 1);
2639 else if (TREE_TYPE (retval) == void_type_node)
2640 /* Recognize tail-recursive call to void function. */
2641 retval_rhs = retval;
2643 retval_rhs = NULL_TREE;
2645 /* Only use `last_insn' if there are cleanups which must be run. */
2646 if (cleanups || cleanup_label != 0)
2647 last_insn = get_last_insn ();
2649 /* Distribute return down conditional expr if either of the sides
2650 may involve tail recursion (see test below). This enhances the number
2651 of tail recursions we see. Don't do this always since it can produce
2652 sub-optimal code in some cases and we distribute assignments into
2653 conditional expressions when it would help. */
2655 if (optimize && retval_rhs != 0
2656 && frame_offset == 0
2657 && TREE_CODE (retval_rhs) == COND_EXPR
2658 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2659 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2661 rtx label = gen_label_rtx ();
2664 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2665 start_cleanup_deferral ();
2666 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2667 DECL_RESULT (current_function_decl),
2668 TREE_OPERAND (retval_rhs, 1));
2669 TREE_SIDE_EFFECTS (expr) = 1;
2670 expand_return (expr);
2673 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2674 DECL_RESULT (current_function_decl),
2675 TREE_OPERAND (retval_rhs, 2));
2676 TREE_SIDE_EFFECTS (expr) = 1;
2677 expand_return (expr);
2678 end_cleanup_deferral ();
2682 /* Attempt to optimize the call if it is tail recursive. */
2683 if (optimize_tail_recursion (retval_rhs, last_insn))
2687 /* This optimization is safe if there are local cleanups
2688 because expand_null_return takes care of them.
2689 ??? I think it should also be safe when there is a cleanup label,
2690 because expand_null_return takes care of them, too.
2691 Any reason why not? */
2692 if (HAVE_return && cleanup_label == 0
2693 && ! current_function_returns_pcc_struct
2694 && BRANCH_COST <= 1)
2696 /* If this is return x == y; then generate
2697 if (x == y) return 1; else return 0;
2698 if we can do it with explicit return insns and branches are cheap,
2699 but not if we have the corresponding scc insn. */
2702 switch (TREE_CODE (retval_rhs))
2728 case TRUTH_ANDIF_EXPR:
2729 case TRUTH_ORIF_EXPR:
2730 case TRUTH_AND_EXPR:
2732 case TRUTH_NOT_EXPR:
2733 case TRUTH_XOR_EXPR:
2736 op0 = gen_label_rtx ();
2737 jumpifnot (retval_rhs, op0);
2738 expand_value_return (const1_rtx);
2740 expand_value_return (const0_rtx);
2749 #endif /* HAVE_return */
2751 /* If the result is an aggregate that is being returned in one (or more)
2752 registers, load the registers here. The compiler currently can't handle
2753 copying a BLKmode value into registers. We could put this code in a
2754 more general area (for use by everyone instead of just function
2755 call/return), but until this feature is generally usable it is kept here
2756 (and in expand_call). The value must go into a pseudo in case there
2757 are cleanups that will clobber the real return register. */
2760 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2761 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2763 int i, bitpos, xbitpos;
2764 int big_endian_correction = 0;
2765 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2766 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2767 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2768 (unsigned int)BITS_PER_WORD);
2769 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2770 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2771 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2772 enum machine_mode tmpmode, result_reg_mode;
2774 /* Structures whose size is not a multiple of a word are aligned
2775 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2776 machine, this means we must skip the empty high order bytes when
2777 calculating the bit offset. */
2778 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2779 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2782 /* Copy the structure BITSIZE bits at a time. */
2783 for (bitpos = 0, xbitpos = big_endian_correction;
2784 bitpos < bytes * BITS_PER_UNIT;
2785 bitpos += bitsize, xbitpos += bitsize)
2787 /* We need a new destination pseudo each time xbitpos is
2788 on a word boundary and when xbitpos == big_endian_correction
2789 (the first time through). */
2790 if (xbitpos % BITS_PER_WORD == 0
2791 || xbitpos == big_endian_correction)
2793 /* Generate an appropriate register. */
2794 dst = gen_reg_rtx (word_mode);
2795 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2797 /* Clobber the destination before we move anything into it. */
2798 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2801 /* We need a new source operand each time bitpos is on a word
2803 if (bitpos % BITS_PER_WORD == 0)
2804 src = operand_subword_force (result_val,
2805 bitpos / BITS_PER_WORD,
2808 /* Use bitpos for the source extraction (left justified) and
2809 xbitpos for the destination store (right justified). */
2810 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2811 extract_bit_field (src, bitsize,
2812 bitpos % BITS_PER_WORD, 1,
2813 NULL_RTX, word_mode,
2815 bitsize / BITS_PER_UNIT,
2817 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2820 /* Find the smallest integer mode large enough to hold the
2821 entire structure and use that mode instead of BLKmode
2822 on the USE insn for the return register. */
2823 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2824 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2825 tmpmode != MAX_MACHINE_MODE;
2826 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2828 /* Have we found a large enough mode? */
2829 if (GET_MODE_SIZE (tmpmode) >= bytes)
2833 /* No suitable mode found. */
2834 if (tmpmode == MAX_MACHINE_MODE)
2837 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2839 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2840 result_reg_mode = word_mode;
2842 result_reg_mode = tmpmode;
2843 result_reg = gen_reg_rtx (result_reg_mode);
2846 for (i = 0; i < n_regs; i++)
2847 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2850 if (tmpmode != result_reg_mode)
2851 result_reg = gen_lowpart (tmpmode, result_reg);
2853 expand_value_return (result_reg);
2857 && TREE_TYPE (retval_rhs) != void_type_node
2858 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2860 /* Calculate the return value into a pseudo reg. */
2861 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2862 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2863 val = force_not_mem (val);
2865 /* Return the calculated value, doing cleanups first. */
2866 expand_value_return (val);
2870 /* No cleanups or no hard reg used;
2871 calculate value into hard return reg. */
2872 expand_expr (retval, const0_rtx, VOIDmode, 0);
2874 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2878 /* Return 1 if the end of the generated RTX is not a barrier.
2879 This means code already compiled can drop through. */
2882 drop_through_at_end_p ()
2884 rtx insn = get_last_insn ();
2885 while (insn && GET_CODE (insn) == NOTE)
2886 insn = PREV_INSN (insn);
2887 return insn && GET_CODE (insn) != BARRIER;
2890 /* Test CALL_EXPR to determine if it is a potential tail recursion call
2891 and emit code to optimize the tail recursion. LAST_INSN indicates where
2892 to place the jump to the tail recursion label. Return TRUE if the
2893 call was optimized into a goto.
2895 This is only used by expand_return, but expand_call is expected to
2899 optimize_tail_recursion (call_expr, last_insn)
2903 /* For tail-recursive call to current function,
2904 just jump back to the beginning.
2905 It's unsafe if any auto variable in this function
2906 has its address taken; for simplicity,
2907 require stack frame to be empty. */
2908 if (optimize && call_expr != 0
2909 && frame_offset == 0
2910 && TREE_CODE (call_expr) == CALL_EXPR
2911 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
2912 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
2913 /* Finish checking validity, and if valid emit code
2914 to set the argument variables for the new call. */
2915 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
2916 DECL_ARGUMENTS (current_function_decl)))
2918 if (tail_recursion_label == 0)
2920 tail_recursion_label = gen_label_rtx ();
2921 emit_label_after (tail_recursion_label,
2922 tail_recursion_reentry);
2925 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2933 /* Emit code to alter this function's formal parms for a tail-recursive call.
2934 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2935 FORMALS is the chain of decls of formals.
2936 Return 1 if this can be done;
2937 otherwise return 0 and do not emit any code. */
2940 tail_recursion_args (actuals, formals)
2941 tree actuals, formals;
2943 register tree a = actuals, f = formals;
2945 register rtx *argvec;
2947 /* Check that number and types of actuals are compatible
2948 with the formals. This is not always true in valid C code.
2949 Also check that no formal needs to be addressable
2950 and that all formals are scalars. */
2952 /* Also count the args. */
2954 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2956 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2957 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2959 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2962 if (a != 0 || f != 0)
2965 /* Compute all the actuals. */
2967 argvec = (rtx *) alloca (i * sizeof (rtx));
2969 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2970 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2972 /* Find which actual values refer to current values of previous formals.
2973 Copy each of them now, before any formal is changed. */
2975 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2979 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2980 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2981 { copy = 1; break; }
2983 argvec[i] = copy_to_reg (argvec[i]);
2986 /* Store the values of the actuals into the formals. */
2988 for (f = formals, a = actuals, i = 0; f;
2989 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2991 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2992 emit_move_insn (DECL_RTL (f), argvec[i]);
2994 convert_move (DECL_RTL (f), argvec[i],
2995 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3002 /* Generate the RTL code for entering a binding contour.
3003 The variables are declared one by one, by calls to `expand_decl'.
3005 EXIT_FLAG is nonzero if this construct should be visible to
3006 `exit_something'. */
3009 expand_start_bindings (exit_flag)
3012 struct nesting *thisblock = ALLOC_NESTING ();
3013 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3015 /* Make an entry on block_stack for the block we are entering. */
3017 thisblock->next = block_stack;
3018 thisblock->all = nesting_stack;
3019 thisblock->depth = ++nesting_depth;
3020 thisblock->data.block.stack_level = 0;
3021 thisblock->data.block.cleanups = 0;
3022 thisblock->data.block.function_call_count = 0;
3023 thisblock->data.block.exception_region = 0;
3024 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
3026 thisblock->data.block.conditional_code = 0;
3027 thisblock->data.block.last_unconditional_cleanup = note;
3028 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3031 && !(block_stack->data.block.cleanups == NULL_TREE
3032 && block_stack->data.block.outer_cleanups == NULL_TREE))
3033 thisblock->data.block.outer_cleanups
3034 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3035 block_stack->data.block.outer_cleanups);
3037 thisblock->data.block.outer_cleanups = 0;
3038 thisblock->data.block.label_chain = 0;
3039 thisblock->data.block.innermost_stack_block = stack_block_stack;
3040 thisblock->data.block.first_insn = note;
3041 thisblock->data.block.block_start_count = ++block_start_count;
3042 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3043 block_stack = thisblock;
3044 nesting_stack = thisblock;
3046 /* Make a new level for allocating stack slots. */
3050 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3051 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3052 expand_expr are made. After we end the region, we know that all
3053 space for all temporaries that were created by TARGET_EXPRs will be
3054 destroyed and their space freed for reuse. */
3057 expand_start_target_temps ()
3059 /* This is so that even if the result is preserved, the space
3060 allocated will be freed, as we know that it is no longer in use. */
3063 /* Start a new binding layer that will keep track of all cleanup
3064 actions to be performed. */
3065 expand_start_bindings (0);
3067 target_temp_slot_level = temp_slot_level;
3071 expand_end_target_temps ()
3073 expand_end_bindings (NULL_TREE, 0, 0);
3075 /* This is so that even if the result is preserved, the space
3076 allocated will be freed, as we know that it is no longer in use. */
3080 /* Mark top block of block_stack as an implicit binding for an
3081 exception region. This is used to prevent infinite recursion when
3082 ending a binding with expand_end_bindings. It is only ever called
3083 by expand_eh_region_start, as that it the only way to create a
3084 block stack for a exception region. */
3087 mark_block_as_eh_region ()
3089 block_stack->data.block.exception_region = 1;
3090 if (block_stack->next
3091 && block_stack->next->data.block.conditional_code)
3093 block_stack->data.block.conditional_code
3094 = block_stack->next->data.block.conditional_code;
3095 block_stack->data.block.last_unconditional_cleanup
3096 = block_stack->next->data.block.last_unconditional_cleanup;
3097 block_stack->data.block.cleanup_ptr
3098 = block_stack->next->data.block.cleanup_ptr;
3102 /* True if we are currently emitting insns in an area of output code
3103 that is controlled by a conditional expression. This is used by
3104 the cleanup handling code to generate conditional cleanup actions. */
3107 conditional_context ()
3109 return block_stack && block_stack->data.block.conditional_code;
3112 /* Mark top block of block_stack as not for an implicit binding for an
3113 exception region. This is only ever done by expand_eh_region_end
3114 to let expand_end_bindings know that it is being called explicitly
3115 to end the binding layer for just the binding layer associated with
3116 the exception region, otherwise expand_end_bindings would try and
3117 end all implicit binding layers for exceptions regions, and then
3118 one normal binding layer. */
3121 mark_block_as_not_eh_region ()
3123 block_stack->data.block.exception_region = 0;
3126 /* True if the top block of block_stack was marked as for an exception
3127 region by mark_block_as_eh_region. */
3132 return block_stack && block_stack->data.block.exception_region;
3135 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3136 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3140 remember_end_note (block)
3141 register tree block;
3143 BLOCK_END_NOTE (block) = last_block_end_note;
3144 last_block_end_note = NULL_RTX;
3147 /* Emit a handler label for a nonlocal goto handler.
3148 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3151 expand_nl_handler_label (slot, before_insn)
3152 rtx slot, before_insn;
3155 rtx handler_label = gen_label_rtx ();
3157 /* Don't let jump_optimize delete the handler. */
3158 LABEL_PRESERVE_P (handler_label) = 1;
3161 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3162 insns = get_insns ();
3164 emit_insns_before (insns, before_insn);
3166 emit_label (handler_label);
3168 return handler_label;
3171 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3174 expand_nl_goto_receiver ()
3176 #ifdef HAVE_nonlocal_goto
3177 if (! HAVE_nonlocal_goto)
3179 /* First adjust our frame pointer to its actual value. It was
3180 previously set to the start of the virtual area corresponding to
3181 the stacked variables when we branched here and now needs to be
3182 adjusted to the actual hardware fp value.
3184 Assignments are to virtual registers are converted by
3185 instantiate_virtual_regs into the corresponding assignment
3186 to the underlying register (fp in this case) that makes
3187 the original assignment true.
3188 So the following insn will actually be
3189 decrementing fp by STARTING_FRAME_OFFSET. */
3190 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3192 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3193 if (fixed_regs[ARG_POINTER_REGNUM])
3195 #ifdef ELIMINABLE_REGS
3196 /* If the argument pointer can be eliminated in favor of the
3197 frame pointer, we don't need to restore it. We assume here
3198 that if such an elimination is present, it can always be used.
3199 This is the case on all known machines; if we don't make this
3200 assumption, we do unnecessary saving on many machines. */
3201 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3204 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3205 if (elim_regs[i].from == ARG_POINTER_REGNUM
3206 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3209 if (i == sizeof elim_regs / sizeof elim_regs [0])
3212 /* Now restore our arg pointer from the address at which it
3213 was saved in our stack frame.
3214 If there hasn't be space allocated for it yet, make
3216 if (arg_pointer_save_area == 0)
3217 arg_pointer_save_area
3218 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3219 emit_move_insn (virtual_incoming_args_rtx,
3220 /* We need a pseudo here, or else
3221 instantiate_virtual_regs_1 complains. */
3222 copy_to_reg (arg_pointer_save_area));
3227 #ifdef HAVE_nonlocal_goto_receiver
3228 if (HAVE_nonlocal_goto_receiver)
3229 emit_insn (gen_nonlocal_goto_receiver ());
3233 /* Make handlers for nonlocal gotos taking place in the function calls in
3237 expand_nl_goto_receivers (thisblock)
3238 struct nesting *thisblock;
3241 rtx afterward = gen_label_rtx ();
3246 /* Record the handler address in the stack slot for that purpose,
3247 during this block, saving and restoring the outer value. */
3248 if (thisblock->next != 0)
3249 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3251 rtx save_receiver = gen_reg_rtx (Pmode);
3252 emit_move_insn (XEXP (slot, 0), save_receiver);
3255 emit_move_insn (save_receiver, XEXP (slot, 0));
3256 insns = get_insns ();
3258 emit_insns_before (insns, thisblock->data.block.first_insn);
3261 /* Jump around the handlers; they run only when specially invoked. */
3262 emit_jump (afterward);
3264 /* Make a separate handler for each label. */
3265 link = nonlocal_labels;
3266 slot = nonlocal_goto_handler_slots;
3267 label_list = NULL_RTX;
3268 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3269 /* Skip any labels we shouldn't be able to jump to from here,
3270 we generate one special handler for all of them below which just calls
3272 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3275 lab = expand_nl_handler_label (XEXP (slot, 0),
3276 thisblock->data.block.first_insn);
3277 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3279 expand_nl_goto_receiver ();
3281 /* Jump to the "real" nonlocal label. */
3282 expand_goto (TREE_VALUE (link));
3285 /* A second pass over all nonlocal labels; this time we handle those
3286 we should not be able to jump to at this point. */
3287 link = nonlocal_labels;
3288 slot = nonlocal_goto_handler_slots;
3290 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3291 if (DECL_TOO_LATE (TREE_VALUE (link)))
3294 lab = expand_nl_handler_label (XEXP (slot, 0),
3295 thisblock->data.block.first_insn);
3296 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3302 expand_nl_goto_receiver ();
3303 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3308 nonlocal_goto_handler_labels = label_list;
3309 emit_label (afterward);
3312 /* Generate RTL code to terminate a binding contour.
3314 VARS is the chain of VAR_DECL nodes for the variables bound in this
3315 contour. There may actually be other nodes in this chain, but any
3316 nodes other than VAR_DECLS are ignored.
3318 MARK_ENDS is nonzero if we should put a note at the beginning
3319 and end of this binding contour.
3321 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3322 (That is true automatically if the contour has a saved stack level.) */
3325 expand_end_bindings (vars, mark_ends, dont_jump_in)
3330 register struct nesting *thisblock;
3333 while (block_stack->data.block.exception_region)
3335 /* Because we don't need or want a new temporary level and
3336 because we didn't create one in expand_eh_region_start,
3337 create a fake one now to avoid removing one in
3338 expand_end_bindings. */
3341 block_stack->data.block.exception_region = 0;
3343 expand_end_bindings (NULL_TREE, 0, 0);
3346 /* Since expand_eh_region_start does an expand_start_bindings, we
3347 have to first end all the bindings that were created by
3348 expand_eh_region_start. */
3350 thisblock = block_stack;
3353 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3354 if (TREE_CODE (decl) == VAR_DECL
3355 && ! TREE_USED (decl)
3356 && ! DECL_IN_SYSTEM_HEADER (decl)
3357 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3358 warning_with_decl (decl, "unused variable `%s'");
3360 if (thisblock->exit_label)
3362 do_pending_stack_adjust ();
3363 emit_label (thisblock->exit_label);
3366 /* If necessary, make handlers for nonlocal gotos taking
3367 place in the function calls in this block. */
3368 if (function_call_count != thisblock->data.block.function_call_count
3370 /* Make handler for outermost block
3371 if there were any nonlocal gotos to this function. */
3372 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3373 /* Make handler for inner block if it has something
3374 special to do when you jump out of it. */
3375 : (thisblock->data.block.cleanups != 0
3376 || thisblock->data.block.stack_level != 0)))
3377 expand_nl_goto_receivers (thisblock);
3379 /* Don't allow jumping into a block that has a stack level.
3380 Cleanups are allowed, though. */
3382 || thisblock->data.block.stack_level != 0)
3384 struct label_chain *chain;
3386 /* Any labels in this block are no longer valid to go to.
3387 Mark them to cause an error message. */
3388 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3390 DECL_TOO_LATE (chain->label) = 1;
3391 /* If any goto without a fixup came to this label,
3392 that must be an error, because gotos without fixups
3393 come from outside all saved stack-levels. */
3394 if (TREE_ADDRESSABLE (chain->label))
3395 error_with_decl (chain->label,
3396 "label `%s' used before containing binding contour");
3400 /* Restore stack level in effect before the block
3401 (only if variable-size objects allocated). */
3402 /* Perform any cleanups associated with the block. */
3404 if (thisblock->data.block.stack_level != 0
3405 || thisblock->data.block.cleanups != 0)
3407 /* Only clean up here if this point can actually be reached. */
3408 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3410 /* Don't let cleanups affect ({...}) constructs. */
3411 int old_expr_stmts_for_value = expr_stmts_for_value;
3412 rtx old_last_expr_value = last_expr_value;
3413 tree old_last_expr_type = last_expr_type;
3414 expr_stmts_for_value = 0;
3416 /* Do the cleanups. */
3417 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3419 do_pending_stack_adjust ();
3421 expr_stmts_for_value = old_expr_stmts_for_value;
3422 last_expr_value = old_last_expr_value;
3423 last_expr_type = old_last_expr_type;
3425 /* Restore the stack level. */
3427 if (reachable && thisblock->data.block.stack_level != 0)
3429 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3430 thisblock->data.block.stack_level, NULL_RTX);
3431 if (nonlocal_goto_handler_slots != 0)
3432 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3436 /* Any gotos out of this block must also do these things.
3437 Also report any gotos with fixups that came to labels in this
3439 fixup_gotos (thisblock,
3440 thisblock->data.block.stack_level,
3441 thisblock->data.block.cleanups,
3442 thisblock->data.block.first_insn,
3446 /* Mark the beginning and end of the scope if requested.
3447 We do this now, after running cleanups on the variables
3448 just going out of scope, so they are in scope for their cleanups. */
3451 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3453 /* Get rid of the beginning-mark if we don't make an end-mark. */
3454 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3456 /* If doing stupid register allocation, make sure lives of all
3457 register variables declared here extend thru end of scope. */
3460 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3461 if (TREE_CODE (decl) == VAR_DECL && DECL_RTL (decl))
3462 use_variable (DECL_RTL (decl));
3464 /* Restore the temporary level of TARGET_EXPRs. */
3465 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3467 /* Restore block_stack level for containing block. */
3469 stack_block_stack = thisblock->data.block.innermost_stack_block;
3470 POPSTACK (block_stack);
3472 /* Pop the stack slot nesting and free any slots at this level. */
3476 /* Generate RTL for the automatic variable declaration DECL.
3477 (Other kinds of declarations are simply ignored if seen here.) */
3483 struct nesting *thisblock = block_stack;
3486 type = TREE_TYPE (decl);
3488 /* Only automatic variables need any expansion done.
3489 Static and external variables, and external functions,
3490 will be handled by `assemble_variable' (called from finish_decl).
3491 TYPE_DECL and CONST_DECL require nothing.
3492 PARM_DECLs are handled in `assign_parms'. */
3494 if (TREE_CODE (decl) != VAR_DECL)
3496 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3499 /* Create the RTL representation for the variable. */
3501 if (type == error_mark_node)
3502 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3503 else if (DECL_SIZE (decl) == 0)
3504 /* Variable with incomplete type. */
3506 if (DECL_INITIAL (decl) == 0)
3507 /* Error message was already done; now avoid a crash. */
3508 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3510 /* An initializer is going to decide the size of this array.
3511 Until we know the size, represent its address with a reg. */
3512 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3513 MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
3515 else if (DECL_MODE (decl) != BLKmode
3516 /* If -ffloat-store, don't put explicit float vars
3518 && !(flag_float_store
3519 && TREE_CODE (type) == REAL_TYPE)
3520 && ! TREE_THIS_VOLATILE (decl)
3521 && ! TREE_ADDRESSABLE (decl)
3522 && (DECL_REGISTER (decl) || ! obey_regdecls)
3523 /* if -fcheck-memory-usage, check all variables. */
3524 && ! current_function_check_memory_usage)
3526 /* Automatic variable that can go in a register. */
3527 int unsignedp = TREE_UNSIGNED (type);
3528 enum machine_mode reg_mode
3529 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3531 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3532 mark_user_reg (DECL_RTL (decl));
3534 if (POINTER_TYPE_P (type))
3535 mark_reg_pointer (DECL_RTL (decl),
3536 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3540 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3541 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3542 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3543 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3544 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3546 /* Variable of fixed size that goes on the stack. */
3550 /* If we previously made RTL for this decl, it must be an array
3551 whose size was determined by the initializer.
3552 The old address was a register; set that register now
3553 to the proper address. */
3554 if (DECL_RTL (decl) != 0)
3556 if (GET_CODE (DECL_RTL (decl)) != MEM
3557 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3559 oldaddr = XEXP (DECL_RTL (decl), 0);
3562 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3563 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3564 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3566 /* Set alignment we actually gave this decl. */
3567 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3568 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3572 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3573 if (addr != oldaddr)
3574 emit_move_insn (oldaddr, addr);
3577 /* If this is a memory ref that contains aggregate components,
3578 mark it as such for cse and loop optimize. */
3579 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3580 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3582 /* If this is in memory because of -ffloat-store,
3583 set the volatile bit, to prevent optimizations from
3584 undoing the effects. */
3585 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3586 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3589 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3592 /* Dynamic-size object: must push space on the stack. */
3596 /* Record the stack pointer on entry to block, if have
3597 not already done so. */
3598 if (thisblock->data.block.stack_level == 0)
3600 do_pending_stack_adjust ();
3601 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3602 &thisblock->data.block.stack_level,
3603 thisblock->data.block.first_insn);
3604 stack_block_stack = thisblock;
3607 /* Compute the variable's size, in bytes. */
3608 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3610 size_int (BITS_PER_UNIT)),
3611 NULL_RTX, VOIDmode, 0);
3614 /* Allocate space on the stack for the variable. Note that
3615 DECL_ALIGN says how the variable is to be aligned and we
3616 cannot use it to conclude anything about the alignment of
3618 address = allocate_dynamic_stack_space (size, NULL_RTX,
3619 TYPE_ALIGN (TREE_TYPE (decl)));
3621 /* Reference the variable indirect through that rtx. */
3622 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3624 /* If this is a memory ref that contains aggregate components,
3625 mark it as such for cse and loop optimize. */
3626 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3627 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3629 /* Indicate the alignment we actually gave this variable. */
3630 #ifdef STACK_BOUNDARY
3631 DECL_ALIGN (decl) = STACK_BOUNDARY;
3633 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3637 if (TREE_THIS_VOLATILE (decl))
3638 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3639 #if 0 /* A variable is not necessarily unchanging
3640 just because it is const. RTX_UNCHANGING_P
3641 means no change in the function,
3642 not merely no change in the variable's scope.
3643 It is correct to set RTX_UNCHANGING_P if the variable's scope
3644 is the whole function. There's no convenient way to test that. */
3645 if (TREE_READONLY (decl))
3646 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3649 /* If doing stupid register allocation, make sure life of any
3650 register variable starts here, at the start of its scope. */
3653 use_variable (DECL_RTL (decl));
3658 /* Emit code to perform the initialization of a declaration DECL. */
3661 expand_decl_init (decl)
3664 int was_used = TREE_USED (decl);
3666 /* If this is a CONST_DECL, we don't have to generate any code, but
3667 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3668 to be set while in the obstack containing the constant. If we don't
3669 do this, we can lose if we have functions nested three deep and the middle
3670 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3671 the innermost function is the first to expand that STRING_CST. */
3672 if (TREE_CODE (decl) == CONST_DECL)
3674 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3675 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3676 EXPAND_INITIALIZER);
3680 if (TREE_STATIC (decl))
3683 /* Compute and store the initial value now. */
3685 if (DECL_INITIAL (decl) == error_mark_node)
3687 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3689 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3690 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3691 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3695 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3697 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3698 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3702 /* Don't let the initialization count as "using" the variable. */
3703 TREE_USED (decl) = was_used;
3705 /* Free any temporaries we made while initializing the decl. */
3706 preserve_temp_slots (NULL_RTX);
3710 /* CLEANUP is an expression to be executed at exit from this binding contour;
3711 for example, in C++, it might call the destructor for this variable.
3713 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3714 CLEANUP multiple times, and have the correct semantics. This
3715 happens in exception handling, for gotos, returns, breaks that
3716 leave the current scope.
3718 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3719 that is not associated with any particular variable. */
3722 expand_decl_cleanup (decl, cleanup)
3725 struct nesting *thisblock = block_stack;
3727 /* Error if we are not in any block. */
3731 /* Record the cleanup if there is one. */
3737 tree *cleanups = &thisblock->data.block.cleanups;
3738 int cond_context = conditional_context ();
3742 rtx flag = gen_reg_rtx (word_mode);
3747 emit_move_insn (flag, const0_rtx);
3748 set_flag_0 = get_insns ();
3751 thisblock->data.block.last_unconditional_cleanup
3752 = emit_insns_after (set_flag_0,
3753 thisblock->data.block.last_unconditional_cleanup);
3755 emit_move_insn (flag, const1_rtx);
3757 /* All cleanups must be on the function_obstack. */
3758 push_obstacks_nochange ();
3759 resume_temporary_allocation ();
3761 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3762 DECL_RTL (cond) = flag;
3764 /* Conditionalize the cleanup. */
3765 cleanup = build (COND_EXPR, void_type_node,
3766 truthvalue_conversion (cond),
3767 cleanup, integer_zero_node);
3768 cleanup = fold (cleanup);
3772 cleanups = thisblock->data.block.cleanup_ptr;
3775 /* All cleanups must be on the function_obstack. */
3776 push_obstacks_nochange ();
3777 resume_temporary_allocation ();
3778 cleanup = unsave_expr (cleanup);
3781 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3784 /* If this block has a cleanup, it belongs in stack_block_stack. */
3785 stack_block_stack = thisblock;
3792 /* If this was optimized so that there is no exception region for the
3793 cleanup, then mark the TREE_LIST node, so that we can later tell
3794 if we need to call expand_eh_region_end. */
3795 if (! using_eh_for_cleanups_p
3796 || expand_eh_region_start_tree (decl, cleanup))
3797 TREE_ADDRESSABLE (t) = 1;
3798 /* If that started a new EH region, we're in a new block. */
3799 thisblock = block_stack;
3806 thisblock->data.block.last_unconditional_cleanup
3807 = emit_insns_after (seq,
3808 thisblock->data.block.last_unconditional_cleanup);
3812 thisblock->data.block.last_unconditional_cleanup
3814 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3820 /* Like expand_decl_cleanup, but suppress generating an exception handler
3821 to perform the cleanup. */
3824 expand_decl_cleanup_no_eh (decl, cleanup)
3827 int save_eh = using_eh_for_cleanups_p;
3830 using_eh_for_cleanups_p = 0;
3831 result = expand_decl_cleanup (decl, cleanup);
3832 using_eh_for_cleanups_p = save_eh;
3837 /* Arrange for the top element of the dynamic cleanup chain to be
3838 popped if we exit the current binding contour. DECL is the
3839 associated declaration, if any, otherwise NULL_TREE. If the
3840 current contour is left via an exception, then __sjthrow will pop
3841 the top element off the dynamic cleanup chain. The code that
3842 avoids doing the action we push into the cleanup chain in the
3843 exceptional case is contained in expand_cleanups.
3845 This routine is only used by expand_eh_region_start, and that is
3846 the only way in which an exception region should be started. This
3847 routine is only used when using the setjmp/longjmp codegen method
3848 for exception handling. */
3851 expand_dcc_cleanup (decl)
3854 struct nesting *thisblock = block_stack;
3857 /* Error if we are not in any block. */
3861 /* Record the cleanup for the dynamic handler chain. */
3863 /* All cleanups must be on the function_obstack. */
3864 push_obstacks_nochange ();
3865 resume_temporary_allocation ();
3866 cleanup = make_node (POPDCC_EXPR);
3869 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3870 thisblock->data.block.cleanups
3871 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3873 /* If this block has a cleanup, it belongs in stack_block_stack. */
3874 stack_block_stack = thisblock;
3878 /* Arrange for the top element of the dynamic handler chain to be
3879 popped if we exit the current binding contour. DECL is the
3880 associated declaration, if any, otherwise NULL_TREE. If the current
3881 contour is left via an exception, then __sjthrow will pop the top
3882 element off the dynamic handler chain. The code that avoids doing
3883 the action we push into the handler chain in the exceptional case
3884 is contained in expand_cleanups.
3886 This routine is only used by expand_eh_region_start, and that is
3887 the only way in which an exception region should be started. This
3888 routine is only used when using the setjmp/longjmp codegen method
3889 for exception handling. */
3892 expand_dhc_cleanup (decl)
3895 struct nesting *thisblock = block_stack;
3898 /* Error if we are not in any block. */
3902 /* Record the cleanup for the dynamic handler chain. */
3904 /* All cleanups must be on the function_obstack. */
3905 push_obstacks_nochange ();
3906 resume_temporary_allocation ();
3907 cleanup = make_node (POPDHC_EXPR);
3910 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3911 thisblock->data.block.cleanups
3912 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3914 /* If this block has a cleanup, it belongs in stack_block_stack. */
3915 stack_block_stack = thisblock;
3919 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3920 DECL_ELTS is the list of elements that belong to DECL's type.
3921 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3924 expand_anon_union_decl (decl, cleanup, decl_elts)
3925 tree decl, cleanup, decl_elts;
3927 struct nesting *thisblock = block_stack;
3931 expand_decl_cleanup (decl, cleanup);
3932 x = DECL_RTL (decl);
3936 tree decl_elt = TREE_VALUE (decl_elts);
3937 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3938 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3940 /* Propagate the union's alignment to the elements. */
3941 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3943 /* If the element has BLKmode and the union doesn't, the union is
3944 aligned such that the element doesn't need to have BLKmode, so
3945 change the element's mode to the appropriate one for its size. */
3946 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3947 DECL_MODE (decl_elt) = mode
3948 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3951 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3952 instead create a new MEM rtx with the proper mode. */
3953 if (GET_CODE (x) == MEM)
3955 if (mode == GET_MODE (x))
3956 DECL_RTL (decl_elt) = x;
3959 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
3960 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
3961 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3964 else if (GET_CODE (x) == REG)
3966 if (mode == GET_MODE (x))
3967 DECL_RTL (decl_elt) = x;
3969 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
3974 /* Record the cleanup if there is one. */
3977 thisblock->data.block.cleanups
3978 = temp_tree_cons (decl_elt, cleanup_elt,
3979 thisblock->data.block.cleanups);
3981 decl_elts = TREE_CHAIN (decl_elts);
3985 /* Expand a list of cleanups LIST.
3986 Elements may be expressions or may be nested lists.
3988 If DONT_DO is nonnull, then any list-element
3989 whose TREE_PURPOSE matches DONT_DO is omitted.
3990 This is sometimes used to avoid a cleanup associated with
3991 a value that is being returned out of the scope.
3993 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3994 goto and handle protection regions specially in that case.
3996 If REACHABLE, we emit code, otherwise just inform the exception handling
3997 code about this finalization. */
4000 expand_cleanups (list, dont_do, in_fixup, reachable)
4007 for (tail = list; tail; tail = TREE_CHAIN (tail))
4008 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4010 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4011 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4016 tree cleanup = TREE_VALUE (tail);
4018 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4019 if (TREE_CODE (cleanup) != POPDHC_EXPR
4020 && TREE_CODE (cleanup) != POPDCC_EXPR
4021 /* See expand_eh_region_start_tree for this case. */
4022 && ! TREE_ADDRESSABLE (tail))
4024 cleanup = protect_with_terminate (cleanup);
4025 expand_eh_region_end (cleanup);
4031 /* Cleanups may be run multiple times. For example,
4032 when exiting a binding contour, we expand the
4033 cleanups associated with that contour. When a goto
4034 within that binding contour has a target outside that
4035 contour, it will expand all cleanups from its scope to
4036 the target. Though the cleanups are expanded multiple
4037 times, the control paths are non-overlapping so the
4038 cleanups will not be executed twice. */
4040 /* We may need to protect fixups with rethrow regions. */
4041 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4044 expand_fixup_region_start ();
4046 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4048 expand_fixup_region_end (TREE_VALUE (tail));
4055 /* Mark when the context we are emitting RTL for as a conditional
4056 context, so that any cleanup actions we register with
4057 expand_decl_init will be properly conditionalized when those
4058 cleanup actions are later performed. Must be called before any
4059 expression (tree) is expanded that is within a conditional context. */
4062 start_cleanup_deferral ()
4064 /* block_stack can be NULL if we are inside the parameter list. It is
4065 OK to do nothing, because cleanups aren't possible here. */
4067 ++block_stack->data.block.conditional_code;
4070 /* Mark the end of a conditional region of code. Because cleanup
4071 deferrals may be nested, we may still be in a conditional region
4072 after we end the currently deferred cleanups, only after we end all
4073 deferred cleanups, are we back in unconditional code. */
4076 end_cleanup_deferral ()
4078 /* block_stack can be NULL if we are inside the parameter list. It is
4079 OK to do nothing, because cleanups aren't possible here. */
4081 --block_stack->data.block.conditional_code;
4084 /* Move all cleanups from the current block_stack
4085 to the containing block_stack, where they are assumed to
4086 have been created. If anything can cause a temporary to
4087 be created, but not expanded for more than one level of
4088 block_stacks, then this code will have to change. */
4093 struct nesting *block = block_stack;
4094 struct nesting *outer = block->next;
4096 outer->data.block.cleanups
4097 = chainon (block->data.block.cleanups,
4098 outer->data.block.cleanups);
4099 block->data.block.cleanups = 0;
4103 last_cleanup_this_contour ()
4105 if (block_stack == 0)
4108 return block_stack->data.block.cleanups;
4111 /* Return 1 if there are any pending cleanups at this point.
4112 If THIS_CONTOUR is nonzero, check the current contour as well.
4113 Otherwise, look only at the contours that enclose this one. */
4116 any_pending_cleanups (this_contour)
4119 struct nesting *block;
4121 if (block_stack == 0)
4124 if (this_contour && block_stack->data.block.cleanups != NULL)
4126 if (block_stack->data.block.cleanups == 0
4127 && block_stack->data.block.outer_cleanups == 0)
4130 for (block = block_stack->next; block; block = block->next)
4131 if (block->data.block.cleanups != 0)
4137 /* Enter a case (Pascal) or switch (C) statement.
4138 Push a block onto case_stack and nesting_stack
4139 to accumulate the case-labels that are seen
4140 and to record the labels generated for the statement.
4142 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4143 Otherwise, this construct is transparent for `exit_something'.
4145 EXPR is the index-expression to be dispatched on.
4146 TYPE is its nominal type. We could simply convert EXPR to this type,
4147 but instead we take short cuts. */
4150 expand_start_case (exit_flag, expr, type, printname)
4154 const char *printname;
4156 register struct nesting *thiscase = ALLOC_NESTING ();
4158 /* Make an entry on case_stack for the case we are entering. */
4160 thiscase->next = case_stack;
4161 thiscase->all = nesting_stack;
4162 thiscase->depth = ++nesting_depth;
4163 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4164 thiscase->data.case_stmt.case_list = 0;
4165 thiscase->data.case_stmt.index_expr = expr;
4166 thiscase->data.case_stmt.nominal_type = type;
4167 thiscase->data.case_stmt.default_label = 0;
4168 thiscase->data.case_stmt.num_ranges = 0;
4169 thiscase->data.case_stmt.printname = printname;
4170 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4171 case_stack = thiscase;
4172 nesting_stack = thiscase;
4174 do_pending_stack_adjust ();
4176 /* Make sure case_stmt.start points to something that won't
4177 need any transformation before expand_end_case. */
4178 if (GET_CODE (get_last_insn ()) != NOTE)
4179 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4181 thiscase->data.case_stmt.start = get_last_insn ();
4183 start_cleanup_deferral ();
4187 /* Start a "dummy case statement" within which case labels are invalid
4188 and are not connected to any larger real case statement.
4189 This can be used if you don't want to let a case statement jump
4190 into the middle of certain kinds of constructs. */
4193 expand_start_case_dummy ()
4195 register struct nesting *thiscase = ALLOC_NESTING ();
4197 /* Make an entry on case_stack for the dummy. */
4199 thiscase->next = case_stack;
4200 thiscase->all = nesting_stack;
4201 thiscase->depth = ++nesting_depth;
4202 thiscase->exit_label = 0;
4203 thiscase->data.case_stmt.case_list = 0;
4204 thiscase->data.case_stmt.start = 0;
4205 thiscase->data.case_stmt.nominal_type = 0;
4206 thiscase->data.case_stmt.default_label = 0;
4207 thiscase->data.case_stmt.num_ranges = 0;
4208 case_stack = thiscase;
4209 nesting_stack = thiscase;
4210 start_cleanup_deferral ();
4213 /* End a dummy case statement. */
4216 expand_end_case_dummy ()
4218 end_cleanup_deferral ();
4219 POPSTACK (case_stack);
4222 /* Return the data type of the index-expression
4223 of the innermost case statement, or null if none. */
4226 case_index_expr_type ()
4229 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4236 /* If this is the first label, warn if any insns have been emitted. */
4237 if (case_stack->data.case_stmt.line_number_status >= 0)
4241 restore_line_number_status
4242 (case_stack->data.case_stmt.line_number_status);
4243 case_stack->data.case_stmt.line_number_status = -1;
4245 for (insn = case_stack->data.case_stmt.start;
4247 insn = NEXT_INSN (insn))
4249 if (GET_CODE (insn) == CODE_LABEL)
4251 if (GET_CODE (insn) != NOTE
4252 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4255 insn = PREV_INSN (insn);
4256 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4258 /* If insn is zero, then there must have been a syntax error. */
4260 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4261 NOTE_LINE_NUMBER(insn),
4262 "unreachable code at beginning of %s",
4263 case_stack->data.case_stmt.printname);
4270 /* Accumulate one case or default label inside a case or switch statement.
4271 VALUE is the value of the case (a null pointer, for a default label).
4272 The function CONVERTER, when applied to arguments T and V,
4273 converts the value V to the type T.
4275 If not currently inside a case or switch statement, return 1 and do
4276 nothing. The caller will print a language-specific error message.
4277 If VALUE is a duplicate or overlaps, return 2 and do nothing
4278 except store the (first) duplicate node in *DUPLICATE.
4279 If VALUE is out of range, return 3 and do nothing.
4280 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4281 Return 0 on success.
4283 Extended to handle range statements. */
4286 pushcase (value, converter, label, duplicate)
4287 register tree value;
4288 tree (*converter) PROTO((tree, tree));
4289 register tree label;
4295 /* Fail if not inside a real case statement. */
4296 if (! (case_stack && case_stack->data.case_stmt.start))
4299 if (stack_block_stack
4300 && stack_block_stack->depth > case_stack->depth)
4303 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4304 nominal_type = case_stack->data.case_stmt.nominal_type;
4306 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4307 if (index_type == error_mark_node)
4310 /* Convert VALUE to the type in which the comparisons are nominally done. */
4312 value = (*converter) (nominal_type, value);
4316 /* Fail if this value is out of range for the actual type of the index
4317 (which may be narrower than NOMINAL_TYPE). */
4318 if (value != 0 && ! int_fits_type_p (value, index_type))
4321 /* Fail if this is a duplicate or overlaps another entry. */
4324 if (case_stack->data.case_stmt.default_label != 0)
4326 *duplicate = case_stack->data.case_stmt.default_label;
4329 case_stack->data.case_stmt.default_label = label;
4332 return add_case_node (value, value, label, duplicate);
4334 expand_label (label);
4338 /* Like pushcase but this case applies to all values between VALUE1 and
4339 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4340 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4341 starts at VALUE1 and ends at the highest value of the index type.
4342 If both are NULL, this case applies to all values.
4344 The return value is the same as that of pushcase but there is one
4345 additional error code: 4 means the specified range was empty. */
4348 pushcase_range (value1, value2, converter, label, duplicate)
4349 register tree value1, value2;
4350 tree (*converter) PROTO((tree, tree));
4351 register tree label;
4357 /* Fail if not inside a real case statement. */
4358 if (! (case_stack && case_stack->data.case_stmt.start))
4361 if (stack_block_stack
4362 && stack_block_stack->depth > case_stack->depth)
4365 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4366 nominal_type = case_stack->data.case_stmt.nominal_type;
4368 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4369 if (index_type == error_mark_node)
4374 /* Convert VALUEs to type in which the comparisons are nominally done
4375 and replace any unspecified value with the corresponding bound. */
4377 value1 = TYPE_MIN_VALUE (index_type);
4379 value2 = TYPE_MAX_VALUE (index_type);
4381 /* Fail if the range is empty. Do this before any conversion since
4382 we want to allow out-of-range empty ranges. */
4383 if (value2 && tree_int_cst_lt (value2, value1))
4386 value1 = (*converter) (nominal_type, value1);
4388 /* If the max was unbounded, use the max of the nominal_type we are
4389 converting to. Do this after the < check above to suppress false
4392 value2 = TYPE_MAX_VALUE (nominal_type);
4393 value2 = (*converter) (nominal_type, value2);
4395 /* Fail if these values are out of range. */
4396 if (TREE_CONSTANT_OVERFLOW (value1)
4397 || ! int_fits_type_p (value1, index_type))
4400 if (TREE_CONSTANT_OVERFLOW (value2)
4401 || ! int_fits_type_p (value2, index_type))
4404 return add_case_node (value1, value2, label, duplicate);
4407 /* Do the actual insertion of a case label for pushcase and pushcase_range
4408 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4409 slowdown for large switch statements. */
4412 add_case_node (low, high, label, duplicate)
4417 struct case_node *p, **q, *r;
4419 q = &case_stack->data.case_stmt.case_list;
4426 /* Keep going past elements distinctly greater than HIGH. */
4427 if (tree_int_cst_lt (high, p->low))
4430 /* or distinctly less than LOW. */
4431 else if (tree_int_cst_lt (p->high, low))
4436 /* We have an overlap; this is an error. */
4437 *duplicate = p->code_label;
4442 /* Add this label to the chain, and succeed.
4443 Copy LOW, HIGH so they are on temporary rather than momentary
4444 obstack and will thus survive till the end of the case statement. */
4446 r = (struct case_node *) oballoc (sizeof (struct case_node));
4447 r->low = copy_node (low);
4449 /* If the bounds are equal, turn this into the one-value case. */
4451 if (tree_int_cst_equal (low, high))
4455 r->high = copy_node (high);
4456 case_stack->data.case_stmt.num_ranges++;
4459 r->code_label = label;
4460 expand_label (label);
4470 struct case_node *s;
4476 if (! (b = p->balance))
4477 /* Growth propagation from left side. */
4484 if ((p->left = s = r->right))
4493 if ((r->parent = s))
4501 case_stack->data.case_stmt.case_list = r;
4504 /* r->balance == +1 */
4509 struct case_node *t = r->right;
4511 if ((p->left = s = t->right))
4515 if ((r->right = s = t->left))
4529 if ((t->parent = s))
4537 case_stack->data.case_stmt.case_list = t;
4544 /* p->balance == +1; growth of left side balances the node. */
4554 if (! (b = p->balance))
4555 /* Growth propagation from right side. */
4563 if ((p->right = s = r->left))
4571 if ((r->parent = s))
4580 case_stack->data.case_stmt.case_list = r;
4584 /* r->balance == -1 */
4588 struct case_node *t = r->left;
4590 if ((p->right = s = t->left))
4595 if ((r->left = s = t->right))
4609 if ((t->parent = s))
4618 case_stack->data.case_stmt.case_list = t;
4624 /* p->balance == -1; growth of right side balances the node. */
4638 /* Returns the number of possible values of TYPE.
4639 Returns -1 if the number is unknown or variable.
4640 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4641 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4642 do not increase monotonically (there may be duplicates);
4643 to 1 if the values increase monotonically, but not always by 1;
4644 otherwise sets it to 0. */
4647 all_cases_count (type, spareness)
4651 HOST_WIDE_INT count;
4654 switch (TREE_CODE (type))
4661 count = 1 << BITS_PER_UNIT;
4665 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4666 || TYPE_MAX_VALUE (type) == NULL
4667 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4672 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4673 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4674 but with overflow checking. */
4675 tree mint = TYPE_MIN_VALUE (type);
4676 tree maxt = TYPE_MAX_VALUE (type);
4677 HOST_WIDE_INT lo, hi;
4678 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4680 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4682 add_double (lo, hi, 1, 0, &lo, &hi);
4683 if (hi != 0 || lo < 0)
4690 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4692 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4693 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4694 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4695 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4699 if (*spareness == 1)
4701 tree prev = TREE_VALUE (TYPE_VALUES (type));
4702 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4704 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4709 prev = TREE_VALUE (t);
4718 #define BITARRAY_TEST(ARRAY, INDEX) \
4719 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4720 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4721 #define BITARRAY_SET(ARRAY, INDEX) \
4722 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4723 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4725 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4726 with the case values we have seen, assuming the case expression
4728 SPARSENESS is as determined by all_cases_count.
4730 The time needed is proportional to COUNT, unless
4731 SPARSENESS is 2, in which case quadratic time is needed. */
4734 mark_seen_cases (type, cases_seen, count, sparseness)
4736 unsigned char *cases_seen;
4740 tree next_node_to_try = NULL_TREE;
4741 long next_node_offset = 0;
4743 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4744 tree val = make_node (INTEGER_CST);
4745 TREE_TYPE (val) = type;
4748 else if (sparseness == 2)
4753 /* This less efficient loop is only needed to handle
4754 duplicate case values (multiple enum constants
4755 with the same value). */
4756 TREE_TYPE (val) = TREE_TYPE (root->low);
4757 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4758 t = TREE_CHAIN (t), xlo++)
4760 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4761 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4765 /* Keep going past elements distinctly greater than VAL. */
4766 if (tree_int_cst_lt (val, n->low))
4769 /* or distinctly less than VAL. */
4770 else if (tree_int_cst_lt (n->high, val))
4775 /* We have found a matching range. */
4776 BITARRAY_SET (cases_seen, xlo);
4786 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4787 for (n = root; n; n = n->right)
4789 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4790 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4791 while ( ! tree_int_cst_lt (n->high, val))
4793 /* Calculate (into xlo) the "offset" of the integer (val).
4794 The element with lowest value has offset 0, the next smallest
4795 element has offset 1, etc. */
4797 HOST_WIDE_INT xlo, xhi;
4799 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4801 /* The TYPE_VALUES will be in increasing order, so
4802 starting searching where we last ended. */
4803 t = next_node_to_try;
4804 xlo = next_node_offset;
4810 t = TYPE_VALUES (type);
4813 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4815 next_node_to_try = TREE_CHAIN (t);
4816 next_node_offset = xlo + 1;
4821 if (t == next_node_to_try)
4830 t = TYPE_MIN_VALUE (type);
4832 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4836 add_double (xlo, xhi,
4837 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4841 if (xhi == 0 && xlo >= 0 && xlo < count)
4842 BITARRAY_SET (cases_seen, xlo);
4843 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4845 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4851 /* Called when the index of a switch statement is an enumerated type
4852 and there is no default label.
4854 Checks that all enumeration literals are covered by the case
4855 expressions of a switch. Also, warn if there are any extra
4856 switch cases that are *not* elements of the enumerated type.
4858 If all enumeration literals were covered by the case expressions,
4859 turn one of the expressions into the default expression since it should
4860 not be possible to fall through such a switch. */
4863 check_for_full_enumeration_handling (type)
4866 register struct case_node *n;
4867 register tree chain;
4868 #if 0 /* variable used by 'if 0'ed code below. */
4869 register struct case_node **l;
4873 /* True iff the selector type is a numbered set mode. */
4876 /* The number of possible selector values. */
4879 /* For each possible selector value. a one iff it has been matched
4880 by a case value alternative. */
4881 unsigned char *cases_seen;
4883 /* The allocated size of cases_seen, in chars. */
4889 size = all_cases_count (type, &sparseness);
4890 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4892 if (size > 0 && size < 600000
4893 /* We deliberately use malloc here - not xmalloc. */
4894 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4897 tree v = TYPE_VALUES (type);
4898 bzero (cases_seen, bytes_needed);
4900 /* The time complexity of this code is normally O(N), where
4901 N being the number of members in the enumerated type.
4902 However, if type is a ENUMERAL_TYPE whose values do not
4903 increase monotonically, O(N*log(N)) time may be needed. */
4905 mark_seen_cases (type, cases_seen, size, sparseness);
4907 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4909 if (BITARRAY_TEST(cases_seen, i) == 0)
4910 warning ("enumeration value `%s' not handled in switch",
4911 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4917 /* Now we go the other way around; we warn if there are case
4918 expressions that don't correspond to enumerators. This can
4919 occur since C and C++ don't enforce type-checking of
4920 assignments to enumeration variables. */
4922 if (case_stack->data.case_stmt.case_list
4923 && case_stack->data.case_stmt.case_list->left)
4924 case_stack->data.case_stmt.case_list
4925 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4927 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4929 for (chain = TYPE_VALUES (type);
4930 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4931 chain = TREE_CHAIN (chain))
4936 if (TYPE_NAME (type) == 0)
4937 warning ("case value `%ld' not in enumerated type",
4938 (long) TREE_INT_CST_LOW (n->low));
4940 warning ("case value `%ld' not in enumerated type `%s'",
4941 (long) TREE_INT_CST_LOW (n->low),
4942 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4945 : DECL_NAME (TYPE_NAME (type))));
4947 if (!tree_int_cst_equal (n->low, n->high))
4949 for (chain = TYPE_VALUES (type);
4950 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4951 chain = TREE_CHAIN (chain))
4956 if (TYPE_NAME (type) == 0)
4957 warning ("case value `%ld' not in enumerated type",
4958 (long) TREE_INT_CST_LOW (n->high));
4960 warning ("case value `%ld' not in enumerated type `%s'",
4961 (long) TREE_INT_CST_LOW (n->high),
4962 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4965 : DECL_NAME (TYPE_NAME (type))));
4971 /* ??? This optimization is disabled because it causes valid programs to
4972 fail. ANSI C does not guarantee that an expression with enum type
4973 will have a value that is the same as one of the enumeration literals. */
4975 /* If all values were found as case labels, make one of them the default
4976 label. Thus, this switch will never fall through. We arbitrarily pick
4977 the last one to make the default since this is likely the most
4978 efficient choice. */
4982 for (l = &case_stack->data.case_stmt.case_list;
4987 case_stack->data.case_stmt.default_label = (*l)->code_label;
4994 /* Terminate a case (Pascal) or switch (C) statement
4995 in which ORIG_INDEX is the expression to be tested.
4996 Generate the code to test it and jump to the right place. */
4999 expand_end_case (orig_index)
5002 tree minval = NULL_TREE, maxval = NULL_TREE, range, orig_minval;
5003 rtx default_label = 0;
5004 register struct case_node *n;
5012 register struct nesting *thiscase = case_stack;
5013 tree index_expr, index_type;
5016 table_label = gen_label_rtx ();
5017 index_expr = thiscase->data.case_stmt.index_expr;
5018 index_type = TREE_TYPE (index_expr);
5019 unsignedp = TREE_UNSIGNED (index_type);
5021 do_pending_stack_adjust ();
5023 /* This might get an spurious warning in the presence of a syntax error;
5024 it could be fixed by moving the call to check_seenlabel after the
5025 check for error_mark_node, and copying the code of check_seenlabel that
5026 deals with case_stack->data.case_stmt.line_number_status /
5027 restore_line_number_status in front of the call to end_cleanup_deferral;
5028 However, this might miss some useful warnings in the presence of
5029 non-syntax errors. */
5032 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5033 if (index_type != error_mark_node)
5035 /* If switch expression was an enumerated type, check that all
5036 enumeration literals are covered by the cases.
5037 No sense trying this if there's a default case, however. */
5039 if (!thiscase->data.case_stmt.default_label
5040 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5041 && TREE_CODE (index_expr) != INTEGER_CST)
5042 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5044 /* If we don't have a default-label, create one here,
5045 after the body of the switch. */
5046 if (thiscase->data.case_stmt.default_label == 0)
5048 thiscase->data.case_stmt.default_label
5049 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5050 expand_label (thiscase->data.case_stmt.default_label);
5052 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5054 before_case = get_last_insn ();
5056 if (thiscase->data.case_stmt.case_list
5057 && thiscase->data.case_stmt.case_list->left)
5058 thiscase->data.case_stmt.case_list
5059 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5061 /* Simplify the case-list before we count it. */
5062 group_case_nodes (thiscase->data.case_stmt.case_list);
5064 /* Get upper and lower bounds of case values.
5065 Also convert all the case values to the index expr's data type. */
5068 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5070 /* Check low and high label values are integers. */
5071 if (TREE_CODE (n->low) != INTEGER_CST)
5073 if (TREE_CODE (n->high) != INTEGER_CST)
5076 n->low = convert (index_type, n->low);
5077 n->high = convert (index_type, n->high);
5079 /* Count the elements and track the largest and smallest
5080 of them (treating them as signed even if they are not). */
5088 if (INT_CST_LT (n->low, minval))
5090 if (INT_CST_LT (maxval, n->high))
5093 /* A range counts double, since it requires two compares. */
5094 if (! tree_int_cst_equal (n->low, n->high))
5098 orig_minval = minval;
5100 /* Compute span of values. */
5102 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5104 end_cleanup_deferral ();
5108 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5110 emit_jump (default_label);
5113 /* If range of values is much bigger than number of values,
5114 make a sequence of conditional branches instead of a dispatch.
5115 If the switch-index is a constant, do it this way
5116 because we can optimize it. */
5118 #ifndef CASE_VALUES_THRESHOLD
5120 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5122 /* If machine does not have a case insn that compares the
5123 bounds, this means extra overhead for dispatch tables
5124 which raises the threshold for using them. */
5125 #define CASE_VALUES_THRESHOLD 5
5126 #endif /* HAVE_casesi */
5127 #endif /* CASE_VALUES_THRESHOLD */
5129 else if (TREE_INT_CST_HIGH (range) != 0
5130 || count < (unsigned int) CASE_VALUES_THRESHOLD
5131 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5133 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5136 || TREE_CODE (index_expr) == INTEGER_CST
5137 /* These will reduce to a constant. */
5138 || (TREE_CODE (index_expr) == CALL_EXPR
5139 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5140 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5141 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5142 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5143 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5145 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5147 /* If the index is a short or char that we do not have
5148 an insn to handle comparisons directly, convert it to
5149 a full integer now, rather than letting each comparison
5150 generate the conversion. */
5152 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5153 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5154 == CODE_FOR_nothing))
5156 enum machine_mode wider_mode;
5157 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5158 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5159 if (cmp_optab->handlers[(int) wider_mode].insn_code
5160 != CODE_FOR_nothing)
5162 index = convert_to_mode (wider_mode, index, unsignedp);
5168 do_pending_stack_adjust ();
5170 index = protect_from_queue (index, 0);
5171 if (GET_CODE (index) == MEM)
5172 index = copy_to_reg (index);
5173 if (GET_CODE (index) == CONST_INT
5174 || TREE_CODE (index_expr) == INTEGER_CST)
5176 /* Make a tree node with the proper constant value
5177 if we don't already have one. */
5178 if (TREE_CODE (index_expr) != INTEGER_CST)
5181 = build_int_2 (INTVAL (index),
5182 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5183 index_expr = convert (index_type, index_expr);
5186 /* For constant index expressions we need only
5187 issue a unconditional branch to the appropriate
5188 target code. The job of removing any unreachable
5189 code is left to the optimisation phase if the
5190 "-O" option is specified. */
5191 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5192 if (! tree_int_cst_lt (index_expr, n->low)
5193 && ! tree_int_cst_lt (n->high, index_expr))
5197 emit_jump (label_rtx (n->code_label));
5199 emit_jump (default_label);
5203 /* If the index expression is not constant we generate
5204 a binary decision tree to select the appropriate
5205 target code. This is done as follows:
5207 The list of cases is rearranged into a binary tree,
5208 nearly optimal assuming equal probability for each case.
5210 The tree is transformed into RTL, eliminating
5211 redundant test conditions at the same time.
5213 If program flow could reach the end of the
5214 decision tree an unconditional jump to the
5215 default code is emitted. */
5218 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5219 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5220 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5222 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5223 default_label, index_type);
5224 emit_jump_if_reachable (default_label);
5233 enum machine_mode index_mode = SImode;
5234 int index_bits = GET_MODE_BITSIZE (index_mode);
5236 enum machine_mode op_mode;
5238 /* Convert the index to SImode. */
5239 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5240 > GET_MODE_BITSIZE (index_mode))
5242 enum machine_mode omode = TYPE_MODE (index_type);
5243 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5245 /* We must handle the endpoints in the original mode. */
5246 index_expr = build (MINUS_EXPR, index_type,
5247 index_expr, minval);
5248 minval = integer_zero_node;
5249 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5250 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5251 omode, 1, 0, default_label);
5252 /* Now we can safely truncate. */
5253 index = convert_to_mode (index_mode, index, 0);
5257 if (TYPE_MODE (index_type) != index_mode)
5259 index_expr = convert (type_for_size (index_bits, 0),
5261 index_type = TREE_TYPE (index_expr);
5264 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5267 index = protect_from_queue (index, 0);
5268 do_pending_stack_adjust ();
5270 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5271 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5273 index = copy_to_mode_reg (op_mode, index);
5275 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5277 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5278 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5280 op1 = copy_to_mode_reg (op_mode, op1);
5282 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5284 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5285 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5287 op2 = copy_to_mode_reg (op_mode, op2);
5289 emit_jump_insn (gen_casesi (index, op1, op2,
5290 table_label, default_label));
5294 #ifdef HAVE_tablejump
5295 if (! win && HAVE_tablejump)
5297 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5298 fold (build (MINUS_EXPR, index_type,
5299 index_expr, minval)));
5300 index_type = TREE_TYPE (index_expr);
5301 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5303 index = protect_from_queue (index, 0);
5304 do_pending_stack_adjust ();
5306 do_tablejump (index, TYPE_MODE (index_type),
5307 expand_expr (range, NULL_RTX, VOIDmode, 0),
5308 table_label, default_label);
5315 /* Get table of labels to jump to, in order of case index. */
5317 ncases = TREE_INT_CST_LOW (range) + 1;
5318 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5319 bzero ((char *) labelvec, ncases * sizeof (rtx));
5321 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5323 register HOST_WIDE_INT i
5324 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5329 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5330 if (i + TREE_INT_CST_LOW (orig_minval)
5331 == TREE_INT_CST_LOW (n->high))
5337 /* Fill in the gaps with the default. */
5338 for (i = 0; i < ncases; i++)
5339 if (labelvec[i] == 0)
5340 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5342 /* Output the table */
5343 emit_label (table_label);
5345 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5346 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5347 gen_rtx_LABEL_REF (Pmode, table_label),
5348 gen_rtvec_v (ncases, labelvec),
5349 const0_rtx, const0_rtx, 0));
5351 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5352 gen_rtvec_v (ncases, labelvec)));
5354 /* If the case insn drops through the table,
5355 after the table we must jump to the default-label.
5356 Otherwise record no drop-through after the table. */
5357 #ifdef CASE_DROPS_THROUGH
5358 emit_jump (default_label);
5364 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5365 reorder_insns (before_case, get_last_insn (),
5366 thiscase->data.case_stmt.start);
5369 end_cleanup_deferral ();
5371 if (thiscase->exit_label)
5372 emit_label (thiscase->exit_label);
5374 POPSTACK (case_stack);
5379 /* Convert the tree NODE into a list linked by the right field, with the left
5380 field zeroed. RIGHT is used for recursion; it is a list to be placed
5381 rightmost in the resulting list. */
5383 static struct case_node *
5384 case_tree2list (node, right)
5385 struct case_node *node, *right;
5387 struct case_node *left;
5390 right = case_tree2list (node->right, right);
5392 node->right = right;
5393 if ((left = node->left))
5396 return case_tree2list (left, node);
5402 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5405 do_jump_if_equal (op1, op2, label, unsignedp)
5406 rtx op1, op2, label;
5409 if (GET_CODE (op1) == CONST_INT
5410 && GET_CODE (op2) == CONST_INT)
5412 if (INTVAL (op1) == INTVAL (op2))
5417 enum machine_mode mode = GET_MODE (op1);
5418 if (mode == VOIDmode)
5419 mode = GET_MODE (op2);
5420 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5425 /* Not all case values are encountered equally. This function
5426 uses a heuristic to weight case labels, in cases where that
5427 looks like a reasonable thing to do.
5429 Right now, all we try to guess is text, and we establish the
5432 chars above space: 16
5441 If we find any cases in the switch that are not either -1 or in the range
5442 of valid ASCII characters, or are control characters other than those
5443 commonly used with "\", don't treat this switch scanning text.
5445 Return 1 if these nodes are suitable for cost estimation, otherwise
5449 estimate_case_costs (node)
5452 tree min_ascii = build_int_2 (-1, -1);
5453 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5457 /* If we haven't already made the cost table, make it now. Note that the
5458 lower bound of the table is -1, not zero. */
5460 if (cost_table == NULL)
5462 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5463 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5465 for (i = 0; i < 128; i++)
5469 else if (ISPUNCT (i))
5471 else if (ISCNTRL (i))
5475 cost_table[' '] = 8;
5476 cost_table['\t'] = 4;
5477 cost_table['\0'] = 4;
5478 cost_table['\n'] = 2;
5479 cost_table['\f'] = 1;
5480 cost_table['\v'] = 1;
5481 cost_table['\b'] = 1;
5484 /* See if all the case expressions look like text. It is text if the
5485 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5486 as signed arithmetic since we don't want to ever access cost_table with a
5487 value less than -1. Also check that none of the constants in a range
5488 are strange control characters. */
5490 for (n = node; n; n = n->right)
5492 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5495 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5496 if (cost_table[i] < 0)
5500 /* All interesting values are within the range of interesting
5501 ASCII characters. */
5505 /* Scan an ordered list of case nodes
5506 combining those with consecutive values or ranges.
5508 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5511 group_case_nodes (head)
5514 case_node_ptr node = head;
5518 rtx lb = next_real_insn (label_rtx (node->code_label));
5520 case_node_ptr np = node;
5522 /* Try to group the successors of NODE with NODE. */
5523 while (((np = np->right) != 0)
5524 /* Do they jump to the same place? */
5525 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5526 || (lb != 0 && lb2 != 0
5527 && simplejump_p (lb)
5528 && simplejump_p (lb2)
5529 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5530 SET_SRC (PATTERN (lb2)))))
5531 /* Are their ranges consecutive? */
5532 && tree_int_cst_equal (np->low,
5533 fold (build (PLUS_EXPR,
5534 TREE_TYPE (node->high),
5537 /* An overflow is not consecutive. */
5538 && tree_int_cst_lt (node->high,
5539 fold (build (PLUS_EXPR,
5540 TREE_TYPE (node->high),
5542 integer_one_node))))
5544 node->high = np->high;
5546 /* NP is the first node after NODE which can't be grouped with it.
5547 Delete the nodes in between, and move on to that node. */
5553 /* Take an ordered list of case nodes
5554 and transform them into a near optimal binary tree,
5555 on the assumption that any target code selection value is as
5556 likely as any other.
5558 The transformation is performed by splitting the ordered
5559 list into two equal sections plus a pivot. The parts are
5560 then attached to the pivot as left and right branches. Each
5561 branch is then transformed recursively. */
5564 balance_case_nodes (head, parent)
5565 case_node_ptr *head;
5566 case_node_ptr parent;
5568 register case_node_ptr np;
5576 register case_node_ptr *npp;
5579 /* Count the number of entries on branch. Also count the ranges. */
5583 if (!tree_int_cst_equal (np->low, np->high))
5587 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5591 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5599 /* Split this list if it is long enough for that to help. */
5604 /* Find the place in the list that bisects the list's total cost,
5605 Here I gets half the total cost. */
5610 /* Skip nodes while their cost does not reach that amount. */
5611 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5612 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5613 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5616 npp = &(*npp)->right;
5621 /* Leave this branch lopsided, but optimize left-hand
5622 side and fill in `parent' fields for right-hand side. */
5624 np->parent = parent;
5625 balance_case_nodes (&np->left, np);
5626 for (; np->right; np = np->right)
5627 np->right->parent = np;
5631 /* If there are just three nodes, split at the middle one. */
5633 npp = &(*npp)->right;
5636 /* Find the place in the list that bisects the list's total cost,
5637 where ranges count as 2.
5638 Here I gets half the total cost. */
5639 i = (i + ranges + 1) / 2;
5642 /* Skip nodes while their cost does not reach that amount. */
5643 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5648 npp = &(*npp)->right;
5653 np->parent = parent;
5656 /* Optimize each of the two split parts. */
5657 balance_case_nodes (&np->left, np);
5658 balance_case_nodes (&np->right, np);
5662 /* Else leave this branch as one level,
5663 but fill in `parent' fields. */
5665 np->parent = parent;
5666 for (; np->right; np = np->right)
5667 np->right->parent = np;
5672 /* Search the parent sections of the case node tree
5673 to see if a test for the lower bound of NODE would be redundant.
5674 INDEX_TYPE is the type of the index expression.
5676 The instructions to generate the case decision tree are
5677 output in the same order as nodes are processed so it is
5678 known that if a parent node checks the range of the current
5679 node minus one that the current node is bounded at its lower
5680 span. Thus the test would be redundant. */
5683 node_has_low_bound (node, index_type)
5688 case_node_ptr pnode;
5690 /* If the lower bound of this node is the lowest value in the index type,
5691 we need not test it. */
5693 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5696 /* If this node has a left branch, the value at the left must be less
5697 than that at this node, so it cannot be bounded at the bottom and
5698 we need not bother testing any further. */
5703 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5704 node->low, integer_one_node));
5706 /* If the subtraction above overflowed, we can't verify anything.
5707 Otherwise, look for a parent that tests our value - 1. */
5709 if (! tree_int_cst_lt (low_minus_one, node->low))
5712 for (pnode = node->parent; pnode; pnode = pnode->parent)
5713 if (tree_int_cst_equal (low_minus_one, pnode->high))
5719 /* Search the parent sections of the case node tree
5720 to see if a test for the upper bound of NODE would be redundant.
5721 INDEX_TYPE is the type of the index expression.
5723 The instructions to generate the case decision tree are
5724 output in the same order as nodes are processed so it is
5725 known that if a parent node checks the range of the current
5726 node plus one that the current node is bounded at its upper
5727 span. Thus the test would be redundant. */
5730 node_has_high_bound (node, index_type)
5735 case_node_ptr pnode;
5737 /* If there is no upper bound, obviously no test is needed. */
5739 if (TYPE_MAX_VALUE (index_type) == NULL)
5742 /* If the upper bound of this node is the highest value in the type
5743 of the index expression, we need not test against it. */
5745 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5748 /* If this node has a right branch, the value at the right must be greater
5749 than that at this node, so it cannot be bounded at the top and
5750 we need not bother testing any further. */
5755 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5756 node->high, integer_one_node));
5758 /* If the addition above overflowed, we can't verify anything.
5759 Otherwise, look for a parent that tests our value + 1. */
5761 if (! tree_int_cst_lt (node->high, high_plus_one))
5764 for (pnode = node->parent; pnode; pnode = pnode->parent)
5765 if (tree_int_cst_equal (high_plus_one, pnode->low))
5771 /* Search the parent sections of the
5772 case node tree to see if both tests for the upper and lower
5773 bounds of NODE would be redundant. */
5776 node_is_bounded (node, index_type)
5780 return (node_has_low_bound (node, index_type)
5781 && node_has_high_bound (node, index_type));
5784 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5787 emit_jump_if_reachable (label)
5790 if (GET_CODE (get_last_insn ()) != BARRIER)
5794 /* Emit step-by-step code to select a case for the value of INDEX.
5795 The thus generated decision tree follows the form of the
5796 case-node binary tree NODE, whose nodes represent test conditions.
5797 INDEX_TYPE is the type of the index of the switch.
5799 Care is taken to prune redundant tests from the decision tree
5800 by detecting any boundary conditions already checked by
5801 emitted rtx. (See node_has_high_bound, node_has_low_bound
5802 and node_is_bounded, above.)
5804 Where the test conditions can be shown to be redundant we emit
5805 an unconditional jump to the target code. As a further
5806 optimization, the subordinates of a tree node are examined to
5807 check for bounded nodes. In this case conditional and/or
5808 unconditional jumps as a result of the boundary check for the
5809 current node are arranged to target the subordinates associated
5810 code for out of bound conditions on the current node.
5812 We can assume that when control reaches the code generated here,
5813 the index value has already been compared with the parents
5814 of this node, and determined to be on the same side of each parent
5815 as this node is. Thus, if this node tests for the value 51,
5816 and a parent tested for 52, we don't need to consider
5817 the possibility of a value greater than 51. If another parent
5818 tests for the value 50, then this node need not test anything. */
5821 emit_case_nodes (index, node, default_label, index_type)
5827 /* If INDEX has an unsigned type, we must make unsigned branches. */
5828 int unsignedp = TREE_UNSIGNED (index_type);
5829 typedef rtx rtx_fn ();
5830 enum machine_mode mode = GET_MODE (index);
5832 /* See if our parents have already tested everything for us.
5833 If they have, emit an unconditional jump for this node. */
5834 if (node_is_bounded (node, index_type))
5835 emit_jump (label_rtx (node->code_label));
5837 else if (tree_int_cst_equal (node->low, node->high))
5839 /* Node is single valued. First see if the index expression matches
5840 this node and then check our children, if any. */
5842 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5843 label_rtx (node->code_label), unsignedp);
5845 if (node->right != 0 && node->left != 0)
5847 /* This node has children on both sides.
5848 Dispatch to one side or the other
5849 by comparing the index value with this node's value.
5850 If one subtree is bounded, check that one first,
5851 so we can avoid real branches in the tree. */
5853 if (node_is_bounded (node->right, index_type))
5855 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5857 GT, NULL_RTX, mode, unsignedp, 0,
5858 label_rtx (node->right->code_label));
5859 emit_case_nodes (index, node->left, default_label, index_type);
5862 else if (node_is_bounded (node->left, index_type))
5864 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5866 LT, NULL_RTX, mode, unsignedp, 0,
5867 label_rtx (node->left->code_label));
5868 emit_case_nodes (index, node->right, default_label, index_type);
5873 /* Neither node is bounded. First distinguish the two sides;
5874 then emit the code for one side at a time. */
5877 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5879 /* See if the value is on the right. */
5880 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5882 GT, NULL_RTX, mode, unsignedp, 0,
5883 label_rtx (test_label));
5885 /* Value must be on the left.
5886 Handle the left-hand subtree. */
5887 emit_case_nodes (index, node->left, default_label, index_type);
5888 /* If left-hand subtree does nothing,
5890 emit_jump_if_reachable (default_label);
5892 /* Code branches here for the right-hand subtree. */
5893 expand_label (test_label);
5894 emit_case_nodes (index, node->right, default_label, index_type);
5898 else if (node->right != 0 && node->left == 0)
5900 /* Here we have a right child but no left so we issue conditional
5901 branch to default and process the right child.
5903 Omit the conditional branch to default if we it avoid only one
5904 right child; it costs too much space to save so little time. */
5906 if (node->right->right || node->right->left
5907 || !tree_int_cst_equal (node->right->low, node->right->high))
5909 if (!node_has_low_bound (node, index_type))
5911 emit_cmp_and_jump_insns (index, expand_expr (node->high,
5914 LT, NULL_RTX, mode, unsignedp, 0,
5918 emit_case_nodes (index, node->right, default_label, index_type);
5921 /* We cannot process node->right normally
5922 since we haven't ruled out the numbers less than
5923 this node's value. So handle node->right explicitly. */
5924 do_jump_if_equal (index,
5925 expand_expr (node->right->low, NULL_RTX,
5927 label_rtx (node->right->code_label), unsignedp);
5930 else if (node->right == 0 && node->left != 0)
5932 /* Just one subtree, on the left. */
5934 #if 0 /* The following code and comment were formerly part
5935 of the condition here, but they didn't work
5936 and I don't understand what the idea was. -- rms. */
5937 /* If our "most probable entry" is less probable
5938 than the default label, emit a jump to
5939 the default label using condition codes
5940 already lying around. With no right branch,
5941 a branch-greater-than will get us to the default
5944 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5947 if (node->left->left || node->left->right
5948 || !tree_int_cst_equal (node->left->low, node->left->high))
5950 if (!node_has_high_bound (node, index_type))
5952 emit_cmp_and_jump_insns (index, expand_expr (node->high,
5955 GT, NULL_RTX, mode, unsignedp, 0,
5959 emit_case_nodes (index, node->left, default_label, index_type);
5962 /* We cannot process node->left normally
5963 since we haven't ruled out the numbers less than
5964 this node's value. So handle node->left explicitly. */
5965 do_jump_if_equal (index,
5966 expand_expr (node->left->low, NULL_RTX,
5968 label_rtx (node->left->code_label), unsignedp);
5973 /* Node is a range. These cases are very similar to those for a single
5974 value, except that we do not start by testing whether this node
5975 is the one to branch to. */
5977 if (node->right != 0 && node->left != 0)
5979 /* Node has subtrees on both sides.
5980 If the right-hand subtree is bounded,
5981 test for it first, since we can go straight there.
5982 Otherwise, we need to make a branch in the control structure,
5983 then handle the two subtrees. */
5984 tree test_label = 0;
5987 if (node_is_bounded (node->right, index_type))
5988 /* Right hand node is fully bounded so we can eliminate any
5989 testing and branch directly to the target code. */
5990 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5992 GT, NULL_RTX, mode, unsignedp, 0,
5993 label_rtx (node->right->code_label));
5996 /* Right hand node requires testing.
5997 Branch to a label where we will handle it later. */
5999 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6000 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6002 GT, NULL_RTX, mode, unsignedp, 0,
6003 label_rtx (test_label));
6006 /* Value belongs to this node or to the left-hand subtree. */
6008 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6010 GE, NULL_RTX, mode, unsignedp, 0,
6011 label_rtx (node->code_label));
6013 /* Handle the left-hand subtree. */
6014 emit_case_nodes (index, node->left, default_label, index_type);
6016 /* If right node had to be handled later, do that now. */
6020 /* If the left-hand subtree fell through,
6021 don't let it fall into the right-hand subtree. */
6022 emit_jump_if_reachable (default_label);
6024 expand_label (test_label);
6025 emit_case_nodes (index, node->right, default_label, index_type);
6029 else if (node->right != 0 && node->left == 0)
6031 /* Deal with values to the left of this node,
6032 if they are possible. */
6033 if (!node_has_low_bound (node, index_type))
6035 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6037 LT, NULL_RTX, mode, unsignedp, 0,
6041 /* Value belongs to this node or to the right-hand subtree. */
6043 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6045 LE, NULL_RTX, mode, unsignedp, 0,
6046 label_rtx (node->code_label));
6048 emit_case_nodes (index, node->right, default_label, index_type);
6051 else if (node->right == 0 && node->left != 0)
6053 /* Deal with values to the right of this node,
6054 if they are possible. */
6055 if (!node_has_high_bound (node, index_type))
6057 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6059 GT, NULL_RTX, mode, unsignedp, 0,
6063 /* Value belongs to this node or to the left-hand subtree. */
6065 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6067 GE, NULL_RTX, mode, unsignedp, 0,
6068 label_rtx (node->code_label));
6070 emit_case_nodes (index, node->left, default_label, index_type);
6075 /* Node has no children so we check low and high bounds to remove
6076 redundant tests. Only one of the bounds can exist,
6077 since otherwise this node is bounded--a case tested already. */
6079 if (!node_has_high_bound (node, index_type))
6081 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6083 GT, NULL_RTX, mode, unsignedp, 0,
6087 if (!node_has_low_bound (node, index_type))
6089 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6091 LT, NULL_RTX, mode, unsignedp, 0,
6095 emit_jump (label_rtx (node->code_label));
6100 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6101 so that the debugging info will be correct for the unrolled loop. */
6103 /* Indexed by block number, contains a pointer to the N'th block node.
6105 Allocated by the call to identify_blocks, then released after the call
6106 to reorder_blocks in the function unroll_block_trees. */
6108 static tree *block_vector;
6111 find_loop_tree_blocks ()
6113 tree block = DECL_INITIAL (current_function_decl);
6115 block_vector = identify_blocks (block, get_insns ());
6119 unroll_block_trees ()
6121 tree block = DECL_INITIAL (current_function_decl);
6123 reorder_blocks (block_vector, block, get_insns ());
6125 /* Release any memory allocated by identify_blocks. */
6127 free (block_vector);