1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
45 #include "insn-config.h"
47 #include "hard-reg-set.h"
56 #define obstack_chunk_alloc xmalloc
57 #define obstack_chunk_free free
58 struct obstack stmt_obstack;
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
65 /* Functions and data structures for expanding case statements. */
67 /* Case label structure, used to hold info on labels within case
68 statements. We handle "range" labels; for a single-value label
69 as in C, the high and low limits are the same.
71 An AVL tree of case nodes is initially created, and later transformed
72 to a list linked via the RIGHT fields in the nodes. Nodes with
73 higher case values are later in the list.
75 Switch statements can be output in one of two forms. A branch table
76 is used if there are more than a few labels and the labels are dense
77 within the range between the smallest and largest case value. If a
78 branch table is used, no further manipulations are done with the case
81 The alternative to the use of a branch table is to generate a series
82 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
83 and PARENT fields to hold a binary tree. Initially the tree is
84 totally unbalanced, with everything on the right. We balance the tree
85 with nodes on the left having lower case values than the parent
86 and nodes on the right having higher values. We then output the tree
91 struct case_node *left; /* Left son in binary tree */
92 struct case_node *right; /* Right son in binary tree; also node chain */
93 struct case_node *parent; /* Parent of node in binary tree */
94 tree low; /* Lowest index value for this label */
95 tree high; /* Highest index value for this label */
96 tree code_label; /* Label to jump to when node matches */
100 typedef struct case_node case_node;
101 typedef struct case_node *case_node_ptr;
103 /* These are used by estimate_case_costs and balance_case_nodes. */
105 /* This must be a signed type, and non-ANSI compilers lack signed char. */
106 static short cost_table_[129];
107 static int use_cost_table;
108 static int cost_table_initialized;
110 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
112 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT)((I) + 1)]
114 /* Stack of control and binding constructs we are currently inside.
116 These constructs begin when you call `expand_start_WHATEVER'
117 and end when you call `expand_end_WHATEVER'. This stack records
118 info about how the construct began that tells the end-function
119 what to do. It also may provide information about the construct
120 to alter the behavior of other constructs within the body.
121 For example, they may affect the behavior of C `break' and `continue'.
123 Each construct gets one `struct nesting' object.
124 All of these objects are chained through the `all' field.
125 `nesting_stack' points to the first object (innermost construct).
126 The position of an entry on `nesting_stack' is in its `depth' field.
128 Each type of construct has its own individual stack.
129 For example, loops have `loop_stack'. Each object points to the
130 next object of the same type through the `next' field.
132 Some constructs are visible to `break' exit-statements and others
133 are not. Which constructs are visible depends on the language.
134 Therefore, the data structure allows each construct to be visible
135 or not, according to the args given when the construct is started.
136 The construct is visible if the `exit_label' field is non-null.
137 In that case, the value should be a CODE_LABEL rtx. */
142 struct nesting *next;
147 /* For conds (if-then and if-then-else statements). */
150 /* Label for the end of the if construct.
151 There is none if EXITFLAG was not set
152 and no `else' has been seen yet. */
154 /* Label for the end of this alternative.
155 This may be the end of the if or the next else/elseif. */
161 /* Label at the top of the loop; place to loop back to. */
163 /* Label at the end of the whole construct. */
165 /* Label before a jump that branches to the end of the whole
166 construct. This is where destructors go if any. */
168 /* Label for `continue' statement to jump to;
169 this is in front of the stepper of the loop. */
172 /* For variable binding contours. */
175 /* Sequence number of this binding contour within the function,
176 in order of entry. */
177 int block_start_count;
178 /* Nonzero => value to restore stack to on exit. */
180 /* The NOTE that starts this contour.
181 Used by expand_goto to check whether the destination
182 is within each contour or not. */
184 /* Innermost containing binding contour that has a stack level. */
185 struct nesting *innermost_stack_block;
186 /* List of cleanups to be run on exit from this contour.
187 This is a list of expressions to be evaluated.
188 The TREE_PURPOSE of each link is the ..._DECL node
189 which the cleanup pertains to. */
191 /* List of cleanup-lists of blocks containing this block,
192 as they were at the locus where this block appears.
193 There is an element for each containing block,
194 ordered innermost containing block first.
195 The tail of this list can be 0,
196 if all remaining elements would be empty lists.
197 The element's TREE_VALUE is the cleanup-list of that block,
198 which may be null. */
200 /* Chain of labels defined inside this binding contour.
201 For contours that have stack levels or cleanups. */
202 struct label_chain *label_chain;
203 /* Number of function calls seen, as of start of this block. */
204 int n_function_calls;
205 /* Nonzero if this is associated with a EH region. */
206 int exception_region;
207 /* The saved target_temp_slot_level from our outer block.
208 We may reset target_temp_slot_level to be the level of
209 this block, if that is done, target_temp_slot_level
210 reverts to the saved target_temp_slot_level at the very
212 int block_target_temp_slot_level;
213 /* True if we are currently emitting insns in an area of
214 output code that is controlled by a conditional
215 expression. This is used by the cleanup handling code to
216 generate conditional cleanup actions. */
217 int conditional_code;
218 /* A place to move the start of the exception region for any
219 of the conditional cleanups, must be at the end or after
220 the start of the last unconditional cleanup, and before any
221 conditional branch points. */
222 rtx last_unconditional_cleanup;
223 /* When in a conditional context, this is the specific
224 cleanup list associated with last_unconditional_cleanup,
225 where we place the conditionalized cleanups. */
228 /* For switch (C) or case (Pascal) statements,
229 and also for dummies (see `expand_start_case_dummy'). */
232 /* The insn after which the case dispatch should finally
233 be emitted. Zero for a dummy. */
235 /* A list of case labels; it is first built as an AVL tree.
236 During expand_end_case, this is converted to a list, and may be
237 rearranged into a nearly balanced binary tree. */
238 struct case_node *case_list;
239 /* Label to jump to if no case matches. */
241 /* The expression to be dispatched on. */
243 /* Type that INDEX_EXPR should be converted to. */
245 /* Name of this kind of statement, for warnings. */
246 const char *printname;
247 /* Used to save no_line_numbers till we see the first case label.
248 We set this to -1 when we see the first case label in this
250 int line_number_status;
255 /* Allocate and return a new `struct nesting'. */
257 #define ALLOC_NESTING() \
258 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
260 /* Pop the nesting stack element by element until we pop off
261 the element which is at the top of STACK.
262 Update all the other stacks, popping off elements from them
263 as we pop them from nesting_stack. */
265 #define POPSTACK(STACK) \
266 do { struct nesting *target = STACK; \
267 struct nesting *this; \
268 do { this = nesting_stack; \
269 if (loop_stack == this) \
270 loop_stack = loop_stack->next; \
271 if (cond_stack == this) \
272 cond_stack = cond_stack->next; \
273 if (block_stack == this) \
274 block_stack = block_stack->next; \
275 if (stack_block_stack == this) \
276 stack_block_stack = stack_block_stack->next; \
277 if (case_stack == this) \
278 case_stack = case_stack->next; \
279 nesting_depth = nesting_stack->depth - 1; \
280 nesting_stack = this->all; \
281 obstack_free (&stmt_obstack, this); } \
282 while (this != target); } while (0)
284 /* In some cases it is impossible to generate code for a forward goto
285 until the label definition is seen. This happens when it may be necessary
286 for the goto to reset the stack pointer: we don't yet know how to do that.
287 So expand_goto puts an entry on this fixup list.
288 Each time a binding contour that resets the stack is exited,
290 If the target label has now been defined, we can insert the proper code. */
294 /* Points to following fixup. */
295 struct goto_fixup *next;
296 /* Points to the insn before the jump insn.
297 If more code must be inserted, it goes after this insn. */
299 /* The LABEL_DECL that this jump is jumping to, or 0
300 for break, continue or return. */
302 /* The BLOCK for the place where this goto was found. */
304 /* The CODE_LABEL rtx that this is jumping to. */
306 /* Number of binding contours started in current function
307 before the label reference. */
308 int block_start_count;
309 /* The outermost stack level that should be restored for this jump.
310 Each time a binding contour that resets the stack is exited,
311 if the target label is *not* yet defined, this slot is updated. */
313 /* List of lists of cleanup expressions to be run by this goto.
314 There is one element for each block that this goto is within.
315 The tail of this list can be 0,
316 if all remaining elements would be empty.
317 The TREE_VALUE contains the cleanup list of that block as of the
318 time this goto was seen.
319 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
320 tree cleanup_list_list;
323 /* Within any binding contour that must restore a stack level,
324 all labels are recorded with a chain of these structures. */
328 /* Points to following fixup. */
329 struct label_chain *next;
335 /* Chain of all pending binding contours. */
336 struct nesting *x_block_stack;
338 /* If any new stacks are added here, add them to POPSTACKS too. */
340 /* Chain of all pending binding contours that restore stack levels
342 struct nesting *x_stack_block_stack;
344 /* Chain of all pending conditional statements. */
345 struct nesting *x_cond_stack;
347 /* Chain of all pending loops. */
348 struct nesting *x_loop_stack;
350 /* Chain of all pending case or switch statements. */
351 struct nesting *x_case_stack;
353 /* Separate chain including all of the above,
354 chained through the `all' field. */
355 struct nesting *x_nesting_stack;
357 /* Number of entries on nesting_stack now. */
360 /* Number of binding contours started so far in this function. */
361 int x_block_start_count;
363 /* Each time we expand an expression-statement,
364 record the expr's type and its RTL value here. */
365 tree x_last_expr_type;
366 rtx x_last_expr_value;
368 /* Nonzero if within a ({...}) grouping, in which case we must
369 always compute a value for each expr-stmt in case it is the last one. */
370 int x_expr_stmts_for_value;
372 /* Filename and line number of last line-number note,
373 whether we actually emitted it or not. */
374 const char *x_emit_filename;
377 struct goto_fixup *x_goto_fixup_chain;
380 #define block_stack (cfun->stmt->x_block_stack)
381 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
382 #define cond_stack (cfun->stmt->x_cond_stack)
383 #define loop_stack (cfun->stmt->x_loop_stack)
384 #define case_stack (cfun->stmt->x_case_stack)
385 #define nesting_stack (cfun->stmt->x_nesting_stack)
386 #define nesting_depth (cfun->stmt->x_nesting_depth)
387 #define current_block_start_count (cfun->stmt->x_block_start_count)
388 #define last_expr_type (cfun->stmt->x_last_expr_type)
389 #define last_expr_value (cfun->stmt->x_last_expr_value)
390 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
391 #define emit_filename (cfun->stmt->x_emit_filename)
392 #define emit_lineno (cfun->stmt->x_emit_lineno)
393 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
395 /* Non-zero if we are using EH to handle cleanus. */
396 static int using_eh_for_cleanups_p = 0;
398 static int n_occurrences PARAMS ((int, const char *));
399 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
400 static int expand_fixup PARAMS ((tree, rtx, rtx));
401 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
402 static void expand_nl_goto_receiver PARAMS ((void));
403 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
404 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
406 static void expand_null_return_1 PARAMS ((rtx));
407 static void expand_value_return PARAMS ((rtx));
408 static int tail_recursion_args PARAMS ((tree, tree));
409 static void expand_cleanups PARAMS ((tree, tree, int, int));
410 static void check_seenlabel PARAMS ((void));
411 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
412 static int estimate_case_costs PARAMS ((case_node_ptr));
413 static void group_case_nodes PARAMS ((case_node_ptr));
414 static void balance_case_nodes PARAMS ((case_node_ptr *,
416 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
417 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
418 static int node_is_bounded PARAMS ((case_node_ptr, tree));
419 static void emit_jump_if_reachable PARAMS ((rtx));
420 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
421 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
422 static void mark_cond_nesting PARAMS ((struct nesting *));
423 static void mark_loop_nesting PARAMS ((struct nesting *));
424 static void mark_block_nesting PARAMS ((struct nesting *));
425 static void mark_case_nesting PARAMS ((struct nesting *));
426 static void mark_case_node PARAMS ((struct case_node *));
427 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
428 static void free_case_nodes PARAMS ((case_node_ptr));
431 using_eh_for_cleanups ()
433 using_eh_for_cleanups_p = 1;
436 /* Mark N (known to be a cond-nesting) for GC. */
439 mark_cond_nesting (n)
444 ggc_mark_rtx (n->exit_label);
445 ggc_mark_rtx (n->data.cond.endif_label);
446 ggc_mark_rtx (n->data.cond.next_label);
452 /* Mark N (known to be a loop-nesting) for GC. */
455 mark_loop_nesting (n)
461 ggc_mark_rtx (n->exit_label);
462 ggc_mark_rtx (n->data.loop.start_label);
463 ggc_mark_rtx (n->data.loop.end_label);
464 ggc_mark_rtx (n->data.loop.alt_end_label);
465 ggc_mark_rtx (n->data.loop.continue_label);
471 /* Mark N (known to be a block-nesting) for GC. */
474 mark_block_nesting (n)
479 struct label_chain *l;
481 ggc_mark_rtx (n->exit_label);
482 ggc_mark_rtx (n->data.block.stack_level);
483 ggc_mark_rtx (n->data.block.first_insn);
484 ggc_mark_tree (n->data.block.cleanups);
485 ggc_mark_tree (n->data.block.outer_cleanups);
487 for (l = n->data.block.label_chain; l != NULL; l = l->next)
490 ggc_mark_tree (l->label);
493 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
495 /* ??? cleanup_ptr never points outside the stack, does it? */
501 /* Mark N (known to be a case-nesting) for GC. */
504 mark_case_nesting (n)
509 ggc_mark_rtx (n->exit_label);
510 ggc_mark_rtx (n->data.case_stmt.start);
512 ggc_mark_tree (n->data.case_stmt.default_label);
513 ggc_mark_tree (n->data.case_stmt.index_expr);
514 ggc_mark_tree (n->data.case_stmt.nominal_type);
516 mark_case_node (n->data.case_stmt.case_list);
529 ggc_mark_tree (c->low);
530 ggc_mark_tree (c->high);
531 ggc_mark_tree (c->code_label);
533 mark_case_node (c->right);
534 mark_case_node (c->left);
542 struct goto_fixup *g;
547 ggc_mark_rtx (g->before_jump);
548 ggc_mark_tree (g->target);
549 ggc_mark_tree (g->context);
550 ggc_mark_rtx (g->target_rtl);
551 ggc_mark_rtx (g->stack_level);
552 ggc_mark_tree (g->cleanup_list_list);
558 /* Clear out all parts of the state in F that can safely be discarded
559 after the function has been compiled, to let garbage collection
560 reclaim the memory. */
566 /* We're about to free the function obstack. If we hold pointers to
567 things allocated there, then we'll try to mark them when we do
568 GC. So, we clear them out here explicitly. */
578 struct stmt_status *p;
583 mark_block_nesting (p->x_block_stack);
584 mark_cond_nesting (p->x_cond_stack);
585 mark_loop_nesting (p->x_loop_stack);
586 mark_case_nesting (p->x_case_stack);
588 ggc_mark_tree (p->x_last_expr_type);
589 /* last_epxr_value is only valid if last_expr_type is nonzero. */
590 if (p->x_last_expr_type)
591 ggc_mark_rtx (p->x_last_expr_value);
593 mark_goto_fixup (p->x_goto_fixup_chain);
599 gcc_obstack_init (&stmt_obstack);
603 init_stmt_for_function ()
605 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
607 /* We are not currently within any block, conditional, loop or case. */
609 stack_block_stack = 0;
616 current_block_start_count = 0;
618 /* No gotos have been expanded yet. */
619 goto_fixup_chain = 0;
621 /* We are not processing a ({...}) grouping. */
622 expr_stmts_for_value = 0;
624 last_expr_value = NULL_RTX;
627 /* Return nonzero if anything is pushed on the loop, condition, or case
632 return cond_stack || loop_stack || case_stack;
635 /* Record the current file and line. Called from emit_line_note. */
637 set_file_and_line_for_stmt (file, line)
641 /* If we're outputting an inline function, and we add a line note,
642 there may be no CFUN->STMT information. So, there's no need to
646 emit_filename = file;
651 /* Emit a no-op instruction. */
658 last_insn = get_last_insn ();
660 && (GET_CODE (last_insn) == CODE_LABEL
661 || (GET_CODE (last_insn) == NOTE
662 && prev_real_insn (last_insn) == 0)))
663 emit_insn (gen_nop ());
666 /* Return the rtx-label that corresponds to a LABEL_DECL,
667 creating it if necessary. */
673 if (TREE_CODE (label) != LABEL_DECL)
676 if (!DECL_RTL_SET_P (label))
677 SET_DECL_RTL (label, gen_label_rtx ());
679 return DECL_RTL (label);
683 /* Add an unconditional jump to LABEL as the next sequential instruction. */
689 do_pending_stack_adjust ();
690 emit_jump_insn (gen_jump (label));
694 /* Emit code to jump to the address
695 specified by the pointer expression EXP. */
698 expand_computed_goto (exp)
701 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
703 #ifdef POINTERS_EXTEND_UNSIGNED
704 x = convert_memory_address (Pmode, x);
708 /* Be sure the function is executable. */
709 if (current_function_check_memory_usage)
710 emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
711 VOIDmode, 1, x, ptr_mode);
713 do_pending_stack_adjust ();
714 emit_indirect_jump (x);
716 current_function_has_computed_jump = 1;
719 /* Handle goto statements and the labels that they can go to. */
721 /* Specify the location in the RTL code of a label LABEL,
722 which is a LABEL_DECL tree node.
724 This is used for the kind of label that the user can jump to with a
725 goto statement, and for alternatives of a switch or case statement.
726 RTL labels generated for loops and conditionals don't go through here;
727 they are generated directly at the RTL level, by other functions below.
729 Note that this has nothing to do with defining label *names*.
730 Languages vary in how they do that and what that even means. */
736 struct label_chain *p;
738 do_pending_stack_adjust ();
739 emit_label (label_rtx (label));
740 if (DECL_NAME (label))
741 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
743 if (stack_block_stack != 0)
745 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
746 p->next = stack_block_stack->data.block.label_chain;
747 stack_block_stack->data.block.label_chain = p;
752 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
753 from nested functions. */
756 declare_nonlocal_label (label)
759 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
761 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
762 LABEL_PRESERVE_P (label_rtx (label)) = 1;
763 if (nonlocal_goto_handler_slots == 0)
765 emit_stack_save (SAVE_NONLOCAL,
766 &nonlocal_goto_stack_level,
767 PREV_INSN (tail_recursion_reentry));
769 nonlocal_goto_handler_slots
770 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
773 /* Generate RTL code for a `goto' statement with target label LABEL.
774 LABEL should be a LABEL_DECL tree node that was or will later be
775 defined with `expand_label'. */
783 /* Check for a nonlocal goto to a containing function. */
784 context = decl_function_context (label);
785 if (context != 0 && context != current_function_decl)
787 struct function *p = find_function_data (context);
788 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
789 rtx handler_slot, static_chain, save_area, insn;
792 /* Find the corresponding handler slot for this label. */
793 handler_slot = p->x_nonlocal_goto_handler_slots;
794 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
795 link = TREE_CHAIN (link))
796 handler_slot = XEXP (handler_slot, 1);
797 handler_slot = XEXP (handler_slot, 0);
799 p->has_nonlocal_label = 1;
800 current_function_has_nonlocal_goto = 1;
801 LABEL_REF_NONLOCAL_P (label_ref) = 1;
803 /* Copy the rtl for the slots so that they won't be shared in
804 case the virtual stack vars register gets instantiated differently
805 in the parent than in the child. */
807 static_chain = copy_to_reg (lookup_static_chain (label));
809 /* Get addr of containing function's current nonlocal goto handler,
810 which will do any cleanups and then jump to the label. */
811 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
812 virtual_stack_vars_rtx,
815 /* Get addr of containing function's nonlocal save area. */
816 save_area = p->x_nonlocal_goto_stack_level;
818 save_area = replace_rtx (copy_rtx (save_area),
819 virtual_stack_vars_rtx, static_chain);
821 #if HAVE_nonlocal_goto
822 if (HAVE_nonlocal_goto)
823 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
824 save_area, label_ref));
828 /* Restore frame pointer for containing function.
829 This sets the actual hard register used for the frame pointer
830 to the location of the function's incoming static chain info.
831 The non-local goto handler will then adjust it to contain the
832 proper value and reload the argument pointer, if needed. */
833 emit_move_insn (hard_frame_pointer_rtx, static_chain);
834 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
836 /* USE of hard_frame_pointer_rtx added for consistency;
837 not clear if really needed. */
838 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
839 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
840 emit_indirect_jump (handler_slot);
843 /* Search backwards to the jump insn and mark it as a
845 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
847 if (GET_CODE (insn) == JUMP_INSN)
849 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
850 const0_rtx, REG_NOTES (insn));
853 else if (GET_CODE (insn) == CALL_INSN)
858 expand_goto_internal (label, label_rtx (label), NULL_RTX);
861 /* Generate RTL code for a `goto' statement with target label BODY.
862 LABEL should be a LABEL_REF.
863 LAST_INSN, if non-0, is the rtx we should consider as the last
864 insn emitted (for the purposes of cleaning up a return). */
867 expand_goto_internal (body, label, last_insn)
872 struct nesting *block;
875 if (GET_CODE (label) != CODE_LABEL)
878 /* If label has already been defined, we can tell now
879 whether and how we must alter the stack level. */
881 if (PREV_INSN (label) != 0)
883 /* Find the innermost pending block that contains the label.
884 (Check containment by comparing insn-uids.)
885 Then restore the outermost stack level within that block,
886 and do cleanups of all blocks contained in it. */
887 for (block = block_stack; block; block = block->next)
889 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
891 if (block->data.block.stack_level != 0)
892 stack_level = block->data.block.stack_level;
893 /* Execute the cleanups for blocks we are exiting. */
894 if (block->data.block.cleanups != 0)
896 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
897 do_pending_stack_adjust ();
903 /* Ensure stack adjust isn't done by emit_jump, as this
904 would clobber the stack pointer. This one should be
905 deleted as dead by flow. */
906 clear_pending_stack_adjust ();
907 do_pending_stack_adjust ();
909 /* Don't do this adjust if it's to the end label and this function
910 is to return with a depressed stack pointer. */
911 if (label == return_label
912 && (((TREE_CODE (TREE_TYPE (current_function_decl))
914 && (TYPE_RETURNS_STACK_DEPRESSED
915 (TREE_TYPE (current_function_decl))))))
918 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
921 if (body != 0 && DECL_TOO_LATE (body))
922 error ("jump to `%s' invalidly jumps into binding contour",
923 IDENTIFIER_POINTER (DECL_NAME (body)));
925 /* Label not yet defined: may need to put this goto
926 on the fixup list. */
927 else if (! expand_fixup (body, label, last_insn))
929 /* No fixup needed. Record that the label is the target
930 of at least one goto that has no fixup. */
932 TREE_ADDRESSABLE (body) = 1;
938 /* Generate if necessary a fixup for a goto
939 whose target label in tree structure (if any) is TREE_LABEL
940 and whose target in rtl is RTL_LABEL.
942 If LAST_INSN is nonzero, we pretend that the jump appears
943 after insn LAST_INSN instead of at the current point in the insn stream.
945 The fixup will be used later to insert insns just before the goto.
946 Those insns will restore the stack level as appropriate for the
947 target label, and will (in the case of C++) also invoke any object
948 destructors which have to be invoked when we exit the scopes which
949 are exited by the goto.
951 Value is nonzero if a fixup is made. */
954 expand_fixup (tree_label, rtl_label, last_insn)
959 struct nesting *block, *end_block;
961 /* See if we can recognize which block the label will be output in.
962 This is possible in some very common cases.
963 If we succeed, set END_BLOCK to that block.
964 Otherwise, set it to 0. */
967 && (rtl_label == cond_stack->data.cond.endif_label
968 || rtl_label == cond_stack->data.cond.next_label))
969 end_block = cond_stack;
970 /* If we are in a loop, recognize certain labels which
971 are likely targets. This reduces the number of fixups
972 we need to create. */
974 && (rtl_label == loop_stack->data.loop.start_label
975 || rtl_label == loop_stack->data.loop.end_label
976 || rtl_label == loop_stack->data.loop.continue_label))
977 end_block = loop_stack;
981 /* Now set END_BLOCK to the binding level to which we will return. */
985 struct nesting *next_block = end_block->all;
988 /* First see if the END_BLOCK is inside the innermost binding level.
989 If so, then no cleanups or stack levels are relevant. */
990 while (next_block && next_block != block)
991 next_block = next_block->all;
996 /* Otherwise, set END_BLOCK to the innermost binding level
997 which is outside the relevant control-structure nesting. */
998 next_block = block_stack->next;
999 for (block = block_stack; block != end_block; block = block->all)
1000 if (block == next_block)
1001 next_block = next_block->next;
1002 end_block = next_block;
1005 /* Does any containing block have a stack level or cleanups?
1006 If not, no fixup is needed, and that is the normal case
1007 (the only case, for standard C). */
1008 for (block = block_stack; block != end_block; block = block->next)
1009 if (block->data.block.stack_level != 0
1010 || block->data.block.cleanups != 0)
1013 if (block != end_block)
1015 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1016 struct goto_fixup *fixup
1017 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1018 /* In case an old stack level is restored, make sure that comes
1019 after any pending stack adjust. */
1020 /* ?? If the fixup isn't to come at the present position,
1021 doing the stack adjust here isn't useful. Doing it with our
1022 settings at that location isn't useful either. Let's hope
1025 do_pending_stack_adjust ();
1026 fixup->target = tree_label;
1027 fixup->target_rtl = rtl_label;
1029 /* Create a BLOCK node and a corresponding matched set of
1030 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1031 this point. The notes will encapsulate any and all fixup
1032 code which we might later insert at this point in the insn
1033 stream. Also, the BLOCK node will be the parent (i.e. the
1034 `SUPERBLOCK') of any other BLOCK nodes which we might create
1035 later on when we are expanding the fixup code.
1037 Note that optimization passes (including expand_end_loop)
1038 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1039 as a placeholder. */
1042 register rtx original_before_jump
1043 = last_insn ? last_insn : get_last_insn ();
1048 block = make_node (BLOCK);
1049 TREE_USED (block) = 1;
1051 if (!cfun->x_whole_function_mode_p)
1052 insert_block (block);
1056 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1057 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1062 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
1063 if (cfun->x_whole_function_mode_p)
1064 NOTE_BLOCK (start) = block;
1065 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
1066 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
1067 if (cfun->x_whole_function_mode_p)
1068 NOTE_BLOCK (end) = block;
1069 fixup->context = block;
1071 emit_insns_after (start, original_before_jump);
1074 fixup->block_start_count = current_block_start_count;
1075 fixup->stack_level = 0;
1076 fixup->cleanup_list_list
1077 = ((block->data.block.outer_cleanups
1078 || block->data.block.cleanups)
1079 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1080 block->data.block.outer_cleanups)
1082 fixup->next = goto_fixup_chain;
1083 goto_fixup_chain = fixup;
1089 /* Expand any needed fixups in the outputmost binding level of the
1090 function. FIRST_INSN is the first insn in the function. */
1093 expand_fixups (first_insn)
1096 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
1099 /* When exiting a binding contour, process all pending gotos requiring fixups.
1100 THISBLOCK is the structure that describes the block being exited.
1101 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1102 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1103 FIRST_INSN is the insn that began this contour.
1105 Gotos that jump out of this contour must restore the
1106 stack level and do the cleanups before actually jumping.
1108 DONT_JUMP_IN nonzero means report error there is a jump into this
1109 contour from before the beginning of the contour.
1110 This is also done if STACK_LEVEL is nonzero. */
1113 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1114 struct nesting *thisblock;
1120 register struct goto_fixup *f, *prev;
1122 /* F is the fixup we are considering; PREV is the previous one. */
1123 /* We run this loop in two passes so that cleanups of exited blocks
1124 are run first, and blocks that are exited are marked so
1127 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1129 /* Test for a fixup that is inactive because it is already handled. */
1130 if (f->before_jump == 0)
1132 /* Delete inactive fixup from the chain, if that is easy to do. */
1134 prev->next = f->next;
1136 /* Has this fixup's target label been defined?
1137 If so, we can finalize it. */
1138 else if (PREV_INSN (f->target_rtl) != 0)
1140 register rtx cleanup_insns;
1142 /* If this fixup jumped into this contour from before the beginning
1143 of this contour, report an error. This code used to use
1144 the first non-label insn after f->target_rtl, but that's
1145 wrong since such can be added, by things like put_var_into_stack
1146 and have INSN_UIDs that are out of the range of the block. */
1147 /* ??? Bug: this does not detect jumping in through intermediate
1148 blocks that have stack levels or cleanups.
1149 It detects only a problem with the innermost block
1150 around the label. */
1152 && (dont_jump_in || stack_level || cleanup_list)
1153 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1154 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1155 && ! DECL_ERROR_ISSUED (f->target))
1157 error_with_decl (f->target,
1158 "label `%s' used before containing binding contour");
1159 /* Prevent multiple errors for one label. */
1160 DECL_ERROR_ISSUED (f->target) = 1;
1163 /* We will expand the cleanups into a sequence of their own and
1164 then later on we will attach this new sequence to the insn
1165 stream just ahead of the actual jump insn. */
1169 /* Temporarily restore the lexical context where we will
1170 logically be inserting the fixup code. We do this for the
1171 sake of getting the debugging information right. */
1174 set_block (f->context);
1176 /* Expand the cleanups for blocks this jump exits. */
1177 if (f->cleanup_list_list)
1180 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1181 /* Marked elements correspond to blocks that have been closed.
1182 Do their cleanups. */
1183 if (TREE_ADDRESSABLE (lists)
1184 && TREE_VALUE (lists) != 0)
1186 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1187 /* Pop any pushes done in the cleanups,
1188 in case function is about to return. */
1189 do_pending_stack_adjust ();
1193 /* Restore stack level for the biggest contour that this
1194 jump jumps out of. */
1196 && ! (f->target_rtl == return_label
1197 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1199 && (TYPE_RETURNS_STACK_DEPRESSED
1200 (TREE_TYPE (current_function_decl))))))
1201 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1203 /* Finish up the sequence containing the insns which implement the
1204 necessary cleanups, and then attach that whole sequence to the
1205 insn stream just ahead of the actual jump insn. Attaching it
1206 at that point insures that any cleanups which are in fact
1207 implicit C++ object destructions (which must be executed upon
1208 leaving the block) appear (to the debugger) to be taking place
1209 in an area of the generated code where the object(s) being
1210 destructed are still "in scope". */
1212 cleanup_insns = get_insns ();
1216 emit_insns_after (cleanup_insns, f->before_jump);
1222 /* For any still-undefined labels, do the cleanups for this block now.
1223 We must do this now since items in the cleanup list may go out
1224 of scope when the block ends. */
1225 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1226 if (f->before_jump != 0
1227 && PREV_INSN (f->target_rtl) == 0
1228 /* Label has still not appeared. If we are exiting a block with
1229 a stack level to restore, that started before the fixup,
1230 mark this stack level as needing restoration
1231 when the fixup is later finalized. */
1233 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1234 means the label is undefined. That's erroneous, but possible. */
1235 && (thisblock->data.block.block_start_count
1236 <= f->block_start_count))
1238 tree lists = f->cleanup_list_list;
1241 for (; lists; lists = TREE_CHAIN (lists))
1242 /* If the following elt. corresponds to our containing block
1243 then the elt. must be for this block. */
1244 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1248 set_block (f->context);
1249 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1250 do_pending_stack_adjust ();
1251 cleanup_insns = get_insns ();
1254 if (cleanup_insns != 0)
1256 = emit_insns_after (cleanup_insns, f->before_jump);
1258 f->cleanup_list_list = TREE_CHAIN (lists);
1262 f->stack_level = stack_level;
1266 /* Return the number of times character C occurs in string S. */
1268 n_occurrences (c, s)
1278 /* Generate RTL for an asm statement (explicit assembler code).
1279 BODY is a STRING_CST node containing the assembler code text,
1280 or an ADDR_EXPR containing a STRING_CST. */
1286 if (current_function_check_memory_usage)
1288 error ("`asm' cannot be used in function where memory usage is checked");
1292 if (TREE_CODE (body) == ADDR_EXPR)
1293 body = TREE_OPERAND (body, 0);
1295 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1296 TREE_STRING_POINTER (body)));
1300 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1301 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1302 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1303 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1304 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1305 constraint allows the use of a register operand. And, *IS_INOUT
1306 will be true if the operand is read-write, i.e., if it is used as
1307 an input as well as an output. If *CONSTRAINT_P is not in
1308 canonical form, it will be made canonical. (Note that `+' will be
1309 rpelaced with `=' as part of this process.)
1311 Returns TRUE if all went well; FALSE if an error occurred. */
1314 parse_output_constraint (constraint_p,
1321 const char **constraint_p;
1329 const char *constraint = *constraint_p;
1332 /* Assume the constraint doesn't allow the use of either a register
1334 *allows_mem = false;
1335 *allows_reg = false;
1337 /* Allow the `=' or `+' to not be at the beginning of the string,
1338 since it wasn't explicitly documented that way, and there is a
1339 large body of code that puts it last. Swap the character to
1340 the front, so as not to uglify any place else. */
1341 p = strchr (constraint, '=');
1343 p = strchr (constraint, '+');
1345 /* If the string doesn't contain an `=', issue an error
1349 error ("output operand constraint lacks `='");
1353 /* If the constraint begins with `+', then the operand is both read
1354 from and written to. */
1355 *is_inout = (*p == '+');
1357 /* Make sure we can specify the matching operand. */
1358 if (*is_inout && operand_num > 9)
1360 error ("output operand constraint %d contains `+'",
1365 /* Canonicalize the output constraint so that it begins with `='. */
1366 if (p != constraint || is_inout)
1369 size_t c_len = strlen (constraint);
1371 if (p != constraint)
1372 warning ("output constraint `%c' for operand %d is not at the beginning",
1375 /* Make a copy of the constraint. */
1376 buf = alloca (c_len + 1);
1377 strcpy (buf, constraint);
1378 /* Swap the first character and the `=' or `+'. */
1379 buf[p - constraint] = buf[0];
1380 /* Make sure the first character is an `='. (Until we do this,
1381 it might be a `+'.) */
1383 /* Replace the constraint with the canonicalized string. */
1384 *constraint_p = ggc_alloc_string (buf, c_len);
1385 constraint = *constraint_p;
1388 /* Loop through the constraint string. */
1389 for (p = constraint + 1; *p; ++p)
1394 error ("operand constraint contains '+' or '=' at illegal position.");
1398 if (operand_num + 1 == ninputs + noutputs)
1400 error ("`%%' constraint used with last operand");
1405 case 'V': case 'm': case 'o':
1409 case '?': case '!': case '*': case '&': case '#':
1410 case 'E': case 'F': case 'G': case 'H':
1411 case 's': case 'i': case 'n':
1412 case 'I': case 'J': case 'K': case 'L': case 'M':
1413 case 'N': case 'O': case 'P': case ',':
1416 case '0': case '1': case '2': case '3': case '4':
1417 case '5': case '6': case '7': case '8': case '9':
1418 error ("matching constraint not valid in output operand");
1422 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1423 excepting those that expand_call created. So match memory
1440 if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1442 #ifdef EXTRA_CONSTRAINT
1445 /* Otherwise we can't assume anything about the nature of
1446 the constraint except that it isn't purely registers.
1447 Treat it like "g" and hope for the best. */
1458 /* Generate RTL for an asm statement with arguments.
1459 STRING is the instruction template.
1460 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1461 Each output or input has an expression in the TREE_VALUE and
1462 a constraint-string in the TREE_PURPOSE.
1463 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1464 that is clobbered by this insn.
1466 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1467 Some elements of OUTPUTS may be replaced with trees representing temporary
1468 values. The caller should copy those temporary values to the originally
1471 VOL nonzero means the insn is volatile; don't optimize it. */
1474 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1475 tree string, outputs, inputs, clobbers;
1477 const char *filename;
1480 rtvec argvec, constraints;
1482 int ninputs = list_length (inputs);
1483 int noutputs = list_length (outputs);
1488 /* Vector of RTX's of evaluated output operands. */
1489 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1490 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1491 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1492 enum machine_mode *inout_mode
1493 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1494 const char **output_constraints
1495 = alloca (noutputs * sizeof (const char *));
1496 /* The insn we have emitted. */
1498 int old_generating_concat_p = generating_concat_p;
1500 /* An ASM with no outputs needs to be treated as volatile, for now. */
1504 if (current_function_check_memory_usage)
1506 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1510 #ifdef MD_ASM_CLOBBERS
1511 /* Sometimes we wish to automatically clobber registers across an asm.
1512 Case in point is when the i386 backend moved from cc0 to a hard reg --
1513 maintaining source-level compatability means automatically clobbering
1514 the flags register. */
1515 MD_ASM_CLOBBERS (clobbers);
1518 if (current_function_check_memory_usage)
1520 error ("`asm' cannot be used in function where memory usage is checked");
1524 /* Count the number of meaningful clobbered registers, ignoring what
1525 we would ignore later. */
1527 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1529 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1531 i = decode_reg_name (regname);
1532 if (i >= 0 || i == -4)
1535 error ("unknown register name `%s' in `asm'", regname);
1540 /* Check that the number of alternatives is constant across all
1542 if (outputs || inputs)
1544 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1545 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1548 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1550 error ("too many alternatives in `asm'");
1557 const char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1559 if (n_occurrences (',', constraint) != nalternatives)
1561 error ("operand constraints for `asm' differ in number of alternatives");
1565 if (TREE_CHAIN (tmp))
1566 tmp = TREE_CHAIN (tmp);
1568 tmp = next, next = 0;
1572 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1574 tree val = TREE_VALUE (tail);
1575 tree type = TREE_TYPE (val);
1576 const char *constraint;
1581 /* If there's an erroneous arg, emit no insn. */
1582 if (type == error_mark_node)
1585 /* Make sure constraint has `=' and does not have `+'. Also, see
1586 if it allows any register. Be liberal on the latter test, since
1587 the worst that happens if we get it wrong is we issue an error
1590 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1591 output_constraints[i] = constraint;
1593 /* Try to parse the output constraint. If that fails, there's
1594 no point in going further. */
1595 if (!parse_output_constraint (&output_constraints[i],
1604 /* If an output operand is not a decl or indirect ref and our constraint
1605 allows a register, make a temporary to act as an intermediate.
1606 Make the asm insn write into that, then our caller will copy it to
1607 the real output operand. Likewise for promoted variables. */
1609 generating_concat_p = 0;
1611 real_output_rtx[i] = NULL_RTX;
1612 if ((TREE_CODE (val) == INDIRECT_REF
1615 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1616 && ! (GET_CODE (DECL_RTL (val)) == REG
1617 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1622 mark_addressable (TREE_VALUE (tail));
1625 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1626 EXPAND_MEMORY_USE_WO);
1628 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1629 error ("output number %d not directly addressable", i);
1630 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1631 || GET_CODE (output_rtx[i]) == CONCAT)
1633 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1634 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1636 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1641 output_rtx[i] = assign_temp (type, 0, 0, 1);
1642 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1645 generating_concat_p = old_generating_concat_p;
1649 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1650 inout_opnum[ninout++] = i;
1655 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1657 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1661 /* Make vectors for the expression-rtx and constraint strings. */
1663 argvec = rtvec_alloc (ninputs);
1664 constraints = rtvec_alloc (ninputs);
1666 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1667 : GET_MODE (output_rtx[0])),
1668 TREE_STRING_POINTER (string),
1669 empty_string, 0, argvec, constraints,
1672 MEM_VOLATILE_P (body) = vol;
1674 /* Eval the inputs and put them into ARGVEC.
1675 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1678 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1681 int allows_reg = 0, allows_mem = 0;
1682 const char *constraint, *orig_constraint;
1686 /* If there's an erroneous arg, emit no insn,
1687 because the ASM_INPUT would get VOIDmode
1688 and that could cause a crash in reload. */
1689 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1692 /* ??? Can this happen, and does the error message make any sense? */
1693 if (TREE_PURPOSE (tail) == NULL_TREE)
1695 error ("hard register `%s' listed as input operand to `asm'",
1696 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1700 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1701 c_len = strlen (constraint);
1702 orig_constraint = constraint;
1704 /* Make sure constraint has neither `=', `+', nor '&'. */
1706 for (j = 0; j < c_len; j++)
1707 switch (constraint[j])
1709 case '+': case '=': case '&':
1710 if (constraint == orig_constraint)
1712 error ("input operand constraint contains `%c'",
1719 if (constraint == orig_constraint
1720 && i + 1 == ninputs - ninout)
1722 error ("`%%' constraint used with last operand");
1727 case 'V': case 'm': case 'o':
1732 case '?': case '!': case '*': case '#':
1733 case 'E': case 'F': case 'G': case 'H':
1734 case 's': case 'i': case 'n':
1735 case 'I': case 'J': case 'K': case 'L': case 'M':
1736 case 'N': case 'O': case 'P': case ',':
1739 /* Whether or not a numeric constraint allows a register is
1740 decided by the matching constraint, and so there is no need
1741 to do anything special with them. We must handle them in
1742 the default case, so that we don't unnecessarily force
1743 operands to memory. */
1744 case '0': case '1': case '2': case '3': case '4':
1745 case '5': case '6': case '7': case '8': case '9':
1746 if (constraint[j] >= '0' + noutputs)
1749 ("matching constraint references invalid operand number");
1753 /* Try and find the real constraint for this dup. */
1754 if ((j == 0 && c_len == 1)
1755 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1759 for (j = constraint[j] - '0'; j > 0; --j)
1762 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1763 c_len = strlen (constraint);
1780 if (! ISALPHA (constraint[j]))
1782 error ("invalid punctuation `%c' in constraint",
1786 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1788 #ifdef EXTRA_CONSTRAINT
1791 /* Otherwise we can't assume anything about the nature of
1792 the constraint except that it isn't purely registers.
1793 Treat it like "g" and hope for the best. */
1801 if (! allows_reg && allows_mem)
1802 mark_addressable (TREE_VALUE (tail));
1804 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1806 /* Never pass a CONCAT to an ASM. */
1807 generating_concat_p = 0;
1808 if (GET_CODE (op) == CONCAT)
1809 op = force_reg (GET_MODE (op), op);
1811 if (asm_operand_ok (op, constraint) <= 0)
1814 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1815 else if (!allows_mem)
1816 warning ("asm operand %d probably doesn't match constraints", i);
1817 else if (CONSTANT_P (op))
1818 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1820 else if (GET_CODE (op) == REG
1821 || GET_CODE (op) == SUBREG
1822 || GET_CODE (op) == ADDRESSOF
1823 || GET_CODE (op) == CONCAT)
1825 tree type = TREE_TYPE (TREE_VALUE (tail));
1826 tree qual_type = build_qualified_type (type,
1828 | TYPE_QUAL_CONST));
1829 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1831 emit_move_insn (memloc, op);
1835 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1836 /* We won't recognize volatile memory as available a
1837 memory_operand at this point. Ignore it. */
1839 else if (queued_subexp_p (op))
1842 /* ??? Leave this only until we have experience with what
1843 happens in combine and elsewhere when constraints are
1845 warning ("asm operand %d probably doesn't match constraints", i);
1847 generating_concat_p = old_generating_concat_p;
1848 ASM_OPERANDS_INPUT (body, i) = op;
1850 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1851 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1856 /* Protect all the operands from the queue now that they have all been
1859 generating_concat_p = 0;
1861 for (i = 0; i < ninputs - ninout; i++)
1862 ASM_OPERANDS_INPUT (body, i)
1863 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1865 for (i = 0; i < noutputs; i++)
1866 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1868 /* For in-out operands, copy output rtx to input rtx. */
1869 for (i = 0; i < ninout; i++)
1871 int j = inout_opnum[i];
1873 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1875 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1876 = gen_rtx_ASM_INPUT (inout_mode[i], digit_string (j));
1879 generating_concat_p = old_generating_concat_p;
1881 /* Now, for each output, construct an rtx
1882 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1883 ARGVEC CONSTRAINTS))
1884 If there is more than one, put them inside a PARALLEL. */
1886 if (noutputs == 1 && nclobbers == 0)
1888 ASM_OPERANDS_OUTPUT_CONSTRAINT (body)
1889 = output_constraints[0];
1890 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1893 else if (noutputs == 0 && nclobbers == 0)
1895 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1896 insn = emit_insn (body);
1907 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1909 /* For each output operand, store a SET. */
1910 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1912 XVECEXP (body, 0, i)
1913 = gen_rtx_SET (VOIDmode,
1915 gen_rtx_ASM_OPERANDS
1916 (GET_MODE (output_rtx[i]),
1917 TREE_STRING_POINTER (string),
1918 output_constraints[i],
1919 i, argvec, constraints,
1922 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1925 /* If there are no outputs (but there are some clobbers)
1926 store the bare ASM_OPERANDS into the PARALLEL. */
1929 XVECEXP (body, 0, i++) = obody;
1931 /* Store (clobber REG) for each clobbered register specified. */
1933 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1935 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1936 int j = decode_reg_name (regname);
1940 if (j == -3) /* `cc', which is not a register */
1943 if (j == -4) /* `memory', don't cache memory across asm */
1945 XVECEXP (body, 0, i++)
1946 = gen_rtx_CLOBBER (VOIDmode,
1949 gen_rtx_SCRATCH (VOIDmode)));
1953 /* Ignore unknown register, error already signaled. */
1957 /* Use QImode since that's guaranteed to clobber just one reg. */
1958 XVECEXP (body, 0, i++)
1959 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1962 insn = emit_insn (body);
1965 /* For any outputs that needed reloading into registers, spill them
1966 back to where they belong. */
1967 for (i = 0; i < noutputs; ++i)
1968 if (real_output_rtx[i])
1969 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1974 /* Generate RTL to evaluate the expression EXP
1975 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1978 expand_expr_stmt (exp)
1981 /* If -W, warn about statements with no side effects,
1982 except for an explicit cast to void (e.g. for assert()), and
1983 except inside a ({...}) where they may be useful. */
1984 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1986 if (! TREE_SIDE_EFFECTS (exp))
1988 if ((extra_warnings || warn_unused_value)
1989 && !(TREE_CODE (exp) == CONVERT_EXPR
1990 && VOID_TYPE_P (TREE_TYPE (exp))))
1991 warning_with_file_and_line (emit_filename, emit_lineno,
1992 "statement with no effect");
1994 else if (warn_unused_value)
1995 warn_if_unused_value (exp);
1998 /* If EXP is of function type and we are expanding statements for
1999 value, convert it to pointer-to-function. */
2000 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2001 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2003 /* The call to `expand_expr' could cause last_expr_type and
2004 last_expr_value to get reset. Therefore, we set last_expr_value
2005 and last_expr_type *after* calling expand_expr. */
2006 last_expr_value = expand_expr (exp,
2007 (expr_stmts_for_value
2008 ? NULL_RTX : const0_rtx),
2010 last_expr_type = TREE_TYPE (exp);
2012 /* If all we do is reference a volatile value in memory,
2013 copy it to a register to be sure it is actually touched. */
2014 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
2015 && TREE_THIS_VOLATILE (exp))
2017 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
2019 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
2020 copy_to_reg (last_expr_value);
2023 rtx lab = gen_label_rtx ();
2025 /* Compare the value with itself to reference it. */
2026 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
2027 expand_expr (TYPE_SIZE (last_expr_type),
2028 NULL_RTX, VOIDmode, 0),
2030 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
2036 /* If this expression is part of a ({...}) and is in memory, we may have
2037 to preserve temporaries. */
2038 preserve_temp_slots (last_expr_value);
2040 /* Free any temporaries used to evaluate this expression. Any temporary
2041 used as a result of this expression will already have been preserved
2048 /* Warn if EXP contains any computations whose results are not used.
2049 Return 1 if a warning is printed; 0 otherwise. */
2052 warn_if_unused_value (exp)
2055 if (TREE_USED (exp))
2058 /* Don't warn about void constructs. This includes casting to void,
2059 void function calls, and statement expressions with a final cast
2061 if (VOID_TYPE_P (TREE_TYPE (exp)))
2064 /* If this is an expression with side effects, don't warn. */
2065 if (TREE_SIDE_EFFECTS (exp))
2068 switch (TREE_CODE (exp))
2070 case PREINCREMENT_EXPR:
2071 case POSTINCREMENT_EXPR:
2072 case PREDECREMENT_EXPR:
2073 case POSTDECREMENT_EXPR:
2078 case METHOD_CALL_EXPR:
2080 case TRY_CATCH_EXPR:
2081 case WITH_CLEANUP_EXPR:
2086 /* For a binding, warn if no side effect within it. */
2087 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2090 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2092 case TRUTH_ORIF_EXPR:
2093 case TRUTH_ANDIF_EXPR:
2094 /* In && or ||, warn if 2nd operand has no side effect. */
2095 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2098 if (TREE_NO_UNUSED_WARNING (exp))
2100 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2102 /* Let people do `(foo (), 0)' without a warning. */
2103 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2105 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2109 case NON_LVALUE_EXPR:
2110 /* Don't warn about conversions not explicit in the user's program. */
2111 if (TREE_NO_UNUSED_WARNING (exp))
2113 /* Assignment to a cast usually results in a cast of a modify.
2114 Don't complain about that. There can be an arbitrary number of
2115 casts before the modify, so we must loop until we find the first
2116 non-cast expression and then test to see if that is a modify. */
2118 tree tem = TREE_OPERAND (exp, 0);
2120 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2121 tem = TREE_OPERAND (tem, 0);
2123 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2124 || TREE_CODE (tem) == CALL_EXPR)
2130 /* Don't warn about automatic dereferencing of references, since
2131 the user cannot control it. */
2132 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2133 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2137 /* Referencing a volatile value is a side effect, so don't warn. */
2139 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2140 && TREE_THIS_VOLATILE (exp))
2143 /* If this is an expression which has no operands, there is no value
2144 to be unused. There are no such language-independent codes,
2145 but front ends may define such. */
2146 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2147 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2151 warning_with_file_and_line (emit_filename, emit_lineno,
2152 "value computed is not used");
2157 /* Clear out the memory of the last expression evaluated. */
2165 /* Begin a statement which will return a value.
2166 Return the RTL_EXPR for this statement expr.
2167 The caller must save that value and pass it to expand_end_stmt_expr. */
2170 expand_start_stmt_expr ()
2174 /* Make the RTL_EXPR node temporary, not momentary,
2175 so that rtl_expr_chain doesn't become garbage. */
2176 t = make_node (RTL_EXPR);
2177 do_pending_stack_adjust ();
2178 start_sequence_for_rtl_expr (t);
2180 expr_stmts_for_value++;
2184 /* Restore the previous state at the end of a statement that returns a value.
2185 Returns a tree node representing the statement's value and the
2186 insns to compute the value.
2188 The nodes of that expression have been freed by now, so we cannot use them.
2189 But we don't want to do that anyway; the expression has already been
2190 evaluated and now we just want to use the value. So generate a RTL_EXPR
2191 with the proper type and RTL value.
2193 If the last substatement was not an expression,
2194 return something with type `void'. */
2197 expand_end_stmt_expr (t)
2202 if (last_expr_type == 0)
2204 last_expr_type = void_type_node;
2205 last_expr_value = const0_rtx;
2207 else if (last_expr_value == 0)
2208 /* There are some cases where this can happen, such as when the
2209 statement is void type. */
2210 last_expr_value = const0_rtx;
2211 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2212 /* Remove any possible QUEUED. */
2213 last_expr_value = protect_from_queue (last_expr_value, 0);
2217 TREE_TYPE (t) = last_expr_type;
2218 RTL_EXPR_RTL (t) = last_expr_value;
2219 RTL_EXPR_SEQUENCE (t) = get_insns ();
2221 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2225 /* Don't consider deleting this expr or containing exprs at tree level. */
2226 TREE_SIDE_EFFECTS (t) = 1;
2227 /* Propagate volatility of the actual RTL expr. */
2228 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2231 expr_stmts_for_value--;
2236 /* Generate RTL for the start of an if-then. COND is the expression
2237 whose truth should be tested.
2239 If EXITFLAG is nonzero, this conditional is visible to
2240 `exit_something'. */
2243 expand_start_cond (cond, exitflag)
2247 struct nesting *thiscond = ALLOC_NESTING ();
2249 /* Make an entry on cond_stack for the cond we are entering. */
2251 thiscond->next = cond_stack;
2252 thiscond->all = nesting_stack;
2253 thiscond->depth = ++nesting_depth;
2254 thiscond->data.cond.next_label = gen_label_rtx ();
2255 /* Before we encounter an `else', we don't need a separate exit label
2256 unless there are supposed to be exit statements
2257 to exit this conditional. */
2258 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2259 thiscond->data.cond.endif_label = thiscond->exit_label;
2260 cond_stack = thiscond;
2261 nesting_stack = thiscond;
2263 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2266 /* Generate RTL between then-clause and the elseif-clause
2267 of an if-then-elseif-.... */
2270 expand_start_elseif (cond)
2273 if (cond_stack->data.cond.endif_label == 0)
2274 cond_stack->data.cond.endif_label = gen_label_rtx ();
2275 emit_jump (cond_stack->data.cond.endif_label);
2276 emit_label (cond_stack->data.cond.next_label);
2277 cond_stack->data.cond.next_label = gen_label_rtx ();
2278 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2281 /* Generate RTL between the then-clause and the else-clause
2282 of an if-then-else. */
2285 expand_start_else ()
2287 if (cond_stack->data.cond.endif_label == 0)
2288 cond_stack->data.cond.endif_label = gen_label_rtx ();
2290 emit_jump (cond_stack->data.cond.endif_label);
2291 emit_label (cond_stack->data.cond.next_label);
2292 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2295 /* After calling expand_start_else, turn this "else" into an "else if"
2296 by providing another condition. */
2299 expand_elseif (cond)
2302 cond_stack->data.cond.next_label = gen_label_rtx ();
2303 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2306 /* Generate RTL for the end of an if-then.
2307 Pop the record for it off of cond_stack. */
2312 struct nesting *thiscond = cond_stack;
2314 do_pending_stack_adjust ();
2315 if (thiscond->data.cond.next_label)
2316 emit_label (thiscond->data.cond.next_label);
2317 if (thiscond->data.cond.endif_label)
2318 emit_label (thiscond->data.cond.endif_label);
2320 POPSTACK (cond_stack);
2324 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2325 loop should be exited by `exit_something'. This is a loop for which
2326 `expand_continue' will jump to the top of the loop.
2328 Make an entry on loop_stack to record the labels associated with
2332 expand_start_loop (exit_flag)
2335 register struct nesting *thisloop = ALLOC_NESTING ();
2337 /* Make an entry on loop_stack for the loop we are entering. */
2339 thisloop->next = loop_stack;
2340 thisloop->all = nesting_stack;
2341 thisloop->depth = ++nesting_depth;
2342 thisloop->data.loop.start_label = gen_label_rtx ();
2343 thisloop->data.loop.end_label = gen_label_rtx ();
2344 thisloop->data.loop.alt_end_label = 0;
2345 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2346 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2347 loop_stack = thisloop;
2348 nesting_stack = thisloop;
2350 do_pending_stack_adjust ();
2352 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2353 emit_label (thisloop->data.loop.start_label);
2358 /* Like expand_start_loop but for a loop where the continuation point
2359 (for expand_continue_loop) will be specified explicitly. */
2362 expand_start_loop_continue_elsewhere (exit_flag)
2365 struct nesting *thisloop = expand_start_loop (exit_flag);
2366 loop_stack->data.loop.continue_label = gen_label_rtx ();
2370 /* Begin a null, aka do { } while (0) "loop". But since the contents
2371 of said loop can still contain a break, we must frob the loop nest. */
2374 expand_start_null_loop ()
2376 register struct nesting *thisloop = ALLOC_NESTING ();
2378 /* Make an entry on loop_stack for the loop we are entering. */
2380 thisloop->next = loop_stack;
2381 thisloop->all = nesting_stack;
2382 thisloop->depth = ++nesting_depth;
2383 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2384 thisloop->data.loop.end_label = gen_label_rtx ();
2385 thisloop->data.loop.alt_end_label = NULL_RTX;
2386 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2387 thisloop->exit_label = thisloop->data.loop.end_label;
2388 loop_stack = thisloop;
2389 nesting_stack = thisloop;
2394 /* Specify the continuation point for a loop started with
2395 expand_start_loop_continue_elsewhere.
2396 Use this at the point in the code to which a continue statement
2400 expand_loop_continue_here ()
2402 do_pending_stack_adjust ();
2403 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2404 emit_label (loop_stack->data.loop.continue_label);
2407 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2408 Pop the block off of loop_stack. */
2413 rtx start_label = loop_stack->data.loop.start_label;
2414 rtx insn = get_last_insn ();
2415 int needs_end_jump = 1;
2417 /* Mark the continue-point at the top of the loop if none elsewhere. */
2418 if (start_label == loop_stack->data.loop.continue_label)
2419 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2421 do_pending_stack_adjust ();
2423 /* If optimizing, perhaps reorder the loop.
2424 First, try to use a condjump near the end.
2425 expand_exit_loop_if_false ends loops with unconditional jumps,
2428 if (test) goto label;
2430 goto loop_stack->data.loop.end_label
2434 If we find such a pattern, we can end the loop earlier. */
2437 && GET_CODE (insn) == CODE_LABEL
2438 && LABEL_NAME (insn) == NULL
2439 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2442 rtx jump = PREV_INSN (PREV_INSN (label));
2444 if (GET_CODE (jump) == JUMP_INSN
2445 && GET_CODE (PATTERN (jump)) == SET
2446 && SET_DEST (PATTERN (jump)) == pc_rtx
2447 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2448 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2449 == loop_stack->data.loop.end_label))
2453 /* The test might be complex and reference LABEL multiple times,
2454 like the loop in loop_iterations to set vtop. To handle this,
2456 insn = PREV_INSN (label);
2457 reorder_insns (label, label, start_label);
2459 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2461 /* We ignore line number notes, but if we see any other note,
2462 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2463 NOTE_INSN_LOOP_*, we disable this optimization. */
2464 if (GET_CODE (prev) == NOTE)
2466 if (NOTE_LINE_NUMBER (prev) < 0)
2470 if (GET_CODE (prev) == CODE_LABEL)
2472 if (GET_CODE (prev) == JUMP_INSN)
2474 if (GET_CODE (PATTERN (prev)) == SET
2475 && SET_DEST (PATTERN (prev)) == pc_rtx
2476 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2477 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2479 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2481 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2483 emit_note_after (NOTE_INSN_LOOP_END, prev);
2492 /* If the loop starts with a loop exit, roll that to the end where
2493 it will optimize together with the jump back.
2495 We look for the conditional branch to the exit, except that once
2496 we find such a branch, we don't look past 30 instructions.
2498 In more detail, if the loop presently looks like this (in pseudo-C):
2501 if (test) goto end_label;
2506 transform it to look like:
2512 if (test) goto end_label;
2513 goto newstart_label;
2516 Here, the `test' may actually consist of some reasonably complex
2517 code, terminating in a test. */
2522 ! (GET_CODE (insn) == JUMP_INSN
2523 && GET_CODE (PATTERN (insn)) == SET
2524 && SET_DEST (PATTERN (insn)) == pc_rtx
2525 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2529 rtx last_test_insn = NULL_RTX;
2531 /* Scan insns from the top of the loop looking for a qualified
2532 conditional exit. */
2533 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2534 insn = NEXT_INSN (insn))
2536 if (GET_CODE (insn) == NOTE)
2539 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2540 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2541 /* The code that actually moves the exit test will
2542 carefully leave BLOCK notes in their original
2543 location. That means, however, that we can't debug
2544 the exit test itself. So, we refuse to move code
2545 containing BLOCK notes at low optimization levels. */
2548 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2550 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2554 /* We've come to the end of an EH region, but
2555 never saw the beginning of that region. That
2556 means that an EH region begins before the top
2557 of the loop, and ends in the middle of it. The
2558 existence of such a situation violates a basic
2559 assumption in this code, since that would imply
2560 that even when EH_REGIONS is zero, we might
2561 move code out of an exception region. */
2565 /* We must not walk into a nested loop. */
2566 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2569 /* We already know this INSN is a NOTE, so there's no
2570 point in looking at it to see if it's a JUMP. */
2574 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2577 if (last_test_insn && num_insns > 30)
2581 /* We don't want to move a partial EH region. Consider:
2595 This isn't legal C++, but here's what it's supposed to
2596 mean: if cond() is true, stop looping. Otherwise,
2597 call bar, and keep looping. In addition, if cond
2598 throws an exception, catch it and keep looping. Such
2599 constructs are certainy legal in LISP.
2601 We should not move the `if (cond()) 0' test since then
2602 the EH-region for the try-block would be broken up.
2603 (In this case we would the EH_BEG note for the `try'
2604 and `if cond()' but not the call to bar() or the
2607 So we don't look for tests within an EH region. */
2610 if (GET_CODE (insn) == JUMP_INSN
2611 && GET_CODE (PATTERN (insn)) == SET
2612 && SET_DEST (PATTERN (insn)) == pc_rtx)
2614 /* This is indeed a jump. */
2615 rtx dest1 = NULL_RTX;
2616 rtx dest2 = NULL_RTX;
2617 rtx potential_last_test;
2618 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2620 /* A conditional jump. */
2621 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2622 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2623 potential_last_test = insn;
2627 /* An unconditional jump. */
2628 dest1 = SET_SRC (PATTERN (insn));
2629 /* Include the BARRIER after the JUMP. */
2630 potential_last_test = NEXT_INSN (insn);
2634 if (dest1 && GET_CODE (dest1) == LABEL_REF
2635 && ((XEXP (dest1, 0)
2636 == loop_stack->data.loop.alt_end_label)
2638 == loop_stack->data.loop.end_label)))
2640 last_test_insn = potential_last_test;
2644 /* If this was a conditional jump, there may be
2645 another label at which we should look. */
2652 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2654 /* We found one. Move everything from there up
2655 to the end of the loop, and add a jump into the loop
2656 to jump to there. */
2657 register rtx newstart_label = gen_label_rtx ();
2658 register rtx start_move = start_label;
2661 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2662 then we want to move this note also. */
2663 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2664 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2665 == NOTE_INSN_LOOP_CONT))
2666 start_move = PREV_INSN (start_move);
2668 emit_label_after (newstart_label, PREV_INSN (start_move));
2670 /* Actually move the insns. Start at the beginning, and
2671 keep copying insns until we've copied the
2673 for (insn = start_move; insn; insn = next_insn)
2675 /* Figure out which insn comes after this one. We have
2676 to do this before we move INSN. */
2677 if (insn == last_test_insn)
2678 /* We've moved all the insns. */
2679 next_insn = NULL_RTX;
2681 next_insn = NEXT_INSN (insn);
2683 if (GET_CODE (insn) == NOTE
2684 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2685 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2686 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2687 NOTE_INSN_BLOCK_ENDs because the correct generation
2688 of debugging information depends on these appearing
2689 in the same order in the RTL and in the tree
2690 structure, where they are represented as BLOCKs.
2691 So, we don't move block notes. Of course, moving
2692 the code inside the block is likely to make it
2693 impossible to debug the instructions in the exit
2694 test, but such is the price of optimization. */
2697 /* Move the INSN. */
2698 reorder_insns (insn, insn, get_last_insn ());
2701 emit_jump_insn_after (gen_jump (start_label),
2702 PREV_INSN (newstart_label));
2703 emit_barrier_after (PREV_INSN (newstart_label));
2704 start_label = newstart_label;
2710 emit_jump (start_label);
2711 emit_note (NULL, NOTE_INSN_LOOP_END);
2713 emit_label (loop_stack->data.loop.end_label);
2715 POPSTACK (loop_stack);
2720 /* Finish a null loop, aka do { } while (0). */
2723 expand_end_null_loop ()
2725 do_pending_stack_adjust ();
2726 emit_label (loop_stack->data.loop.end_label);
2728 POPSTACK (loop_stack);
2733 /* Generate a jump to the current loop's continue-point.
2734 This is usually the top of the loop, but may be specified
2735 explicitly elsewhere. If not currently inside a loop,
2736 return 0 and do nothing; caller will print an error message. */
2739 expand_continue_loop (whichloop)
2740 struct nesting *whichloop;
2744 whichloop = loop_stack;
2747 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2752 /* Generate a jump to exit the current loop. If not currently inside a loop,
2753 return 0 and do nothing; caller will print an error message. */
2756 expand_exit_loop (whichloop)
2757 struct nesting *whichloop;
2761 whichloop = loop_stack;
2764 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2768 /* Generate a conditional jump to exit the current loop if COND
2769 evaluates to zero. If not currently inside a loop,
2770 return 0 and do nothing; caller will print an error message. */
2773 expand_exit_loop_if_false (whichloop, cond)
2774 struct nesting *whichloop;
2777 rtx label = gen_label_rtx ();
2782 whichloop = loop_stack;
2785 /* In order to handle fixups, we actually create a conditional jump
2786 around a unconditional branch to exit the loop. If fixups are
2787 necessary, they go before the unconditional branch. */
2789 do_jump (cond, NULL_RTX, label);
2790 last_insn = get_last_insn ();
2791 if (GET_CODE (last_insn) == CODE_LABEL)
2792 whichloop->data.loop.alt_end_label = last_insn;
2793 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2800 /* Return nonzero if the loop nest is empty. Else return zero. */
2803 stmt_loop_nest_empty ()
2805 /* cfun->stmt can be NULL if we are building a call to get the
2806 EH context for a setjmp/longjmp EH target and the current
2807 function was a deferred inline function. */
2808 return (cfun->stmt == NULL || loop_stack == NULL);
2811 /* Return non-zero if we should preserve sub-expressions as separate
2812 pseudos. We never do so if we aren't optimizing. We always do so
2813 if -fexpensive-optimizations.
2815 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2816 the loop may still be a small one. */
2819 preserve_subexpressions_p ()
2823 if (flag_expensive_optimizations)
2826 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2829 insn = get_last_insn_anywhere ();
2832 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2833 < n_non_fixed_regs * 3));
2837 /* Generate a jump to exit the current loop, conditional, binding contour
2838 or case statement. Not all such constructs are visible to this function,
2839 only those started with EXIT_FLAG nonzero. Individual languages use
2840 the EXIT_FLAG parameter to control which kinds of constructs you can
2843 If not currently inside anything that can be exited,
2844 return 0 and do nothing; caller will print an error message. */
2847 expand_exit_something ()
2851 for (n = nesting_stack; n; n = n->all)
2852 if (n->exit_label != 0)
2854 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2861 /* Generate RTL to return from the current function, with no value.
2862 (That is, we do not do anything about returning any value.) */
2865 expand_null_return ()
2867 rtx last_insn = get_last_insn ();
2869 /* If this function was declared to return a value, but we
2870 didn't, clobber the return registers so that they are not
2871 propogated live to the rest of the function. */
2872 clobber_return_register ();
2874 expand_null_return_1 (last_insn);
2877 /* Generate RTL to return from the current function, with value VAL. */
2880 expand_value_return (val)
2883 rtx last_insn = get_last_insn ();
2884 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2886 /* Copy the value to the return location
2887 unless it's already there. */
2889 if (return_reg != val)
2891 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2892 #ifdef PROMOTE_FUNCTION_RETURN
2893 int unsignedp = TREE_UNSIGNED (type);
2894 enum machine_mode old_mode
2895 = DECL_MODE (DECL_RESULT (current_function_decl));
2896 enum machine_mode mode
2897 = promote_mode (type, old_mode, &unsignedp, 1);
2899 if (mode != old_mode)
2900 val = convert_modes (mode, old_mode, val, unsignedp);
2902 if (GET_CODE (return_reg) == PARALLEL)
2903 emit_group_load (return_reg, val, int_size_in_bytes (type),
2906 emit_move_insn (return_reg, val);
2909 expand_null_return_1 (last_insn);
2912 /* Output a return with no value. If LAST_INSN is nonzero,
2913 pretend that the return takes place after LAST_INSN. */
2916 expand_null_return_1 (last_insn)
2919 rtx end_label = cleanup_label ? cleanup_label : return_label;
2921 clear_pending_stack_adjust ();
2922 do_pending_stack_adjust ();
2926 end_label = return_label = gen_label_rtx ();
2927 expand_goto_internal (NULL_TREE, end_label, last_insn);
2930 /* Generate RTL to evaluate the expression RETVAL and return it
2931 from the current function. */
2934 expand_return (retval)
2937 /* If there are any cleanups to be performed, then they will
2938 be inserted following LAST_INSN. It is desirable
2939 that the last_insn, for such purposes, should be the
2940 last insn before computing the return value. Otherwise, cleanups
2941 which call functions can clobber the return value. */
2942 /* ??? rms: I think that is erroneous, because in C++ it would
2943 run destructors on variables that might be used in the subsequent
2944 computation of the return value. */
2947 register rtx val = 0;
2950 /* If function wants no value, give it none. */
2951 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2953 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2955 expand_null_return ();
2959 if (retval == error_mark_node)
2961 /* Treat this like a return of no value from a function that
2963 expand_null_return ();
2966 else if (TREE_CODE (retval) == RESULT_DECL)
2967 retval_rhs = retval;
2968 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2969 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2970 retval_rhs = TREE_OPERAND (retval, 1);
2971 else if (VOID_TYPE_P (TREE_TYPE (retval)))
2972 /* Recognize tail-recursive call to void function. */
2973 retval_rhs = retval;
2975 retval_rhs = NULL_TREE;
2977 last_insn = get_last_insn ();
2979 /* Distribute return down conditional expr if either of the sides
2980 may involve tail recursion (see test below). This enhances the number
2981 of tail recursions we see. Don't do this always since it can produce
2982 sub-optimal code in some cases and we distribute assignments into
2983 conditional expressions when it would help. */
2985 if (optimize && retval_rhs != 0
2986 && frame_offset == 0
2987 && TREE_CODE (retval_rhs) == COND_EXPR
2988 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2989 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2991 rtx label = gen_label_rtx ();
2994 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2995 start_cleanup_deferral ();
2996 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2997 DECL_RESULT (current_function_decl),
2998 TREE_OPERAND (retval_rhs, 1));
2999 TREE_SIDE_EFFECTS (expr) = 1;
3000 expand_return (expr);
3003 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3004 DECL_RESULT (current_function_decl),
3005 TREE_OPERAND (retval_rhs, 2));
3006 TREE_SIDE_EFFECTS (expr) = 1;
3007 expand_return (expr);
3008 end_cleanup_deferral ();
3012 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3014 /* If the result is an aggregate that is being returned in one (or more)
3015 registers, load the registers here. The compiler currently can't handle
3016 copying a BLKmode value into registers. We could put this code in a
3017 more general area (for use by everyone instead of just function
3018 call/return), but until this feature is generally usable it is kept here
3019 (and in expand_call). The value must go into a pseudo in case there
3020 are cleanups that will clobber the real return register. */
3023 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3024 && GET_CODE (result_rtl) == REG)
3027 unsigned HOST_WIDE_INT bitpos, xbitpos;
3028 unsigned HOST_WIDE_INT big_endian_correction = 0;
3029 unsigned HOST_WIDE_INT bytes
3030 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3031 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3032 unsigned int bitsize
3033 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3034 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3035 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3036 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3037 enum machine_mode tmpmode, result_reg_mode;
3041 expand_null_return ();
3045 /* Structures whose size is not a multiple of a word are aligned
3046 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3047 machine, this means we must skip the empty high order bytes when
3048 calculating the bit offset. */
3049 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
3050 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3053 /* Copy the structure BITSIZE bits at a time. */
3054 for (bitpos = 0, xbitpos = big_endian_correction;
3055 bitpos < bytes * BITS_PER_UNIT;
3056 bitpos += bitsize, xbitpos += bitsize)
3058 /* We need a new destination pseudo each time xbitpos is
3059 on a word boundary and when xbitpos == big_endian_correction
3060 (the first time through). */
3061 if (xbitpos % BITS_PER_WORD == 0
3062 || xbitpos == big_endian_correction)
3064 /* Generate an appropriate register. */
3065 dst = gen_reg_rtx (word_mode);
3066 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3068 /* Clobber the destination before we move anything into it. */
3069 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
3072 /* We need a new source operand each time bitpos is on a word
3074 if (bitpos % BITS_PER_WORD == 0)
3075 src = operand_subword_force (result_val,
3076 bitpos / BITS_PER_WORD,
3079 /* Use bitpos for the source extraction (left justified) and
3080 xbitpos for the destination store (right justified). */
3081 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3082 extract_bit_field (src, bitsize,
3083 bitpos % BITS_PER_WORD, 1,
3084 NULL_RTX, word_mode, word_mode,
3085 bitsize, BITS_PER_WORD),
3086 bitsize, BITS_PER_WORD);
3089 /* Find the smallest integer mode large enough to hold the
3090 entire structure and use that mode instead of BLKmode
3091 on the USE insn for the return register. */
3092 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3093 tmpmode != VOIDmode;
3094 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3095 /* Have we found a large enough mode? */
3096 if (GET_MODE_SIZE (tmpmode) >= bytes)
3099 /* No suitable mode found. */
3100 if (tmpmode == VOIDmode)
3103 PUT_MODE (result_rtl, tmpmode);
3105 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3106 result_reg_mode = word_mode;
3108 result_reg_mode = tmpmode;
3109 result_reg = gen_reg_rtx (result_reg_mode);
3112 for (i = 0; i < n_regs; i++)
3113 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3116 if (tmpmode != result_reg_mode)
3117 result_reg = gen_lowpart (tmpmode, result_reg);
3119 expand_value_return (result_reg);
3121 else if (retval_rhs != 0
3122 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3123 && (GET_CODE (result_rtl) == REG
3124 || (GET_CODE (result_rtl) == PARALLEL)))
3126 /* Calculate the return value into a temporary (usually a pseudo
3128 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3129 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3131 val = assign_temp (nt, 0, 0, 1);
3132 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3133 val = force_not_mem (val);
3135 /* Return the calculated value, doing cleanups first. */
3136 expand_value_return (val);
3140 /* No cleanups or no hard reg used;
3141 calculate value into hard return reg. */
3142 expand_expr (retval, const0_rtx, VOIDmode, 0);
3144 expand_value_return (result_rtl);
3148 /* Return 1 if the end of the generated RTX is not a barrier.
3149 This means code already compiled can drop through. */
3152 drop_through_at_end_p ()
3154 rtx insn = get_last_insn ();
3155 while (insn && GET_CODE (insn) == NOTE)
3156 insn = PREV_INSN (insn);
3157 return insn && GET_CODE (insn) != BARRIER;
3160 /* Attempt to optimize a potential tail recursion call into a goto.
3161 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3162 where to place the jump to the tail recursion label.
3164 Return TRUE if the call was optimized into a goto. */
3167 optimize_tail_recursion (arguments, last_insn)
3171 /* Finish checking validity, and if valid emit code to set the
3172 argument variables for the new call. */
3173 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3175 if (tail_recursion_label == 0)
3177 tail_recursion_label = gen_label_rtx ();
3178 emit_label_after (tail_recursion_label,
3179 tail_recursion_reentry);
3182 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3189 /* Emit code to alter this function's formal parms for a tail-recursive call.
3190 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3191 FORMALS is the chain of decls of formals.
3192 Return 1 if this can be done;
3193 otherwise return 0 and do not emit any code. */
3196 tail_recursion_args (actuals, formals)
3197 tree actuals, formals;
3199 register tree a = actuals, f = formals;
3201 register rtx *argvec;
3203 /* Check that number and types of actuals are compatible
3204 with the formals. This is not always true in valid C code.
3205 Also check that no formal needs to be addressable
3206 and that all formals are scalars. */
3208 /* Also count the args. */
3210 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3212 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3213 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3215 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3218 if (a != 0 || f != 0)
3221 /* Compute all the actuals. */
3223 argvec = (rtx *) alloca (i * sizeof (rtx));
3225 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3226 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3228 /* Find which actual values refer to current values of previous formals.
3229 Copy each of them now, before any formal is changed. */
3231 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3235 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3236 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3242 argvec[i] = copy_to_reg (argvec[i]);
3245 /* Store the values of the actuals into the formals. */
3247 for (f = formals, a = actuals, i = 0; f;
3248 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3250 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3251 emit_move_insn (DECL_RTL (f), argvec[i]);
3253 convert_move (DECL_RTL (f), argvec[i],
3254 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3261 /* Generate the RTL code for entering a binding contour.
3262 The variables are declared one by one, by calls to `expand_decl'.
3264 FLAGS is a bitwise or of the following flags:
3266 1 - Nonzero if this construct should be visible to
3269 2 - Nonzero if this contour does not require a
3270 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3271 language-independent code should set this flag because they
3272 will not create corresponding BLOCK nodes. (There should be
3273 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3274 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3275 when expand_end_bindings is called.
3277 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3278 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3282 expand_start_bindings_and_block (flags, block)
3286 struct nesting *thisblock = ALLOC_NESTING ();
3288 int exit_flag = ((flags & 1) != 0);
3289 int block_flag = ((flags & 2) == 0);
3291 /* If a BLOCK is supplied, then the caller should be requesting a
3292 NOTE_INSN_BLOCK_BEG note. */
3293 if (!block_flag && block)
3296 /* Create a note to mark the beginning of the block. */
3299 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3300 NOTE_BLOCK (note) = block;
3303 note = emit_note (NULL, NOTE_INSN_DELETED);
3305 /* Make an entry on block_stack for the block we are entering. */
3307 thisblock->next = block_stack;
3308 thisblock->all = nesting_stack;
3309 thisblock->depth = ++nesting_depth;
3310 thisblock->data.block.stack_level = 0;
3311 thisblock->data.block.cleanups = 0;
3312 thisblock->data.block.n_function_calls = 0;
3313 thisblock->data.block.exception_region = 0;
3314 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3316 thisblock->data.block.conditional_code = 0;
3317 thisblock->data.block.last_unconditional_cleanup = note;
3318 /* When we insert instructions after the last unconditional cleanup,
3319 we don't adjust last_insn. That means that a later add_insn will
3320 clobber the instructions we've just added. The easiest way to
3321 fix this is to just insert another instruction here, so that the
3322 instructions inserted after the last unconditional cleanup are
3323 never the last instruction. */
3324 emit_note (NULL, NOTE_INSN_DELETED);
3325 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3328 && !(block_stack->data.block.cleanups == NULL_TREE
3329 && block_stack->data.block.outer_cleanups == NULL_TREE))
3330 thisblock->data.block.outer_cleanups
3331 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3332 block_stack->data.block.outer_cleanups);
3334 thisblock->data.block.outer_cleanups = 0;
3335 thisblock->data.block.label_chain = 0;
3336 thisblock->data.block.innermost_stack_block = stack_block_stack;
3337 thisblock->data.block.first_insn = note;
3338 thisblock->data.block.block_start_count = ++current_block_start_count;
3339 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3340 block_stack = thisblock;
3341 nesting_stack = thisblock;
3343 /* Make a new level for allocating stack slots. */
3347 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3348 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3349 expand_expr are made. After we end the region, we know that all
3350 space for all temporaries that were created by TARGET_EXPRs will be
3351 destroyed and their space freed for reuse. */
3354 expand_start_target_temps ()
3356 /* This is so that even if the result is preserved, the space
3357 allocated will be freed, as we know that it is no longer in use. */
3360 /* Start a new binding layer that will keep track of all cleanup
3361 actions to be performed. */
3362 expand_start_bindings (2);
3364 target_temp_slot_level = temp_slot_level;
3368 expand_end_target_temps ()
3370 expand_end_bindings (NULL_TREE, 0, 0);
3372 /* This is so that even if the result is preserved, the space
3373 allocated will be freed, as we know that it is no longer in use. */
3377 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3378 in question represents the outermost pair of curly braces (i.e. the "body
3379 block") of a function or method.
3381 For any BLOCK node representing a "body block" of a function or method, the
3382 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3383 represents the outermost (function) scope for the function or method (i.e.
3384 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3385 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3388 is_body_block (stmt)
3391 if (TREE_CODE (stmt) == BLOCK)
3393 tree parent = BLOCK_SUPERCONTEXT (stmt);
3395 if (parent && TREE_CODE (parent) == BLOCK)
3397 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3399 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3407 /* True if we are currently emitting insns in an area of output code
3408 that is controlled by a conditional expression. This is used by
3409 the cleanup handling code to generate conditional cleanup actions. */
3412 conditional_context ()
3414 return block_stack && block_stack->data.block.conditional_code;
3417 /* Return an opaque pointer to the current nesting level, so frontend code
3418 can check its own sanity. */
3421 current_nesting_level ()
3423 return cfun ? block_stack : 0;
3426 /* Emit a handler label for a nonlocal goto handler.
3427 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3430 expand_nl_handler_label (slot, before_insn)
3431 rtx slot, before_insn;
3434 rtx handler_label = gen_label_rtx ();
3436 /* Don't let cleanup_cfg delete the handler. */
3437 LABEL_PRESERVE_P (handler_label) = 1;
3440 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3441 insns = get_insns ();
3443 emit_insns_before (insns, before_insn);
3445 emit_label (handler_label);
3447 return handler_label;
3450 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3453 expand_nl_goto_receiver ()
3455 #ifdef HAVE_nonlocal_goto
3456 if (! HAVE_nonlocal_goto)
3458 /* First adjust our frame pointer to its actual value. It was
3459 previously set to the start of the virtual area corresponding to
3460 the stacked variables when we branched here and now needs to be
3461 adjusted to the actual hardware fp value.
3463 Assignments are to virtual registers are converted by
3464 instantiate_virtual_regs into the corresponding assignment
3465 to the underlying register (fp in this case) that makes
3466 the original assignment true.
3467 So the following insn will actually be
3468 decrementing fp by STARTING_FRAME_OFFSET. */
3469 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3471 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3472 if (fixed_regs[ARG_POINTER_REGNUM])
3474 #ifdef ELIMINABLE_REGS
3475 /* If the argument pointer can be eliminated in favor of the
3476 frame pointer, we don't need to restore it. We assume here
3477 that if such an elimination is present, it can always be used.
3478 This is the case on all known machines; if we don't make this
3479 assumption, we do unnecessary saving on many machines. */
3480 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3483 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3484 if (elim_regs[i].from == ARG_POINTER_REGNUM
3485 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3488 if (i == ARRAY_SIZE (elim_regs))
3491 /* Now restore our arg pointer from the address at which it
3492 was saved in our stack frame.
3493 If there hasn't be space allocated for it yet, make
3495 if (arg_pointer_save_area == 0)
3496 arg_pointer_save_area
3497 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3498 emit_move_insn (virtual_incoming_args_rtx,
3499 /* We need a pseudo here, or else
3500 instantiate_virtual_regs_1 complains. */
3501 copy_to_reg (arg_pointer_save_area));
3506 #ifdef HAVE_nonlocal_goto_receiver
3507 if (HAVE_nonlocal_goto_receiver)
3508 emit_insn (gen_nonlocal_goto_receiver ());
3512 /* Make handlers for nonlocal gotos taking place in the function calls in
3516 expand_nl_goto_receivers (thisblock)
3517 struct nesting *thisblock;
3520 rtx afterward = gen_label_rtx ();
3525 /* Record the handler address in the stack slot for that purpose,
3526 during this block, saving and restoring the outer value. */
3527 if (thisblock->next != 0)
3528 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3530 rtx save_receiver = gen_reg_rtx (Pmode);
3531 emit_move_insn (XEXP (slot, 0), save_receiver);
3534 emit_move_insn (save_receiver, XEXP (slot, 0));
3535 insns = get_insns ();
3537 emit_insns_before (insns, thisblock->data.block.first_insn);
3540 /* Jump around the handlers; they run only when specially invoked. */
3541 emit_jump (afterward);
3543 /* Make a separate handler for each label. */
3544 link = nonlocal_labels;
3545 slot = nonlocal_goto_handler_slots;
3546 label_list = NULL_RTX;
3547 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3548 /* Skip any labels we shouldn't be able to jump to from here,
3549 we generate one special handler for all of them below which just calls
3551 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3554 lab = expand_nl_handler_label (XEXP (slot, 0),
3555 thisblock->data.block.first_insn);
3556 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3558 expand_nl_goto_receiver ();
3560 /* Jump to the "real" nonlocal label. */
3561 expand_goto (TREE_VALUE (link));
3564 /* A second pass over all nonlocal labels; this time we handle those
3565 we should not be able to jump to at this point. */
3566 link = nonlocal_labels;
3567 slot = nonlocal_goto_handler_slots;
3569 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3570 if (DECL_TOO_LATE (TREE_VALUE (link)))
3573 lab = expand_nl_handler_label (XEXP (slot, 0),
3574 thisblock->data.block.first_insn);
3575 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3581 expand_nl_goto_receiver ();
3582 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3587 nonlocal_goto_handler_labels = label_list;
3588 emit_label (afterward);
3591 /* Warn about any unused VARS (which may contain nodes other than
3592 VAR_DECLs, but such nodes are ignored). The nodes are connected
3593 via the TREE_CHAIN field. */
3596 warn_about_unused_variables (vars)
3601 if (warn_unused_variable)
3602 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3603 if (TREE_CODE (decl) == VAR_DECL
3604 && ! TREE_USED (decl)
3605 && ! DECL_IN_SYSTEM_HEADER (decl)
3606 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3607 warning_with_decl (decl, "unused variable `%s'");
3610 /* Generate RTL code to terminate a binding contour.
3612 VARS is the chain of VAR_DECL nodes for the variables bound in this
3613 contour. There may actually be other nodes in this chain, but any
3614 nodes other than VAR_DECLS are ignored.
3616 MARK_ENDS is nonzero if we should put a note at the beginning
3617 and end of this binding contour.
3619 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3620 (That is true automatically if the contour has a saved stack level.) */
3623 expand_end_bindings (vars, mark_ends, dont_jump_in)
3628 register struct nesting *thisblock = block_stack;
3630 /* If any of the variables in this scope were not used, warn the
3632 warn_about_unused_variables (vars);
3634 if (thisblock->exit_label)
3636 do_pending_stack_adjust ();
3637 emit_label (thisblock->exit_label);
3640 /* If necessary, make handlers for nonlocal gotos taking
3641 place in the function calls in this block. */
3642 if (function_call_count != thisblock->data.block.n_function_calls
3644 /* Make handler for outermost block
3645 if there were any nonlocal gotos to this function. */
3646 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3647 /* Make handler for inner block if it has something
3648 special to do when you jump out of it. */
3649 : (thisblock->data.block.cleanups != 0
3650 || thisblock->data.block.stack_level != 0)))
3651 expand_nl_goto_receivers (thisblock);
3653 /* Don't allow jumping into a block that has a stack level.
3654 Cleanups are allowed, though. */
3656 || thisblock->data.block.stack_level != 0)
3658 struct label_chain *chain;
3660 /* Any labels in this block are no longer valid to go to.
3661 Mark them to cause an error message. */
3662 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3664 DECL_TOO_LATE (chain->label) = 1;
3665 /* If any goto without a fixup came to this label,
3666 that must be an error, because gotos without fixups
3667 come from outside all saved stack-levels. */
3668 if (TREE_ADDRESSABLE (chain->label))
3669 error_with_decl (chain->label,
3670 "label `%s' used before containing binding contour");
3674 /* Restore stack level in effect before the block
3675 (only if variable-size objects allocated). */
3676 /* Perform any cleanups associated with the block. */
3678 if (thisblock->data.block.stack_level != 0
3679 || thisblock->data.block.cleanups != 0)
3684 /* Don't let cleanups affect ({...}) constructs. */
3685 int old_expr_stmts_for_value = expr_stmts_for_value;
3686 rtx old_last_expr_value = last_expr_value;
3687 tree old_last_expr_type = last_expr_type;
3688 expr_stmts_for_value = 0;
3690 /* Only clean up here if this point can actually be reached. */
3691 insn = get_last_insn ();
3692 if (GET_CODE (insn) == NOTE)
3693 insn = prev_nonnote_insn (insn);
3694 reachable = (! insn || GET_CODE (insn) != BARRIER);
3696 /* Do the cleanups. */
3697 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3699 do_pending_stack_adjust ();
3701 expr_stmts_for_value = old_expr_stmts_for_value;
3702 last_expr_value = old_last_expr_value;
3703 last_expr_type = old_last_expr_type;
3705 /* Restore the stack level. */
3707 if (reachable && thisblock->data.block.stack_level != 0)
3709 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3710 thisblock->data.block.stack_level, NULL_RTX);
3711 if (nonlocal_goto_handler_slots != 0)
3712 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3716 /* Any gotos out of this block must also do these things.
3717 Also report any gotos with fixups that came to labels in this
3719 fixup_gotos (thisblock,
3720 thisblock->data.block.stack_level,
3721 thisblock->data.block.cleanups,
3722 thisblock->data.block.first_insn,
3726 /* Mark the beginning and end of the scope if requested.
3727 We do this now, after running cleanups on the variables
3728 just going out of scope, so they are in scope for their cleanups. */
3732 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3733 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3736 /* Get rid of the beginning-mark if we don't make an end-mark. */
3737 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3739 /* Restore the temporary level of TARGET_EXPRs. */
3740 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3742 /* Restore block_stack level for containing block. */
3744 stack_block_stack = thisblock->data.block.innermost_stack_block;
3745 POPSTACK (block_stack);
3747 /* Pop the stack slot nesting and free any slots at this level. */
3751 /* Generate code to save the stack pointer at the start of the current block
3752 and set up to restore it on exit. */
3755 save_stack_pointer ()
3757 struct nesting *thisblock = block_stack;
3759 if (thisblock->data.block.stack_level == 0)
3761 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3762 &thisblock->data.block.stack_level,
3763 thisblock->data.block.first_insn);
3764 stack_block_stack = thisblock;
3768 /* Generate RTL for the automatic variable declaration DECL.
3769 (Other kinds of declarations are simply ignored if seen here.) */
3775 struct nesting *thisblock;
3778 type = TREE_TYPE (decl);
3780 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3781 type in case this node is used in a reference. */
3782 if (TREE_CODE (decl) == CONST_DECL)
3784 DECL_MODE (decl) = TYPE_MODE (type);
3785 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3786 DECL_SIZE (decl) = TYPE_SIZE (type);
3787 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3791 /* Otherwise, only automatic variables need any expansion done. Static and
3792 external variables, and external functions, will be handled by
3793 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3794 nothing. PARM_DECLs are handled in `assign_parms'. */
3795 if (TREE_CODE (decl) != VAR_DECL)
3798 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3801 thisblock = block_stack;
3803 /* Create the RTL representation for the variable. */
3805 if (type == error_mark_node)
3806 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3808 else if (DECL_SIZE (decl) == 0)
3809 /* Variable with incomplete type. */
3811 if (DECL_INITIAL (decl) == 0)
3812 /* Error message was already done; now avoid a crash. */
3813 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3815 /* An initializer is going to decide the size of this array.
3816 Until we know the size, represent its address with a reg. */
3817 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode)));
3819 set_mem_attributes (DECL_RTL (decl), decl, 1);
3821 else if (DECL_MODE (decl) != BLKmode
3822 /* If -ffloat-store, don't put explicit float vars
3824 && !(flag_float_store
3825 && TREE_CODE (type) == REAL_TYPE)
3826 && ! TREE_THIS_VOLATILE (decl)
3827 && (DECL_REGISTER (decl) || optimize)
3828 /* if -fcheck-memory-usage, check all variables. */
3829 && ! current_function_check_memory_usage)
3831 /* Automatic variable that can go in a register. */
3832 int unsignedp = TREE_UNSIGNED (type);
3833 enum machine_mode reg_mode
3834 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3836 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3837 mark_user_reg (DECL_RTL (decl));
3839 if (POINTER_TYPE_P (type))
3840 mark_reg_pointer (DECL_RTL (decl),
3841 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3843 maybe_set_unchanging (DECL_RTL (decl), decl);
3845 /* If something wants our address, try to use ADDRESSOF. */
3846 if (TREE_ADDRESSABLE (decl))
3847 put_var_into_stack (decl);
3850 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3851 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3852 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3853 STACK_CHECK_MAX_VAR_SIZE)))
3855 /* Variable of fixed size that goes on the stack. */
3859 /* If we previously made RTL for this decl, it must be an array
3860 whose size was determined by the initializer.
3861 The old address was a register; set that register now
3862 to the proper address. */
3863 if (DECL_RTL_SET_P (decl))
3865 if (GET_CODE (DECL_RTL (decl)) != MEM
3866 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3868 oldaddr = XEXP (DECL_RTL (decl), 0);
3872 assign_temp (TREE_TYPE (decl), 1, 1, 1));
3874 /* Set alignment we actually gave this decl. */
3875 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3876 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3877 DECL_USER_ALIGN (decl) = 0;
3881 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3882 if (addr != oldaddr)
3883 emit_move_insn (oldaddr, addr);
3887 /* Dynamic-size object: must push space on the stack. */
3891 /* Record the stack pointer on entry to block, if have
3892 not already done so. */
3893 do_pending_stack_adjust ();
3894 save_stack_pointer ();
3896 /* In function-at-a-time mode, variable_size doesn't expand this,
3898 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3899 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3900 const0_rtx, VOIDmode, 0);
3902 /* Compute the variable's size, in bytes. */
3903 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3906 /* Allocate space on the stack for the variable. Note that
3907 DECL_ALIGN says how the variable is to be aligned and we
3908 cannot use it to conclude anything about the alignment of
3910 address = allocate_dynamic_stack_space (size, NULL_RTX,
3911 TYPE_ALIGN (TREE_TYPE (decl)));
3913 /* Reference the variable indirect through that rtx. */
3914 SET_DECL_RTL (decl, gen_rtx_MEM (DECL_MODE (decl), address));
3916 set_mem_attributes (DECL_RTL (decl), decl, 1);
3918 /* Indicate the alignment we actually gave this variable. */
3919 #ifdef STACK_BOUNDARY
3920 DECL_ALIGN (decl) = STACK_BOUNDARY;
3922 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3924 DECL_USER_ALIGN (decl) = 0;
3928 /* Emit code to perform the initialization of a declaration DECL. */
3931 expand_decl_init (decl)
3934 int was_used = TREE_USED (decl);
3936 /* If this is a CONST_DECL, we don't have to generate any code, but
3937 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3938 to be set while in the obstack containing the constant. If we don't
3939 do this, we can lose if we have functions nested three deep and the middle
3940 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3941 the innermost function is the first to expand that STRING_CST. */
3942 if (TREE_CODE (decl) == CONST_DECL)
3944 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3945 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3946 EXPAND_INITIALIZER);
3950 if (TREE_STATIC (decl))
3953 /* Compute and store the initial value now. */
3955 if (DECL_INITIAL (decl) == error_mark_node)
3957 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3959 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3960 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3961 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3965 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3967 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3968 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3972 /* Don't let the initialization count as "using" the variable. */
3973 TREE_USED (decl) = was_used;
3975 /* Free any temporaries we made while initializing the decl. */
3976 preserve_temp_slots (NULL_RTX);
3980 /* CLEANUP is an expression to be executed at exit from this binding contour;
3981 for example, in C++, it might call the destructor for this variable.
3983 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3984 CLEANUP multiple times, and have the correct semantics. This
3985 happens in exception handling, for gotos, returns, breaks that
3986 leave the current scope.
3988 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3989 that is not associated with any particular variable. */
3992 expand_decl_cleanup (decl, cleanup)
3995 struct nesting *thisblock;
3997 /* Error if we are not in any block. */
3998 if (cfun == 0 || block_stack == 0)
4001 thisblock = block_stack;
4003 /* Record the cleanup if there is one. */
4009 tree *cleanups = &thisblock->data.block.cleanups;
4010 int cond_context = conditional_context ();
4014 rtx flag = gen_reg_rtx (word_mode);
4019 emit_move_insn (flag, const0_rtx);
4020 set_flag_0 = get_insns ();
4023 thisblock->data.block.last_unconditional_cleanup
4024 = emit_insns_after (set_flag_0,
4025 thisblock->data.block.last_unconditional_cleanup);
4027 emit_move_insn (flag, const1_rtx);
4029 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4030 SET_DECL_RTL (cond, flag);
4032 /* Conditionalize the cleanup. */
4033 cleanup = build (COND_EXPR, void_type_node,
4034 truthvalue_conversion (cond),
4035 cleanup, integer_zero_node);
4036 cleanup = fold (cleanup);
4038 cleanups = thisblock->data.block.cleanup_ptr;
4041 cleanup = unsave_expr (cleanup);
4043 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4046 /* If this block has a cleanup, it belongs in stack_block_stack. */
4047 stack_block_stack = thisblock;
4054 if (! using_eh_for_cleanups_p)
4055 TREE_ADDRESSABLE (t) = 1;
4057 expand_eh_region_start ();
4064 thisblock->data.block.last_unconditional_cleanup
4065 = emit_insns_after (seq,
4066 thisblock->data.block.last_unconditional_cleanup);
4070 thisblock->data.block.last_unconditional_cleanup
4072 /* When we insert instructions after the last unconditional cleanup,
4073 we don't adjust last_insn. That means that a later add_insn will
4074 clobber the instructions we've just added. The easiest way to
4075 fix this is to just insert another instruction here, so that the
4076 instructions inserted after the last unconditional cleanup are
4077 never the last instruction. */
4078 emit_note (NULL, NOTE_INSN_DELETED);
4079 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4085 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4086 DECL_ELTS is the list of elements that belong to DECL's type.
4087 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4090 expand_anon_union_decl (decl, cleanup, decl_elts)
4091 tree decl, cleanup, decl_elts;
4093 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4097 /* If any of the elements are addressable, so is the entire union. */
4098 for (t = decl_elts; t; t = TREE_CHAIN (t))
4099 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4101 TREE_ADDRESSABLE (decl) = 1;
4106 expand_decl_cleanup (decl, cleanup);
4107 x = DECL_RTL (decl);
4109 /* Go through the elements, assigning RTL to each. */
4110 for (t = decl_elts; t; t = TREE_CHAIN (t))
4112 tree decl_elt = TREE_VALUE (t);
4113 tree cleanup_elt = TREE_PURPOSE (t);
4114 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4116 /* Propagate the union's alignment to the elements. */
4117 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4118 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4120 /* If the element has BLKmode and the union doesn't, the union is
4121 aligned such that the element doesn't need to have BLKmode, so
4122 change the element's mode to the appropriate one for its size. */
4123 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4124 DECL_MODE (decl_elt) = mode
4125 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4127 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4128 instead create a new MEM rtx with the proper mode. */
4129 if (GET_CODE (x) == MEM)
4131 if (mode == GET_MODE (x))
4132 SET_DECL_RTL (decl_elt, x);
4134 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4136 else if (GET_CODE (x) == REG)
4138 if (mode == GET_MODE (x))
4139 SET_DECL_RTL (decl_elt, x);
4141 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4146 /* Record the cleanup if there is one. */
4149 thisblock->data.block.cleanups
4150 = tree_cons (decl_elt, cleanup_elt,
4151 thisblock->data.block.cleanups);
4155 /* Expand a list of cleanups LIST.
4156 Elements may be expressions or may be nested lists.
4158 If DONT_DO is nonnull, then any list-element
4159 whose TREE_PURPOSE matches DONT_DO is omitted.
4160 This is sometimes used to avoid a cleanup associated with
4161 a value that is being returned out of the scope.
4163 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4164 goto and handle protection regions specially in that case.
4166 If REACHABLE, we emit code, otherwise just inform the exception handling
4167 code about this finalization. */
4170 expand_cleanups (list, dont_do, in_fixup, reachable)
4177 for (tail = list; tail; tail = TREE_CHAIN (tail))
4178 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4180 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4181 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4184 if (! in_fixup && using_eh_for_cleanups_p)
4185 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4189 /* Cleanups may be run multiple times. For example,
4190 when exiting a binding contour, we expand the
4191 cleanups associated with that contour. When a goto
4192 within that binding contour has a target outside that
4193 contour, it will expand all cleanups from its scope to
4194 the target. Though the cleanups are expanded multiple
4195 times, the control paths are non-overlapping so the
4196 cleanups will not be executed twice. */
4198 /* We may need to protect from outer cleanups. */
4199 if (in_fixup && using_eh_for_cleanups_p)
4201 expand_eh_region_start ();
4203 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4205 expand_eh_region_end_fixup (TREE_VALUE (tail));
4208 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4216 /* Mark when the context we are emitting RTL for as a conditional
4217 context, so that any cleanup actions we register with
4218 expand_decl_init will be properly conditionalized when those
4219 cleanup actions are later performed. Must be called before any
4220 expression (tree) is expanded that is within a conditional context. */
4223 start_cleanup_deferral ()
4225 /* block_stack can be NULL if we are inside the parameter list. It is
4226 OK to do nothing, because cleanups aren't possible here. */
4228 ++block_stack->data.block.conditional_code;
4231 /* Mark the end of a conditional region of code. Because cleanup
4232 deferrals may be nested, we may still be in a conditional region
4233 after we end the currently deferred cleanups, only after we end all
4234 deferred cleanups, are we back in unconditional code. */
4237 end_cleanup_deferral ()
4239 /* block_stack can be NULL if we are inside the parameter list. It is
4240 OK to do nothing, because cleanups aren't possible here. */
4242 --block_stack->data.block.conditional_code;
4245 /* Move all cleanups from the current block_stack
4246 to the containing block_stack, where they are assumed to
4247 have been created. If anything can cause a temporary to
4248 be created, but not expanded for more than one level of
4249 block_stacks, then this code will have to change. */
4254 struct nesting *block = block_stack;
4255 struct nesting *outer = block->next;
4257 outer->data.block.cleanups
4258 = chainon (block->data.block.cleanups,
4259 outer->data.block.cleanups);
4260 block->data.block.cleanups = 0;
4264 last_cleanup_this_contour ()
4266 if (block_stack == 0)
4269 return block_stack->data.block.cleanups;
4272 /* Return 1 if there are any pending cleanups at this point.
4273 If THIS_CONTOUR is nonzero, check the current contour as well.
4274 Otherwise, look only at the contours that enclose this one. */
4277 any_pending_cleanups (this_contour)
4280 struct nesting *block;
4282 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4285 if (this_contour && block_stack->data.block.cleanups != NULL)
4287 if (block_stack->data.block.cleanups == 0
4288 && block_stack->data.block.outer_cleanups == 0)
4291 for (block = block_stack->next; block; block = block->next)
4292 if (block->data.block.cleanups != 0)
4298 /* Enter a case (Pascal) or switch (C) statement.
4299 Push a block onto case_stack and nesting_stack
4300 to accumulate the case-labels that are seen
4301 and to record the labels generated for the statement.
4303 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4304 Otherwise, this construct is transparent for `exit_something'.
4306 EXPR is the index-expression to be dispatched on.
4307 TYPE is its nominal type. We could simply convert EXPR to this type,
4308 but instead we take short cuts. */
4311 expand_start_case (exit_flag, expr, type, printname)
4315 const char *printname;
4317 register struct nesting *thiscase = ALLOC_NESTING ();
4319 /* Make an entry on case_stack for the case we are entering. */
4321 thiscase->next = case_stack;
4322 thiscase->all = nesting_stack;
4323 thiscase->depth = ++nesting_depth;
4324 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4325 thiscase->data.case_stmt.case_list = 0;
4326 thiscase->data.case_stmt.index_expr = expr;
4327 thiscase->data.case_stmt.nominal_type = type;
4328 thiscase->data.case_stmt.default_label = 0;
4329 thiscase->data.case_stmt.printname = printname;
4330 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4331 case_stack = thiscase;
4332 nesting_stack = thiscase;
4334 do_pending_stack_adjust ();
4336 /* Make sure case_stmt.start points to something that won't
4337 need any transformation before expand_end_case. */
4338 if (GET_CODE (get_last_insn ()) != NOTE)
4339 emit_note (NULL, NOTE_INSN_DELETED);
4341 thiscase->data.case_stmt.start = get_last_insn ();
4343 start_cleanup_deferral ();
4346 /* Start a "dummy case statement" within which case labels are invalid
4347 and are not connected to any larger real case statement.
4348 This can be used if you don't want to let a case statement jump
4349 into the middle of certain kinds of constructs. */
4352 expand_start_case_dummy ()
4354 register struct nesting *thiscase = ALLOC_NESTING ();
4356 /* Make an entry on case_stack for the dummy. */
4358 thiscase->next = case_stack;
4359 thiscase->all = nesting_stack;
4360 thiscase->depth = ++nesting_depth;
4361 thiscase->exit_label = 0;
4362 thiscase->data.case_stmt.case_list = 0;
4363 thiscase->data.case_stmt.start = 0;
4364 thiscase->data.case_stmt.nominal_type = 0;
4365 thiscase->data.case_stmt.default_label = 0;
4366 case_stack = thiscase;
4367 nesting_stack = thiscase;
4368 start_cleanup_deferral ();
4371 /* End a dummy case statement. */
4374 expand_end_case_dummy ()
4376 end_cleanup_deferral ();
4377 POPSTACK (case_stack);
4380 /* Return the data type of the index-expression
4381 of the innermost case statement, or null if none. */
4384 case_index_expr_type ()
4387 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4394 /* If this is the first label, warn if any insns have been emitted. */
4395 if (case_stack->data.case_stmt.line_number_status >= 0)
4399 restore_line_number_status
4400 (case_stack->data.case_stmt.line_number_status);
4401 case_stack->data.case_stmt.line_number_status = -1;
4403 for (insn = case_stack->data.case_stmt.start;
4405 insn = NEXT_INSN (insn))
4407 if (GET_CODE (insn) == CODE_LABEL)
4409 if (GET_CODE (insn) != NOTE
4410 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4413 insn = PREV_INSN (insn);
4414 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4416 /* If insn is zero, then there must have been a syntax error. */
4418 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4419 NOTE_LINE_NUMBER (insn),
4420 "unreachable code at beginning of %s",
4421 case_stack->data.case_stmt.printname);
4428 /* Accumulate one case or default label inside a case or switch statement.
4429 VALUE is the value of the case (a null pointer, for a default label).
4430 The function CONVERTER, when applied to arguments T and V,
4431 converts the value V to the type T.
4433 If not currently inside a case or switch statement, return 1 and do
4434 nothing. The caller will print a language-specific error message.
4435 If VALUE is a duplicate or overlaps, return 2 and do nothing
4436 except store the (first) duplicate node in *DUPLICATE.
4437 If VALUE is out of range, return 3 and do nothing.
4438 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4439 Return 0 on success.
4441 Extended to handle range statements. */
4444 pushcase (value, converter, label, duplicate)
4445 register tree value;
4446 tree (*converter) PARAMS ((tree, tree));
4447 register tree label;
4453 /* Fail if not inside a real case statement. */
4454 if (! (case_stack && case_stack->data.case_stmt.start))
4457 if (stack_block_stack
4458 && stack_block_stack->depth > case_stack->depth)
4461 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4462 nominal_type = case_stack->data.case_stmt.nominal_type;
4464 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4465 if (index_type == error_mark_node)
4468 /* Convert VALUE to the type in which the comparisons are nominally done. */
4470 value = (*converter) (nominal_type, value);
4474 /* Fail if this value is out of range for the actual type of the index
4475 (which may be narrower than NOMINAL_TYPE). */
4477 && (TREE_CONSTANT_OVERFLOW (value)
4478 || ! int_fits_type_p (value, index_type)))
4481 return add_case_node (value, value, label, duplicate);
4484 /* Like pushcase but this case applies to all values between VALUE1 and
4485 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4486 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4487 starts at VALUE1 and ends at the highest value of the index type.
4488 If both are NULL, this case applies to all values.
4490 The return value is the same as that of pushcase but there is one
4491 additional error code: 4 means the specified range was empty. */
4494 pushcase_range (value1, value2, converter, label, duplicate)
4495 register tree value1, value2;
4496 tree (*converter) PARAMS ((tree, tree));
4497 register tree label;
4503 /* Fail if not inside a real case statement. */
4504 if (! (case_stack && case_stack->data.case_stmt.start))
4507 if (stack_block_stack
4508 && stack_block_stack->depth > case_stack->depth)
4511 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4512 nominal_type = case_stack->data.case_stmt.nominal_type;
4514 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4515 if (index_type == error_mark_node)
4520 /* Convert VALUEs to type in which the comparisons are nominally done
4521 and replace any unspecified value with the corresponding bound. */
4523 value1 = TYPE_MIN_VALUE (index_type);
4525 value2 = TYPE_MAX_VALUE (index_type);
4527 /* Fail if the range is empty. Do this before any conversion since
4528 we want to allow out-of-range empty ranges. */
4529 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4532 /* If the max was unbounded, use the max of the nominal_type we are
4533 converting to. Do this after the < check above to suppress false
4536 value2 = TYPE_MAX_VALUE (nominal_type);
4538 value1 = (*converter) (nominal_type, value1);
4539 value2 = (*converter) (nominal_type, value2);
4541 /* Fail if these values are out of range. */
4542 if (TREE_CONSTANT_OVERFLOW (value1)
4543 || ! int_fits_type_p (value1, index_type))
4546 if (TREE_CONSTANT_OVERFLOW (value2)
4547 || ! int_fits_type_p (value2, index_type))
4550 return add_case_node (value1, value2, label, duplicate);
4553 /* Do the actual insertion of a case label for pushcase and pushcase_range
4554 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4555 slowdown for large switch statements. */
4558 add_case_node (low, high, label, duplicate)
4563 struct case_node *p, **q, *r;
4565 /* If there's no HIGH value, then this is not a case range; it's
4566 just a simple case label. But that's just a degenerate case
4571 /* Handle default labels specially. */
4574 if (case_stack->data.case_stmt.default_label != 0)
4576 *duplicate = case_stack->data.case_stmt.default_label;
4579 case_stack->data.case_stmt.default_label = label;
4580 expand_label (label);
4584 q = &case_stack->data.case_stmt.case_list;
4591 /* Keep going past elements distinctly greater than HIGH. */
4592 if (tree_int_cst_lt (high, p->low))
4595 /* or distinctly less than LOW. */
4596 else if (tree_int_cst_lt (p->high, low))
4601 /* We have an overlap; this is an error. */
4602 *duplicate = p->code_label;
4607 /* Add this label to the chain, and succeed. */
4609 r = (struct case_node *) xmalloc (sizeof (struct case_node));
4612 /* If the bounds are equal, turn this into the one-value case. */
4613 if (tree_int_cst_equal (low, high))
4618 r->code_label = label;
4619 expand_label (label);
4629 struct case_node *s;
4635 if (! (b = p->balance))
4636 /* Growth propagation from left side. */
4643 if ((p->left = s = r->right))
4652 if ((r->parent = s))
4660 case_stack->data.case_stmt.case_list = r;
4663 /* r->balance == +1 */
4668 struct case_node *t = r->right;
4670 if ((p->left = s = t->right))
4674 if ((r->right = s = t->left))
4688 if ((t->parent = s))
4696 case_stack->data.case_stmt.case_list = t;
4703 /* p->balance == +1; growth of left side balances the node. */
4713 if (! (b = p->balance))
4714 /* Growth propagation from right side. */
4722 if ((p->right = s = r->left))
4730 if ((r->parent = s))
4739 case_stack->data.case_stmt.case_list = r;
4743 /* r->balance == -1 */
4747 struct case_node *t = r->left;
4749 if ((p->right = s = t->left))
4754 if ((r->left = s = t->right))
4768 if ((t->parent = s))
4777 case_stack->data.case_stmt.case_list = t;
4783 /* p->balance == -1; growth of right side balances the node. */
4796 /* Returns the number of possible values of TYPE.
4797 Returns -1 if the number is unknown, variable, or if the number does not
4798 fit in a HOST_WIDE_INT.
4799 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4800 do not increase monotonically (there may be duplicates);
4801 to 1 if the values increase monotonically, but not always by 1;
4802 otherwise sets it to 0. */
4805 all_cases_count (type, spareness)
4810 HOST_WIDE_INT count, minval, lastval;
4814 switch (TREE_CODE (type))
4821 count = 1 << BITS_PER_UNIT;
4826 if (TYPE_MAX_VALUE (type) != 0
4827 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4828 TYPE_MIN_VALUE (type))))
4829 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4830 convert (type, integer_zero_node))))
4831 && host_integerp (t, 1))
4832 count = tree_low_cst (t, 1);
4838 /* Don't waste time with enumeral types with huge values. */
4839 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4840 || TYPE_MAX_VALUE (type) == 0
4841 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4844 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4847 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4849 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4851 if (*spareness == 2 || thisval < lastval)
4853 else if (thisval != minval + count)
4863 #define BITARRAY_TEST(ARRAY, INDEX) \
4864 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4865 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4866 #define BITARRAY_SET(ARRAY, INDEX) \
4867 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4868 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4870 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4871 with the case values we have seen, assuming the case expression
4873 SPARSENESS is as determined by all_cases_count.
4875 The time needed is proportional to COUNT, unless
4876 SPARSENESS is 2, in which case quadratic time is needed. */
4879 mark_seen_cases (type, cases_seen, count, sparseness)
4881 unsigned char *cases_seen;
4882 HOST_WIDE_INT count;
4885 tree next_node_to_try = NULL_TREE;
4886 HOST_WIDE_INT next_node_offset = 0;
4888 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4889 tree val = make_node (INTEGER_CST);
4891 TREE_TYPE (val) = type;
4895 else if (sparseness == 2)
4898 unsigned HOST_WIDE_INT xlo;
4900 /* This less efficient loop is only needed to handle
4901 duplicate case values (multiple enum constants
4902 with the same value). */
4903 TREE_TYPE (val) = TREE_TYPE (root->low);
4904 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4905 t = TREE_CHAIN (t), xlo++)
4907 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4908 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4912 /* Keep going past elements distinctly greater than VAL. */
4913 if (tree_int_cst_lt (val, n->low))
4916 /* or distinctly less than VAL. */
4917 else if (tree_int_cst_lt (n->high, val))
4922 /* We have found a matching range. */
4923 BITARRAY_SET (cases_seen, xlo);
4933 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4935 for (n = root; n; n = n->right)
4937 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4938 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4939 while (! tree_int_cst_lt (n->high, val))
4941 /* Calculate (into xlo) the "offset" of the integer (val).
4942 The element with lowest value has offset 0, the next smallest
4943 element has offset 1, etc. */
4945 unsigned HOST_WIDE_INT xlo;
4949 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4951 /* The TYPE_VALUES will be in increasing order, so
4952 starting searching where we last ended. */
4953 t = next_node_to_try;
4954 xlo = next_node_offset;
4960 t = TYPE_VALUES (type);
4963 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4965 next_node_to_try = TREE_CHAIN (t);
4966 next_node_offset = xlo + 1;
4971 if (t == next_node_to_try)
4980 t = TYPE_MIN_VALUE (type);
4982 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4986 add_double (xlo, xhi,
4987 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4991 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
4992 BITARRAY_SET (cases_seen, xlo);
4994 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4996 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5002 /* Called when the index of a switch statement is an enumerated type
5003 and there is no default label.
5005 Checks that all enumeration literals are covered by the case
5006 expressions of a switch. Also, warn if there are any extra
5007 switch cases that are *not* elements of the enumerated type.
5009 If all enumeration literals were covered by the case expressions,
5010 turn one of the expressions into the default expression since it should
5011 not be possible to fall through such a switch. */
5014 check_for_full_enumeration_handling (type)
5017 register struct case_node *n;
5018 register tree chain;
5019 #if 0 /* variable used by 'if 0'ed code below. */
5020 register struct case_node **l;
5024 /* True iff the selector type is a numbered set mode. */
5027 /* The number of possible selector values. */
5030 /* For each possible selector value. a one iff it has been matched
5031 by a case value alternative. */
5032 unsigned char *cases_seen;
5034 /* The allocated size of cases_seen, in chars. */
5035 HOST_WIDE_INT bytes_needed;
5040 size = all_cases_count (type, &sparseness);
5041 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5043 if (size > 0 && size < 600000
5044 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5045 this optimization if we don't have enough memory rather than
5046 aborting, as xmalloc would do. */
5048 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5051 tree v = TYPE_VALUES (type);
5053 /* The time complexity of this code is normally O(N), where
5054 N being the number of members in the enumerated type.
5055 However, if type is a ENUMERAL_TYPE whose values do not
5056 increase monotonically, O(N*log(N)) time may be needed. */
5058 mark_seen_cases (type, cases_seen, size, sparseness);
5060 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5061 if (BITARRAY_TEST (cases_seen, i) == 0)
5062 warning ("enumeration value `%s' not handled in switch",
5063 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5068 /* Now we go the other way around; we warn if there are case
5069 expressions that don't correspond to enumerators. This can
5070 occur since C and C++ don't enforce type-checking of
5071 assignments to enumeration variables. */
5073 if (case_stack->data.case_stmt.case_list
5074 && case_stack->data.case_stmt.case_list->left)
5075 case_stack->data.case_stmt.case_list
5076 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5078 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5080 for (chain = TYPE_VALUES (type);
5081 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5082 chain = TREE_CHAIN (chain))
5087 if (TYPE_NAME (type) == 0)
5088 warning ("case value `%ld' not in enumerated type",
5089 (long) TREE_INT_CST_LOW (n->low));
5091 warning ("case value `%ld' not in enumerated type `%s'",
5092 (long) TREE_INT_CST_LOW (n->low),
5093 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5096 : DECL_NAME (TYPE_NAME (type))));
5098 if (!tree_int_cst_equal (n->low, n->high))
5100 for (chain = TYPE_VALUES (type);
5101 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5102 chain = TREE_CHAIN (chain))
5107 if (TYPE_NAME (type) == 0)
5108 warning ("case value `%ld' not in enumerated type",
5109 (long) TREE_INT_CST_LOW (n->high));
5111 warning ("case value `%ld' not in enumerated type `%s'",
5112 (long) TREE_INT_CST_LOW (n->high),
5113 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5116 : DECL_NAME (TYPE_NAME (type))));
5122 /* ??? This optimization is disabled because it causes valid programs to
5123 fail. ANSI C does not guarantee that an expression with enum type
5124 will have a value that is the same as one of the enumeration literals. */
5126 /* If all values were found as case labels, make one of them the default
5127 label. Thus, this switch will never fall through. We arbitrarily pick
5128 the last one to make the default since this is likely the most
5129 efficient choice. */
5133 for (l = &case_stack->data.case_stmt.case_list;
5138 case_stack->data.case_stmt.default_label = (*l)->code_label;
5144 /* Free CN, and its children. */
5147 free_case_nodes (cn)
5152 free_case_nodes (cn->left);
5153 free_case_nodes (cn->right);
5159 /* Terminate a case (Pascal) or switch (C) statement
5160 in which ORIG_INDEX is the expression to be tested.
5161 Generate the code to test it and jump to the right place. */
5164 expand_end_case (orig_index)
5167 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE, orig_minval;
5168 rtx default_label = 0;
5169 register struct case_node *n;
5177 register struct nesting *thiscase = case_stack;
5178 tree index_expr, index_type;
5181 /* Don't crash due to previous errors. */
5182 if (thiscase == NULL)
5185 table_label = gen_label_rtx ();
5186 index_expr = thiscase->data.case_stmt.index_expr;
5187 index_type = TREE_TYPE (index_expr);
5188 unsignedp = TREE_UNSIGNED (index_type);
5190 do_pending_stack_adjust ();
5192 /* This might get an spurious warning in the presence of a syntax error;
5193 it could be fixed by moving the call to check_seenlabel after the
5194 check for error_mark_node, and copying the code of check_seenlabel that
5195 deals with case_stack->data.case_stmt.line_number_status /
5196 restore_line_number_status in front of the call to end_cleanup_deferral;
5197 However, this might miss some useful warnings in the presence of
5198 non-syntax errors. */
5201 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5202 if (index_type != error_mark_node)
5204 /* If switch expression was an enumerated type, check that all
5205 enumeration literals are covered by the cases.
5206 No sense trying this if there's a default case, however. */
5208 if (!thiscase->data.case_stmt.default_label
5209 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5210 && TREE_CODE (index_expr) != INTEGER_CST)
5211 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5213 /* If we don't have a default-label, create one here,
5214 after the body of the switch. */
5215 if (thiscase->data.case_stmt.default_label == 0)
5217 thiscase->data.case_stmt.default_label
5218 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5219 expand_label (thiscase->data.case_stmt.default_label);
5221 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5223 before_case = get_last_insn ();
5225 if (thiscase->data.case_stmt.case_list
5226 && thiscase->data.case_stmt.case_list->left)
5227 thiscase->data.case_stmt.case_list
5228 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5230 /* Simplify the case-list before we count it. */
5231 group_case_nodes (thiscase->data.case_stmt.case_list);
5233 /* Get upper and lower bounds of case values.
5234 Also convert all the case values to the index expr's data type. */
5237 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5239 /* Check low and high label values are integers. */
5240 if (TREE_CODE (n->low) != INTEGER_CST)
5242 if (TREE_CODE (n->high) != INTEGER_CST)
5245 n->low = convert (index_type, n->low);
5246 n->high = convert (index_type, n->high);
5248 /* Count the elements and track the largest and smallest
5249 of them (treating them as signed even if they are not). */
5257 if (INT_CST_LT (n->low, minval))
5259 if (INT_CST_LT (maxval, n->high))
5262 /* A range counts double, since it requires two compares. */
5263 if (! tree_int_cst_equal (n->low, n->high))
5267 orig_minval = minval;
5269 /* Compute span of values. */
5271 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5273 end_cleanup_deferral ();
5277 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5279 emit_jump (default_label);
5282 /* If range of values is much bigger than number of values,
5283 make a sequence of conditional branches instead of a dispatch.
5284 If the switch-index is a constant, do it this way
5285 because we can optimize it. */
5287 #ifndef CASE_VALUES_THRESHOLD
5289 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5291 /* If machine does not have a case insn that compares the
5292 bounds, this means extra overhead for dispatch tables
5293 which raises the threshold for using them. */
5294 #define CASE_VALUES_THRESHOLD 5
5295 #endif /* HAVE_casesi */
5296 #endif /* CASE_VALUES_THRESHOLD */
5298 else if (count < CASE_VALUES_THRESHOLD
5299 || compare_tree_int (range, 10 * count) > 0
5300 /* RANGE may be signed, and really large ranges will show up
5301 as negative numbers. */
5302 || compare_tree_int (range, 0) < 0
5303 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5306 || TREE_CODE (index_expr) == INTEGER_CST
5307 /* These will reduce to a constant. */
5308 || (TREE_CODE (index_expr) == CALL_EXPR
5309 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5310 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5311 && DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_NORMAL
5312 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5313 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5314 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5316 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5318 /* If the index is a short or char that we do not have
5319 an insn to handle comparisons directly, convert it to
5320 a full integer now, rather than letting each comparison
5321 generate the conversion. */
5323 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5324 && (cmp_optab->handlers[(int) GET_MODE (index)].insn_code
5325 == CODE_FOR_nothing))
5327 enum machine_mode wider_mode;
5328 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5329 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5330 if (cmp_optab->handlers[(int) wider_mode].insn_code
5331 != CODE_FOR_nothing)
5333 index = convert_to_mode (wider_mode, index, unsignedp);
5339 do_pending_stack_adjust ();
5341 index = protect_from_queue (index, 0);
5342 if (GET_CODE (index) == MEM)
5343 index = copy_to_reg (index);
5344 if (GET_CODE (index) == CONST_INT
5345 || TREE_CODE (index_expr) == INTEGER_CST)
5347 /* Make a tree node with the proper constant value
5348 if we don't already have one. */
5349 if (TREE_CODE (index_expr) != INTEGER_CST)
5352 = build_int_2 (INTVAL (index),
5353 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5354 index_expr = convert (index_type, index_expr);
5357 /* For constant index expressions we need only
5358 issue a unconditional branch to the appropriate
5359 target code. The job of removing any unreachable
5360 code is left to the optimisation phase if the
5361 "-O" option is specified. */
5362 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5363 if (! tree_int_cst_lt (index_expr, n->low)
5364 && ! tree_int_cst_lt (n->high, index_expr))
5368 emit_jump (label_rtx (n->code_label));
5370 emit_jump (default_label);
5374 /* If the index expression is not constant we generate
5375 a binary decision tree to select the appropriate
5376 target code. This is done as follows:
5378 The list of cases is rearranged into a binary tree,
5379 nearly optimal assuming equal probability for each case.
5381 The tree is transformed into RTL, eliminating
5382 redundant test conditions at the same time.
5384 If program flow could reach the end of the
5385 decision tree an unconditional jump to the
5386 default code is emitted. */
5389 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5390 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5391 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5392 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5393 default_label, index_type);
5394 emit_jump_if_reachable (default_label);
5403 enum machine_mode index_mode = SImode;
5404 int index_bits = GET_MODE_BITSIZE (index_mode);
5406 enum machine_mode op_mode;
5408 /* Convert the index to SImode. */
5409 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5410 > GET_MODE_BITSIZE (index_mode))
5412 enum machine_mode omode = TYPE_MODE (index_type);
5413 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5415 /* We must handle the endpoints in the original mode. */
5416 index_expr = build (MINUS_EXPR, index_type,
5417 index_expr, minval);
5418 minval = integer_zero_node;
5419 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5420 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5421 omode, 1, 0, default_label);
5422 /* Now we can safely truncate. */
5423 index = convert_to_mode (index_mode, index, 0);
5427 if (TYPE_MODE (index_type) != index_mode)
5429 index_expr = convert (type_for_size (index_bits, 0),
5431 index_type = TREE_TYPE (index_expr);
5434 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5437 index = protect_from_queue (index, 0);
5438 do_pending_stack_adjust ();
5440 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
5441 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
5443 index = copy_to_mode_reg (op_mode, index);
5445 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5447 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
5448 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
5449 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
5450 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
5452 op1 = copy_to_mode_reg (op_mode, op1);
5454 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5456 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
5457 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
5458 op2, TREE_UNSIGNED (TREE_TYPE (range)));
5459 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
5461 op2 = copy_to_mode_reg (op_mode, op2);
5463 emit_jump_insn (gen_casesi (index, op1, op2,
5464 table_label, default_label));
5468 #ifdef HAVE_tablejump
5469 if (! win && HAVE_tablejump)
5471 index_type = thiscase->data.case_stmt.nominal_type;
5472 index_expr = fold (build (MINUS_EXPR, index_type,
5473 convert (index_type, index_expr),
5474 convert (index_type, minval)));
5475 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5477 index = protect_from_queue (index, 0);
5478 do_pending_stack_adjust ();
5480 do_tablejump (index, TYPE_MODE (index_type),
5481 convert_modes (TYPE_MODE (index_type),
5482 TYPE_MODE (TREE_TYPE (range)),
5483 expand_expr (range, NULL_RTX,
5485 TREE_UNSIGNED (TREE_TYPE (range))),
5486 table_label, default_label);
5493 /* Get table of labels to jump to, in order of case index. */
5495 ncases = TREE_INT_CST_LOW (range) + 1;
5496 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5497 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5499 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5501 register HOST_WIDE_INT i
5502 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5507 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5508 if (i + TREE_INT_CST_LOW (orig_minval)
5509 == TREE_INT_CST_LOW (n->high))
5515 /* Fill in the gaps with the default. */
5516 for (i = 0; i < ncases; i++)
5517 if (labelvec[i] == 0)
5518 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5520 /* Output the table */
5521 emit_label (table_label);
5523 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5524 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5525 gen_rtx_LABEL_REF (Pmode, table_label),
5526 gen_rtvec_v (ncases, labelvec),
5527 const0_rtx, const0_rtx));
5529 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5530 gen_rtvec_v (ncases, labelvec)));
5532 /* If the case insn drops through the table,
5533 after the table we must jump to the default-label.
5534 Otherwise record no drop-through after the table. */
5535 #ifdef CASE_DROPS_THROUGH
5536 emit_jump (default_label);
5542 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5543 reorder_insns (before_case, get_last_insn (),
5544 thiscase->data.case_stmt.start);
5547 end_cleanup_deferral ();
5549 if (thiscase->exit_label)
5550 emit_label (thiscase->exit_label);
5552 free_case_nodes (case_stack->data.case_stmt.case_list);
5553 POPSTACK (case_stack);
5558 /* Convert the tree NODE into a list linked by the right field, with the left
5559 field zeroed. RIGHT is used for recursion; it is a list to be placed
5560 rightmost in the resulting list. */
5562 static struct case_node *
5563 case_tree2list (node, right)
5564 struct case_node *node, *right;
5566 struct case_node *left;
5569 right = case_tree2list (node->right, right);
5571 node->right = right;
5572 if ((left = node->left))
5575 return case_tree2list (left, node);
5581 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5584 do_jump_if_equal (op1, op2, label, unsignedp)
5585 rtx op1, op2, label;
5588 if (GET_CODE (op1) == CONST_INT
5589 && GET_CODE (op2) == CONST_INT)
5591 if (INTVAL (op1) == INTVAL (op2))
5596 enum machine_mode mode = GET_MODE (op1);
5597 if (mode == VOIDmode)
5598 mode = GET_MODE (op2);
5599 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5604 /* Not all case values are encountered equally. This function
5605 uses a heuristic to weight case labels, in cases where that
5606 looks like a reasonable thing to do.
5608 Right now, all we try to guess is text, and we establish the
5611 chars above space: 16
5620 If we find any cases in the switch that are not either -1 or in the range
5621 of valid ASCII characters, or are control characters other than those
5622 commonly used with "\", don't treat this switch scanning text.
5624 Return 1 if these nodes are suitable for cost estimation, otherwise
5628 estimate_case_costs (node)
5631 tree min_ascii = integer_minus_one_node;
5632 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5636 /* If we haven't already made the cost table, make it now. Note that the
5637 lower bound of the table is -1, not zero. */
5639 if (! cost_table_initialized)
5641 cost_table_initialized = 1;
5643 for (i = 0; i < 128; i++)
5646 COST_TABLE (i) = 16;
5647 else if (ISPUNCT (i))
5649 else if (ISCNTRL (i))
5650 COST_TABLE (i) = -1;
5653 COST_TABLE (' ') = 8;
5654 COST_TABLE ('\t') = 4;
5655 COST_TABLE ('\0') = 4;
5656 COST_TABLE ('\n') = 2;
5657 COST_TABLE ('\f') = 1;
5658 COST_TABLE ('\v') = 1;
5659 COST_TABLE ('\b') = 1;
5662 /* See if all the case expressions look like text. It is text if the
5663 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5664 as signed arithmetic since we don't want to ever access cost_table with a
5665 value less than -1. Also check that none of the constants in a range
5666 are strange control characters. */
5668 for (n = node; n; n = n->right)
5670 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5673 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5674 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5675 if (COST_TABLE (i) < 0)
5679 /* All interesting values are within the range of interesting
5680 ASCII characters. */
5684 /* Scan an ordered list of case nodes
5685 combining those with consecutive values or ranges.
5687 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5690 group_case_nodes (head)
5693 case_node_ptr node = head;
5697 rtx lb = next_real_insn (label_rtx (node->code_label));
5699 case_node_ptr np = node;
5701 /* Try to group the successors of NODE with NODE. */
5702 while (((np = np->right) != 0)
5703 /* Do they jump to the same place? */
5704 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5705 || (lb != 0 && lb2 != 0
5706 && simplejump_p (lb)
5707 && simplejump_p (lb2)
5708 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5709 SET_SRC (PATTERN (lb2)))))
5710 /* Are their ranges consecutive? */
5711 && tree_int_cst_equal (np->low,
5712 fold (build (PLUS_EXPR,
5713 TREE_TYPE (node->high),
5716 /* An overflow is not consecutive. */
5717 && tree_int_cst_lt (node->high,
5718 fold (build (PLUS_EXPR,
5719 TREE_TYPE (node->high),
5721 integer_one_node))))
5723 node->high = np->high;
5725 /* NP is the first node after NODE which can't be grouped with it.
5726 Delete the nodes in between, and move on to that node. */
5732 /* Take an ordered list of case nodes
5733 and transform them into a near optimal binary tree,
5734 on the assumption that any target code selection value is as
5735 likely as any other.
5737 The transformation is performed by splitting the ordered
5738 list into two equal sections plus a pivot. The parts are
5739 then attached to the pivot as left and right branches. Each
5740 branch is then transformed recursively. */
5743 balance_case_nodes (head, parent)
5744 case_node_ptr *head;
5745 case_node_ptr parent;
5747 register case_node_ptr np;
5755 register case_node_ptr *npp;
5758 /* Count the number of entries on branch. Also count the ranges. */
5762 if (!tree_int_cst_equal (np->low, np->high))
5766 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5770 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5778 /* Split this list if it is long enough for that to help. */
5783 /* Find the place in the list that bisects the list's total cost,
5784 Here I gets half the total cost. */
5789 /* Skip nodes while their cost does not reach that amount. */
5790 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5791 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5792 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5795 npp = &(*npp)->right;
5800 /* Leave this branch lopsided, but optimize left-hand
5801 side and fill in `parent' fields for right-hand side. */
5803 np->parent = parent;
5804 balance_case_nodes (&np->left, np);
5805 for (; np->right; np = np->right)
5806 np->right->parent = np;
5810 /* If there are just three nodes, split at the middle one. */
5812 npp = &(*npp)->right;
5815 /* Find the place in the list that bisects the list's total cost,
5816 where ranges count as 2.
5817 Here I gets half the total cost. */
5818 i = (i + ranges + 1) / 2;
5821 /* Skip nodes while their cost does not reach that amount. */
5822 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5827 npp = &(*npp)->right;
5832 np->parent = parent;
5835 /* Optimize each of the two split parts. */
5836 balance_case_nodes (&np->left, np);
5837 balance_case_nodes (&np->right, np);
5841 /* Else leave this branch as one level,
5842 but fill in `parent' fields. */
5844 np->parent = parent;
5845 for (; np->right; np = np->right)
5846 np->right->parent = np;
5851 /* Search the parent sections of the case node tree
5852 to see if a test for the lower bound of NODE would be redundant.
5853 INDEX_TYPE is the type of the index expression.
5855 The instructions to generate the case decision tree are
5856 output in the same order as nodes are processed so it is
5857 known that if a parent node checks the range of the current
5858 node minus one that the current node is bounded at its lower
5859 span. Thus the test would be redundant. */
5862 node_has_low_bound (node, index_type)
5867 case_node_ptr pnode;
5869 /* If the lower bound of this node is the lowest value in the index type,
5870 we need not test it. */
5872 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5875 /* If this node has a left branch, the value at the left must be less
5876 than that at this node, so it cannot be bounded at the bottom and
5877 we need not bother testing any further. */
5882 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5883 node->low, integer_one_node));
5885 /* If the subtraction above overflowed, we can't verify anything.
5886 Otherwise, look for a parent that tests our value - 1. */
5888 if (! tree_int_cst_lt (low_minus_one, node->low))
5891 for (pnode = node->parent; pnode; pnode = pnode->parent)
5892 if (tree_int_cst_equal (low_minus_one, pnode->high))
5898 /* Search the parent sections of the case node tree
5899 to see if a test for the upper bound of NODE would be redundant.
5900 INDEX_TYPE is the type of the index expression.
5902 The instructions to generate the case decision tree are
5903 output in the same order as nodes are processed so it is
5904 known that if a parent node checks the range of the current
5905 node plus one that the current node is bounded at its upper
5906 span. Thus the test would be redundant. */
5909 node_has_high_bound (node, index_type)
5914 case_node_ptr pnode;
5916 /* If there is no upper bound, obviously no test is needed. */
5918 if (TYPE_MAX_VALUE (index_type) == NULL)
5921 /* If the upper bound of this node is the highest value in the type
5922 of the index expression, we need not test against it. */
5924 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5927 /* If this node has a right branch, the value at the right must be greater
5928 than that at this node, so it cannot be bounded at the top and
5929 we need not bother testing any further. */
5934 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5935 node->high, integer_one_node));
5937 /* If the addition above overflowed, we can't verify anything.
5938 Otherwise, look for a parent that tests our value + 1. */
5940 if (! tree_int_cst_lt (node->high, high_plus_one))
5943 for (pnode = node->parent; pnode; pnode = pnode->parent)
5944 if (tree_int_cst_equal (high_plus_one, pnode->low))
5950 /* Search the parent sections of the
5951 case node tree to see if both tests for the upper and lower
5952 bounds of NODE would be redundant. */
5955 node_is_bounded (node, index_type)
5959 return (node_has_low_bound (node, index_type)
5960 && node_has_high_bound (node, index_type));
5963 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5966 emit_jump_if_reachable (label)
5969 if (GET_CODE (get_last_insn ()) != BARRIER)
5973 /* Emit step-by-step code to select a case for the value of INDEX.
5974 The thus generated decision tree follows the form of the
5975 case-node binary tree NODE, whose nodes represent test conditions.
5976 INDEX_TYPE is the type of the index of the switch.
5978 Care is taken to prune redundant tests from the decision tree
5979 by detecting any boundary conditions already checked by
5980 emitted rtx. (See node_has_high_bound, node_has_low_bound
5981 and node_is_bounded, above.)
5983 Where the test conditions can be shown to be redundant we emit
5984 an unconditional jump to the target code. As a further
5985 optimization, the subordinates of a tree node are examined to
5986 check for bounded nodes. In this case conditional and/or
5987 unconditional jumps as a result of the boundary check for the
5988 current node are arranged to target the subordinates associated
5989 code for out of bound conditions on the current node.
5991 We can assume that when control reaches the code generated here,
5992 the index value has already been compared with the parents
5993 of this node, and determined to be on the same side of each parent
5994 as this node is. Thus, if this node tests for the value 51,
5995 and a parent tested for 52, we don't need to consider
5996 the possibility of a value greater than 51. If another parent
5997 tests for the value 50, then this node need not test anything. */
6000 emit_case_nodes (index, node, default_label, index_type)
6006 /* If INDEX has an unsigned type, we must make unsigned branches. */
6007 int unsignedp = TREE_UNSIGNED (index_type);
6008 enum machine_mode mode = GET_MODE (index);
6009 enum machine_mode imode = TYPE_MODE (index_type);
6011 /* See if our parents have already tested everything for us.
6012 If they have, emit an unconditional jump for this node. */
6013 if (node_is_bounded (node, index_type))
6014 emit_jump (label_rtx (node->code_label));
6016 else if (tree_int_cst_equal (node->low, node->high))
6018 /* Node is single valued. First see if the index expression matches
6019 this node and then check our children, if any. */
6021 do_jump_if_equal (index,
6022 convert_modes (mode, imode,
6023 expand_expr (node->low, NULL_RTX,
6026 label_rtx (node->code_label), unsignedp);
6028 if (node->right != 0 && node->left != 0)
6030 /* This node has children on both sides.
6031 Dispatch to one side or the other
6032 by comparing the index value with this node's value.
6033 If one subtree is bounded, check that one first,
6034 so we can avoid real branches in the tree. */
6036 if (node_is_bounded (node->right, index_type))
6038 emit_cmp_and_jump_insns (index,
6041 expand_expr (node->high, NULL_RTX,
6044 GT, NULL_RTX, mode, unsignedp, 0,
6045 label_rtx (node->right->code_label));
6046 emit_case_nodes (index, node->left, default_label, index_type);
6049 else if (node_is_bounded (node->left, index_type))
6051 emit_cmp_and_jump_insns (index,
6054 expand_expr (node->high, NULL_RTX,
6057 LT, NULL_RTX, mode, unsignedp, 0,
6058 label_rtx (node->left->code_label));
6059 emit_case_nodes (index, node->right, default_label, index_type);
6064 /* Neither node is bounded. First distinguish the two sides;
6065 then emit the code for one side at a time. */
6067 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6069 /* See if the value is on the right. */
6070 emit_cmp_and_jump_insns (index,
6073 expand_expr (node->high, NULL_RTX,
6076 GT, NULL_RTX, mode, unsignedp, 0,
6077 label_rtx (test_label));
6079 /* Value must be on the left.
6080 Handle the left-hand subtree. */
6081 emit_case_nodes (index, node->left, default_label, index_type);
6082 /* If left-hand subtree does nothing,
6084 emit_jump_if_reachable (default_label);
6086 /* Code branches here for the right-hand subtree. */
6087 expand_label (test_label);
6088 emit_case_nodes (index, node->right, default_label, index_type);
6092 else if (node->right != 0 && node->left == 0)
6094 /* Here we have a right child but no left so we issue conditional
6095 branch to default and process the right child.
6097 Omit the conditional branch to default if we it avoid only one
6098 right child; it costs too much space to save so little time. */
6100 if (node->right->right || node->right->left
6101 || !tree_int_cst_equal (node->right->low, node->right->high))
6103 if (!node_has_low_bound (node, index_type))
6105 emit_cmp_and_jump_insns (index,
6108 expand_expr (node->high, NULL_RTX,
6111 LT, NULL_RTX, mode, unsignedp, 0,
6115 emit_case_nodes (index, node->right, default_label, index_type);
6118 /* We cannot process node->right normally
6119 since we haven't ruled out the numbers less than
6120 this node's value. So handle node->right explicitly. */
6121 do_jump_if_equal (index,
6124 expand_expr (node->right->low, NULL_RTX,
6127 label_rtx (node->right->code_label), unsignedp);
6130 else if (node->right == 0 && node->left != 0)
6132 /* Just one subtree, on the left. */
6134 #if 0 /* The following code and comment were formerly part
6135 of the condition here, but they didn't work
6136 and I don't understand what the idea was. -- rms. */
6137 /* If our "most probable entry" is less probable
6138 than the default label, emit a jump to
6139 the default label using condition codes
6140 already lying around. With no right branch,
6141 a branch-greater-than will get us to the default
6144 && COST_TABLE (TREE_INT_CST_LOW (node->high)) < 12)
6147 if (node->left->left || node->left->right
6148 || !tree_int_cst_equal (node->left->low, node->left->high))
6150 if (!node_has_high_bound (node, index_type))
6152 emit_cmp_and_jump_insns (index,
6155 expand_expr (node->high, NULL_RTX,
6158 GT, NULL_RTX, mode, unsignedp, 0,
6162 emit_case_nodes (index, node->left, default_label, index_type);
6165 /* We cannot process node->left normally
6166 since we haven't ruled out the numbers less than
6167 this node's value. So handle node->left explicitly. */
6168 do_jump_if_equal (index,
6171 expand_expr (node->left->low, NULL_RTX,
6174 label_rtx (node->left->code_label), unsignedp);
6179 /* Node is a range. These cases are very similar to those for a single
6180 value, except that we do not start by testing whether this node
6181 is the one to branch to. */
6183 if (node->right != 0 && node->left != 0)
6185 /* Node has subtrees on both sides.
6186 If the right-hand subtree is bounded,
6187 test for it first, since we can go straight there.
6188 Otherwise, we need to make a branch in the control structure,
6189 then handle the two subtrees. */
6190 tree test_label = 0;
6192 if (node_is_bounded (node->right, index_type))
6193 /* Right hand node is fully bounded so we can eliminate any
6194 testing and branch directly to the target code. */
6195 emit_cmp_and_jump_insns (index,
6198 expand_expr (node->high, NULL_RTX,
6201 GT, NULL_RTX, mode, unsignedp, 0,
6202 label_rtx (node->right->code_label));
6205 /* Right hand node requires testing.
6206 Branch to a label where we will handle it later. */
6208 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6209 emit_cmp_and_jump_insns (index,
6212 expand_expr (node->high, NULL_RTX,
6215 GT, NULL_RTX, mode, unsignedp, 0,
6216 label_rtx (test_label));
6219 /* Value belongs to this node or to the left-hand subtree. */
6221 emit_cmp_and_jump_insns (index,
6224 expand_expr (node->low, NULL_RTX,
6227 GE, NULL_RTX, mode, unsignedp, 0,
6228 label_rtx (node->code_label));
6230 /* Handle the left-hand subtree. */
6231 emit_case_nodes (index, node->left, default_label, index_type);
6233 /* If right node had to be handled later, do that now. */
6237 /* If the left-hand subtree fell through,
6238 don't let it fall into the right-hand subtree. */
6239 emit_jump_if_reachable (default_label);
6241 expand_label (test_label);
6242 emit_case_nodes (index, node->right, default_label, index_type);
6246 else if (node->right != 0 && node->left == 0)
6248 /* Deal with values to the left of this node,
6249 if they are possible. */
6250 if (!node_has_low_bound (node, index_type))
6252 emit_cmp_and_jump_insns (index,
6255 expand_expr (node->low, NULL_RTX,
6258 LT, NULL_RTX, mode, unsignedp, 0,
6262 /* Value belongs to this node or to the right-hand subtree. */
6264 emit_cmp_and_jump_insns (index,
6267 expand_expr (node->high, NULL_RTX,
6270 LE, NULL_RTX, mode, unsignedp, 0,
6271 label_rtx (node->code_label));
6273 emit_case_nodes (index, node->right, default_label, index_type);
6276 else if (node->right == 0 && node->left != 0)
6278 /* Deal with values to the right of this node,
6279 if they are possible. */
6280 if (!node_has_high_bound (node, index_type))
6282 emit_cmp_and_jump_insns (index,
6285 expand_expr (node->high, NULL_RTX,
6288 GT, NULL_RTX, mode, unsignedp, 0,
6292 /* Value belongs to this node or to the left-hand subtree. */
6294 emit_cmp_and_jump_insns (index,
6297 expand_expr (node->low, NULL_RTX,
6300 GE, NULL_RTX, mode, unsignedp, 0,
6301 label_rtx (node->code_label));
6303 emit_case_nodes (index, node->left, default_label, index_type);
6308 /* Node has no children so we check low and high bounds to remove
6309 redundant tests. Only one of the bounds can exist,
6310 since otherwise this node is bounded--a case tested already. */
6311 int high_bound = node_has_high_bound (node, index_type);
6312 int low_bound = node_has_low_bound (node, index_type);
6314 if (!high_bound && low_bound)
6316 emit_cmp_and_jump_insns (index,
6319 expand_expr (node->high, NULL_RTX,
6322 GT, NULL_RTX, mode, unsignedp, 0,
6326 else if (!low_bound && high_bound)
6328 emit_cmp_and_jump_insns (index,
6331 expand_expr (node->low, NULL_RTX,
6334 LT, NULL_RTX, mode, unsignedp, 0,
6337 else if (!low_bound && !high_bound)
6339 /* Widen LOW and HIGH to the same width as INDEX. */
6340 tree type = type_for_mode (mode, unsignedp);
6341 tree low = build1 (CONVERT_EXPR, type, node->low);
6342 tree high = build1 (CONVERT_EXPR, type, node->high);
6343 rtx new_index, new_bound;
6345 /* Instead of doing two branches, emit one unsigned branch for
6346 (index-low) > (high-low). */
6347 new_index = expand_binop (mode, sub_optab, index,
6348 expand_expr (low, NULL_RTX, mode, 0),
6349 NULL_RTX, unsignedp, OPTAB_WIDEN);
6350 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6354 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6355 mode, 1, 0, default_label);
6358 emit_jump (label_rtx (node->code_label));