1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
45 #include "insn-flags.h"
46 #include "insn-config.h"
47 #include "insn-codes.h"
49 #include "hard-reg-set.h"
58 #define obstack_chunk_alloc xmalloc
59 #define obstack_chunk_free free
60 struct obstack stmt_obstack;
62 /* Assume that case vectors are not pc-relative. */
63 #ifndef CASE_VECTOR_PC_RELATIVE
64 #define CASE_VECTOR_PC_RELATIVE 0
67 /* Functions and data structures for expanding case statements. */
69 /* Case label structure, used to hold info on labels within case
70 statements. We handle "range" labels; for a single-value label
71 as in C, the high and low limits are the same.
73 An AVL tree of case nodes is initially created, and later transformed
74 to a list linked via the RIGHT fields in the nodes. Nodes with
75 higher case values are later in the list.
77 Switch statements can be output in one of two forms. A branch table
78 is used if there are more than a few labels and the labels are dense
79 within the range between the smallest and largest case value. If a
80 branch table is used, no further manipulations are done with the case
83 The alternative to the use of a branch table is to generate a series
84 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
85 and PARENT fields to hold a binary tree. Initially the tree is
86 totally unbalanced, with everything on the right. We balance the tree
87 with nodes on the left having lower case values than the parent
88 and nodes on the right having higher values. We then output the tree
93 struct case_node *left; /* Left son in binary tree */
94 struct case_node *right; /* Right son in binary tree; also node chain */
95 struct case_node *parent; /* Parent of node in binary tree */
96 tree low; /* Lowest index value for this label */
97 tree high; /* Highest index value for this label */
98 tree code_label; /* Label to jump to when node matches */
102 typedef struct case_node case_node;
103 typedef struct case_node *case_node_ptr;
105 /* These are used by estimate_case_costs and balance_case_nodes. */
107 /* This must be a signed type, and non-ANSI compilers lack signed char. */
108 static short cost_table_[129];
109 static int use_cost_table;
110 static int cost_table_initialized;
112 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
114 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT)((I) + 1)]
116 /* Stack of control and binding constructs we are currently inside.
118 These constructs begin when you call `expand_start_WHATEVER'
119 and end when you call `expand_end_WHATEVER'. This stack records
120 info about how the construct began that tells the end-function
121 what to do. It also may provide information about the construct
122 to alter the behavior of other constructs within the body.
123 For example, they may affect the behavior of C `break' and `continue'.
125 Each construct gets one `struct nesting' object.
126 All of these objects are chained through the `all' field.
127 `nesting_stack' points to the first object (innermost construct).
128 The position of an entry on `nesting_stack' is in its `depth' field.
130 Each type of construct has its own individual stack.
131 For example, loops have `loop_stack'. Each object points to the
132 next object of the same type through the `next' field.
134 Some constructs are visible to `break' exit-statements and others
135 are not. Which constructs are visible depends on the language.
136 Therefore, the data structure allows each construct to be visible
137 or not, according to the args given when the construct is started.
138 The construct is visible if the `exit_label' field is non-null.
139 In that case, the value should be a CODE_LABEL rtx. */
144 struct nesting *next;
149 /* For conds (if-then and if-then-else statements). */
152 /* Label for the end of the if construct.
153 There is none if EXITFLAG was not set
154 and no `else' has been seen yet. */
156 /* Label for the end of this alternative.
157 This may be the end of the if or the next else/elseif. */
163 /* Label at the top of the loop; place to loop back to. */
165 /* Label at the end of the whole construct. */
167 /* Label before a jump that branches to the end of the whole
168 construct. This is where destructors go if any. */
170 /* Label for `continue' statement to jump to;
171 this is in front of the stepper of the loop. */
174 /* For variable binding contours. */
177 /* Sequence number of this binding contour within the function,
178 in order of entry. */
179 int block_start_count;
180 /* Nonzero => value to restore stack to on exit. */
182 /* The NOTE that starts this contour.
183 Used by expand_goto to check whether the destination
184 is within each contour or not. */
186 /* Innermost containing binding contour that has a stack level. */
187 struct nesting *innermost_stack_block;
188 /* List of cleanups to be run on exit from this contour.
189 This is a list of expressions to be evaluated.
190 The TREE_PURPOSE of each link is the ..._DECL node
191 which the cleanup pertains to. */
193 /* List of cleanup-lists of blocks containing this block,
194 as they were at the locus where this block appears.
195 There is an element for each containing block,
196 ordered innermost containing block first.
197 The tail of this list can be 0,
198 if all remaining elements would be empty lists.
199 The element's TREE_VALUE is the cleanup-list of that block,
200 which may be null. */
202 /* Chain of labels defined inside this binding contour.
203 For contours that have stack levels or cleanups. */
204 struct label_chain *label_chain;
205 /* Number of function calls seen, as of start of this block. */
206 int n_function_calls;
207 /* Nonzero if this is associated with a EH region. */
208 int exception_region;
209 /* The saved target_temp_slot_level from our outer block.
210 We may reset target_temp_slot_level to be the level of
211 this block, if that is done, target_temp_slot_level
212 reverts to the saved target_temp_slot_level at the very
214 int block_target_temp_slot_level;
215 /* True if we are currently emitting insns in an area of
216 output code that is controlled by a conditional
217 expression. This is used by the cleanup handling code to
218 generate conditional cleanup actions. */
219 int conditional_code;
220 /* A place to move the start of the exception region for any
221 of the conditional cleanups, must be at the end or after
222 the start of the last unconditional cleanup, and before any
223 conditional branch points. */
224 rtx last_unconditional_cleanup;
225 /* When in a conditional context, this is the specific
226 cleanup list associated with last_unconditional_cleanup,
227 where we place the conditionalized cleanups. */
230 /* For switch (C) or case (Pascal) statements,
231 and also for dummies (see `expand_start_case_dummy'). */
234 /* The insn after which the case dispatch should finally
235 be emitted. Zero for a dummy. */
237 /* A list of case labels; it is first built as an AVL tree.
238 During expand_end_case, this is converted to a list, and may be
239 rearranged into a nearly balanced binary tree. */
240 struct case_node *case_list;
241 /* Label to jump to if no case matches. */
243 /* The expression to be dispatched on. */
245 /* Type that INDEX_EXPR should be converted to. */
247 /* Name of this kind of statement, for warnings. */
248 const char *printname;
249 /* Used to save no_line_numbers till we see the first case label.
250 We set this to -1 when we see the first case label in this
252 int line_number_status;
257 /* Allocate and return a new `struct nesting'. */
259 #define ALLOC_NESTING() \
260 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
262 /* Pop the nesting stack element by element until we pop off
263 the element which is at the top of STACK.
264 Update all the other stacks, popping off elements from them
265 as we pop them from nesting_stack. */
267 #define POPSTACK(STACK) \
268 do { struct nesting *target = STACK; \
269 struct nesting *this; \
270 do { this = nesting_stack; \
271 if (loop_stack == this) \
272 loop_stack = loop_stack->next; \
273 if (cond_stack == this) \
274 cond_stack = cond_stack->next; \
275 if (block_stack == this) \
276 block_stack = block_stack->next; \
277 if (stack_block_stack == this) \
278 stack_block_stack = stack_block_stack->next; \
279 if (case_stack == this) \
280 case_stack = case_stack->next; \
281 nesting_depth = nesting_stack->depth - 1; \
282 nesting_stack = this->all; \
283 obstack_free (&stmt_obstack, this); } \
284 while (this != target); } while (0)
286 /* In some cases it is impossible to generate code for a forward goto
287 until the label definition is seen. This happens when it may be necessary
288 for the goto to reset the stack pointer: we don't yet know how to do that.
289 So expand_goto puts an entry on this fixup list.
290 Each time a binding contour that resets the stack is exited,
292 If the target label has now been defined, we can insert the proper code. */
296 /* Points to following fixup. */
297 struct goto_fixup *next;
298 /* Points to the insn before the jump insn.
299 If more code must be inserted, it goes after this insn. */
301 /* The LABEL_DECL that this jump is jumping to, or 0
302 for break, continue or return. */
304 /* The BLOCK for the place where this goto was found. */
306 /* The CODE_LABEL rtx that this is jumping to. */
308 /* Number of binding contours started in current function
309 before the label reference. */
310 int block_start_count;
311 /* The outermost stack level that should be restored for this jump.
312 Each time a binding contour that resets the stack is exited,
313 if the target label is *not* yet defined, this slot is updated. */
315 /* List of lists of cleanup expressions to be run by this goto.
316 There is one element for each block that this goto is within.
317 The tail of this list can be 0,
318 if all remaining elements would be empty.
319 The TREE_VALUE contains the cleanup list of that block as of the
320 time this goto was seen.
321 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
322 tree cleanup_list_list;
325 /* Within any binding contour that must restore a stack level,
326 all labels are recorded with a chain of these structures. */
330 /* Points to following fixup. */
331 struct label_chain *next;
337 /* Chain of all pending binding contours. */
338 struct nesting *x_block_stack;
340 /* If any new stacks are added here, add them to POPSTACKS too. */
342 /* Chain of all pending binding contours that restore stack levels
344 struct nesting *x_stack_block_stack;
346 /* Chain of all pending conditional statements. */
347 struct nesting *x_cond_stack;
349 /* Chain of all pending loops. */
350 struct nesting *x_loop_stack;
352 /* Chain of all pending case or switch statements. */
353 struct nesting *x_case_stack;
355 /* Separate chain including all of the above,
356 chained through the `all' field. */
357 struct nesting *x_nesting_stack;
359 /* Number of entries on nesting_stack now. */
362 /* Number of binding contours started so far in this function. */
363 int x_block_start_count;
365 /* Each time we expand an expression-statement,
366 record the expr's type and its RTL value here. */
367 tree x_last_expr_type;
368 rtx x_last_expr_value;
370 /* Nonzero if within a ({...}) grouping, in which case we must
371 always compute a value for each expr-stmt in case it is the last one. */
372 int x_expr_stmts_for_value;
374 /* Filename and line number of last line-number note,
375 whether we actually emitted it or not. */
376 const char *x_emit_filename;
379 struct goto_fixup *x_goto_fixup_chain;
382 #define block_stack (cfun->stmt->x_block_stack)
383 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
384 #define cond_stack (cfun->stmt->x_cond_stack)
385 #define loop_stack (cfun->stmt->x_loop_stack)
386 #define case_stack (cfun->stmt->x_case_stack)
387 #define nesting_stack (cfun->stmt->x_nesting_stack)
388 #define nesting_depth (cfun->stmt->x_nesting_depth)
389 #define current_block_start_count (cfun->stmt->x_block_start_count)
390 #define last_expr_type (cfun->stmt->x_last_expr_type)
391 #define last_expr_value (cfun->stmt->x_last_expr_value)
392 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
393 #define emit_filename (cfun->stmt->x_emit_filename)
394 #define emit_lineno (cfun->stmt->x_emit_lineno)
395 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
397 /* Non-zero if we are using EH to handle cleanus. */
398 static int using_eh_for_cleanups_p = 0;
400 static int n_occurrences PARAMS ((int, const char *));
401 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
402 static int expand_fixup PARAMS ((tree, rtx, rtx));
403 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
404 static void expand_nl_goto_receiver PARAMS ((void));
405 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
406 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
408 static void expand_null_return_1 PARAMS ((rtx, int));
409 static void expand_value_return PARAMS ((rtx));
410 static int tail_recursion_args PARAMS ((tree, tree));
411 static void expand_cleanups PARAMS ((tree, tree, int, int));
412 static void check_seenlabel PARAMS ((void));
413 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
414 static int estimate_case_costs PARAMS ((case_node_ptr));
415 static void group_case_nodes PARAMS ((case_node_ptr));
416 static void balance_case_nodes PARAMS ((case_node_ptr *,
418 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
419 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
420 static int node_is_bounded PARAMS ((case_node_ptr, tree));
421 static void emit_jump_if_reachable PARAMS ((rtx));
422 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
423 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
424 static void mark_cond_nesting PARAMS ((struct nesting *));
425 static void mark_loop_nesting PARAMS ((struct nesting *));
426 static void mark_block_nesting PARAMS ((struct nesting *));
427 static void mark_case_nesting PARAMS ((struct nesting *));
428 static void mark_case_node PARAMS ((struct case_node *));
429 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
430 static void free_case_nodes PARAMS ((case_node_ptr));
433 using_eh_for_cleanups ()
435 using_eh_for_cleanups_p = 1;
438 /* Mark N (known to be a cond-nesting) for GC. */
441 mark_cond_nesting (n)
446 ggc_mark_rtx (n->exit_label);
447 ggc_mark_rtx (n->data.cond.endif_label);
448 ggc_mark_rtx (n->data.cond.next_label);
454 /* Mark N (known to be a loop-nesting) for GC. */
457 mark_loop_nesting (n)
463 ggc_mark_rtx (n->exit_label);
464 ggc_mark_rtx (n->data.loop.start_label);
465 ggc_mark_rtx (n->data.loop.end_label);
466 ggc_mark_rtx (n->data.loop.alt_end_label);
467 ggc_mark_rtx (n->data.loop.continue_label);
473 /* Mark N (known to be a block-nesting) for GC. */
476 mark_block_nesting (n)
481 struct label_chain *l;
483 ggc_mark_rtx (n->exit_label);
484 ggc_mark_rtx (n->data.block.stack_level);
485 ggc_mark_rtx (n->data.block.first_insn);
486 ggc_mark_tree (n->data.block.cleanups);
487 ggc_mark_tree (n->data.block.outer_cleanups);
489 for (l = n->data.block.label_chain; l != NULL; l = l->next)
492 ggc_mark_tree (l->label);
495 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
497 /* ??? cleanup_ptr never points outside the stack, does it? */
503 /* Mark N (known to be a case-nesting) for GC. */
506 mark_case_nesting (n)
511 ggc_mark_rtx (n->exit_label);
512 ggc_mark_rtx (n->data.case_stmt.start);
514 ggc_mark_tree (n->data.case_stmt.default_label);
515 ggc_mark_tree (n->data.case_stmt.index_expr);
516 ggc_mark_tree (n->data.case_stmt.nominal_type);
518 mark_case_node (n->data.case_stmt.case_list);
531 ggc_mark_tree (c->low);
532 ggc_mark_tree (c->high);
533 ggc_mark_tree (c->code_label);
535 mark_case_node (c->right);
536 mark_case_node (c->left);
544 struct goto_fixup *g;
549 ggc_mark_rtx (g->before_jump);
550 ggc_mark_tree (g->target);
551 ggc_mark_tree (g->context);
552 ggc_mark_rtx (g->target_rtl);
553 ggc_mark_rtx (g->stack_level);
554 ggc_mark_tree (g->cleanup_list_list);
560 /* Clear out all parts of the state in F that can safely be discarded
561 after the function has been compiled, to let garbage collection
562 reclaim the memory. */
568 /* We're about to free the function obstack. If we hold pointers to
569 things allocated there, then we'll try to mark them when we do
570 GC. So, we clear them out here explicitly. */
580 struct stmt_status *p;
585 mark_block_nesting (p->x_block_stack);
586 mark_cond_nesting (p->x_cond_stack);
587 mark_loop_nesting (p->x_loop_stack);
588 mark_case_nesting (p->x_case_stack);
590 ggc_mark_tree (p->x_last_expr_type);
591 /* last_epxr_value is only valid if last_expr_type is nonzero. */
592 if (p->x_last_expr_type)
593 ggc_mark_rtx (p->x_last_expr_value);
595 mark_goto_fixup (p->x_goto_fixup_chain);
601 gcc_obstack_init (&stmt_obstack);
605 init_stmt_for_function ()
607 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
609 /* We are not currently within any block, conditional, loop or case. */
611 stack_block_stack = 0;
618 current_block_start_count = 0;
620 /* No gotos have been expanded yet. */
621 goto_fixup_chain = 0;
623 /* We are not processing a ({...}) grouping. */
624 expr_stmts_for_value = 0;
626 last_expr_value = NULL_RTX;
629 /* Return nonzero if anything is pushed on the loop, condition, or case
634 return cond_stack || loop_stack || case_stack;
637 /* Record the current file and line. Called from emit_line_note. */
639 set_file_and_line_for_stmt (file, line)
643 /* If we're outputting an inline function, and we add a line note,
644 there may be no CFUN->STMT information. So, there's no need to
648 emit_filename = file;
653 /* Emit a no-op instruction. */
660 last_insn = get_last_insn ();
662 && (GET_CODE (last_insn) == CODE_LABEL
663 || (GET_CODE (last_insn) == NOTE
664 && prev_real_insn (last_insn) == 0)))
665 emit_insn (gen_nop ());
668 /* Return the rtx-label that corresponds to a LABEL_DECL,
669 creating it if necessary. */
675 if (TREE_CODE (label) != LABEL_DECL)
678 if (!DECL_RTL_SET_P (label))
679 SET_DECL_RTL (label, gen_label_rtx ());
681 return DECL_RTL (label);
685 /* Add an unconditional jump to LABEL as the next sequential instruction. */
691 do_pending_stack_adjust ();
692 emit_jump_insn (gen_jump (label));
696 /* Emit code to jump to the address
697 specified by the pointer expression EXP. */
700 expand_computed_goto (exp)
703 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
705 #ifdef POINTERS_EXTEND_UNSIGNED
706 x = convert_memory_address (Pmode, x);
710 /* Be sure the function is executable. */
711 if (current_function_check_memory_usage)
712 emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
713 VOIDmode, 1, x, ptr_mode);
715 do_pending_stack_adjust ();
716 emit_indirect_jump (x);
718 current_function_has_computed_jump = 1;
721 /* Handle goto statements and the labels that they can go to. */
723 /* Specify the location in the RTL code of a label LABEL,
724 which is a LABEL_DECL tree node.
726 This is used for the kind of label that the user can jump to with a
727 goto statement, and for alternatives of a switch or case statement.
728 RTL labels generated for loops and conditionals don't go through here;
729 they are generated directly at the RTL level, by other functions below.
731 Note that this has nothing to do with defining label *names*.
732 Languages vary in how they do that and what that even means. */
738 struct label_chain *p;
740 do_pending_stack_adjust ();
741 emit_label (label_rtx (label));
742 if (DECL_NAME (label))
743 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
745 if (stack_block_stack != 0)
747 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
748 p->next = stack_block_stack->data.block.label_chain;
749 stack_block_stack->data.block.label_chain = p;
754 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
755 from nested functions. */
758 declare_nonlocal_label (label)
761 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
763 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
764 LABEL_PRESERVE_P (label_rtx (label)) = 1;
765 if (nonlocal_goto_handler_slots == 0)
767 emit_stack_save (SAVE_NONLOCAL,
768 &nonlocal_goto_stack_level,
769 PREV_INSN (tail_recursion_reentry));
771 nonlocal_goto_handler_slots
772 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
775 /* Generate RTL code for a `goto' statement with target label LABEL.
776 LABEL should be a LABEL_DECL tree node that was or will later be
777 defined with `expand_label'. */
785 /* Check for a nonlocal goto to a containing function. */
786 context = decl_function_context (label);
787 if (context != 0 && context != current_function_decl)
789 struct function *p = find_function_data (context);
790 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
791 rtx handler_slot, static_chain, save_area, insn;
794 /* Find the corresponding handler slot for this label. */
795 handler_slot = p->x_nonlocal_goto_handler_slots;
796 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
797 link = TREE_CHAIN (link))
798 handler_slot = XEXP (handler_slot, 1);
799 handler_slot = XEXP (handler_slot, 0);
801 p->has_nonlocal_label = 1;
802 current_function_has_nonlocal_goto = 1;
803 LABEL_REF_NONLOCAL_P (label_ref) = 1;
805 /* Copy the rtl for the slots so that they won't be shared in
806 case the virtual stack vars register gets instantiated differently
807 in the parent than in the child. */
809 static_chain = copy_to_reg (lookup_static_chain (label));
811 /* Get addr of containing function's current nonlocal goto handler,
812 which will do any cleanups and then jump to the label. */
813 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
814 virtual_stack_vars_rtx,
817 /* Get addr of containing function's nonlocal save area. */
818 save_area = p->x_nonlocal_goto_stack_level;
820 save_area = replace_rtx (copy_rtx (save_area),
821 virtual_stack_vars_rtx, static_chain);
823 #if HAVE_nonlocal_goto
824 if (HAVE_nonlocal_goto)
825 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
826 save_area, label_ref));
830 /* Restore frame pointer for containing function.
831 This sets the actual hard register used for the frame pointer
832 to the location of the function's incoming static chain info.
833 The non-local goto handler will then adjust it to contain the
834 proper value and reload the argument pointer, if needed. */
835 emit_move_insn (hard_frame_pointer_rtx, static_chain);
836 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
838 /* USE of hard_frame_pointer_rtx added for consistency;
839 not clear if really needed. */
840 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
841 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
842 emit_indirect_jump (handler_slot);
845 /* Search backwards to the jump insn and mark it as a
847 for (insn = get_last_insn ();
848 GET_CODE (insn) != JUMP_INSN;
849 insn = PREV_INSN (insn))
851 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
855 expand_goto_internal (label, label_rtx (label), NULL_RTX);
858 /* Generate RTL code for a `goto' statement with target label BODY.
859 LABEL should be a LABEL_REF.
860 LAST_INSN, if non-0, is the rtx we should consider as the last
861 insn emitted (for the purposes of cleaning up a return). */
864 expand_goto_internal (body, label, last_insn)
869 struct nesting *block;
872 if (GET_CODE (label) != CODE_LABEL)
875 /* If label has already been defined, we can tell now
876 whether and how we must alter the stack level. */
878 if (PREV_INSN (label) != 0)
880 /* Find the innermost pending block that contains the label.
881 (Check containment by comparing insn-uids.)
882 Then restore the outermost stack level within that block,
883 and do cleanups of all blocks contained in it. */
884 for (block = block_stack; block; block = block->next)
886 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
888 if (block->data.block.stack_level != 0)
889 stack_level = block->data.block.stack_level;
890 /* Execute the cleanups for blocks we are exiting. */
891 if (block->data.block.cleanups != 0)
893 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
894 do_pending_stack_adjust ();
900 /* Ensure stack adjust isn't done by emit_jump, as this
901 would clobber the stack pointer. This one should be
902 deleted as dead by flow. */
903 clear_pending_stack_adjust ();
904 do_pending_stack_adjust ();
906 /* Don't do this adjust if it's to the end label and this function
907 is to return with a depressed stack pointer. */
908 if (label == return_label
909 && (((TREE_CODE (TREE_TYPE (current_function_decl))
911 && (TYPE_RETURNS_STACK_DEPRESSED
912 (TREE_TYPE (current_function_decl))))))
915 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
918 if (body != 0 && DECL_TOO_LATE (body))
919 error ("jump to `%s' invalidly jumps into binding contour",
920 IDENTIFIER_POINTER (DECL_NAME (body)));
922 /* Label not yet defined: may need to put this goto
923 on the fixup list. */
924 else if (! expand_fixup (body, label, last_insn))
926 /* No fixup needed. Record that the label is the target
927 of at least one goto that has no fixup. */
929 TREE_ADDRESSABLE (body) = 1;
935 /* Generate if necessary a fixup for a goto
936 whose target label in tree structure (if any) is TREE_LABEL
937 and whose target in rtl is RTL_LABEL.
939 If LAST_INSN is nonzero, we pretend that the jump appears
940 after insn LAST_INSN instead of at the current point in the insn stream.
942 The fixup will be used later to insert insns just before the goto.
943 Those insns will restore the stack level as appropriate for the
944 target label, and will (in the case of C++) also invoke any object
945 destructors which have to be invoked when we exit the scopes which
946 are exited by the goto.
948 Value is nonzero if a fixup is made. */
951 expand_fixup (tree_label, rtl_label, last_insn)
956 struct nesting *block, *end_block;
958 /* See if we can recognize which block the label will be output in.
959 This is possible in some very common cases.
960 If we succeed, set END_BLOCK to that block.
961 Otherwise, set it to 0. */
964 && (rtl_label == cond_stack->data.cond.endif_label
965 || rtl_label == cond_stack->data.cond.next_label))
966 end_block = cond_stack;
967 /* If we are in a loop, recognize certain labels which
968 are likely targets. This reduces the number of fixups
969 we need to create. */
971 && (rtl_label == loop_stack->data.loop.start_label
972 || rtl_label == loop_stack->data.loop.end_label
973 || rtl_label == loop_stack->data.loop.continue_label))
974 end_block = loop_stack;
978 /* Now set END_BLOCK to the binding level to which we will return. */
982 struct nesting *next_block = end_block->all;
985 /* First see if the END_BLOCK is inside the innermost binding level.
986 If so, then no cleanups or stack levels are relevant. */
987 while (next_block && next_block != block)
988 next_block = next_block->all;
993 /* Otherwise, set END_BLOCK to the innermost binding level
994 which is outside the relevant control-structure nesting. */
995 next_block = block_stack->next;
996 for (block = block_stack; block != end_block; block = block->all)
997 if (block == next_block)
998 next_block = next_block->next;
999 end_block = next_block;
1002 /* Does any containing block have a stack level or cleanups?
1003 If not, no fixup is needed, and that is the normal case
1004 (the only case, for standard C). */
1005 for (block = block_stack; block != end_block; block = block->next)
1006 if (block->data.block.stack_level != 0
1007 || block->data.block.cleanups != 0)
1010 if (block != end_block)
1012 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1013 struct goto_fixup *fixup
1014 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1015 /* In case an old stack level is restored, make sure that comes
1016 after any pending stack adjust. */
1017 /* ?? If the fixup isn't to come at the present position,
1018 doing the stack adjust here isn't useful. Doing it with our
1019 settings at that location isn't useful either. Let's hope
1022 do_pending_stack_adjust ();
1023 fixup->target = tree_label;
1024 fixup->target_rtl = rtl_label;
1026 /* Create a BLOCK node and a corresponding matched set of
1027 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1028 this point. The notes will encapsulate any and all fixup
1029 code which we might later insert at this point in the insn
1030 stream. Also, the BLOCK node will be the parent (i.e. the
1031 `SUPERBLOCK') of any other BLOCK nodes which we might create
1032 later on when we are expanding the fixup code.
1034 Note that optimization passes (including expand_end_loop)
1035 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1036 as a placeholder. */
1039 register rtx original_before_jump
1040 = last_insn ? last_insn : get_last_insn ();
1045 block = make_node (BLOCK);
1046 TREE_USED (block) = 1;
1048 if (!cfun->x_whole_function_mode_p)
1049 insert_block (block);
1053 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1054 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1059 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1060 if (cfun->x_whole_function_mode_p)
1061 NOTE_BLOCK (start) = block;
1062 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1063 end = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1064 if (cfun->x_whole_function_mode_p)
1065 NOTE_BLOCK (end) = block;
1066 fixup->context = block;
1068 emit_insns_after (start, original_before_jump);
1071 fixup->block_start_count = current_block_start_count;
1072 fixup->stack_level = 0;
1073 fixup->cleanup_list_list
1074 = ((block->data.block.outer_cleanups
1075 || block->data.block.cleanups)
1076 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1077 block->data.block.outer_cleanups)
1079 fixup->next = goto_fixup_chain;
1080 goto_fixup_chain = fixup;
1086 /* Expand any needed fixups in the outputmost binding level of the
1087 function. FIRST_INSN is the first insn in the function. */
1090 expand_fixups (first_insn)
1093 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1096 /* When exiting a binding contour, process all pending gotos requiring fixups.
1097 THISBLOCK is the structure that describes the block being exited.
1098 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1099 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1100 FIRST_INSN is the insn that began this contour.
1102 Gotos that jump out of this contour must restore the
1103 stack level and do the cleanups before actually jumping.
1105 DONT_JUMP_IN nonzero means report error there is a jump into this
1106 contour from before the beginning of the contour.
1107 This is also done if STACK_LEVEL is nonzero. */
1110 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1111 struct nesting *thisblock;
1117 register struct goto_fixup *f, *prev;
1119 /* F is the fixup we are considering; PREV is the previous one. */
1120 /* We run this loop in two passes so that cleanups of exited blocks
1121 are run first, and blocks that are exited are marked so
1124 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1126 /* Test for a fixup that is inactive because it is already handled. */
1127 if (f->before_jump == 0)
1129 /* Delete inactive fixup from the chain, if that is easy to do. */
1131 prev->next = f->next;
1133 /* Has this fixup's target label been defined?
1134 If so, we can finalize it. */
1135 else if (PREV_INSN (f->target_rtl) != 0)
1137 register rtx cleanup_insns;
1139 /* If this fixup jumped into this contour from before the beginning
1140 of this contour, report an error. This code used to use
1141 the first non-label insn after f->target_rtl, but that's
1142 wrong since such can be added, by things like put_var_into_stack
1143 and have INSN_UIDs that are out of the range of the block. */
1144 /* ??? Bug: this does not detect jumping in through intermediate
1145 blocks that have stack levels or cleanups.
1146 It detects only a problem with the innermost block
1147 around the label. */
1149 && (dont_jump_in || stack_level || cleanup_list)
1150 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1151 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1152 && ! DECL_ERROR_ISSUED (f->target))
1154 error_with_decl (f->target,
1155 "label `%s' used before containing binding contour");
1156 /* Prevent multiple errors for one label. */
1157 DECL_ERROR_ISSUED (f->target) = 1;
1160 /* We will expand the cleanups into a sequence of their own and
1161 then later on we will attach this new sequence to the insn
1162 stream just ahead of the actual jump insn. */
1166 /* Temporarily restore the lexical context where we will
1167 logically be inserting the fixup code. We do this for the
1168 sake of getting the debugging information right. */
1171 set_block (f->context);
1173 /* Expand the cleanups for blocks this jump exits. */
1174 if (f->cleanup_list_list)
1177 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1178 /* Marked elements correspond to blocks that have been closed.
1179 Do their cleanups. */
1180 if (TREE_ADDRESSABLE (lists)
1181 && TREE_VALUE (lists) != 0)
1183 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1184 /* Pop any pushes done in the cleanups,
1185 in case function is about to return. */
1186 do_pending_stack_adjust ();
1190 /* Restore stack level for the biggest contour that this
1191 jump jumps out of. */
1193 && ! (f->target_rtl == return_label
1194 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1196 && (TYPE_RETURNS_STACK_DEPRESSED
1197 (TREE_TYPE (current_function_decl))))))
1198 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1200 /* Finish up the sequence containing the insns which implement the
1201 necessary cleanups, and then attach that whole sequence to the
1202 insn stream just ahead of the actual jump insn. Attaching it
1203 at that point insures that any cleanups which are in fact
1204 implicit C++ object destructions (which must be executed upon
1205 leaving the block) appear (to the debugger) to be taking place
1206 in an area of the generated code where the object(s) being
1207 destructed are still "in scope". */
1209 cleanup_insns = get_insns ();
1213 emit_insns_after (cleanup_insns, f->before_jump);
1219 /* For any still-undefined labels, do the cleanups for this block now.
1220 We must do this now since items in the cleanup list may go out
1221 of scope when the block ends. */
1222 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1223 if (f->before_jump != 0
1224 && PREV_INSN (f->target_rtl) == 0
1225 /* Label has still not appeared. If we are exiting a block with
1226 a stack level to restore, that started before the fixup,
1227 mark this stack level as needing restoration
1228 when the fixup is later finalized. */
1230 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1231 means the label is undefined. That's erroneous, but possible. */
1232 && (thisblock->data.block.block_start_count
1233 <= f->block_start_count))
1235 tree lists = f->cleanup_list_list;
1238 for (; lists; lists = TREE_CHAIN (lists))
1239 /* If the following elt. corresponds to our containing block
1240 then the elt. must be for this block. */
1241 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1245 set_block (f->context);
1246 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1247 do_pending_stack_adjust ();
1248 cleanup_insns = get_insns ();
1251 if (cleanup_insns != 0)
1253 = emit_insns_after (cleanup_insns, f->before_jump);
1255 f->cleanup_list_list = TREE_CHAIN (lists);
1259 f->stack_level = stack_level;
1263 /* Return the number of times character C occurs in string S. */
1265 n_occurrences (c, s)
1275 /* Generate RTL for an asm statement (explicit assembler code).
1276 BODY is a STRING_CST node containing the assembler code text,
1277 or an ADDR_EXPR containing a STRING_CST. */
1283 if (current_function_check_memory_usage)
1285 error ("`asm' cannot be used in function where memory usage is checked");
1289 if (TREE_CODE (body) == ADDR_EXPR)
1290 body = TREE_OPERAND (body, 0);
1292 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1293 TREE_STRING_POINTER (body)));
1297 /* Generate RTL for an asm statement with arguments.
1298 STRING is the instruction template.
1299 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1300 Each output or input has an expression in the TREE_VALUE and
1301 a constraint-string in the TREE_PURPOSE.
1302 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1303 that is clobbered by this insn.
1305 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1306 Some elements of OUTPUTS may be replaced with trees representing temporary
1307 values. The caller should copy those temporary values to the originally
1310 VOL nonzero means the insn is volatile; don't optimize it. */
1313 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1314 tree string, outputs, inputs, clobbers;
1316 const char *filename;
1319 rtvec argvec, constraints;
1321 int ninputs = list_length (inputs);
1322 int noutputs = list_length (outputs);
1327 /* Vector of RTX's of evaluated output operands. */
1328 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1329 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1330 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1331 enum machine_mode *inout_mode
1332 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1333 /* The insn we have emitted. */
1335 int old_generating_concat_p = generating_concat_p;
1337 /* An ASM with no outputs needs to be treated as volatile, for now. */
1341 if (current_function_check_memory_usage)
1343 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1347 #ifdef MD_ASM_CLOBBERS
1348 /* Sometimes we wish to automatically clobber registers across an asm.
1349 Case in point is when the i386 backend moved from cc0 to a hard reg --
1350 maintaining source-level compatability means automatically clobbering
1351 the flags register. */
1352 MD_ASM_CLOBBERS (clobbers);
1355 if (current_function_check_memory_usage)
1357 error ("`asm' cannot be used in function where memory usage is checked");
1361 /* Count the number of meaningful clobbered registers, ignoring what
1362 we would ignore later. */
1364 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1366 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1368 i = decode_reg_name (regname);
1369 if (i >= 0 || i == -4)
1372 error ("unknown register name `%s' in `asm'", regname);
1377 /* Check that the number of alternatives is constant across all
1379 if (outputs || inputs)
1381 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1382 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1385 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1387 error ("too many alternatives in `asm'");
1394 const char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1396 if (n_occurrences (',', constraint) != nalternatives)
1398 error ("operand constraints for `asm' differ in number of alternatives");
1402 if (TREE_CHAIN (tmp))
1403 tmp = TREE_CHAIN (tmp);
1405 tmp = next, next = 0;
1409 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1411 tree val = TREE_VALUE (tail);
1412 tree type = TREE_TYPE (val);
1413 const char *constraint;
1421 /* If there's an erroneous arg, emit no insn. */
1422 if (TREE_TYPE (val) == error_mark_node)
1425 /* Make sure constraint has `=' and does not have `+'. Also, see
1426 if it allows any register. Be liberal on the latter test, since
1427 the worst that happens if we get it wrong is we issue an error
1430 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1431 c_len = strlen (constraint);
1433 /* Allow the `=' or `+' to not be at the beginning of the string,
1434 since it wasn't explicitly documented that way, and there is a
1435 large body of code that puts it last. Swap the character to
1436 the front, so as not to uglify any place else. */
1440 if ((p = strchr (constraint, '=')) != NULL)
1442 if ((p = strchr (constraint, '+')) != NULL)
1445 error ("output operand constraint lacks `='");
1449 is_inout = *p == '+';
1453 /* Have to throw away this constraint string and get a new one. */
1454 char *buf = alloca (c_len + 1);
1457 memcpy (buf + 1, constraint, j);
1458 memcpy (buf + 1 + j, p + 1, c_len - j); /* not -j-1 - copy null */
1459 constraint = ggc_alloc_string (buf, c_len);
1463 "output constraint `%c' for operand %d is not at the beginning",
1467 /* Make sure we can specify the matching operand. */
1468 if (is_inout && i > 9)
1470 error ("output operand constraint %d contains `+'", i);
1474 for (j = 1; j < c_len; j++)
1475 switch (constraint[j])
1479 error ("operand constraint contains '+' or '=' at illegal position.");
1483 if (i + 1 == ninputs + noutputs)
1485 error ("`%%' constraint used with last operand");
1490 case '?': case '!': case '*': case '&': case '#':
1491 case 'E': case 'F': case 'G': case 'H':
1492 case 's': case 'i': case 'n':
1493 case 'I': case 'J': case 'K': case 'L': case 'M':
1494 case 'N': case 'O': case 'P': case ',':
1497 case '0': case '1': case '2': case '3': case '4':
1498 case '5': case '6': case '7': case '8': case '9':
1499 error ("matching constraint not valid in output operand");
1502 case 'V': case 'm': case 'o':
1507 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1508 excepting those that expand_call created. So match memory
1523 if (! ISALPHA (constraint[j]))
1525 error ("invalid punctuation `%c' in constraint",
1529 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1531 #ifdef EXTRA_CONSTRAINT
1534 /* Otherwise we can't assume anything about the nature of
1535 the constraint except that it isn't purely registers.
1536 Treat it like "g" and hope for the best. */
1544 /* If an output operand is not a decl or indirect ref and our constraint
1545 allows a register, make a temporary to act as an intermediate.
1546 Make the asm insn write into that, then our caller will copy it to
1547 the real output operand. Likewise for promoted variables. */
1549 generating_concat_p = 0;
1551 real_output_rtx[i] = NULL_RTX;
1552 if ((TREE_CODE (val) == INDIRECT_REF
1555 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1556 && ! (GET_CODE (DECL_RTL (val)) == REG
1557 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1562 mark_addressable (TREE_VALUE (tail));
1565 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1566 EXPAND_MEMORY_USE_WO);
1568 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1569 error ("output number %d not directly addressable", i);
1570 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1571 || GET_CODE (output_rtx[i]) == CONCAT)
1573 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1574 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1576 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1581 output_rtx[i] = assign_temp (type, 0, 0, 1);
1582 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1585 generating_concat_p = old_generating_concat_p;
1589 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1590 inout_opnum[ninout++] = i;
1595 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1597 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1601 /* Make vectors for the expression-rtx and constraint strings. */
1603 argvec = rtvec_alloc (ninputs);
1604 constraints = rtvec_alloc (ninputs);
1606 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1607 : GET_MODE (output_rtx[0])),
1608 TREE_STRING_POINTER (string),
1609 empty_string, 0, argvec, constraints,
1612 MEM_VOLATILE_P (body) = vol;
1614 /* Eval the inputs and put them into ARGVEC.
1615 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1618 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1621 int allows_reg = 0, allows_mem = 0;
1622 const char *constraint, *orig_constraint;
1626 /* If there's an erroneous arg, emit no insn,
1627 because the ASM_INPUT would get VOIDmode
1628 and that could cause a crash in reload. */
1629 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1632 /* ??? Can this happen, and does the error message make any sense? */
1633 if (TREE_PURPOSE (tail) == NULL_TREE)
1635 error ("hard register `%s' listed as input operand to `asm'",
1636 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1640 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1641 c_len = strlen (constraint);
1642 orig_constraint = constraint;
1644 /* Make sure constraint has neither `=', `+', nor '&'. */
1646 for (j = 0; j < c_len; j++)
1647 switch (constraint[j])
1649 case '+': case '=': case '&':
1650 if (constraint == orig_constraint)
1652 error ("input operand constraint contains `%c'",
1659 if (constraint == orig_constraint
1660 && i + 1 == ninputs - ninout)
1662 error ("`%%' constraint used with last operand");
1667 case 'V': case 'm': case 'o':
1672 case '?': case '!': case '*': case '#':
1673 case 'E': case 'F': case 'G': case 'H':
1674 case 's': case 'i': case 'n':
1675 case 'I': case 'J': case 'K': case 'L': case 'M':
1676 case 'N': case 'O': case 'P': case ',':
1679 /* Whether or not a numeric constraint allows a register is
1680 decided by the matching constraint, and so there is no need
1681 to do anything special with them. We must handle them in
1682 the default case, so that we don't unnecessarily force
1683 operands to memory. */
1684 case '0': case '1': case '2': case '3': case '4':
1685 case '5': case '6': case '7': case '8': case '9':
1686 if (constraint[j] >= '0' + noutputs)
1689 ("matching constraint references invalid operand number");
1693 /* Try and find the real constraint for this dup. */
1694 if ((j == 0 && c_len == 1)
1695 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1699 for (j = constraint[j] - '0'; j > 0; --j)
1702 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1703 c_len = strlen (constraint);
1720 if (! ISALPHA (constraint[j]))
1722 error ("invalid punctuation `%c' in constraint",
1726 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1728 #ifdef EXTRA_CONSTRAINT
1731 /* Otherwise we can't assume anything about the nature of
1732 the constraint except that it isn't purely registers.
1733 Treat it like "g" and hope for the best. */
1741 if (! allows_reg && allows_mem)
1742 mark_addressable (TREE_VALUE (tail));
1744 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1746 /* Never pass a CONCAT to an ASM. */
1747 generating_concat_p = 0;
1748 if (GET_CODE (op) == CONCAT)
1749 op = force_reg (GET_MODE (op), op);
1751 if (asm_operand_ok (op, constraint) <= 0)
1754 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1755 else if (!allows_mem)
1756 warning ("asm operand %d probably doesn't match constraints", i);
1757 else if (CONSTANT_P (op))
1758 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1760 else if (GET_CODE (op) == REG
1761 || GET_CODE (op) == SUBREG
1762 || GET_CODE (op) == CONCAT)
1764 tree type = TREE_TYPE (TREE_VALUE (tail));
1765 tree qual_type = build_qualified_type (type,
1767 | TYPE_QUAL_CONST));
1768 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1770 emit_move_insn (memloc, op);
1774 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1775 /* We won't recognize volatile memory as available a
1776 memory_operand at this point. Ignore it. */
1778 else if (queued_subexp_p (op))
1781 /* ??? Leave this only until we have experience with what
1782 happens in combine and elsewhere when constraints are
1784 warning ("asm operand %d probably doesn't match constraints", i);
1786 generating_concat_p = old_generating_concat_p;
1787 ASM_OPERANDS_INPUT (body, i) = op;
1789 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1790 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1795 /* Protect all the operands from the queue now that they have all been
1798 generating_concat_p = 0;
1800 for (i = 0; i < ninputs - ninout; i++)
1801 ASM_OPERANDS_INPUT (body, i)
1802 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1804 for (i = 0; i < noutputs; i++)
1805 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1807 /* For in-out operands, copy output rtx to input rtx. */
1808 for (i = 0; i < ninout; i++)
1810 int j = inout_opnum[i];
1812 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1814 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1815 = gen_rtx_ASM_INPUT (inout_mode[i], digit_string (j));
1818 generating_concat_p = old_generating_concat_p;
1820 /* Now, for each output, construct an rtx
1821 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1822 ARGVEC CONSTRAINTS))
1823 If there is more than one, put them inside a PARALLEL. */
1825 if (noutputs == 1 && nclobbers == 0)
1827 ASM_OPERANDS_OUTPUT_CONSTRAINT (body)
1828 = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1829 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1832 else if (noutputs == 0 && nclobbers == 0)
1834 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1835 insn = emit_insn (body);
1846 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1848 /* For each output operand, store a SET. */
1849 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1851 XVECEXP (body, 0, i)
1852 = gen_rtx_SET (VOIDmode,
1854 gen_rtx_ASM_OPERANDS
1855 (GET_MODE (output_rtx[i]),
1856 TREE_STRING_POINTER (string),
1857 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1858 i, argvec, constraints,
1861 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1864 /* If there are no outputs (but there are some clobbers)
1865 store the bare ASM_OPERANDS into the PARALLEL. */
1868 XVECEXP (body, 0, i++) = obody;
1870 /* Store (clobber REG) for each clobbered register specified. */
1872 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1874 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1875 int j = decode_reg_name (regname);
1879 if (j == -3) /* `cc', which is not a register */
1882 if (j == -4) /* `memory', don't cache memory across asm */
1884 XVECEXP (body, 0, i++)
1885 = gen_rtx_CLOBBER (VOIDmode,
1888 gen_rtx_SCRATCH (VOIDmode)));
1892 /* Ignore unknown register, error already signaled. */
1896 /* Use QImode since that's guaranteed to clobber just one reg. */
1897 XVECEXP (body, 0, i++)
1898 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1901 insn = emit_insn (body);
1904 /* For any outputs that needed reloading into registers, spill them
1905 back to where they belong. */
1906 for (i = 0; i < noutputs; ++i)
1907 if (real_output_rtx[i])
1908 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1913 /* Generate RTL to evaluate the expression EXP
1914 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1917 expand_expr_stmt (exp)
1920 /* If -W, warn about statements with no side effects,
1921 except for an explicit cast to void (e.g. for assert()), and
1922 except inside a ({...}) where they may be useful. */
1923 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1925 if (! TREE_SIDE_EFFECTS (exp))
1927 if ((extra_warnings || warn_unused_value)
1928 && !(TREE_CODE (exp) == CONVERT_EXPR
1929 && VOID_TYPE_P (TREE_TYPE (exp))))
1930 warning_with_file_and_line (emit_filename, emit_lineno,
1931 "statement with no effect");
1933 else if (warn_unused_value)
1934 warn_if_unused_value (exp);
1937 /* If EXP is of function type and we are expanding statements for
1938 value, convert it to pointer-to-function. */
1939 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1940 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1942 /* The call to `expand_expr' could cause last_expr_type and
1943 last_expr_value to get reset. Therefore, we set last_expr_value
1944 and last_expr_type *after* calling expand_expr. */
1945 last_expr_value = expand_expr (exp,
1946 (expr_stmts_for_value
1947 ? NULL_RTX : const0_rtx),
1949 last_expr_type = TREE_TYPE (exp);
1951 /* If all we do is reference a volatile value in memory,
1952 copy it to a register to be sure it is actually touched. */
1953 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1954 && TREE_THIS_VOLATILE (exp))
1956 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1958 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1959 copy_to_reg (last_expr_value);
1962 rtx lab = gen_label_rtx ();
1964 /* Compare the value with itself to reference it. */
1965 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1966 expand_expr (TYPE_SIZE (last_expr_type),
1967 NULL_RTX, VOIDmode, 0),
1969 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1975 /* If this expression is part of a ({...}) and is in memory, we may have
1976 to preserve temporaries. */
1977 preserve_temp_slots (last_expr_value);
1979 /* Free any temporaries used to evaluate this expression. Any temporary
1980 used as a result of this expression will already have been preserved
1987 /* Warn if EXP contains any computations whose results are not used.
1988 Return 1 if a warning is printed; 0 otherwise. */
1991 warn_if_unused_value (exp)
1994 if (TREE_USED (exp))
1997 /* Don't warn about void constructs. This includes casting to void,
1998 void function calls, and statement expressions with a final cast
2000 if (VOID_TYPE_P (TREE_TYPE (exp)))
2003 /* If this is an expression with side effects, don't warn. */
2004 if (TREE_SIDE_EFFECTS (exp))
2007 switch (TREE_CODE (exp))
2009 case PREINCREMENT_EXPR:
2010 case POSTINCREMENT_EXPR:
2011 case PREDECREMENT_EXPR:
2012 case POSTDECREMENT_EXPR:
2017 case METHOD_CALL_EXPR:
2019 case TRY_CATCH_EXPR:
2020 case WITH_CLEANUP_EXPR:
2025 /* For a binding, warn if no side effect within it. */
2026 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2029 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2031 case TRUTH_ORIF_EXPR:
2032 case TRUTH_ANDIF_EXPR:
2033 /* In && or ||, warn if 2nd operand has no side effect. */
2034 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2037 if (TREE_NO_UNUSED_WARNING (exp))
2039 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2041 /* Let people do `(foo (), 0)' without a warning. */
2042 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2044 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2048 case NON_LVALUE_EXPR:
2049 /* Don't warn about conversions not explicit in the user's program. */
2050 if (TREE_NO_UNUSED_WARNING (exp))
2052 /* Assignment to a cast usually results in a cast of a modify.
2053 Don't complain about that. There can be an arbitrary number of
2054 casts before the modify, so we must loop until we find the first
2055 non-cast expression and then test to see if that is a modify. */
2057 tree tem = TREE_OPERAND (exp, 0);
2059 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2060 tem = TREE_OPERAND (tem, 0);
2062 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2063 || TREE_CODE (tem) == CALL_EXPR)
2069 /* Don't warn about automatic dereferencing of references, since
2070 the user cannot control it. */
2071 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2072 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2076 /* Referencing a volatile value is a side effect, so don't warn. */
2078 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2079 && TREE_THIS_VOLATILE (exp))
2082 /* If this is an expression which has no operands, there is no value
2083 to be unused. There are no such language-independent codes,
2084 but front ends may define such. */
2085 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2086 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2090 warning_with_file_and_line (emit_filename, emit_lineno,
2091 "value computed is not used");
2096 /* Clear out the memory of the last expression evaluated. */
2104 /* Begin a statement which will return a value.
2105 Return the RTL_EXPR for this statement expr.
2106 The caller must save that value and pass it to expand_end_stmt_expr. */
2109 expand_start_stmt_expr ()
2113 /* Make the RTL_EXPR node temporary, not momentary,
2114 so that rtl_expr_chain doesn't become garbage. */
2115 t = make_node (RTL_EXPR);
2116 do_pending_stack_adjust ();
2117 start_sequence_for_rtl_expr (t);
2119 expr_stmts_for_value++;
2123 /* Restore the previous state at the end of a statement that returns a value.
2124 Returns a tree node representing the statement's value and the
2125 insns to compute the value.
2127 The nodes of that expression have been freed by now, so we cannot use them.
2128 But we don't want to do that anyway; the expression has already been
2129 evaluated and now we just want to use the value. So generate a RTL_EXPR
2130 with the proper type and RTL value.
2132 If the last substatement was not an expression,
2133 return something with type `void'. */
2136 expand_end_stmt_expr (t)
2141 if (last_expr_type == 0)
2143 last_expr_type = void_type_node;
2144 last_expr_value = const0_rtx;
2146 else if (last_expr_value == 0)
2147 /* There are some cases where this can happen, such as when the
2148 statement is void type. */
2149 last_expr_value = const0_rtx;
2150 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2151 /* Remove any possible QUEUED. */
2152 last_expr_value = protect_from_queue (last_expr_value, 0);
2156 TREE_TYPE (t) = last_expr_type;
2157 RTL_EXPR_RTL (t) = last_expr_value;
2158 RTL_EXPR_SEQUENCE (t) = get_insns ();
2160 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2164 /* Don't consider deleting this expr or containing exprs at tree level. */
2165 TREE_SIDE_EFFECTS (t) = 1;
2166 /* Propagate volatility of the actual RTL expr. */
2167 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2170 expr_stmts_for_value--;
2175 /* Generate RTL for the start of an if-then. COND is the expression
2176 whose truth should be tested.
2178 If EXITFLAG is nonzero, this conditional is visible to
2179 `exit_something'. */
2182 expand_start_cond (cond, exitflag)
2186 struct nesting *thiscond = ALLOC_NESTING ();
2188 /* Make an entry on cond_stack for the cond we are entering. */
2190 thiscond->next = cond_stack;
2191 thiscond->all = nesting_stack;
2192 thiscond->depth = ++nesting_depth;
2193 thiscond->data.cond.next_label = gen_label_rtx ();
2194 /* Before we encounter an `else', we don't need a separate exit label
2195 unless there are supposed to be exit statements
2196 to exit this conditional. */
2197 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2198 thiscond->data.cond.endif_label = thiscond->exit_label;
2199 cond_stack = thiscond;
2200 nesting_stack = thiscond;
2202 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2205 /* Generate RTL between then-clause and the elseif-clause
2206 of an if-then-elseif-.... */
2209 expand_start_elseif (cond)
2212 if (cond_stack->data.cond.endif_label == 0)
2213 cond_stack->data.cond.endif_label = gen_label_rtx ();
2214 emit_jump (cond_stack->data.cond.endif_label);
2215 emit_label (cond_stack->data.cond.next_label);
2216 cond_stack->data.cond.next_label = gen_label_rtx ();
2217 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2220 /* Generate RTL between the then-clause and the else-clause
2221 of an if-then-else. */
2224 expand_start_else ()
2226 if (cond_stack->data.cond.endif_label == 0)
2227 cond_stack->data.cond.endif_label = gen_label_rtx ();
2229 emit_jump (cond_stack->data.cond.endif_label);
2230 emit_label (cond_stack->data.cond.next_label);
2231 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2234 /* After calling expand_start_else, turn this "else" into an "else if"
2235 by providing another condition. */
2238 expand_elseif (cond)
2241 cond_stack->data.cond.next_label = gen_label_rtx ();
2242 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2245 /* Generate RTL for the end of an if-then.
2246 Pop the record for it off of cond_stack. */
2251 struct nesting *thiscond = cond_stack;
2253 do_pending_stack_adjust ();
2254 if (thiscond->data.cond.next_label)
2255 emit_label (thiscond->data.cond.next_label);
2256 if (thiscond->data.cond.endif_label)
2257 emit_label (thiscond->data.cond.endif_label);
2259 POPSTACK (cond_stack);
2263 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2264 loop should be exited by `exit_something'. This is a loop for which
2265 `expand_continue' will jump to the top of the loop.
2267 Make an entry on loop_stack to record the labels associated with
2271 expand_start_loop (exit_flag)
2274 register struct nesting *thisloop = ALLOC_NESTING ();
2276 /* Make an entry on loop_stack for the loop we are entering. */
2278 thisloop->next = loop_stack;
2279 thisloop->all = nesting_stack;
2280 thisloop->depth = ++nesting_depth;
2281 thisloop->data.loop.start_label = gen_label_rtx ();
2282 thisloop->data.loop.end_label = gen_label_rtx ();
2283 thisloop->data.loop.alt_end_label = 0;
2284 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2285 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2286 loop_stack = thisloop;
2287 nesting_stack = thisloop;
2289 do_pending_stack_adjust ();
2291 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2292 emit_label (thisloop->data.loop.start_label);
2297 /* Like expand_start_loop but for a loop where the continuation point
2298 (for expand_continue_loop) will be specified explicitly. */
2301 expand_start_loop_continue_elsewhere (exit_flag)
2304 struct nesting *thisloop = expand_start_loop (exit_flag);
2305 loop_stack->data.loop.continue_label = gen_label_rtx ();
2309 /* Begin a null, aka do { } while (0) "loop". But since the contents
2310 of said loop can still contain a break, we must frob the loop nest. */
2313 expand_start_null_loop ()
2315 register struct nesting *thisloop = ALLOC_NESTING ();
2317 /* Make an entry on loop_stack for the loop we are entering. */
2319 thisloop->next = loop_stack;
2320 thisloop->all = nesting_stack;
2321 thisloop->depth = ++nesting_depth;
2322 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2323 thisloop->data.loop.end_label = gen_label_rtx ();
2324 thisloop->data.loop.alt_end_label = NULL_RTX;
2325 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2326 thisloop->exit_label = thisloop->data.loop.end_label;
2327 loop_stack = thisloop;
2328 nesting_stack = thisloop;
2333 /* Specify the continuation point for a loop started with
2334 expand_start_loop_continue_elsewhere.
2335 Use this at the point in the code to which a continue statement
2339 expand_loop_continue_here ()
2341 do_pending_stack_adjust ();
2342 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2343 emit_label (loop_stack->data.loop.continue_label);
2346 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2347 Pop the block off of loop_stack. */
2352 rtx start_label = loop_stack->data.loop.start_label;
2353 rtx insn = get_last_insn ();
2354 int needs_end_jump = 1;
2356 /* Mark the continue-point at the top of the loop if none elsewhere. */
2357 if (start_label == loop_stack->data.loop.continue_label)
2358 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2360 do_pending_stack_adjust ();
2362 /* If optimizing, perhaps reorder the loop.
2363 First, try to use a condjump near the end.
2364 expand_exit_loop_if_false ends loops with unconditional jumps,
2367 if (test) goto label;
2369 goto loop_stack->data.loop.end_label
2373 If we find such a pattern, we can end the loop earlier. */
2376 && GET_CODE (insn) == CODE_LABEL
2377 && LABEL_NAME (insn) == NULL
2378 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2381 rtx jump = PREV_INSN (PREV_INSN (label));
2383 if (GET_CODE (jump) == JUMP_INSN
2384 && GET_CODE (PATTERN (jump)) == SET
2385 && SET_DEST (PATTERN (jump)) == pc_rtx
2386 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2387 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2388 == loop_stack->data.loop.end_label))
2392 /* The test might be complex and reference LABEL multiple times,
2393 like the loop in loop_iterations to set vtop. To handle this,
2395 insn = PREV_INSN (label);
2396 reorder_insns (label, label, start_label);
2398 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2400 /* We ignore line number notes, but if we see any other note,
2401 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2402 NOTE_INSN_LOOP_*, we disable this optimization. */
2403 if (GET_CODE (prev) == NOTE)
2405 if (NOTE_LINE_NUMBER (prev) < 0)
2409 if (GET_CODE (prev) == CODE_LABEL)
2411 if (GET_CODE (prev) == JUMP_INSN)
2413 if (GET_CODE (PATTERN (prev)) == SET
2414 && SET_DEST (PATTERN (prev)) == pc_rtx
2415 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2416 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2418 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2420 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2422 emit_note_after (NOTE_INSN_LOOP_END, prev);
2431 /* If the loop starts with a loop exit, roll that to the end where
2432 it will optimize together with the jump back.
2434 We look for the conditional branch to the exit, except that once
2435 we find such a branch, we don't look past 30 instructions.
2437 In more detail, if the loop presently looks like this (in pseudo-C):
2440 if (test) goto end_label;
2445 transform it to look like:
2451 if (test) goto end_label;
2452 goto newstart_label;
2455 Here, the `test' may actually consist of some reasonably complex
2456 code, terminating in a test. */
2461 ! (GET_CODE (insn) == JUMP_INSN
2462 && GET_CODE (PATTERN (insn)) == SET
2463 && SET_DEST (PATTERN (insn)) == pc_rtx
2464 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2468 rtx last_test_insn = NULL_RTX;
2470 /* Scan insns from the top of the loop looking for a qualified
2471 conditional exit. */
2472 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2473 insn = NEXT_INSN (insn))
2475 if (GET_CODE (insn) == NOTE)
2478 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2479 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2480 /* The code that actually moves the exit test will
2481 carefully leave BLOCK notes in their original
2482 location. That means, however, that we can't debug
2483 the exit test itself. So, we refuse to move code
2484 containing BLOCK notes at low optimization levels. */
2487 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2489 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2493 /* We've come to the end of an EH region, but
2494 never saw the beginning of that region. That
2495 means that an EH region begins before the top
2496 of the loop, and ends in the middle of it. The
2497 existence of such a situation violates a basic
2498 assumption in this code, since that would imply
2499 that even when EH_REGIONS is zero, we might
2500 move code out of an exception region. */
2504 /* We must not walk into a nested loop. */
2505 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2508 /* We already know this INSN is a NOTE, so there's no
2509 point in looking at it to see if it's a JUMP. */
2513 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2516 if (last_test_insn && num_insns > 30)
2520 /* We don't want to move a partial EH region. Consider:
2534 This isn't legal C++, but here's what it's supposed to
2535 mean: if cond() is true, stop looping. Otherwise,
2536 call bar, and keep looping. In addition, if cond
2537 throws an exception, catch it and keep looping. Such
2538 constructs are certainy legal in LISP.
2540 We should not move the `if (cond()) 0' test since then
2541 the EH-region for the try-block would be broken up.
2542 (In this case we would the EH_BEG note for the `try'
2543 and `if cond()' but not the call to bar() or the
2546 So we don't look for tests within an EH region. */
2549 if (GET_CODE (insn) == JUMP_INSN
2550 && GET_CODE (PATTERN (insn)) == SET
2551 && SET_DEST (PATTERN (insn)) == pc_rtx)
2553 /* This is indeed a jump. */
2554 rtx dest1 = NULL_RTX;
2555 rtx dest2 = NULL_RTX;
2556 rtx potential_last_test;
2557 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2559 /* A conditional jump. */
2560 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2561 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2562 potential_last_test = insn;
2566 /* An unconditional jump. */
2567 dest1 = SET_SRC (PATTERN (insn));
2568 /* Include the BARRIER after the JUMP. */
2569 potential_last_test = NEXT_INSN (insn);
2573 if (dest1 && GET_CODE (dest1) == LABEL_REF
2574 && ((XEXP (dest1, 0)
2575 == loop_stack->data.loop.alt_end_label)
2577 == loop_stack->data.loop.end_label)))
2579 last_test_insn = potential_last_test;
2583 /* If this was a conditional jump, there may be
2584 another label at which we should look. */
2591 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2593 /* We found one. Move everything from there up
2594 to the end of the loop, and add a jump into the loop
2595 to jump to there. */
2596 register rtx newstart_label = gen_label_rtx ();
2597 register rtx start_move = start_label;
2600 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2601 then we want to move this note also. */
2602 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2603 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2604 == NOTE_INSN_LOOP_CONT))
2605 start_move = PREV_INSN (start_move);
2607 emit_label_after (newstart_label, PREV_INSN (start_move));
2609 /* Actually move the insns. Start at the beginning, and
2610 keep copying insns until we've copied the
2612 for (insn = start_move; insn; insn = next_insn)
2614 /* Figure out which insn comes after this one. We have
2615 to do this before we move INSN. */
2616 if (insn == last_test_insn)
2617 /* We've moved all the insns. */
2618 next_insn = NULL_RTX;
2620 next_insn = NEXT_INSN (insn);
2622 if (GET_CODE (insn) == NOTE
2623 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2624 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2625 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2626 NOTE_INSN_BLOCK_ENDs because the correct generation
2627 of debugging information depends on these appearing
2628 in the same order in the RTL and in the tree
2629 structure, where they are represented as BLOCKs.
2630 So, we don't move block notes. Of course, moving
2631 the code inside the block is likely to make it
2632 impossible to debug the instructions in the exit
2633 test, but such is the price of optimization. */
2636 /* Move the INSN. */
2637 reorder_insns (insn, insn, get_last_insn ());
2640 emit_jump_insn_after (gen_jump (start_label),
2641 PREV_INSN (newstart_label));
2642 emit_barrier_after (PREV_INSN (newstart_label));
2643 start_label = newstart_label;
2649 emit_jump (start_label);
2650 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2652 emit_label (loop_stack->data.loop.end_label);
2654 POPSTACK (loop_stack);
2659 /* Finish a null loop, aka do { } while (0). */
2662 expand_end_null_loop ()
2664 do_pending_stack_adjust ();
2665 emit_label (loop_stack->data.loop.end_label);
2667 POPSTACK (loop_stack);
2672 /* Generate a jump to the current loop's continue-point.
2673 This is usually the top of the loop, but may be specified
2674 explicitly elsewhere. If not currently inside a loop,
2675 return 0 and do nothing; caller will print an error message. */
2678 expand_continue_loop (whichloop)
2679 struct nesting *whichloop;
2683 whichloop = loop_stack;
2686 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2691 /* Generate a jump to exit the current loop. If not currently inside a loop,
2692 return 0 and do nothing; caller will print an error message. */
2695 expand_exit_loop (whichloop)
2696 struct nesting *whichloop;
2700 whichloop = loop_stack;
2703 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2707 /* Generate a conditional jump to exit the current loop if COND
2708 evaluates to zero. If not currently inside a loop,
2709 return 0 and do nothing; caller will print an error message. */
2712 expand_exit_loop_if_false (whichloop, cond)
2713 struct nesting *whichloop;
2716 rtx label = gen_label_rtx ();
2721 whichloop = loop_stack;
2724 /* In order to handle fixups, we actually create a conditional jump
2725 around a unconditional branch to exit the loop. If fixups are
2726 necessary, they go before the unconditional branch. */
2728 do_jump (cond, NULL_RTX, label);
2729 last_insn = get_last_insn ();
2730 if (GET_CODE (last_insn) == CODE_LABEL)
2731 whichloop->data.loop.alt_end_label = last_insn;
2732 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2739 /* Return nonzero if the loop nest is empty. Else return zero. */
2742 stmt_loop_nest_empty ()
2744 /* cfun->stmt can be NULL if we are building a call to get the
2745 EH context for a setjmp/longjmp EH target and the current
2746 function was a deferred inline function. */
2747 return (cfun->stmt == NULL || loop_stack == NULL);
2750 /* Return non-zero if we should preserve sub-expressions as separate
2751 pseudos. We never do so if we aren't optimizing. We always do so
2752 if -fexpensive-optimizations.
2754 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2755 the loop may still be a small one. */
2758 preserve_subexpressions_p ()
2762 if (flag_expensive_optimizations)
2765 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2768 insn = get_last_insn_anywhere ();
2771 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2772 < n_non_fixed_regs * 3));
2776 /* Generate a jump to exit the current loop, conditional, binding contour
2777 or case statement. Not all such constructs are visible to this function,
2778 only those started with EXIT_FLAG nonzero. Individual languages use
2779 the EXIT_FLAG parameter to control which kinds of constructs you can
2782 If not currently inside anything that can be exited,
2783 return 0 and do nothing; caller will print an error message. */
2786 expand_exit_something ()
2790 for (n = nesting_stack; n; n = n->all)
2791 if (n->exit_label != 0)
2793 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2800 /* Generate RTL to return from the current function, with no value.
2801 (That is, we do not do anything about returning any value.) */
2804 expand_null_return ()
2806 struct nesting *block = block_stack;
2807 rtx last_insn = get_last_insn ();
2809 /* If this function was declared to return a value, but we
2810 didn't, clobber the return registers so that they are not
2811 propogated live to the rest of the function. */
2812 clobber_return_register ();
2814 /* Does any pending block have cleanups? */
2815 while (block && block->data.block.cleanups == 0)
2816 block = block->next;
2818 /* If yes, use a goto to return, since that runs cleanups. */
2820 expand_null_return_1 (last_insn, block != 0);
2823 /* Generate RTL to return from the current function, with value VAL. */
2826 expand_value_return (val)
2829 struct nesting *block = block_stack;
2830 rtx last_insn = get_last_insn ();
2831 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2833 /* Copy the value to the return location
2834 unless it's already there. */
2836 if (return_reg != val)
2838 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2839 #ifdef PROMOTE_FUNCTION_RETURN
2840 int unsignedp = TREE_UNSIGNED (type);
2841 enum machine_mode old_mode
2842 = DECL_MODE (DECL_RESULT (current_function_decl));
2843 enum machine_mode mode
2844 = promote_mode (type, old_mode, &unsignedp, 1);
2846 if (mode != old_mode)
2847 val = convert_modes (mode, old_mode, val, unsignedp);
2849 if (GET_CODE (return_reg) == PARALLEL)
2850 emit_group_load (return_reg, val, int_size_in_bytes (type),
2853 emit_move_insn (return_reg, val);
2856 /* Does any pending block have cleanups? */
2858 while (block && block->data.block.cleanups == 0)
2859 block = block->next;
2861 /* If yes, use a goto to return, since that runs cleanups.
2862 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2864 expand_null_return_1 (last_insn, block != 0);
2867 /* Output a return with no value. If LAST_INSN is nonzero,
2868 pretend that the return takes place after LAST_INSN.
2869 If USE_GOTO is nonzero then don't use a return instruction;
2870 go to the return label instead. This causes any cleanups
2871 of pending blocks to be executed normally. */
2874 expand_null_return_1 (last_insn, use_goto)
2878 rtx end_label = cleanup_label ? cleanup_label : return_label;
2880 clear_pending_stack_adjust ();
2881 do_pending_stack_adjust ();
2884 /* PCC-struct return always uses an epilogue. */
2885 if (current_function_returns_pcc_struct || use_goto)
2888 end_label = return_label = gen_label_rtx ();
2889 expand_goto_internal (NULL_TREE, end_label, last_insn);
2893 /* Otherwise output a simple return-insn if one is available,
2894 unless it won't do the job. */
2896 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2898 emit_jump_insn (gen_return ());
2904 /* Otherwise jump to the epilogue. */
2905 expand_goto_internal (NULL_TREE, end_label, last_insn);
2908 /* Generate RTL to evaluate the expression RETVAL and return it
2909 from the current function. */
2912 expand_return (retval)
2915 /* If there are any cleanups to be performed, then they will
2916 be inserted following LAST_INSN. It is desirable
2917 that the last_insn, for such purposes, should be the
2918 last insn before computing the return value. Otherwise, cleanups
2919 which call functions can clobber the return value. */
2920 /* ??? rms: I think that is erroneous, because in C++ it would
2921 run destructors on variables that might be used in the subsequent
2922 computation of the return value. */
2925 register rtx val = 0;
2929 /* If function wants no value, give it none. */
2930 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2932 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2934 expand_null_return ();
2938 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2939 /* This is not sufficient. We also need to watch for cleanups of the
2940 expression we are about to expand. Unfortunately, we cannot know
2941 if it has cleanups until we expand it, and we want to change how we
2942 expand it depending upon if we need cleanups. We can't win. */
2944 cleanups = any_pending_cleanups (1);
2949 if (retval == error_mark_node)
2951 /* Treat this like a return of no value from a function that
2953 expand_null_return ();
2956 else if (TREE_CODE (retval) == RESULT_DECL)
2957 retval_rhs = retval;
2958 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2959 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2960 retval_rhs = TREE_OPERAND (retval, 1);
2961 else if (VOID_TYPE_P (TREE_TYPE (retval)))
2962 /* Recognize tail-recursive call to void function. */
2963 retval_rhs = retval;
2965 retval_rhs = NULL_TREE;
2967 /* Only use `last_insn' if there are cleanups which must be run. */
2968 if (cleanups || cleanup_label != 0)
2969 last_insn = get_last_insn ();
2971 /* Distribute return down conditional expr if either of the sides
2972 may involve tail recursion (see test below). This enhances the number
2973 of tail recursions we see. Don't do this always since it can produce
2974 sub-optimal code in some cases and we distribute assignments into
2975 conditional expressions when it would help. */
2977 if (optimize && retval_rhs != 0
2978 && frame_offset == 0
2979 && TREE_CODE (retval_rhs) == COND_EXPR
2980 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2981 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2983 rtx label = gen_label_rtx ();
2986 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2987 start_cleanup_deferral ();
2988 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2989 DECL_RESULT (current_function_decl),
2990 TREE_OPERAND (retval_rhs, 1));
2991 TREE_SIDE_EFFECTS (expr) = 1;
2992 expand_return (expr);
2995 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2996 DECL_RESULT (current_function_decl),
2997 TREE_OPERAND (retval_rhs, 2));
2998 TREE_SIDE_EFFECTS (expr) = 1;
2999 expand_return (expr);
3000 end_cleanup_deferral ();
3004 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3006 /* If the result is an aggregate that is being returned in one (or more)
3007 registers, load the registers here. The compiler currently can't handle
3008 copying a BLKmode value into registers. We could put this code in a
3009 more general area (for use by everyone instead of just function
3010 call/return), but until this feature is generally usable it is kept here
3011 (and in expand_call). The value must go into a pseudo in case there
3012 are cleanups that will clobber the real return register. */
3015 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3016 && GET_CODE (result_rtl) == REG)
3019 unsigned HOST_WIDE_INT bitpos, xbitpos;
3020 unsigned HOST_WIDE_INT big_endian_correction = 0;
3021 unsigned HOST_WIDE_INT bytes
3022 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3023 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3024 unsigned int bitsize
3025 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3026 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3027 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3028 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3029 enum machine_mode tmpmode, result_reg_mode;
3033 expand_null_return ();
3037 /* Structures whose size is not a multiple of a word are aligned
3038 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3039 machine, this means we must skip the empty high order bytes when
3040 calculating the bit offset. */
3041 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
3042 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3045 /* Copy the structure BITSIZE bits at a time. */
3046 for (bitpos = 0, xbitpos = big_endian_correction;
3047 bitpos < bytes * BITS_PER_UNIT;
3048 bitpos += bitsize, xbitpos += bitsize)
3050 /* We need a new destination pseudo each time xbitpos is
3051 on a word boundary and when xbitpos == big_endian_correction
3052 (the first time through). */
3053 if (xbitpos % BITS_PER_WORD == 0
3054 || xbitpos == big_endian_correction)
3056 /* Generate an appropriate register. */
3057 dst = gen_reg_rtx (word_mode);
3058 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3060 /* Clobber the destination before we move anything into it. */
3061 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
3064 /* We need a new source operand each time bitpos is on a word
3066 if (bitpos % BITS_PER_WORD == 0)
3067 src = operand_subword_force (result_val,
3068 bitpos / BITS_PER_WORD,
3071 /* Use bitpos for the source extraction (left justified) and
3072 xbitpos for the destination store (right justified). */
3073 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3074 extract_bit_field (src, bitsize,
3075 bitpos % BITS_PER_WORD, 1,
3076 NULL_RTX, word_mode, word_mode,
3077 bitsize, BITS_PER_WORD),
3078 bitsize, BITS_PER_WORD);
3081 /* Find the smallest integer mode large enough to hold the
3082 entire structure and use that mode instead of BLKmode
3083 on the USE insn for the return register. */
3084 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3085 tmpmode != VOIDmode;
3086 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3087 /* Have we found a large enough mode? */
3088 if (GET_MODE_SIZE (tmpmode) >= bytes)
3091 /* No suitable mode found. */
3092 if (tmpmode == VOIDmode)
3095 PUT_MODE (result_rtl, tmpmode);
3097 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3098 result_reg_mode = word_mode;
3100 result_reg_mode = tmpmode;
3101 result_reg = gen_reg_rtx (result_reg_mode);
3104 for (i = 0; i < n_regs; i++)
3105 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3108 if (tmpmode != result_reg_mode)
3109 result_reg = gen_lowpart (tmpmode, result_reg);
3111 expand_value_return (result_reg);
3115 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3116 && (GET_CODE (result_rtl) == REG
3117 || (GET_CODE (result_rtl) == PARALLEL)))
3119 /* Calculate the return value into a temporary (usually a pseudo
3121 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3122 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3124 val = assign_temp (nt, 0, 0, 1);
3125 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3126 val = force_not_mem (val);
3128 /* Return the calculated value, doing cleanups first. */
3129 expand_value_return (val);
3133 /* No cleanups or no hard reg used;
3134 calculate value into hard return reg. */
3135 expand_expr (retval, const0_rtx, VOIDmode, 0);
3137 expand_value_return (result_rtl);
3141 /* Return 1 if the end of the generated RTX is not a barrier.
3142 This means code already compiled can drop through. */
3145 drop_through_at_end_p ()
3147 rtx insn = get_last_insn ();
3148 while (insn && GET_CODE (insn) == NOTE)
3149 insn = PREV_INSN (insn);
3150 return insn && GET_CODE (insn) != BARRIER;
3153 /* Attempt to optimize a potential tail recursion call into a goto.
3154 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3155 where to place the jump to the tail recursion label.
3157 Return TRUE if the call was optimized into a goto. */
3160 optimize_tail_recursion (arguments, last_insn)
3164 /* Finish checking validity, and if valid emit code to set the
3165 argument variables for the new call. */
3166 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3168 if (tail_recursion_label == 0)
3170 tail_recursion_label = gen_label_rtx ();
3171 emit_label_after (tail_recursion_label,
3172 tail_recursion_reentry);
3175 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3182 /* Emit code to alter this function's formal parms for a tail-recursive call.
3183 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3184 FORMALS is the chain of decls of formals.
3185 Return 1 if this can be done;
3186 otherwise return 0 and do not emit any code. */
3189 tail_recursion_args (actuals, formals)
3190 tree actuals, formals;
3192 register tree a = actuals, f = formals;
3194 register rtx *argvec;
3196 /* Check that number and types of actuals are compatible
3197 with the formals. This is not always true in valid C code.
3198 Also check that no formal needs to be addressable
3199 and that all formals are scalars. */
3201 /* Also count the args. */
3203 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3205 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3206 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3208 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3211 if (a != 0 || f != 0)
3214 /* Compute all the actuals. */
3216 argvec = (rtx *) alloca (i * sizeof (rtx));
3218 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3219 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3221 /* Find which actual values refer to current values of previous formals.
3222 Copy each of them now, before any formal is changed. */
3224 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3228 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3229 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3235 argvec[i] = copy_to_reg (argvec[i]);
3238 /* Store the values of the actuals into the formals. */
3240 for (f = formals, a = actuals, i = 0; f;
3241 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3243 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3244 emit_move_insn (DECL_RTL (f), argvec[i]);
3246 convert_move (DECL_RTL (f), argvec[i],
3247 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3254 /* Generate the RTL code for entering a binding contour.
3255 The variables are declared one by one, by calls to `expand_decl'.
3257 FLAGS is a bitwise or of the following flags:
3259 1 - Nonzero if this construct should be visible to
3262 2 - Nonzero if this contour does not require a
3263 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3264 language-independent code should set this flag because they
3265 will not create corresponding BLOCK nodes. (There should be
3266 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3267 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3268 when expand_end_bindings is called.
3270 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3271 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3275 expand_start_bindings_and_block (flags, block)
3279 struct nesting *thisblock = ALLOC_NESTING ();
3281 int exit_flag = ((flags & 1) != 0);
3282 int block_flag = ((flags & 2) == 0);
3284 /* If a BLOCK is supplied, then the caller should be requesting a
3285 NOTE_INSN_BLOCK_BEG note. */
3286 if (!block_flag && block)
3289 /* Create a note to mark the beginning of the block. */
3292 note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3293 NOTE_BLOCK (note) = block;
3296 note = emit_note (NULL_PTR, NOTE_INSN_DELETED);
3298 /* Make an entry on block_stack for the block we are entering. */
3300 thisblock->next = block_stack;
3301 thisblock->all = nesting_stack;
3302 thisblock->depth = ++nesting_depth;
3303 thisblock->data.block.stack_level = 0;
3304 thisblock->data.block.cleanups = 0;
3305 thisblock->data.block.n_function_calls = 0;
3306 thisblock->data.block.exception_region = 0;
3307 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3309 thisblock->data.block.conditional_code = 0;
3310 thisblock->data.block.last_unconditional_cleanup = note;
3311 /* When we insert instructions after the last unconditional cleanup,
3312 we don't adjust last_insn. That means that a later add_insn will
3313 clobber the instructions we've just added. The easiest way to
3314 fix this is to just insert another instruction here, so that the
3315 instructions inserted after the last unconditional cleanup are
3316 never the last instruction. */
3317 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3318 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3321 && !(block_stack->data.block.cleanups == NULL_TREE
3322 && block_stack->data.block.outer_cleanups == NULL_TREE))
3323 thisblock->data.block.outer_cleanups
3324 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3325 block_stack->data.block.outer_cleanups);
3327 thisblock->data.block.outer_cleanups = 0;
3328 thisblock->data.block.label_chain = 0;
3329 thisblock->data.block.innermost_stack_block = stack_block_stack;
3330 thisblock->data.block.first_insn = note;
3331 thisblock->data.block.block_start_count = ++current_block_start_count;
3332 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3333 block_stack = thisblock;
3334 nesting_stack = thisblock;
3336 /* Make a new level for allocating stack slots. */
3340 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3341 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3342 expand_expr are made. After we end the region, we know that all
3343 space for all temporaries that were created by TARGET_EXPRs will be
3344 destroyed and their space freed for reuse. */
3347 expand_start_target_temps ()
3349 /* This is so that even if the result is preserved, the space
3350 allocated will be freed, as we know that it is no longer in use. */
3353 /* Start a new binding layer that will keep track of all cleanup
3354 actions to be performed. */
3355 expand_start_bindings (2);
3357 target_temp_slot_level = temp_slot_level;
3361 expand_end_target_temps ()
3363 expand_end_bindings (NULL_TREE, 0, 0);
3365 /* This is so that even if the result is preserved, the space
3366 allocated will be freed, as we know that it is no longer in use. */
3370 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3371 in question represents the outermost pair of curly braces (i.e. the "body
3372 block") of a function or method.
3374 For any BLOCK node representing a "body block" of a function or method, the
3375 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3376 represents the outermost (function) scope for the function or method (i.e.
3377 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3378 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3381 is_body_block (stmt)
3384 if (TREE_CODE (stmt) == BLOCK)
3386 tree parent = BLOCK_SUPERCONTEXT (stmt);
3388 if (parent && TREE_CODE (parent) == BLOCK)
3390 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3392 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3400 /* Mark top block of block_stack as an implicit binding for an
3401 exception region. This is used to prevent infinite recursion when
3402 ending a binding with expand_end_bindings. It is only ever called
3403 by expand_eh_region_start, as that it the only way to create a
3404 block stack for a exception region. */
3407 mark_block_as_eh_region ()
3409 block_stack->data.block.exception_region = 1;
3410 if (block_stack->next
3411 && block_stack->next->data.block.conditional_code)
3413 block_stack->data.block.conditional_code
3414 = block_stack->next->data.block.conditional_code;
3415 block_stack->data.block.last_unconditional_cleanup
3416 = block_stack->next->data.block.last_unconditional_cleanup;
3417 block_stack->data.block.cleanup_ptr
3418 = block_stack->next->data.block.cleanup_ptr;
3422 /* True if we are currently emitting insns in an area of output code
3423 that is controlled by a conditional expression. This is used by
3424 the cleanup handling code to generate conditional cleanup actions. */
3427 conditional_context ()
3429 return block_stack && block_stack->data.block.conditional_code;
3432 /* Mark top block of block_stack as not for an implicit binding for an
3433 exception region. This is only ever done by expand_eh_region_end
3434 to let expand_end_bindings know that it is being called explicitly
3435 to end the binding layer for just the binding layer associated with
3436 the exception region, otherwise expand_end_bindings would try and
3437 end all implicit binding layers for exceptions regions, and then
3438 one normal binding layer. */
3441 mark_block_as_not_eh_region ()
3443 block_stack->data.block.exception_region = 0;
3446 /* True if the top block of block_stack was marked as for an exception
3447 region by mark_block_as_eh_region. */
3452 return cfun && block_stack && block_stack->data.block.exception_region;
3455 /* Emit a handler label for a nonlocal goto handler.
3456 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3459 expand_nl_handler_label (slot, before_insn)
3460 rtx slot, before_insn;
3463 rtx handler_label = gen_label_rtx ();
3465 /* Don't let jump_optimize delete the handler. */
3466 LABEL_PRESERVE_P (handler_label) = 1;
3469 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3470 insns = get_insns ();
3472 emit_insns_before (insns, before_insn);
3474 emit_label (handler_label);
3476 return handler_label;
3479 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3482 expand_nl_goto_receiver ()
3484 #ifdef HAVE_nonlocal_goto
3485 if (! HAVE_nonlocal_goto)
3487 /* First adjust our frame pointer to its actual value. It was
3488 previously set to the start of the virtual area corresponding to
3489 the stacked variables when we branched here and now needs to be
3490 adjusted to the actual hardware fp value.
3492 Assignments are to virtual registers are converted by
3493 instantiate_virtual_regs into the corresponding assignment
3494 to the underlying register (fp in this case) that makes
3495 the original assignment true.
3496 So the following insn will actually be
3497 decrementing fp by STARTING_FRAME_OFFSET. */
3498 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3500 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3501 if (fixed_regs[ARG_POINTER_REGNUM])
3503 #ifdef ELIMINABLE_REGS
3504 /* If the argument pointer can be eliminated in favor of the
3505 frame pointer, we don't need to restore it. We assume here
3506 that if such an elimination is present, it can always be used.
3507 This is the case on all known machines; if we don't make this
3508 assumption, we do unnecessary saving on many machines. */
3509 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3512 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3513 if (elim_regs[i].from == ARG_POINTER_REGNUM
3514 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3517 if (i == ARRAY_SIZE (elim_regs))
3520 /* Now restore our arg pointer from the address at which it
3521 was saved in our stack frame.
3522 If there hasn't be space allocated for it yet, make
3524 if (arg_pointer_save_area == 0)
3525 arg_pointer_save_area
3526 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3527 emit_move_insn (virtual_incoming_args_rtx,
3528 /* We need a pseudo here, or else
3529 instantiate_virtual_regs_1 complains. */
3530 copy_to_reg (arg_pointer_save_area));
3535 #ifdef HAVE_nonlocal_goto_receiver
3536 if (HAVE_nonlocal_goto_receiver)
3537 emit_insn (gen_nonlocal_goto_receiver ());
3541 /* Make handlers for nonlocal gotos taking place in the function calls in
3545 expand_nl_goto_receivers (thisblock)
3546 struct nesting *thisblock;
3549 rtx afterward = gen_label_rtx ();
3554 /* Record the handler address in the stack slot for that purpose,
3555 during this block, saving and restoring the outer value. */
3556 if (thisblock->next != 0)
3557 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3559 rtx save_receiver = gen_reg_rtx (Pmode);
3560 emit_move_insn (XEXP (slot, 0), save_receiver);
3563 emit_move_insn (save_receiver, XEXP (slot, 0));
3564 insns = get_insns ();
3566 emit_insns_before (insns, thisblock->data.block.first_insn);
3569 /* Jump around the handlers; they run only when specially invoked. */
3570 emit_jump (afterward);
3572 /* Make a separate handler for each label. */
3573 link = nonlocal_labels;
3574 slot = nonlocal_goto_handler_slots;
3575 label_list = NULL_RTX;
3576 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3577 /* Skip any labels we shouldn't be able to jump to from here,
3578 we generate one special handler for all of them below which just calls
3580 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3583 lab = expand_nl_handler_label (XEXP (slot, 0),
3584 thisblock->data.block.first_insn);
3585 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3587 expand_nl_goto_receiver ();
3589 /* Jump to the "real" nonlocal label. */
3590 expand_goto (TREE_VALUE (link));
3593 /* A second pass over all nonlocal labels; this time we handle those
3594 we should not be able to jump to at this point. */
3595 link = nonlocal_labels;
3596 slot = nonlocal_goto_handler_slots;
3598 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3599 if (DECL_TOO_LATE (TREE_VALUE (link)))
3602 lab = expand_nl_handler_label (XEXP (slot, 0),
3603 thisblock->data.block.first_insn);
3604 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3610 expand_nl_goto_receiver ();
3611 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3616 nonlocal_goto_handler_labels = label_list;
3617 emit_label (afterward);
3620 /* Warn about any unused VARS (which may contain nodes other than
3621 VAR_DECLs, but such nodes are ignored). The nodes are connected
3622 via the TREE_CHAIN field. */
3625 warn_about_unused_variables (vars)
3630 if (warn_unused_variable)
3631 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3632 if (TREE_CODE (decl) == VAR_DECL
3633 && ! TREE_USED (decl)
3634 && ! DECL_IN_SYSTEM_HEADER (decl)
3635 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3636 warning_with_decl (decl, "unused variable `%s'");
3639 /* Generate RTL code to terminate a binding contour.
3641 VARS is the chain of VAR_DECL nodes for the variables bound in this
3642 contour. There may actually be other nodes in this chain, but any
3643 nodes other than VAR_DECLS are ignored.
3645 MARK_ENDS is nonzero if we should put a note at the beginning
3646 and end of this binding contour.
3648 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3649 (That is true automatically if the contour has a saved stack level.) */
3652 expand_end_bindings (vars, mark_ends, dont_jump_in)
3657 register struct nesting *thisblock;
3659 while (block_stack->data.block.exception_region)
3661 /* Because we don't need or want a new temporary level and
3662 because we didn't create one in expand_eh_region_start,
3663 create a fake one now to avoid removing one in
3664 expand_end_bindings. */
3667 block_stack->data.block.exception_region = 0;
3669 expand_end_bindings (NULL_TREE, 0, 0);
3672 /* Since expand_eh_region_start does an expand_start_bindings, we
3673 have to first end all the bindings that were created by
3674 expand_eh_region_start. */
3676 thisblock = block_stack;
3678 /* If any of the variables in this scope were not used, warn the
3680 warn_about_unused_variables (vars);
3682 if (thisblock->exit_label)
3684 do_pending_stack_adjust ();
3685 emit_label (thisblock->exit_label);
3688 /* If necessary, make handlers for nonlocal gotos taking
3689 place in the function calls in this block. */
3690 if (function_call_count != thisblock->data.block.n_function_calls
3692 /* Make handler for outermost block
3693 if there were any nonlocal gotos to this function. */
3694 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3695 /* Make handler for inner block if it has something
3696 special to do when you jump out of it. */
3697 : (thisblock->data.block.cleanups != 0
3698 || thisblock->data.block.stack_level != 0)))
3699 expand_nl_goto_receivers (thisblock);
3701 /* Don't allow jumping into a block that has a stack level.
3702 Cleanups are allowed, though. */
3704 || thisblock->data.block.stack_level != 0)
3706 struct label_chain *chain;
3708 /* Any labels in this block are no longer valid to go to.
3709 Mark them to cause an error message. */
3710 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3712 DECL_TOO_LATE (chain->label) = 1;
3713 /* If any goto without a fixup came to this label,
3714 that must be an error, because gotos without fixups
3715 come from outside all saved stack-levels. */
3716 if (TREE_ADDRESSABLE (chain->label))
3717 error_with_decl (chain->label,
3718 "label `%s' used before containing binding contour");
3722 /* Restore stack level in effect before the block
3723 (only if variable-size objects allocated). */
3724 /* Perform any cleanups associated with the block. */
3726 if (thisblock->data.block.stack_level != 0
3727 || thisblock->data.block.cleanups != 0)
3732 /* Don't let cleanups affect ({...}) constructs. */
3733 int old_expr_stmts_for_value = expr_stmts_for_value;
3734 rtx old_last_expr_value = last_expr_value;
3735 tree old_last_expr_type = last_expr_type;
3736 expr_stmts_for_value = 0;
3738 /* Only clean up here if this point can actually be reached. */
3739 insn = get_last_insn ();
3740 if (GET_CODE (insn) == NOTE)
3741 insn = prev_nonnote_insn (insn);
3742 reachable = (! insn || GET_CODE (insn) != BARRIER);
3744 /* Do the cleanups. */
3745 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3747 do_pending_stack_adjust ();
3749 expr_stmts_for_value = old_expr_stmts_for_value;
3750 last_expr_value = old_last_expr_value;
3751 last_expr_type = old_last_expr_type;
3753 /* Restore the stack level. */
3755 if (reachable && thisblock->data.block.stack_level != 0)
3757 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3758 thisblock->data.block.stack_level, NULL_RTX);
3759 if (nonlocal_goto_handler_slots != 0)
3760 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3764 /* Any gotos out of this block must also do these things.
3765 Also report any gotos with fixups that came to labels in this
3767 fixup_gotos (thisblock,
3768 thisblock->data.block.stack_level,
3769 thisblock->data.block.cleanups,
3770 thisblock->data.block.first_insn,
3774 /* Mark the beginning and end of the scope if requested.
3775 We do this now, after running cleanups on the variables
3776 just going out of scope, so they are in scope for their cleanups. */
3780 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3781 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3784 /* Get rid of the beginning-mark if we don't make an end-mark. */
3785 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3787 /* Restore the temporary level of TARGET_EXPRs. */
3788 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3790 /* Restore block_stack level for containing block. */
3792 stack_block_stack = thisblock->data.block.innermost_stack_block;
3793 POPSTACK (block_stack);
3795 /* Pop the stack slot nesting and free any slots at this level. */
3799 /* Generate code to save the stack pointer at the start of the current block
3800 and set up to restore it on exit. */
3803 save_stack_pointer ()
3805 struct nesting *thisblock = block_stack;
3807 if (thisblock->data.block.stack_level == 0)
3809 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3810 &thisblock->data.block.stack_level,
3811 thisblock->data.block.first_insn);
3812 stack_block_stack = thisblock;
3816 /* Generate RTL for the automatic variable declaration DECL.
3817 (Other kinds of declarations are simply ignored if seen here.) */
3823 struct nesting *thisblock;
3826 type = TREE_TYPE (decl);
3828 /* Only automatic variables need any expansion done.
3829 Static and external variables, and external functions,
3830 will be handled by `assemble_variable' (called from finish_decl).
3831 TYPE_DECL and CONST_DECL require nothing.
3832 PARM_DECLs are handled in `assign_parms'. */
3834 if (TREE_CODE (decl) != VAR_DECL)
3836 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3839 thisblock = block_stack;
3841 /* Create the RTL representation for the variable. */
3843 if (type == error_mark_node)
3844 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3846 else if (DECL_SIZE (decl) == 0)
3847 /* Variable with incomplete type. */
3849 if (DECL_INITIAL (decl) == 0)
3850 /* Error message was already done; now avoid a crash. */
3851 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3853 /* An initializer is going to decide the size of this array.
3854 Until we know the size, represent its address with a reg. */
3855 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode)));
3857 set_mem_attributes (DECL_RTL (decl), decl, 1);
3859 else if (DECL_MODE (decl) != BLKmode
3860 /* If -ffloat-store, don't put explicit float vars
3862 && !(flag_float_store
3863 && TREE_CODE (type) == REAL_TYPE)
3864 && ! TREE_THIS_VOLATILE (decl)
3865 && (DECL_REGISTER (decl) || optimize)
3866 /* if -fcheck-memory-usage, check all variables. */
3867 && ! current_function_check_memory_usage)
3869 /* Automatic variable that can go in a register. */
3870 int unsignedp = TREE_UNSIGNED (type);
3871 enum machine_mode reg_mode
3872 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3874 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3875 mark_user_reg (DECL_RTL (decl));
3877 if (POINTER_TYPE_P (type))
3878 mark_reg_pointer (DECL_RTL (decl),
3879 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3881 maybe_set_unchanging (DECL_RTL (decl), decl);
3883 /* If something wants our address, try to use ADDRESSOF. */
3884 if (TREE_ADDRESSABLE (decl))
3885 put_var_into_stack (decl);
3888 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3889 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3890 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3891 STACK_CHECK_MAX_VAR_SIZE)))
3893 /* Variable of fixed size that goes on the stack. */
3897 /* If we previously made RTL for this decl, it must be an array
3898 whose size was determined by the initializer.
3899 The old address was a register; set that register now
3900 to the proper address. */
3901 if (DECL_RTL_SET_P (decl))
3903 if (GET_CODE (DECL_RTL (decl)) != MEM
3904 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3906 oldaddr = XEXP (DECL_RTL (decl), 0);
3910 assign_temp (TREE_TYPE (decl), 1, 1, 1));
3912 /* Set alignment we actually gave this decl. */
3913 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3914 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3915 DECL_USER_ALIGN (decl) = 0;
3919 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3920 if (addr != oldaddr)
3921 emit_move_insn (oldaddr, addr);
3925 /* Dynamic-size object: must push space on the stack. */
3929 /* Record the stack pointer on entry to block, if have
3930 not already done so. */
3931 do_pending_stack_adjust ();
3932 save_stack_pointer ();
3934 /* In function-at-a-time mode, variable_size doesn't expand this,
3936 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3937 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3938 const0_rtx, VOIDmode, 0);
3940 /* Compute the variable's size, in bytes. */
3941 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3944 /* Allocate space on the stack for the variable. Note that
3945 DECL_ALIGN says how the variable is to be aligned and we
3946 cannot use it to conclude anything about the alignment of
3948 address = allocate_dynamic_stack_space (size, NULL_RTX,
3949 TYPE_ALIGN (TREE_TYPE (decl)));
3951 /* Reference the variable indirect through that rtx. */
3952 SET_DECL_RTL (decl, gen_rtx_MEM (DECL_MODE (decl), address));
3954 set_mem_attributes (DECL_RTL (decl), decl, 1);
3956 /* Indicate the alignment we actually gave this variable. */
3957 #ifdef STACK_BOUNDARY
3958 DECL_ALIGN (decl) = STACK_BOUNDARY;
3960 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3962 DECL_USER_ALIGN (decl) = 0;
3966 /* Emit code to perform the initialization of a declaration DECL. */
3969 expand_decl_init (decl)
3972 int was_used = TREE_USED (decl);
3974 /* If this is a CONST_DECL, we don't have to generate any code, but
3975 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3976 to be set while in the obstack containing the constant. If we don't
3977 do this, we can lose if we have functions nested three deep and the middle
3978 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3979 the innermost function is the first to expand that STRING_CST. */
3980 if (TREE_CODE (decl) == CONST_DECL)
3982 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3983 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3984 EXPAND_INITIALIZER);
3988 if (TREE_STATIC (decl))
3991 /* Compute and store the initial value now. */
3993 if (DECL_INITIAL (decl) == error_mark_node)
3995 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3997 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3998 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3999 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4003 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4005 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4006 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4010 /* Don't let the initialization count as "using" the variable. */
4011 TREE_USED (decl) = was_used;
4013 /* Free any temporaries we made while initializing the decl. */
4014 preserve_temp_slots (NULL_RTX);
4018 /* CLEANUP is an expression to be executed at exit from this binding contour;
4019 for example, in C++, it might call the destructor for this variable.
4021 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4022 CLEANUP multiple times, and have the correct semantics. This
4023 happens in exception handling, for gotos, returns, breaks that
4024 leave the current scope.
4026 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4027 that is not associated with any particular variable. */
4030 expand_decl_cleanup (decl, cleanup)
4033 struct nesting *thisblock;
4035 /* Error if we are not in any block. */
4036 if (cfun == 0 || block_stack == 0)
4039 thisblock = block_stack;
4041 /* Record the cleanup if there is one. */
4047 tree *cleanups = &thisblock->data.block.cleanups;
4048 int cond_context = conditional_context ();
4052 rtx flag = gen_reg_rtx (word_mode);
4057 emit_move_insn (flag, const0_rtx);
4058 set_flag_0 = get_insns ();
4061 thisblock->data.block.last_unconditional_cleanup
4062 = emit_insns_after (set_flag_0,
4063 thisblock->data.block.last_unconditional_cleanup);
4065 emit_move_insn (flag, const1_rtx);
4067 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4068 SET_DECL_RTL (cond, flag);
4070 /* Conditionalize the cleanup. */
4071 cleanup = build (COND_EXPR, void_type_node,
4072 truthvalue_conversion (cond),
4073 cleanup, integer_zero_node);
4074 cleanup = fold (cleanup);
4076 cleanups = thisblock->data.block.cleanup_ptr;
4079 cleanup = unsave_expr (cleanup);
4081 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4084 /* If this block has a cleanup, it belongs in stack_block_stack. */
4085 stack_block_stack = thisblock;
4092 /* If this was optimized so that there is no exception region for the
4093 cleanup, then mark the TREE_LIST node, so that we can later tell
4094 if we need to call expand_eh_region_end. */
4095 if (! using_eh_for_cleanups_p
4096 || expand_eh_region_start_tree (decl, cleanup))
4097 TREE_ADDRESSABLE (t) = 1;
4098 /* If that started a new EH region, we're in a new block. */
4099 thisblock = block_stack;
4106 thisblock->data.block.last_unconditional_cleanup
4107 = emit_insns_after (seq,
4108 thisblock->data.block.last_unconditional_cleanup);
4112 thisblock->data.block.last_unconditional_cleanup
4114 /* When we insert instructions after the last unconditional cleanup,
4115 we don't adjust last_insn. That means that a later add_insn will
4116 clobber the instructions we've just added. The easiest way to
4117 fix this is to just insert another instruction here, so that the
4118 instructions inserted after the last unconditional cleanup are
4119 never the last instruction. */
4120 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4121 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4127 /* Like expand_decl_cleanup, but suppress generating an exception handler
4128 to perform the cleanup. */
4132 expand_decl_cleanup_no_eh (decl, cleanup)
4135 int save_eh = using_eh_for_cleanups_p;
4138 using_eh_for_cleanups_p = 0;
4139 result = expand_decl_cleanup (decl, cleanup);
4140 using_eh_for_cleanups_p = save_eh;
4146 /* Arrange for the top element of the dynamic cleanup chain to be
4147 popped if we exit the current binding contour. DECL is the
4148 associated declaration, if any, otherwise NULL_TREE. If the
4149 current contour is left via an exception, then __sjthrow will pop
4150 the top element off the dynamic cleanup chain. The code that
4151 avoids doing the action we push into the cleanup chain in the
4152 exceptional case is contained in expand_cleanups.
4154 This routine is only used by expand_eh_region_start, and that is
4155 the only way in which an exception region should be started. This
4156 routine is only used when using the setjmp/longjmp codegen method
4157 for exception handling. */
4160 expand_dcc_cleanup (decl)
4163 struct nesting *thisblock;
4166 /* Error if we are not in any block. */
4167 if (cfun == 0 || block_stack == 0)
4169 thisblock = block_stack;
4171 /* Record the cleanup for the dynamic handler chain. */
4173 cleanup = make_node (POPDCC_EXPR);
4175 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4176 thisblock->data.block.cleanups
4177 = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4179 /* If this block has a cleanup, it belongs in stack_block_stack. */
4180 stack_block_stack = thisblock;
4184 /* Arrange for the top element of the dynamic handler chain to be
4185 popped if we exit the current binding contour. DECL is the
4186 associated declaration, if any, otherwise NULL_TREE. If the current
4187 contour is left via an exception, then __sjthrow will pop the top
4188 element off the dynamic handler chain. The code that avoids doing
4189 the action we push into the handler chain in the exceptional case
4190 is contained in expand_cleanups.
4192 This routine is only used by expand_eh_region_start, and that is
4193 the only way in which an exception region should be started. This
4194 routine is only used when using the setjmp/longjmp codegen method
4195 for exception handling. */
4198 expand_dhc_cleanup (decl)
4201 struct nesting *thisblock;
4204 /* Error if we are not in any block. */
4205 if (cfun == 0 || block_stack == 0)
4207 thisblock = block_stack;
4209 /* Record the cleanup for the dynamic handler chain. */
4211 cleanup = make_node (POPDHC_EXPR);
4213 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4214 thisblock->data.block.cleanups
4215 = tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4217 /* If this block has a cleanup, it belongs in stack_block_stack. */
4218 stack_block_stack = thisblock;
4222 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4223 DECL_ELTS is the list of elements that belong to DECL's type.
4224 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4227 expand_anon_union_decl (decl, cleanup, decl_elts)
4228 tree decl, cleanup, decl_elts;
4230 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4234 /* If any of the elements are addressable, so is the entire union. */
4235 for (t = decl_elts; t; t = TREE_CHAIN (t))
4236 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4238 TREE_ADDRESSABLE (decl) = 1;
4243 expand_decl_cleanup (decl, cleanup);
4244 x = DECL_RTL (decl);
4246 /* Go through the elements, assigning RTL to each. */
4247 for (t = decl_elts; t; t = TREE_CHAIN (t))
4249 tree decl_elt = TREE_VALUE (t);
4250 tree cleanup_elt = TREE_PURPOSE (t);
4251 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4253 /* Propagate the union's alignment to the elements. */
4254 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4255 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4257 /* If the element has BLKmode and the union doesn't, the union is
4258 aligned such that the element doesn't need to have BLKmode, so
4259 change the element's mode to the appropriate one for its size. */
4260 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4261 DECL_MODE (decl_elt) = mode
4262 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4264 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4265 instead create a new MEM rtx with the proper mode. */
4266 if (GET_CODE (x) == MEM)
4268 if (mode == GET_MODE (x))
4269 SET_DECL_RTL (decl_elt, x);
4272 SET_DECL_RTL (decl_elt,
4273 gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0))));
4274 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
4277 else if (GET_CODE (x) == REG)
4279 if (mode == GET_MODE (x))
4280 SET_DECL_RTL (decl_elt, x);
4282 SET_DECL_RTL (decl_elt, gen_rtx_SUBREG (mode, x, 0));
4287 /* Record the cleanup if there is one. */
4290 thisblock->data.block.cleanups
4291 = tree_cons (decl_elt, cleanup_elt,
4292 thisblock->data.block.cleanups);
4296 /* Expand a list of cleanups LIST.
4297 Elements may be expressions or may be nested lists.
4299 If DONT_DO is nonnull, then any list-element
4300 whose TREE_PURPOSE matches DONT_DO is omitted.
4301 This is sometimes used to avoid a cleanup associated with
4302 a value that is being returned out of the scope.
4304 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4305 goto and handle protection regions specially in that case.
4307 If REACHABLE, we emit code, otherwise just inform the exception handling
4308 code about this finalization. */
4311 expand_cleanups (list, dont_do, in_fixup, reachable)
4318 for (tail = list; tail; tail = TREE_CHAIN (tail))
4319 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4321 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4322 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4327 tree cleanup = TREE_VALUE (tail);
4329 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4330 if (TREE_CODE (cleanup) != POPDHC_EXPR
4331 && TREE_CODE (cleanup) != POPDCC_EXPR
4332 /* See expand_eh_region_start_tree for this case. */
4333 && ! TREE_ADDRESSABLE (tail))
4335 cleanup = protect_with_terminate (cleanup);
4336 expand_eh_region_end (cleanup);
4342 /* Cleanups may be run multiple times. For example,
4343 when exiting a binding contour, we expand the
4344 cleanups associated with that contour. When a goto
4345 within that binding contour has a target outside that
4346 contour, it will expand all cleanups from its scope to
4347 the target. Though the cleanups are expanded multiple
4348 times, the control paths are non-overlapping so the
4349 cleanups will not be executed twice. */
4351 /* We may need to protect fixups with rethrow regions. */
4352 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4355 expand_fixup_region_start ();
4357 /* The cleanup might contain try-blocks, so we have to
4358 preserve our current queue. */
4360 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4363 expand_fixup_region_end (TREE_VALUE (tail));
4370 /* Mark when the context we are emitting RTL for as a conditional
4371 context, so that any cleanup actions we register with
4372 expand_decl_init will be properly conditionalized when those
4373 cleanup actions are later performed. Must be called before any
4374 expression (tree) is expanded that is within a conditional context. */
4377 start_cleanup_deferral ()
4379 /* block_stack can be NULL if we are inside the parameter list. It is
4380 OK to do nothing, because cleanups aren't possible here. */
4382 ++block_stack->data.block.conditional_code;
4385 /* Mark the end of a conditional region of code. Because cleanup
4386 deferrals may be nested, we may still be in a conditional region
4387 after we end the currently deferred cleanups, only after we end all
4388 deferred cleanups, are we back in unconditional code. */
4391 end_cleanup_deferral ()
4393 /* block_stack can be NULL if we are inside the parameter list. It is
4394 OK to do nothing, because cleanups aren't possible here. */
4396 --block_stack->data.block.conditional_code;
4399 /* Move all cleanups from the current block_stack
4400 to the containing block_stack, where they are assumed to
4401 have been created. If anything can cause a temporary to
4402 be created, but not expanded for more than one level of
4403 block_stacks, then this code will have to change. */
4408 struct nesting *block = block_stack;
4409 struct nesting *outer = block->next;
4411 outer->data.block.cleanups
4412 = chainon (block->data.block.cleanups,
4413 outer->data.block.cleanups);
4414 block->data.block.cleanups = 0;
4418 last_cleanup_this_contour ()
4420 if (block_stack == 0)
4423 return block_stack->data.block.cleanups;
4426 /* Return 1 if there are any pending cleanups at this point.
4427 If THIS_CONTOUR is nonzero, check the current contour as well.
4428 Otherwise, look only at the contours that enclose this one. */
4431 any_pending_cleanups (this_contour)
4434 struct nesting *block;
4436 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4439 if (this_contour && block_stack->data.block.cleanups != NULL)
4441 if (block_stack->data.block.cleanups == 0
4442 && block_stack->data.block.outer_cleanups == 0)
4445 for (block = block_stack->next; block; block = block->next)
4446 if (block->data.block.cleanups != 0)
4452 /* Enter a case (Pascal) or switch (C) statement.
4453 Push a block onto case_stack and nesting_stack
4454 to accumulate the case-labels that are seen
4455 and to record the labels generated for the statement.
4457 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4458 Otherwise, this construct is transparent for `exit_something'.
4460 EXPR is the index-expression to be dispatched on.
4461 TYPE is its nominal type. We could simply convert EXPR to this type,
4462 but instead we take short cuts. */
4465 expand_start_case (exit_flag, expr, type, printname)
4469 const char *printname;
4471 register struct nesting *thiscase = ALLOC_NESTING ();
4473 /* Make an entry on case_stack for the case we are entering. */
4475 thiscase->next = case_stack;
4476 thiscase->all = nesting_stack;
4477 thiscase->depth = ++nesting_depth;
4478 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4479 thiscase->data.case_stmt.case_list = 0;
4480 thiscase->data.case_stmt.index_expr = expr;
4481 thiscase->data.case_stmt.nominal_type = type;
4482 thiscase->data.case_stmt.default_label = 0;
4483 thiscase->data.case_stmt.printname = printname;
4484 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4485 case_stack = thiscase;
4486 nesting_stack = thiscase;
4488 do_pending_stack_adjust ();
4490 /* Make sure case_stmt.start points to something that won't
4491 need any transformation before expand_end_case. */
4492 if (GET_CODE (get_last_insn ()) != NOTE)
4493 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4495 thiscase->data.case_stmt.start = get_last_insn ();
4497 start_cleanup_deferral ();
4500 /* Start a "dummy case statement" within which case labels are invalid
4501 and are not connected to any larger real case statement.
4502 This can be used if you don't want to let a case statement jump
4503 into the middle of certain kinds of constructs. */
4506 expand_start_case_dummy ()
4508 register struct nesting *thiscase = ALLOC_NESTING ();
4510 /* Make an entry on case_stack for the dummy. */
4512 thiscase->next = case_stack;
4513 thiscase->all = nesting_stack;
4514 thiscase->depth = ++nesting_depth;
4515 thiscase->exit_label = 0;
4516 thiscase->data.case_stmt.case_list = 0;
4517 thiscase->data.case_stmt.start = 0;
4518 thiscase->data.case_stmt.nominal_type = 0;
4519 thiscase->data.case_stmt.default_label = 0;
4520 case_stack = thiscase;
4521 nesting_stack = thiscase;
4522 start_cleanup_deferral ();
4525 /* End a dummy case statement. */
4528 expand_end_case_dummy ()
4530 end_cleanup_deferral ();
4531 POPSTACK (case_stack);
4534 /* Return the data type of the index-expression
4535 of the innermost case statement, or null if none. */
4538 case_index_expr_type ()
4541 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4548 /* If this is the first label, warn if any insns have been emitted. */
4549 if (case_stack->data.case_stmt.line_number_status >= 0)
4553 restore_line_number_status
4554 (case_stack->data.case_stmt.line_number_status);
4555 case_stack->data.case_stmt.line_number_status = -1;
4557 for (insn = case_stack->data.case_stmt.start;
4559 insn = NEXT_INSN (insn))
4561 if (GET_CODE (insn) == CODE_LABEL)
4563 if (GET_CODE (insn) != NOTE
4564 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4567 insn = PREV_INSN (insn);
4568 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4570 /* If insn is zero, then there must have been a syntax error. */
4572 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4573 NOTE_LINE_NUMBER (insn),
4574 "unreachable code at beginning of %s",
4575 case_stack->data.case_stmt.printname);
4582 /* Accumulate one case or default label inside a case or switch statement.
4583 VALUE is the value of the case (a null pointer, for a default label).
4584 The function CONVERTER, when applied to arguments T and V,
4585 converts the value V to the type T.
4587 If not currently inside a case or switch statement, return 1 and do
4588 nothing. The caller will print a language-specific error message.
4589 If VALUE is a duplicate or overlaps, return 2 and do nothing
4590 except store the (first) duplicate node in *DUPLICATE.
4591 If VALUE is out of range, return 3 and do nothing.
4592 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4593 Return 0 on success.
4595 Extended to handle range statements. */
4598 pushcase (value, converter, label, duplicate)
4599 register tree value;
4600 tree (*converter) PARAMS ((tree, tree));
4601 register tree label;
4607 /* Fail if not inside a real case statement. */
4608 if (! (case_stack && case_stack->data.case_stmt.start))
4611 if (stack_block_stack
4612 && stack_block_stack->depth > case_stack->depth)
4615 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4616 nominal_type = case_stack->data.case_stmt.nominal_type;
4618 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4619 if (index_type == error_mark_node)
4622 /* Convert VALUE to the type in which the comparisons are nominally done. */
4624 value = (*converter) (nominal_type, value);
4628 /* Fail if this value is out of range for the actual type of the index
4629 (which may be narrower than NOMINAL_TYPE). */
4631 && (TREE_CONSTANT_OVERFLOW (value)
4632 || ! int_fits_type_p (value, index_type)))
4635 return add_case_node (value, value, label, duplicate);
4638 /* Like pushcase but this case applies to all values between VALUE1 and
4639 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4640 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4641 starts at VALUE1 and ends at the highest value of the index type.
4642 If both are NULL, this case applies to all values.
4644 The return value is the same as that of pushcase but there is one
4645 additional error code: 4 means the specified range was empty. */
4648 pushcase_range (value1, value2, converter, label, duplicate)
4649 register tree value1, value2;
4650 tree (*converter) PARAMS ((tree, tree));
4651 register tree label;
4657 /* Fail if not inside a real case statement. */
4658 if (! (case_stack && case_stack->data.case_stmt.start))
4661 if (stack_block_stack
4662 && stack_block_stack->depth > case_stack->depth)
4665 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4666 nominal_type = case_stack->data.case_stmt.nominal_type;
4668 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4669 if (index_type == error_mark_node)
4674 /* Convert VALUEs to type in which the comparisons are nominally done
4675 and replace any unspecified value with the corresponding bound. */
4677 value1 = TYPE_MIN_VALUE (index_type);
4679 value2 = TYPE_MAX_VALUE (index_type);
4681 /* Fail if the range is empty. Do this before any conversion since
4682 we want to allow out-of-range empty ranges. */
4683 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4686 /* If the max was unbounded, use the max of the nominal_type we are
4687 converting to. Do this after the < check above to suppress false
4690 value2 = TYPE_MAX_VALUE (nominal_type);
4692 value1 = (*converter) (nominal_type, value1);
4693 value2 = (*converter) (nominal_type, value2);
4695 /* Fail if these values are out of range. */
4696 if (TREE_CONSTANT_OVERFLOW (value1)
4697 || ! int_fits_type_p (value1, index_type))
4700 if (TREE_CONSTANT_OVERFLOW (value2)
4701 || ! int_fits_type_p (value2, index_type))
4704 return add_case_node (value1, value2, label, duplicate);
4707 /* Do the actual insertion of a case label for pushcase and pushcase_range
4708 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4709 slowdown for large switch statements. */
4712 add_case_node (low, high, label, duplicate)
4717 struct case_node *p, **q, *r;
4719 /* If there's no HIGH value, then this is not a case range; it's
4720 just a simple case label. But that's just a degenerate case
4725 /* Handle default labels specially. */
4728 if (case_stack->data.case_stmt.default_label != 0)
4730 *duplicate = case_stack->data.case_stmt.default_label;
4733 case_stack->data.case_stmt.default_label = label;
4734 expand_label (label);
4738 q = &case_stack->data.case_stmt.case_list;
4745 /* Keep going past elements distinctly greater than HIGH. */
4746 if (tree_int_cst_lt (high, p->low))
4749 /* or distinctly less than LOW. */
4750 else if (tree_int_cst_lt (p->high, low))
4755 /* We have an overlap; this is an error. */
4756 *duplicate = p->code_label;
4761 /* Add this label to the chain, and succeed. */
4763 r = (struct case_node *) xmalloc (sizeof (struct case_node));
4766 /* If the bounds are equal, turn this into the one-value case. */
4767 if (tree_int_cst_equal (low, high))
4772 r->code_label = label;
4773 expand_label (label);
4783 struct case_node *s;
4789 if (! (b = p->balance))
4790 /* Growth propagation from left side. */
4797 if ((p->left = s = r->right))
4806 if ((r->parent = s))
4814 case_stack->data.case_stmt.case_list = r;
4817 /* r->balance == +1 */
4822 struct case_node *t = r->right;
4824 if ((p->left = s = t->right))
4828 if ((r->right = s = t->left))
4842 if ((t->parent = s))
4850 case_stack->data.case_stmt.case_list = t;
4857 /* p->balance == +1; growth of left side balances the node. */
4867 if (! (b = p->balance))
4868 /* Growth propagation from right side. */
4876 if ((p->right = s = r->left))
4884 if ((r->parent = s))
4893 case_stack->data.case_stmt.case_list = r;
4897 /* r->balance == -1 */
4901 struct case_node *t = r->left;
4903 if ((p->right = s = t->left))
4908 if ((r->left = s = t->right))
4922 if ((t->parent = s))
4931 case_stack->data.case_stmt.case_list = t;
4937 /* p->balance == -1; growth of right side balances the node. */
4950 /* Returns the number of possible values of TYPE.
4951 Returns -1 if the number is unknown, variable, or if the number does not
4952 fit in a HOST_WIDE_INT.
4953 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4954 do not increase monotonically (there may be duplicates);
4955 to 1 if the values increase monotonically, but not always by 1;
4956 otherwise sets it to 0. */
4959 all_cases_count (type, spareness)
4964 HOST_WIDE_INT count, minval, lastval;
4968 switch (TREE_CODE (type))
4975 count = 1 << BITS_PER_UNIT;
4980 if (TYPE_MAX_VALUE (type) != 0
4981 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4982 TYPE_MIN_VALUE (type))))
4983 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4984 convert (type, integer_zero_node))))
4985 && host_integerp (t, 1))
4986 count = tree_low_cst (t, 1);
4992 /* Don't waste time with enumeral types with huge values. */
4993 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4994 || TYPE_MAX_VALUE (type) == 0
4995 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4998 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
5001 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
5003 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
5005 if (*spareness == 2 || thisval < lastval)
5007 else if (thisval != minval + count)
5017 #define BITARRAY_TEST(ARRAY, INDEX) \
5018 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5019 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
5020 #define BITARRAY_SET(ARRAY, INDEX) \
5021 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5022 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
5024 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5025 with the case values we have seen, assuming the case expression
5027 SPARSENESS is as determined by all_cases_count.
5029 The time needed is proportional to COUNT, unless
5030 SPARSENESS is 2, in which case quadratic time is needed. */
5033 mark_seen_cases (type, cases_seen, count, sparseness)
5035 unsigned char *cases_seen;
5036 HOST_WIDE_INT count;
5039 tree next_node_to_try = NULL_TREE;
5040 HOST_WIDE_INT next_node_offset = 0;
5042 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5043 tree val = make_node (INTEGER_CST);
5045 TREE_TYPE (val) = type;
5049 else if (sparseness == 2)
5052 unsigned HOST_WIDE_INT xlo;
5054 /* This less efficient loop is only needed to handle
5055 duplicate case values (multiple enum constants
5056 with the same value). */
5057 TREE_TYPE (val) = TREE_TYPE (root->low);
5058 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5059 t = TREE_CHAIN (t), xlo++)
5061 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5062 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5066 /* Keep going past elements distinctly greater than VAL. */
5067 if (tree_int_cst_lt (val, n->low))
5070 /* or distinctly less than VAL. */
5071 else if (tree_int_cst_lt (n->high, val))
5076 /* We have found a matching range. */
5077 BITARRAY_SET (cases_seen, xlo);
5087 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5089 for (n = root; n; n = n->right)
5091 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5092 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5093 while (! tree_int_cst_lt (n->high, val))
5095 /* Calculate (into xlo) the "offset" of the integer (val).
5096 The element with lowest value has offset 0, the next smallest
5097 element has offset 1, etc. */
5099 unsigned HOST_WIDE_INT xlo;
5103 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5105 /* The TYPE_VALUES will be in increasing order, so
5106 starting searching where we last ended. */
5107 t = next_node_to_try;
5108 xlo = next_node_offset;
5114 t = TYPE_VALUES (type);
5117 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5119 next_node_to_try = TREE_CHAIN (t);
5120 next_node_offset = xlo + 1;
5125 if (t == next_node_to_try)
5134 t = TYPE_MIN_VALUE (type);
5136 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5140 add_double (xlo, xhi,
5141 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5145 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5146 BITARRAY_SET (cases_seen, xlo);
5148 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5150 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5156 /* Called when the index of a switch statement is an enumerated type
5157 and there is no default label.
5159 Checks that all enumeration literals are covered by the case
5160 expressions of a switch. Also, warn if there are any extra
5161 switch cases that are *not* elements of the enumerated type.
5163 If all enumeration literals were covered by the case expressions,
5164 turn one of the expressions into the default expression since it should
5165 not be possible to fall through such a switch. */
5168 check_for_full_enumeration_handling (type)
5171 register struct case_node *n;
5172 register tree chain;
5173 #if 0 /* variable used by 'if 0'ed code below. */
5174 register struct case_node **l;
5178 /* True iff the selector type is a numbered set mode. */
5181 /* The number of possible selector values. */
5184 /* For each possible selector value. a one iff it has been matched
5185 by a case value alternative. */
5186 unsigned char *cases_seen;
5188 /* The allocated size of cases_seen, in chars. */
5189 HOST_WIDE_INT bytes_needed;
5194 size = all_cases_count (type, &sparseness);
5195 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5197 if (size > 0 && size < 600000
5198 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5199 this optimization if we don't have enough memory rather than
5200 aborting, as xmalloc would do. */
5202 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5205 tree v = TYPE_VALUES (type);
5207 /* The time complexity of this code is normally O(N), where
5208 N being the number of members in the enumerated type.
5209 However, if type is a ENUMERAL_TYPE whose values do not
5210 increase monotonically, O(N*log(N)) time may be needed. */
5212 mark_seen_cases (type, cases_seen, size, sparseness);
5214 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5215 if (BITARRAY_TEST (cases_seen, i) == 0)
5216 warning ("enumeration value `%s' not handled in switch",
5217 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5222 /* Now we go the other way around; we warn if there are case
5223 expressions that don't correspond to enumerators. This can
5224 occur since C and C++ don't enforce type-checking of
5225 assignments to enumeration variables. */
5227 if (case_stack->data.case_stmt.case_list
5228 && case_stack->data.case_stmt.case_list->left)
5229 case_stack->data.case_stmt.case_list
5230 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5232 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5234 for (chain = TYPE_VALUES (type);
5235 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5236 chain = TREE_CHAIN (chain))
5241 if (TYPE_NAME (type) == 0)
5242 warning ("case value `%ld' not in enumerated type",
5243 (long) TREE_INT_CST_LOW (n->low));
5245 warning ("case value `%ld' not in enumerated type `%s'",
5246 (long) TREE_INT_CST_LOW (n->low),
5247 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5250 : DECL_NAME (TYPE_NAME (type))));
5252 if (!tree_int_cst_equal (n->low, n->high))
5254 for (chain = TYPE_VALUES (type);
5255 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5256 chain = TREE_CHAIN (chain))
5261 if (TYPE_NAME (type) == 0)
5262 warning ("case value `%ld' not in enumerated type",
5263 (long) TREE_INT_CST_LOW (n->high));
5265 warning ("case value `%ld' not in enumerated type `%s'",
5266 (long) TREE_INT_CST_LOW (n->high),
5267 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5270 : DECL_NAME (TYPE_NAME (type))));
5276 /* ??? This optimization is disabled because it causes valid programs to
5277 fail. ANSI C does not guarantee that an expression with enum type
5278 will have a value that is the same as one of the enumeration literals. */
5280 /* If all values were found as case labels, make one of them the default
5281 label. Thus, this switch will never fall through. We arbitrarily pick
5282 the last one to make the default since this is likely the most
5283 efficient choice. */
5287 for (l = &case_stack->data.case_stmt.case_list;
5292 case_stack->data.case_stmt.default_label = (*l)->code_label;
5298 /* Free CN, and its children. */
5301 free_case_nodes (cn)
5306 free_case_nodes (cn->left);
5307 free_case_nodes (cn->right);
5313 /* Terminate a case (Pascal) or switch (C) statement
5314 in which ORIG_INDEX is the expression to be tested.
5315 Generate the code to test it and jump to the right place. */
5318 expand_end_case (orig_index)
5321 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE, orig_minval;
5322 rtx default_label = 0;
5323 register struct case_node *n;
5331 register struct nesting *thiscase = case_stack;
5332 tree index_expr, index_type;
5335 /* Don't crash due to previous errors. */
5336 if (thiscase == NULL)
5339 table_label = gen_label_rtx ();
5340 index_expr = thiscase->data.case_stmt.index_expr;
5341 index_type = TREE_TYPE (index_expr);
5342 unsignedp = TREE_UNSIGNED (index_type);
5344 do_pending_stack_adjust ();
5346 /* This might get an spurious warning in the presence of a syntax error;
5347 it could be fixed by moving the call to check_seenlabel after the
5348 check for error_mark_node, and copying the code of check_seenlabel that
5349 deals with case_stack->data.case_stmt.line_number_status /
5350 restore_line_number_status in front of the call to end_cleanup_deferral;
5351 However, this might miss some useful warnings in the presence of
5352 non-syntax errors. */
5355 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5356 if (index_type != error_mark_node)
5358 /* If switch expression was an enumerated type, check that all
5359 enumeration literals are covered by the cases.
5360 No sense trying this if there's a default case, however. */
5362 if (!thiscase->data.case_stmt.default_label
5363 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5364 && TREE_CODE (index_expr) != INTEGER_CST)
5365 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5367 /* If we don't have a default-label, create one here,
5368 after the body of the switch. */
5369 if (thiscase->data.case_stmt.default_label == 0)
5371 thiscase->data.case_stmt.default_label
5372 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5373 expand_label (thiscase->data.case_stmt.default_label);
5375 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5377 before_case = get_last_insn ();
5379 if (thiscase->data.case_stmt.case_list
5380 && thiscase->data.case_stmt.case_list->left)
5381 thiscase->data.case_stmt.case_list
5382 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5384 /* Simplify the case-list before we count it. */
5385 group_case_nodes (thiscase->data.case_stmt.case_list);
5387 /* Get upper and lower bounds of case values.
5388 Also convert all the case values to the index expr's data type. */
5391 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5393 /* Check low and high label values are integers. */
5394 if (TREE_CODE (n->low) != INTEGER_CST)
5396 if (TREE_CODE (n->high) != INTEGER_CST)
5399 n->low = convert (index_type, n->low);
5400 n->high = convert (index_type, n->high);
5402 /* Count the elements and track the largest and smallest
5403 of them (treating them as signed even if they are not). */
5411 if (INT_CST_LT (n->low, minval))
5413 if (INT_CST_LT (maxval, n->high))
5416 /* A range counts double, since it requires two compares. */
5417 if (! tree_int_cst_equal (n->low, n->high))
5421 orig_minval = minval;
5423 /* Compute span of values. */
5425 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5427 end_cleanup_deferral ();
5431 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5433 emit_jump (default_label);
5436 /* If range of values is much bigger than number of values,
5437 make a sequence of conditional branches instead of a dispatch.
5438 If the switch-index is a constant, do it this way
5439 because we can optimize it. */
5441 #ifndef CASE_VALUES_THRESHOLD
5443 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5445 /* If machine does not have a case insn that compares the
5446 bounds, this means extra overhead for dispatch tables
5447 which raises the threshold for using them. */
5448 #define CASE_VALUES_THRESHOLD 5
5449 #endif /* HAVE_casesi */
5450 #endif /* CASE_VALUES_THRESHOLD */
5452 else if (count < CASE_VALUES_THRESHOLD
5453 || compare_tree_int (range, 10 * count) > 0
5454 /* RANGE may be signed, and really large ranges will show up
5455 as negative numbers. */
5456 || compare_tree_int (range, 0) < 0
5457 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5460 || TREE_CODE (index_expr) == INTEGER_CST
5461 /* These will reduce to a constant. */
5462 || (TREE_CODE (index_expr) == CALL_EXPR
5463 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5464 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5465 && DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_NORMAL
5466 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5467 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5468 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5470 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5472 /* If the index is a short or char that we do not have
5473 an insn to handle comparisons directly, convert it to
5474 a full integer now, rather than letting each comparison
5475 generate the conversion. */
5477 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5478 && (cmp_optab->handlers[(int) GET_MODE (index)].insn_code
5479 == CODE_FOR_nothing))
5481 enum machine_mode wider_mode;
5482 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5483 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5484 if (cmp_optab->handlers[(int) wider_mode].insn_code
5485 != CODE_FOR_nothing)
5487 index = convert_to_mode (wider_mode, index, unsignedp);
5493 do_pending_stack_adjust ();
5495 index = protect_from_queue (index, 0);
5496 if (GET_CODE (index) == MEM)
5497 index = copy_to_reg (index);
5498 if (GET_CODE (index) == CONST_INT
5499 || TREE_CODE (index_expr) == INTEGER_CST)
5501 /* Make a tree node with the proper constant value
5502 if we don't already have one. */
5503 if (TREE_CODE (index_expr) != INTEGER_CST)
5506 = build_int_2 (INTVAL (index),
5507 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5508 index_expr = convert (index_type, index_expr);
5511 /* For constant index expressions we need only
5512 issue a unconditional branch to the appropriate
5513 target code. The job of removing any unreachable
5514 code is left to the optimisation phase if the
5515 "-O" option is specified. */
5516 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5517 if (! tree_int_cst_lt (index_expr, n->low)
5518 && ! tree_int_cst_lt (n->high, index_expr))
5522 emit_jump (label_rtx (n->code_label));
5524 emit_jump (default_label);
5528 /* If the index expression is not constant we generate
5529 a binary decision tree to select the appropriate
5530 target code. This is done as follows:
5532 The list of cases is rearranged into a binary tree,
5533 nearly optimal assuming equal probability for each case.
5535 The tree is transformed into RTL, eliminating
5536 redundant test conditions at the same time.
5538 If program flow could reach the end of the
5539 decision tree an unconditional jump to the
5540 default code is emitted. */
5543 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5544 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5545 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5547 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5548 default_label, index_type);
5549 emit_jump_if_reachable (default_label);
5558 enum machine_mode index_mode = SImode;
5559 int index_bits = GET_MODE_BITSIZE (index_mode);
5561 enum machine_mode op_mode;
5563 /* Convert the index to SImode. */
5564 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5565 > GET_MODE_BITSIZE (index_mode))
5567 enum machine_mode omode = TYPE_MODE (index_type);
5568 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5570 /* We must handle the endpoints in the original mode. */
5571 index_expr = build (MINUS_EXPR, index_type,
5572 index_expr, minval);
5573 minval = integer_zero_node;
5574 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5575 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5576 omode, 1, 0, default_label);
5577 /* Now we can safely truncate. */
5578 index = convert_to_mode (index_mode, index, 0);
5582 if (TYPE_MODE (index_type) != index_mode)
5584 index_expr = convert (type_for_size (index_bits, 0),
5586 index_type = TREE_TYPE (index_expr);
5589 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5592 index = protect_from_queue (index, 0);
5593 do_pending_stack_adjust ();
5595 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
5596 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
5598 index = copy_to_mode_reg (op_mode, index);
5600 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5602 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
5603 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
5605 op1 = copy_to_mode_reg (op_mode, op1);
5607 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5609 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
5610 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
5612 op2 = copy_to_mode_reg (op_mode, op2);
5614 emit_jump_insn (gen_casesi (index, op1, op2,
5615 table_label, default_label));
5619 #ifdef HAVE_tablejump
5620 if (! win && HAVE_tablejump)
5622 index_type = thiscase->data.case_stmt.nominal_type;
5623 index_expr = fold (build (MINUS_EXPR, index_type,
5624 convert (index_type, index_expr),
5625 convert (index_type, minval)));
5626 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5628 index = protect_from_queue (index, 0);
5629 do_pending_stack_adjust ();
5631 do_tablejump (index, TYPE_MODE (index_type),
5632 expand_expr (range, NULL_RTX, VOIDmode, 0),
5633 table_label, default_label);
5640 /* Get table of labels to jump to, in order of case index. */
5642 ncases = TREE_INT_CST_LOW (range) + 1;
5643 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5644 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5646 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5648 register HOST_WIDE_INT i
5649 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5654 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5655 if (i + TREE_INT_CST_LOW (orig_minval)
5656 == TREE_INT_CST_LOW (n->high))
5662 /* Fill in the gaps with the default. */
5663 for (i = 0; i < ncases; i++)
5664 if (labelvec[i] == 0)
5665 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5667 /* Output the table */
5668 emit_label (table_label);
5670 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5671 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5672 gen_rtx_LABEL_REF (Pmode, table_label),
5673 gen_rtvec_v (ncases, labelvec),
5674 const0_rtx, const0_rtx));
5676 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5677 gen_rtvec_v (ncases, labelvec)));
5679 /* If the case insn drops through the table,
5680 after the table we must jump to the default-label.
5681 Otherwise record no drop-through after the table. */
5682 #ifdef CASE_DROPS_THROUGH
5683 emit_jump (default_label);
5689 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5690 reorder_insns (before_case, get_last_insn (),
5691 thiscase->data.case_stmt.start);
5694 end_cleanup_deferral ();
5696 if (thiscase->exit_label)
5697 emit_label (thiscase->exit_label);
5699 free_case_nodes (case_stack->data.case_stmt.case_list);
5700 POPSTACK (case_stack);
5705 /* Convert the tree NODE into a list linked by the right field, with the left
5706 field zeroed. RIGHT is used for recursion; it is a list to be placed
5707 rightmost in the resulting list. */
5709 static struct case_node *
5710 case_tree2list (node, right)
5711 struct case_node *node, *right;
5713 struct case_node *left;
5716 right = case_tree2list (node->right, right);
5718 node->right = right;
5719 if ((left = node->left))
5722 return case_tree2list (left, node);
5728 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5731 do_jump_if_equal (op1, op2, label, unsignedp)
5732 rtx op1, op2, label;
5735 if (GET_CODE (op1) == CONST_INT
5736 && GET_CODE (op2) == CONST_INT)
5738 if (INTVAL (op1) == INTVAL (op2))
5743 enum machine_mode mode = GET_MODE (op1);
5744 if (mode == VOIDmode)
5745 mode = GET_MODE (op2);
5746 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5751 /* Not all case values are encountered equally. This function
5752 uses a heuristic to weight case labels, in cases where that
5753 looks like a reasonable thing to do.
5755 Right now, all we try to guess is text, and we establish the
5758 chars above space: 16
5767 If we find any cases in the switch that are not either -1 or in the range
5768 of valid ASCII characters, or are control characters other than those
5769 commonly used with "\", don't treat this switch scanning text.
5771 Return 1 if these nodes are suitable for cost estimation, otherwise
5775 estimate_case_costs (node)
5778 tree min_ascii = integer_minus_one_node;
5779 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5783 /* If we haven't already made the cost table, make it now. Note that the
5784 lower bound of the table is -1, not zero. */
5786 if (! cost_table_initialized)
5788 cost_table_initialized = 1;
5790 for (i = 0; i < 128; i++)
5793 COST_TABLE (i) = 16;
5794 else if (ISPUNCT (i))
5796 else if (ISCNTRL (i))
5797 COST_TABLE (i) = -1;
5800 COST_TABLE (' ') = 8;
5801 COST_TABLE ('\t') = 4;
5802 COST_TABLE ('\0') = 4;
5803 COST_TABLE ('\n') = 2;
5804 COST_TABLE ('\f') = 1;
5805 COST_TABLE ('\v') = 1;
5806 COST_TABLE ('\b') = 1;
5809 /* See if all the case expressions look like text. It is text if the
5810 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5811 as signed arithmetic since we don't want to ever access cost_table with a
5812 value less than -1. Also check that none of the constants in a range
5813 are strange control characters. */
5815 for (n = node; n; n = n->right)
5817 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5820 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5821 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5822 if (COST_TABLE (i) < 0)
5826 /* All interesting values are within the range of interesting
5827 ASCII characters. */
5831 /* Scan an ordered list of case nodes
5832 combining those with consecutive values or ranges.
5834 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5837 group_case_nodes (head)
5840 case_node_ptr node = head;
5844 rtx lb = next_real_insn (label_rtx (node->code_label));
5846 case_node_ptr np = node;
5848 /* Try to group the successors of NODE with NODE. */
5849 while (((np = np->right) != 0)
5850 /* Do they jump to the same place? */
5851 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5852 || (lb != 0 && lb2 != 0
5853 && simplejump_p (lb)
5854 && simplejump_p (lb2)
5855 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5856 SET_SRC (PATTERN (lb2)))))
5857 /* Are their ranges consecutive? */
5858 && tree_int_cst_equal (np->low,
5859 fold (build (PLUS_EXPR,
5860 TREE_TYPE (node->high),
5863 /* An overflow is not consecutive. */
5864 && tree_int_cst_lt (node->high,
5865 fold (build (PLUS_EXPR,
5866 TREE_TYPE (node->high),
5868 integer_one_node))))
5870 node->high = np->high;
5872 /* NP is the first node after NODE which can't be grouped with it.
5873 Delete the nodes in between, and move on to that node. */
5879 /* Take an ordered list of case nodes
5880 and transform them into a near optimal binary tree,
5881 on the assumption that any target code selection value is as
5882 likely as any other.
5884 The transformation is performed by splitting the ordered
5885 list into two equal sections plus a pivot. The parts are
5886 then attached to the pivot as left and right branches. Each
5887 branch is then transformed recursively. */
5890 balance_case_nodes (head, parent)
5891 case_node_ptr *head;
5892 case_node_ptr parent;
5894 register case_node_ptr np;
5902 register case_node_ptr *npp;
5905 /* Count the number of entries on branch. Also count the ranges. */
5909 if (!tree_int_cst_equal (np->low, np->high))
5913 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5917 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5925 /* Split this list if it is long enough for that to help. */
5930 /* Find the place in the list that bisects the list's total cost,
5931 Here I gets half the total cost. */
5936 /* Skip nodes while their cost does not reach that amount. */
5937 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5938 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5939 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5942 npp = &(*npp)->right;
5947 /* Leave this branch lopsided, but optimize left-hand
5948 side and fill in `parent' fields for right-hand side. */
5950 np->parent = parent;
5951 balance_case_nodes (&np->left, np);
5952 for (; np->right; np = np->right)
5953 np->right->parent = np;
5957 /* If there are just three nodes, split at the middle one. */
5959 npp = &(*npp)->right;
5962 /* Find the place in the list that bisects the list's total cost,
5963 where ranges count as 2.
5964 Here I gets half the total cost. */
5965 i = (i + ranges + 1) / 2;
5968 /* Skip nodes while their cost does not reach that amount. */
5969 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5974 npp = &(*npp)->right;
5979 np->parent = parent;
5982 /* Optimize each of the two split parts. */
5983 balance_case_nodes (&np->left, np);
5984 balance_case_nodes (&np->right, np);
5988 /* Else leave this branch as one level,
5989 but fill in `parent' fields. */
5991 np->parent = parent;
5992 for (; np->right; np = np->right)
5993 np->right->parent = np;
5998 /* Search the parent sections of the case node tree
5999 to see if a test for the lower bound of NODE would be redundant.
6000 INDEX_TYPE is the type of the index expression.
6002 The instructions to generate the case decision tree are
6003 output in the same order as nodes are processed so it is
6004 known that if a parent node checks the range of the current
6005 node minus one that the current node is bounded at its lower
6006 span. Thus the test would be redundant. */
6009 node_has_low_bound (node, index_type)
6014 case_node_ptr pnode;
6016 /* If the lower bound of this node is the lowest value in the index type,
6017 we need not test it. */
6019 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6022 /* If this node has a left branch, the value at the left must be less
6023 than that at this node, so it cannot be bounded at the bottom and
6024 we need not bother testing any further. */
6029 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6030 node->low, integer_one_node));
6032 /* If the subtraction above overflowed, we can't verify anything.
6033 Otherwise, look for a parent that tests our value - 1. */
6035 if (! tree_int_cst_lt (low_minus_one, node->low))
6038 for (pnode = node->parent; pnode; pnode = pnode->parent)
6039 if (tree_int_cst_equal (low_minus_one, pnode->high))
6045 /* Search the parent sections of the case node tree
6046 to see if a test for the upper bound of NODE would be redundant.
6047 INDEX_TYPE is the type of the index expression.
6049 The instructions to generate the case decision tree are
6050 output in the same order as nodes are processed so it is
6051 known that if a parent node checks the range of the current
6052 node plus one that the current node is bounded at its upper
6053 span. Thus the test would be redundant. */
6056 node_has_high_bound (node, index_type)
6061 case_node_ptr pnode;
6063 /* If there is no upper bound, obviously no test is needed. */
6065 if (TYPE_MAX_VALUE (index_type) == NULL)
6068 /* If the upper bound of this node is the highest value in the type
6069 of the index expression, we need not test against it. */
6071 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6074 /* If this node has a right branch, the value at the right must be greater
6075 than that at this node, so it cannot be bounded at the top and
6076 we need not bother testing any further. */
6081 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6082 node->high, integer_one_node));
6084 /* If the addition above overflowed, we can't verify anything.
6085 Otherwise, look for a parent that tests our value + 1. */
6087 if (! tree_int_cst_lt (node->high, high_plus_one))
6090 for (pnode = node->parent; pnode; pnode = pnode->parent)
6091 if (tree_int_cst_equal (high_plus_one, pnode->low))
6097 /* Search the parent sections of the
6098 case node tree to see if both tests for the upper and lower
6099 bounds of NODE would be redundant. */
6102 node_is_bounded (node, index_type)
6106 return (node_has_low_bound (node, index_type)
6107 && node_has_high_bound (node, index_type));
6110 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6113 emit_jump_if_reachable (label)
6116 if (GET_CODE (get_last_insn ()) != BARRIER)
6120 /* Emit step-by-step code to select a case for the value of INDEX.
6121 The thus generated decision tree follows the form of the
6122 case-node binary tree NODE, whose nodes represent test conditions.
6123 INDEX_TYPE is the type of the index of the switch.
6125 Care is taken to prune redundant tests from the decision tree
6126 by detecting any boundary conditions already checked by
6127 emitted rtx. (See node_has_high_bound, node_has_low_bound
6128 and node_is_bounded, above.)
6130 Where the test conditions can be shown to be redundant we emit
6131 an unconditional jump to the target code. As a further
6132 optimization, the subordinates of a tree node are examined to
6133 check for bounded nodes. In this case conditional and/or
6134 unconditional jumps as a result of the boundary check for the
6135 current node are arranged to target the subordinates associated
6136 code for out of bound conditions on the current node.
6138 We can assume that when control reaches the code generated here,
6139 the index value has already been compared with the parents
6140 of this node, and determined to be on the same side of each parent
6141 as this node is. Thus, if this node tests for the value 51,
6142 and a parent tested for 52, we don't need to consider
6143 the possibility of a value greater than 51. If another parent
6144 tests for the value 50, then this node need not test anything. */
6147 emit_case_nodes (index, node, default_label, index_type)
6153 /* If INDEX has an unsigned type, we must make unsigned branches. */
6154 int unsignedp = TREE_UNSIGNED (index_type);
6155 enum machine_mode mode = GET_MODE (index);
6157 /* See if our parents have already tested everything for us.
6158 If they have, emit an unconditional jump for this node. */
6159 if (node_is_bounded (node, index_type))
6160 emit_jump (label_rtx (node->code_label));
6162 else if (tree_int_cst_equal (node->low, node->high))
6164 /* Node is single valued. First see if the index expression matches
6165 this node and then check our children, if any. */
6167 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6168 label_rtx (node->code_label), unsignedp);
6170 if (node->right != 0 && node->left != 0)
6172 /* This node has children on both sides.
6173 Dispatch to one side or the other
6174 by comparing the index value with this node's value.
6175 If one subtree is bounded, check that one first,
6176 so we can avoid real branches in the tree. */
6178 if (node_is_bounded (node->right, index_type))
6180 emit_cmp_and_jump_insns (index,
6181 expand_expr (node->high, NULL_RTX,
6183 GT, NULL_RTX, mode, unsignedp, 0,
6184 label_rtx (node->right->code_label));
6185 emit_case_nodes (index, node->left, default_label, index_type);
6188 else if (node_is_bounded (node->left, index_type))
6190 emit_cmp_and_jump_insns (index,
6191 expand_expr (node->high, NULL_RTX,
6193 LT, NULL_RTX, mode, unsignedp, 0,
6194 label_rtx (node->left->code_label));
6195 emit_case_nodes (index, node->right, default_label, index_type);
6200 /* Neither node is bounded. First distinguish the two sides;
6201 then emit the code for one side at a time. */
6203 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6205 /* See if the value is on the right. */
6206 emit_cmp_and_jump_insns (index,
6207 expand_expr (node->high, NULL_RTX,
6209 GT, NULL_RTX, mode, unsignedp, 0,
6210 label_rtx (test_label));
6212 /* Value must be on the left.
6213 Handle the left-hand subtree. */
6214 emit_case_nodes (index, node->left, default_label, index_type);
6215 /* If left-hand subtree does nothing,
6217 emit_jump_if_reachable (default_label);
6219 /* Code branches here for the right-hand subtree. */
6220 expand_label (test_label);
6221 emit_case_nodes (index, node->right, default_label, index_type);
6225 else if (node->right != 0 && node->left == 0)
6227 /* Here we have a right child but no left so we issue conditional
6228 branch to default and process the right child.
6230 Omit the conditional branch to default if we it avoid only one
6231 right child; it costs too much space to save so little time. */
6233 if (node->right->right || node->right->left
6234 || !tree_int_cst_equal (node->right->low, node->right->high))
6236 if (!node_has_low_bound (node, index_type))
6238 emit_cmp_and_jump_insns (index,
6239 expand_expr (node->high, NULL_RTX,
6241 LT, NULL_RTX, mode, unsignedp, 0,
6245 emit_case_nodes (index, node->right, default_label, index_type);
6248 /* We cannot process node->right normally
6249 since we haven't ruled out the numbers less than
6250 this node's value. So handle node->right explicitly. */
6251 do_jump_if_equal (index,
6252 expand_expr (node->right->low, NULL_RTX,
6254 label_rtx (node->right->code_label), unsignedp);
6257 else if (node->right == 0 && node->left != 0)
6259 /* Just one subtree, on the left. */
6261 #if 0 /* The following code and comment were formerly part
6262 of the condition here, but they didn't work
6263 and I don't understand what the idea was. -- rms. */
6264 /* If our "most probable entry" is less probable
6265 than the default label, emit a jump to
6266 the default label using condition codes
6267 already lying around. With no right branch,
6268 a branch-greater-than will get us to the default
6271 && COST_TABLE (TREE_INT_CST_LOW (node->high)) < 12)
6274 if (node->left->left || node->left->right
6275 || !tree_int_cst_equal (node->left->low, node->left->high))
6277 if (!node_has_high_bound (node, index_type))
6279 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6282 GT, NULL_RTX, mode, unsignedp, 0,
6286 emit_case_nodes (index, node->left, default_label, index_type);
6289 /* We cannot process node->left normally
6290 since we haven't ruled out the numbers less than
6291 this node's value. So handle node->left explicitly. */
6292 do_jump_if_equal (index,
6293 expand_expr (node->left->low, NULL_RTX,
6295 label_rtx (node->left->code_label), unsignedp);
6300 /* Node is a range. These cases are very similar to those for a single
6301 value, except that we do not start by testing whether this node
6302 is the one to branch to. */
6304 if (node->right != 0 && node->left != 0)
6306 /* Node has subtrees on both sides.
6307 If the right-hand subtree is bounded,
6308 test for it first, since we can go straight there.
6309 Otherwise, we need to make a branch in the control structure,
6310 then handle the two subtrees. */
6311 tree test_label = 0;
6313 if (node_is_bounded (node->right, index_type))
6314 /* Right hand node is fully bounded so we can eliminate any
6315 testing and branch directly to the target code. */
6316 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6318 GT, NULL_RTX, mode, unsignedp, 0,
6319 label_rtx (node->right->code_label));
6322 /* Right hand node requires testing.
6323 Branch to a label where we will handle it later. */
6325 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6326 emit_cmp_and_jump_insns (index,
6327 expand_expr (node->high, NULL_RTX,
6329 GT, NULL_RTX, mode, unsignedp, 0,
6330 label_rtx (test_label));
6333 /* Value belongs to this node or to the left-hand subtree. */
6335 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6337 GE, NULL_RTX, mode, unsignedp, 0,
6338 label_rtx (node->code_label));
6340 /* Handle the left-hand subtree. */
6341 emit_case_nodes (index, node->left, default_label, index_type);
6343 /* If right node had to be handled later, do that now. */
6347 /* If the left-hand subtree fell through,
6348 don't let it fall into the right-hand subtree. */
6349 emit_jump_if_reachable (default_label);
6351 expand_label (test_label);
6352 emit_case_nodes (index, node->right, default_label, index_type);
6356 else if (node->right != 0 && node->left == 0)
6358 /* Deal with values to the left of this node,
6359 if they are possible. */
6360 if (!node_has_low_bound (node, index_type))
6362 emit_cmp_and_jump_insns (index,
6363 expand_expr (node->low, NULL_RTX,
6365 LT, NULL_RTX, mode, unsignedp, 0,
6369 /* Value belongs to this node or to the right-hand subtree. */
6371 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6373 LE, NULL_RTX, mode, unsignedp, 0,
6374 label_rtx (node->code_label));
6376 emit_case_nodes (index, node->right, default_label, index_type);
6379 else if (node->right == 0 && node->left != 0)
6381 /* Deal with values to the right of this node,
6382 if they are possible. */
6383 if (!node_has_high_bound (node, index_type))
6385 emit_cmp_and_jump_insns (index,
6386 expand_expr (node->high, NULL_RTX,
6388 GT, NULL_RTX, mode, unsignedp, 0,
6392 /* Value belongs to this node or to the left-hand subtree. */
6394 emit_cmp_and_jump_insns (index,
6395 expand_expr (node->low, NULL_RTX,
6397 GE, NULL_RTX, mode, unsignedp, 0,
6398 label_rtx (node->code_label));
6400 emit_case_nodes (index, node->left, default_label, index_type);
6405 /* Node has no children so we check low and high bounds to remove
6406 redundant tests. Only one of the bounds can exist,
6407 since otherwise this node is bounded--a case tested already. */
6409 if (!node_has_high_bound (node, index_type))
6411 emit_cmp_and_jump_insns (index,
6412 expand_expr (node->high, NULL_RTX,
6414 GT, NULL_RTX, mode, unsignedp, 0,
6418 if (!node_has_low_bound (node, index_type))
6420 emit_cmp_and_jump_insns (index,
6421 expand_expr (node->low, NULL_RTX,
6423 LT, NULL_RTX, mode, unsignedp, 0,
6427 emit_jump (label_rtx (node->code_label));