1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
45 #include "insn-config.h"
48 #include "hard-reg-set.h"
56 #include "langhooks.h"
59 #define obstack_chunk_alloc xmalloc
60 #define obstack_chunk_free free
61 struct obstack stmt_obstack;
63 /* Assume that case vectors are not pc-relative. */
64 #ifndef CASE_VECTOR_PC_RELATIVE
65 #define CASE_VECTOR_PC_RELATIVE 0
68 /* Functions and data structures for expanding case statements. */
70 /* Case label structure, used to hold info on labels within case
71 statements. We handle "range" labels; for a single-value label
72 as in C, the high and low limits are the same.
74 An AVL tree of case nodes is initially created, and later transformed
75 to a list linked via the RIGHT fields in the nodes. Nodes with
76 higher case values are later in the list.
78 Switch statements can be output in one of two forms. A branch table
79 is used if there are more than a few labels and the labels are dense
80 within the range between the smallest and largest case value. If a
81 branch table is used, no further manipulations are done with the case
84 The alternative to the use of a branch table is to generate a series
85 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
86 and PARENT fields to hold a binary tree. Initially the tree is
87 totally unbalanced, with everything on the right. We balance the tree
88 with nodes on the left having lower case values than the parent
89 and nodes on the right having higher values. We then output the tree
94 struct case_node *left; /* Left son in binary tree */
95 struct case_node *right; /* Right son in binary tree; also node chain */
96 struct case_node *parent; /* Parent of node in binary tree */
97 tree low; /* Lowest index value for this label */
98 tree high; /* Highest index value for this label */
99 tree code_label; /* Label to jump to when node matches */
103 typedef struct case_node case_node;
104 typedef struct case_node *case_node_ptr;
106 /* These are used by estimate_case_costs and balance_case_nodes. */
108 /* This must be a signed type, and non-ANSI compilers lack signed char. */
109 static short cost_table_[129];
110 static int use_cost_table;
111 static int cost_table_initialized;
113 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
115 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
117 /* Stack of control and binding constructs we are currently inside.
119 These constructs begin when you call `expand_start_WHATEVER'
120 and end when you call `expand_end_WHATEVER'. This stack records
121 info about how the construct began that tells the end-function
122 what to do. It also may provide information about the construct
123 to alter the behavior of other constructs within the body.
124 For example, they may affect the behavior of C `break' and `continue'.
126 Each construct gets one `struct nesting' object.
127 All of these objects are chained through the `all' field.
128 `nesting_stack' points to the first object (innermost construct).
129 The position of an entry on `nesting_stack' is in its `depth' field.
131 Each type of construct has its own individual stack.
132 For example, loops have `loop_stack'. Each object points to the
133 next object of the same type through the `next' field.
135 Some constructs are visible to `break' exit-statements and others
136 are not. Which constructs are visible depends on the language.
137 Therefore, the data structure allows each construct to be visible
138 or not, according to the args given when the construct is started.
139 The construct is visible if the `exit_label' field is non-null.
140 In that case, the value should be a CODE_LABEL rtx. */
145 struct nesting *next;
150 /* For conds (if-then and if-then-else statements). */
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
164 /* Label at the top of the loop; place to loop back to. */
166 /* Label at the end of the whole construct. */
168 /* Label before a jump that branches to the end of the whole
169 construct. This is where destructors go if any. */
171 /* Label for `continue' statement to jump to;
172 this is in front of the stepper of the loop. */
175 /* For variable binding contours. */
178 /* Sequence number of this binding contour within the function,
179 in order of entry. */
180 int block_start_count;
181 /* Nonzero => value to restore stack to on exit. */
183 /* The NOTE that starts this contour.
184 Used by expand_goto to check whether the destination
185 is within each contour or not. */
187 /* Innermost containing binding contour that has a stack level. */
188 struct nesting *innermost_stack_block;
189 /* List of cleanups to be run on exit from this contour.
190 This is a list of expressions to be evaluated.
191 The TREE_PURPOSE of each link is the ..._DECL node
192 which the cleanup pertains to. */
194 /* List of cleanup-lists of blocks containing this block,
195 as they were at the locus where this block appears.
196 There is an element for each containing block,
197 ordered innermost containing block first.
198 The tail of this list can be 0,
199 if all remaining elements would be empty lists.
200 The element's TREE_VALUE is the cleanup-list of that block,
201 which may be null. */
203 /* Chain of labels defined inside this binding contour.
204 For contours that have stack levels or cleanups. */
205 struct label_chain *label_chain;
206 /* Number of function calls seen, as of start of this block. */
207 int n_function_calls;
208 /* Nonzero if this is associated with an EH region. */
209 int exception_region;
210 /* The saved target_temp_slot_level from our outer block.
211 We may reset target_temp_slot_level to be the level of
212 this block, if that is done, target_temp_slot_level
213 reverts to the saved target_temp_slot_level at the very
215 int block_target_temp_slot_level;
216 /* True if we are currently emitting insns in an area of
217 output code that is controlled by a conditional
218 expression. This is used by the cleanup handling code to
219 generate conditional cleanup actions. */
220 int conditional_code;
221 /* A place to move the start of the exception region for any
222 of the conditional cleanups, must be at the end or after
223 the start of the last unconditional cleanup, and before any
224 conditional branch points. */
225 rtx last_unconditional_cleanup;
226 /* When in a conditional context, this is the specific
227 cleanup list associated with last_unconditional_cleanup,
228 where we place the conditionalized cleanups. */
231 /* For switch (C) or case (Pascal) statements,
232 and also for dummies (see `expand_start_case_dummy'). */
235 /* The insn after which the case dispatch should finally
236 be emitted. Zero for a dummy. */
238 /* A list of case labels; it is first built as an AVL tree.
239 During expand_end_case, this is converted to a list, and may be
240 rearranged into a nearly balanced binary tree. */
241 struct case_node *case_list;
242 /* Label to jump to if no case matches. */
244 /* The expression to be dispatched on. */
246 /* Type that INDEX_EXPR should be converted to. */
248 /* Name of this kind of statement, for warnings. */
249 const char *printname;
250 /* Used to save no_line_numbers till we see the first case label.
251 We set this to -1 when we see the first case label in this
253 int line_number_status;
258 /* Allocate and return a new `struct nesting'. */
260 #define ALLOC_NESTING() \
261 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
263 /* Pop the nesting stack element by element until we pop off
264 the element which is at the top of STACK.
265 Update all the other stacks, popping off elements from them
266 as we pop them from nesting_stack. */
268 #define POPSTACK(STACK) \
269 do { struct nesting *target = STACK; \
270 struct nesting *this; \
271 do { this = nesting_stack; \
272 if (loop_stack == this) \
273 loop_stack = loop_stack->next; \
274 if (cond_stack == this) \
275 cond_stack = cond_stack->next; \
276 if (block_stack == this) \
277 block_stack = block_stack->next; \
278 if (stack_block_stack == this) \
279 stack_block_stack = stack_block_stack->next; \
280 if (case_stack == this) \
281 case_stack = case_stack->next; \
282 nesting_depth = nesting_stack->depth - 1; \
283 nesting_stack = this->all; \
284 obstack_free (&stmt_obstack, this); } \
285 while (this != target); } while (0)
287 /* In some cases it is impossible to generate code for a forward goto
288 until the label definition is seen. This happens when it may be necessary
289 for the goto to reset the stack pointer: we don't yet know how to do that.
290 So expand_goto puts an entry on this fixup list.
291 Each time a binding contour that resets the stack is exited,
293 If the target label has now been defined, we can insert the proper code. */
297 /* Points to following fixup. */
298 struct goto_fixup *next;
299 /* Points to the insn before the jump insn.
300 If more code must be inserted, it goes after this insn. */
302 /* The LABEL_DECL that this jump is jumping to, or 0
303 for break, continue or return. */
305 /* The BLOCK for the place where this goto was found. */
307 /* The CODE_LABEL rtx that this is jumping to. */
309 /* Number of binding contours started in current function
310 before the label reference. */
311 int block_start_count;
312 /* The outermost stack level that should be restored for this jump.
313 Each time a binding contour that resets the stack is exited,
314 if the target label is *not* yet defined, this slot is updated. */
316 /* List of lists of cleanup expressions to be run by this goto.
317 There is one element for each block that this goto is within.
318 The tail of this list can be 0,
319 if all remaining elements would be empty.
320 The TREE_VALUE contains the cleanup list of that block as of the
321 time this goto was seen.
322 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
323 tree cleanup_list_list;
326 /* Within any binding contour that must restore a stack level,
327 all labels are recorded with a chain of these structures. */
331 /* Points to following fixup. */
332 struct label_chain *next;
338 /* Chain of all pending binding contours. */
339 struct nesting *x_block_stack;
341 /* If any new stacks are added here, add them to POPSTACKS too. */
343 /* Chain of all pending binding contours that restore stack levels
345 struct nesting *x_stack_block_stack;
347 /* Chain of all pending conditional statements. */
348 struct nesting *x_cond_stack;
350 /* Chain of all pending loops. */
351 struct nesting *x_loop_stack;
353 /* Chain of all pending case or switch statements. */
354 struct nesting *x_case_stack;
356 /* Separate chain including all of the above,
357 chained through the `all' field. */
358 struct nesting *x_nesting_stack;
360 /* Number of entries on nesting_stack now. */
363 /* Number of binding contours started so far in this function. */
364 int x_block_start_count;
366 /* Each time we expand an expression-statement,
367 record the expr's type and its RTL value here. */
368 tree x_last_expr_type;
369 rtx x_last_expr_value;
371 /* Nonzero if within a ({...}) grouping, in which case we must
372 always compute a value for each expr-stmt in case it is the last one. */
373 int x_expr_stmts_for_value;
375 /* Filename and line number of last line-number note,
376 whether we actually emitted it or not. */
377 const char *x_emit_filename;
380 struct goto_fixup *x_goto_fixup_chain;
383 #define block_stack (cfun->stmt->x_block_stack)
384 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
385 #define cond_stack (cfun->stmt->x_cond_stack)
386 #define loop_stack (cfun->stmt->x_loop_stack)
387 #define case_stack (cfun->stmt->x_case_stack)
388 #define nesting_stack (cfun->stmt->x_nesting_stack)
389 #define nesting_depth (cfun->stmt->x_nesting_depth)
390 #define current_block_start_count (cfun->stmt->x_block_start_count)
391 #define last_expr_type (cfun->stmt->x_last_expr_type)
392 #define last_expr_value (cfun->stmt->x_last_expr_value)
393 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
394 #define emit_filename (cfun->stmt->x_emit_filename)
395 #define emit_lineno (cfun->stmt->x_emit_lineno)
396 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
398 /* Non-zero if we are using EH to handle cleanus. */
399 static int using_eh_for_cleanups_p = 0;
401 static int n_occurrences PARAMS ((int, const char *));
402 static bool parse_input_constraint PARAMS ((const char **, int, int, int,
403 int, const char * const *,
405 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
406 static int expand_fixup PARAMS ((tree, rtx, rtx));
407 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
408 static void expand_nl_goto_receiver PARAMS ((void));
409 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
410 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
412 static bool check_operand_nalternatives PARAMS ((tree, tree));
413 static bool check_unique_operand_names PARAMS ((tree, tree));
414 static tree resolve_operand_names PARAMS ((tree, tree, tree,
416 static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
417 static void expand_null_return_1 PARAMS ((rtx));
418 static enum br_predictor return_prediction PARAMS ((rtx));
419 static void expand_value_return PARAMS ((rtx));
420 static int tail_recursion_args PARAMS ((tree, tree));
421 static void expand_cleanups PARAMS ((tree, tree, int, int));
422 static void check_seenlabel PARAMS ((void));
423 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
424 static int estimate_case_costs PARAMS ((case_node_ptr));
425 static void group_case_nodes PARAMS ((case_node_ptr));
426 static void balance_case_nodes PARAMS ((case_node_ptr *,
428 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
429 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
430 static int node_is_bounded PARAMS ((case_node_ptr, tree));
431 static void emit_jump_if_reachable PARAMS ((rtx));
432 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
433 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
434 static void mark_cond_nesting PARAMS ((struct nesting *));
435 static void mark_loop_nesting PARAMS ((struct nesting *));
436 static void mark_block_nesting PARAMS ((struct nesting *));
437 static void mark_case_nesting PARAMS ((struct nesting *));
438 static void mark_case_node PARAMS ((struct case_node *));
439 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
440 static void free_case_nodes PARAMS ((case_node_ptr));
443 using_eh_for_cleanups ()
445 using_eh_for_cleanups_p = 1;
448 /* Mark N (known to be a cond-nesting) for GC. */
451 mark_cond_nesting (n)
456 ggc_mark_rtx (n->exit_label);
457 ggc_mark_rtx (n->data.cond.endif_label);
458 ggc_mark_rtx (n->data.cond.next_label);
464 /* Mark N (known to be a loop-nesting) for GC. */
467 mark_loop_nesting (n)
473 ggc_mark_rtx (n->exit_label);
474 ggc_mark_rtx (n->data.loop.start_label);
475 ggc_mark_rtx (n->data.loop.end_label);
476 ggc_mark_rtx (n->data.loop.alt_end_label);
477 ggc_mark_rtx (n->data.loop.continue_label);
483 /* Mark N (known to be a block-nesting) for GC. */
486 mark_block_nesting (n)
491 struct label_chain *l;
493 ggc_mark_rtx (n->exit_label);
494 ggc_mark_rtx (n->data.block.stack_level);
495 ggc_mark_rtx (n->data.block.first_insn);
496 ggc_mark_tree (n->data.block.cleanups);
497 ggc_mark_tree (n->data.block.outer_cleanups);
499 for (l = n->data.block.label_chain; l != NULL; l = l->next)
502 ggc_mark_tree (l->label);
505 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
507 /* ??? cleanup_ptr never points outside the stack, does it? */
513 /* Mark N (known to be a case-nesting) for GC. */
516 mark_case_nesting (n)
521 ggc_mark_rtx (n->exit_label);
522 ggc_mark_rtx (n->data.case_stmt.start);
524 ggc_mark_tree (n->data.case_stmt.default_label);
525 ggc_mark_tree (n->data.case_stmt.index_expr);
526 ggc_mark_tree (n->data.case_stmt.nominal_type);
528 mark_case_node (n->data.case_stmt.case_list);
541 ggc_mark_tree (c->low);
542 ggc_mark_tree (c->high);
543 ggc_mark_tree (c->code_label);
545 mark_case_node (c->right);
546 mark_case_node (c->left);
554 struct goto_fixup *g;
559 ggc_mark_rtx (g->before_jump);
560 ggc_mark_tree (g->target);
561 ggc_mark_tree (g->context);
562 ggc_mark_rtx (g->target_rtl);
563 ggc_mark_rtx (g->stack_level);
564 ggc_mark_tree (g->cleanup_list_list);
570 /* Clear out all parts of the state in F that can safely be discarded
571 after the function has been compiled, to let garbage collection
572 reclaim the memory. */
578 /* We're about to free the function obstack. If we hold pointers to
579 things allocated there, then we'll try to mark them when we do
580 GC. So, we clear them out here explicitly. */
590 struct stmt_status *p;
595 mark_block_nesting (p->x_block_stack);
596 mark_cond_nesting (p->x_cond_stack);
597 mark_loop_nesting (p->x_loop_stack);
598 mark_case_nesting (p->x_case_stack);
600 ggc_mark_tree (p->x_last_expr_type);
601 /* last_epxr_value is only valid if last_expr_type is nonzero. */
602 if (p->x_last_expr_type)
603 ggc_mark_rtx (p->x_last_expr_value);
605 mark_goto_fixup (p->x_goto_fixup_chain);
611 gcc_obstack_init (&stmt_obstack);
615 init_stmt_for_function ()
617 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
619 /* We are not currently within any block, conditional, loop or case. */
621 stack_block_stack = 0;
628 current_block_start_count = 0;
630 /* No gotos have been expanded yet. */
631 goto_fixup_chain = 0;
633 /* We are not processing a ({...}) grouping. */
634 expr_stmts_for_value = 0;
636 last_expr_value = NULL_RTX;
639 /* Return nonzero if anything is pushed on the loop, condition, or case
644 return cond_stack || loop_stack || case_stack;
647 /* Record the current file and line. Called from emit_line_note. */
649 set_file_and_line_for_stmt (file, line)
653 /* If we're outputting an inline function, and we add a line note,
654 there may be no CFUN->STMT information. So, there's no need to
658 emit_filename = file;
663 /* Emit a no-op instruction. */
670 last_insn = get_last_insn ();
672 && (GET_CODE (last_insn) == CODE_LABEL
673 || (GET_CODE (last_insn) == NOTE
674 && prev_real_insn (last_insn) == 0)))
675 emit_insn (gen_nop ());
678 /* Return the rtx-label that corresponds to a LABEL_DECL,
679 creating it if necessary. */
685 if (TREE_CODE (label) != LABEL_DECL)
688 if (!DECL_RTL_SET_P (label))
689 SET_DECL_RTL (label, gen_label_rtx ());
691 return DECL_RTL (label);
695 /* Add an unconditional jump to LABEL as the next sequential instruction. */
701 do_pending_stack_adjust ();
702 emit_jump_insn (gen_jump (label));
706 /* Emit code to jump to the address
707 specified by the pointer expression EXP. */
710 expand_computed_goto (exp)
713 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
715 #ifdef POINTERS_EXTEND_UNSIGNED
716 if (GET_MODE (x) != Pmode)
717 x = convert_memory_address (Pmode, x);
721 do_pending_stack_adjust ();
722 emit_indirect_jump (x);
724 current_function_has_computed_jump = 1;
727 /* Handle goto statements and the labels that they can go to. */
729 /* Specify the location in the RTL code of a label LABEL,
730 which is a LABEL_DECL tree node.
732 This is used for the kind of label that the user can jump to with a
733 goto statement, and for alternatives of a switch or case statement.
734 RTL labels generated for loops and conditionals don't go through here;
735 they are generated directly at the RTL level, by other functions below.
737 Note that this has nothing to do with defining label *names*.
738 Languages vary in how they do that and what that even means. */
744 struct label_chain *p;
746 do_pending_stack_adjust ();
747 emit_label (label_rtx (label));
748 if (DECL_NAME (label))
749 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
751 if (stack_block_stack != 0)
753 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
754 p->next = stack_block_stack->data.block.label_chain;
755 stack_block_stack->data.block.label_chain = p;
760 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
761 from nested functions. */
764 declare_nonlocal_label (label)
767 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
769 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
770 LABEL_PRESERVE_P (label_rtx (label)) = 1;
771 if (nonlocal_goto_handler_slots == 0)
773 emit_stack_save (SAVE_NONLOCAL,
774 &nonlocal_goto_stack_level,
775 PREV_INSN (tail_recursion_reentry));
777 nonlocal_goto_handler_slots
778 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
781 /* Generate RTL code for a `goto' statement with target label LABEL.
782 LABEL should be a LABEL_DECL tree node that was or will later be
783 defined with `expand_label'. */
791 /* Check for a nonlocal goto to a containing function. */
792 context = decl_function_context (label);
793 if (context != 0 && context != current_function_decl)
795 struct function *p = find_function_data (context);
796 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
797 rtx handler_slot, static_chain, save_area, insn;
800 /* Find the corresponding handler slot for this label. */
801 handler_slot = p->x_nonlocal_goto_handler_slots;
802 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
803 link = TREE_CHAIN (link))
804 handler_slot = XEXP (handler_slot, 1);
805 handler_slot = XEXP (handler_slot, 0);
807 p->has_nonlocal_label = 1;
808 current_function_has_nonlocal_goto = 1;
809 LABEL_REF_NONLOCAL_P (label_ref) = 1;
811 /* Copy the rtl for the slots so that they won't be shared in
812 case the virtual stack vars register gets instantiated differently
813 in the parent than in the child. */
815 static_chain = copy_to_reg (lookup_static_chain (label));
817 /* Get addr of containing function's current nonlocal goto handler,
818 which will do any cleanups and then jump to the label. */
819 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
820 virtual_stack_vars_rtx,
823 /* Get addr of containing function's nonlocal save area. */
824 save_area = p->x_nonlocal_goto_stack_level;
826 save_area = replace_rtx (copy_rtx (save_area),
827 virtual_stack_vars_rtx, static_chain);
829 #if HAVE_nonlocal_goto
830 if (HAVE_nonlocal_goto)
831 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
832 save_area, label_ref));
836 /* Restore frame pointer for containing function.
837 This sets the actual hard register used for the frame pointer
838 to the location of the function's incoming static chain info.
839 The non-local goto handler will then adjust it to contain the
840 proper value and reload the argument pointer, if needed. */
841 emit_move_insn (hard_frame_pointer_rtx, static_chain);
842 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
844 /* USE of hard_frame_pointer_rtx added for consistency;
845 not clear if really needed. */
846 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
847 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
848 emit_indirect_jump (handler_slot);
851 /* Search backwards to the jump insn and mark it as a
853 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
855 if (GET_CODE (insn) == JUMP_INSN)
857 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
858 const0_rtx, REG_NOTES (insn));
861 else if (GET_CODE (insn) == CALL_INSN)
866 expand_goto_internal (label, label_rtx (label), NULL_RTX);
869 /* Generate RTL code for a `goto' statement with target label BODY.
870 LABEL should be a LABEL_REF.
871 LAST_INSN, if non-0, is the rtx we should consider as the last
872 insn emitted (for the purposes of cleaning up a return). */
875 expand_goto_internal (body, label, last_insn)
880 struct nesting *block;
883 if (GET_CODE (label) != CODE_LABEL)
886 /* If label has already been defined, we can tell now
887 whether and how we must alter the stack level. */
889 if (PREV_INSN (label) != 0)
891 /* Find the innermost pending block that contains the label.
892 (Check containment by comparing insn-uids.)
893 Then restore the outermost stack level within that block,
894 and do cleanups of all blocks contained in it. */
895 for (block = block_stack; block; block = block->next)
897 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
899 if (block->data.block.stack_level != 0)
900 stack_level = block->data.block.stack_level;
901 /* Execute the cleanups for blocks we are exiting. */
902 if (block->data.block.cleanups != 0)
904 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
905 do_pending_stack_adjust ();
911 /* Ensure stack adjust isn't done by emit_jump, as this
912 would clobber the stack pointer. This one should be
913 deleted as dead by flow. */
914 clear_pending_stack_adjust ();
915 do_pending_stack_adjust ();
917 /* Don't do this adjust if it's to the end label and this function
918 is to return with a depressed stack pointer. */
919 if (label == return_label
920 && (((TREE_CODE (TREE_TYPE (current_function_decl))
922 && (TYPE_RETURNS_STACK_DEPRESSED
923 (TREE_TYPE (current_function_decl))))))
926 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
929 if (body != 0 && DECL_TOO_LATE (body))
930 error ("jump to `%s' invalidly jumps into binding contour",
931 IDENTIFIER_POINTER (DECL_NAME (body)));
933 /* Label not yet defined: may need to put this goto
934 on the fixup list. */
935 else if (! expand_fixup (body, label, last_insn))
937 /* No fixup needed. Record that the label is the target
938 of at least one goto that has no fixup. */
940 TREE_ADDRESSABLE (body) = 1;
946 /* Generate if necessary a fixup for a goto
947 whose target label in tree structure (if any) is TREE_LABEL
948 and whose target in rtl is RTL_LABEL.
950 If LAST_INSN is nonzero, we pretend that the jump appears
951 after insn LAST_INSN instead of at the current point in the insn stream.
953 The fixup will be used later to insert insns just before the goto.
954 Those insns will restore the stack level as appropriate for the
955 target label, and will (in the case of C++) also invoke any object
956 destructors which have to be invoked when we exit the scopes which
957 are exited by the goto.
959 Value is nonzero if a fixup is made. */
962 expand_fixup (tree_label, rtl_label, last_insn)
967 struct nesting *block, *end_block;
969 /* See if we can recognize which block the label will be output in.
970 This is possible in some very common cases.
971 If we succeed, set END_BLOCK to that block.
972 Otherwise, set it to 0. */
975 && (rtl_label == cond_stack->data.cond.endif_label
976 || rtl_label == cond_stack->data.cond.next_label))
977 end_block = cond_stack;
978 /* If we are in a loop, recognize certain labels which
979 are likely targets. This reduces the number of fixups
980 we need to create. */
982 && (rtl_label == loop_stack->data.loop.start_label
983 || rtl_label == loop_stack->data.loop.end_label
984 || rtl_label == loop_stack->data.loop.continue_label))
985 end_block = loop_stack;
989 /* Now set END_BLOCK to the binding level to which we will return. */
993 struct nesting *next_block = end_block->all;
996 /* First see if the END_BLOCK is inside the innermost binding level.
997 If so, then no cleanups or stack levels are relevant. */
998 while (next_block && next_block != block)
999 next_block = next_block->all;
1004 /* Otherwise, set END_BLOCK to the innermost binding level
1005 which is outside the relevant control-structure nesting. */
1006 next_block = block_stack->next;
1007 for (block = block_stack; block != end_block; block = block->all)
1008 if (block == next_block)
1009 next_block = next_block->next;
1010 end_block = next_block;
1013 /* Does any containing block have a stack level or cleanups?
1014 If not, no fixup is needed, and that is the normal case
1015 (the only case, for standard C). */
1016 for (block = block_stack; block != end_block; block = block->next)
1017 if (block->data.block.stack_level != 0
1018 || block->data.block.cleanups != 0)
1021 if (block != end_block)
1023 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1024 struct goto_fixup *fixup
1025 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1026 /* In case an old stack level is restored, make sure that comes
1027 after any pending stack adjust. */
1028 /* ?? If the fixup isn't to come at the present position,
1029 doing the stack adjust here isn't useful. Doing it with our
1030 settings at that location isn't useful either. Let's hope
1033 do_pending_stack_adjust ();
1034 fixup->target = tree_label;
1035 fixup->target_rtl = rtl_label;
1037 /* Create a BLOCK node and a corresponding matched set of
1038 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1039 this point. The notes will encapsulate any and all fixup
1040 code which we might later insert at this point in the insn
1041 stream. Also, the BLOCK node will be the parent (i.e. the
1042 `SUPERBLOCK') of any other BLOCK nodes which we might create
1043 later on when we are expanding the fixup code.
1045 Note that optimization passes (including expand_end_loop)
1046 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1047 as a placeholder. */
1050 rtx original_before_jump
1051 = last_insn ? last_insn : get_last_insn ();
1056 block = make_node (BLOCK);
1057 TREE_USED (block) = 1;
1059 if (!cfun->x_whole_function_mode_p)
1060 (*lang_hooks.decls.insert_block) (block);
1064 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1065 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1070 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
1071 if (cfun->x_whole_function_mode_p)
1072 NOTE_BLOCK (start) = block;
1073 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
1074 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
1075 if (cfun->x_whole_function_mode_p)
1076 NOTE_BLOCK (end) = block;
1077 fixup->context = block;
1079 emit_insns_after (start, original_before_jump);
1082 fixup->block_start_count = current_block_start_count;
1083 fixup->stack_level = 0;
1084 fixup->cleanup_list_list
1085 = ((block->data.block.outer_cleanups
1086 || block->data.block.cleanups)
1087 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1088 block->data.block.outer_cleanups)
1090 fixup->next = goto_fixup_chain;
1091 goto_fixup_chain = fixup;
1097 /* Expand any needed fixups in the outputmost binding level of the
1098 function. FIRST_INSN is the first insn in the function. */
1101 expand_fixups (first_insn)
1104 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
1107 /* When exiting a binding contour, process all pending gotos requiring fixups.
1108 THISBLOCK is the structure that describes the block being exited.
1109 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1110 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1111 FIRST_INSN is the insn that began this contour.
1113 Gotos that jump out of this contour must restore the
1114 stack level and do the cleanups before actually jumping.
1116 DONT_JUMP_IN nonzero means report error there is a jump into this
1117 contour from before the beginning of the contour.
1118 This is also done if STACK_LEVEL is nonzero. */
1121 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1122 struct nesting *thisblock;
1128 struct goto_fixup *f, *prev;
1130 /* F is the fixup we are considering; PREV is the previous one. */
1131 /* We run this loop in two passes so that cleanups of exited blocks
1132 are run first, and blocks that are exited are marked so
1135 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1137 /* Test for a fixup that is inactive because it is already handled. */
1138 if (f->before_jump == 0)
1140 /* Delete inactive fixup from the chain, if that is easy to do. */
1142 prev->next = f->next;
1144 /* Has this fixup's target label been defined?
1145 If so, we can finalize it. */
1146 else if (PREV_INSN (f->target_rtl) != 0)
1150 /* If this fixup jumped into this contour from before the beginning
1151 of this contour, report an error. This code used to use
1152 the first non-label insn after f->target_rtl, but that's
1153 wrong since such can be added, by things like put_var_into_stack
1154 and have INSN_UIDs that are out of the range of the block. */
1155 /* ??? Bug: this does not detect jumping in through intermediate
1156 blocks that have stack levels or cleanups.
1157 It detects only a problem with the innermost block
1158 around the label. */
1160 && (dont_jump_in || stack_level || cleanup_list)
1161 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1162 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1163 && ! DECL_ERROR_ISSUED (f->target))
1165 error_with_decl (f->target,
1166 "label `%s' used before containing binding contour");
1167 /* Prevent multiple errors for one label. */
1168 DECL_ERROR_ISSUED (f->target) = 1;
1171 /* We will expand the cleanups into a sequence of their own and
1172 then later on we will attach this new sequence to the insn
1173 stream just ahead of the actual jump insn. */
1177 /* Temporarily restore the lexical context where we will
1178 logically be inserting the fixup code. We do this for the
1179 sake of getting the debugging information right. */
1181 (*lang_hooks.decls.pushlevel) (0);
1182 (*lang_hooks.decls.set_block) (f->context);
1184 /* Expand the cleanups for blocks this jump exits. */
1185 if (f->cleanup_list_list)
1188 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1189 /* Marked elements correspond to blocks that have been closed.
1190 Do their cleanups. */
1191 if (TREE_ADDRESSABLE (lists)
1192 && TREE_VALUE (lists) != 0)
1194 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1195 /* Pop any pushes done in the cleanups,
1196 in case function is about to return. */
1197 do_pending_stack_adjust ();
1201 /* Restore stack level for the biggest contour that this
1202 jump jumps out of. */
1204 && ! (f->target_rtl == return_label
1205 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1207 && (TYPE_RETURNS_STACK_DEPRESSED
1208 (TREE_TYPE (current_function_decl))))))
1209 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1211 /* Finish up the sequence containing the insns which implement the
1212 necessary cleanups, and then attach that whole sequence to the
1213 insn stream just ahead of the actual jump insn. Attaching it
1214 at that point insures that any cleanups which are in fact
1215 implicit C++ object destructions (which must be executed upon
1216 leaving the block) appear (to the debugger) to be taking place
1217 in an area of the generated code where the object(s) being
1218 destructed are still "in scope". */
1220 cleanup_insns = get_insns ();
1221 (*lang_hooks.decls.poplevel) (1, 0, 0);
1224 emit_insns_after (cleanup_insns, f->before_jump);
1230 /* For any still-undefined labels, do the cleanups for this block now.
1231 We must do this now since items in the cleanup list may go out
1232 of scope when the block ends. */
1233 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1234 if (f->before_jump != 0
1235 && PREV_INSN (f->target_rtl) == 0
1236 /* Label has still not appeared. If we are exiting a block with
1237 a stack level to restore, that started before the fixup,
1238 mark this stack level as needing restoration
1239 when the fixup is later finalized. */
1241 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1242 means the label is undefined. That's erroneous, but possible. */
1243 && (thisblock->data.block.block_start_count
1244 <= f->block_start_count))
1246 tree lists = f->cleanup_list_list;
1249 for (; lists; lists = TREE_CHAIN (lists))
1250 /* If the following elt. corresponds to our containing block
1251 then the elt. must be for this block. */
1252 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1255 (*lang_hooks.decls.pushlevel) (0);
1256 (*lang_hooks.decls.set_block) (f->context);
1257 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1258 do_pending_stack_adjust ();
1259 cleanup_insns = get_insns ();
1260 (*lang_hooks.decls.poplevel) (1, 0, 0);
1262 if (cleanup_insns != 0)
1264 = emit_insns_after (cleanup_insns, f->before_jump);
1266 f->cleanup_list_list = TREE_CHAIN (lists);
1270 f->stack_level = stack_level;
1274 /* Return the number of times character C occurs in string S. */
1276 n_occurrences (c, s)
1286 /* Generate RTL for an asm statement (explicit assembler code).
1287 BODY is a STRING_CST node containing the assembler code text,
1288 or an ADDR_EXPR containing a STRING_CST. */
1294 if (TREE_CODE (body) == ADDR_EXPR)
1295 body = TREE_OPERAND (body, 0);
1297 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1298 TREE_STRING_POINTER (body)));
1302 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1303 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1304 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1305 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1306 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1307 constraint allows the use of a register operand. And, *IS_INOUT
1308 will be true if the operand is read-write, i.e., if it is used as
1309 an input as well as an output. If *CONSTRAINT_P is not in
1310 canonical form, it will be made canonical. (Note that `+' will be
1311 rpelaced with `=' as part of this process.)
1313 Returns TRUE if all went well; FALSE if an error occurred. */
1316 parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1317 allows_mem, allows_reg, is_inout)
1318 const char **constraint_p;
1326 const char *constraint = *constraint_p;
1329 /* Assume the constraint doesn't allow the use of either a register
1331 *allows_mem = false;
1332 *allows_reg = false;
1334 /* Allow the `=' or `+' to not be at the beginning of the string,
1335 since it wasn't explicitly documented that way, and there is a
1336 large body of code that puts it last. Swap the character to
1337 the front, so as not to uglify any place else. */
1338 p = strchr (constraint, '=');
1340 p = strchr (constraint, '+');
1342 /* If the string doesn't contain an `=', issue an error
1346 error ("output operand constraint lacks `='");
1350 /* If the constraint begins with `+', then the operand is both read
1351 from and written to. */
1352 *is_inout = (*p == '+');
1354 /* Canonicalize the output constraint so that it begins with `='. */
1355 if (p != constraint || is_inout)
1358 size_t c_len = strlen (constraint);
1360 if (p != constraint)
1361 warning ("output constraint `%c' for operand %d is not at the beginning",
1364 /* Make a copy of the constraint. */
1365 buf = alloca (c_len + 1);
1366 strcpy (buf, constraint);
1367 /* Swap the first character and the `=' or `+'. */
1368 buf[p - constraint] = buf[0];
1369 /* Make sure the first character is an `='. (Until we do this,
1370 it might be a `+'.) */
1372 /* Replace the constraint with the canonicalized string. */
1373 *constraint_p = ggc_alloc_string (buf, c_len);
1374 constraint = *constraint_p;
1377 /* Loop through the constraint string. */
1378 for (p = constraint + 1; *p; ++p)
1383 error ("operand constraint contains incorrectly positioned '+' or '='");
1387 if (operand_num + 1 == ninputs + noutputs)
1389 error ("`%%' constraint used with last operand");
1394 case 'V': case 'm': case 'o':
1398 case '?': case '!': case '*': case '&': case '#':
1399 case 'E': case 'F': case 'G': case 'H':
1400 case 's': case 'i': case 'n':
1401 case 'I': case 'J': case 'K': case 'L': case 'M':
1402 case 'N': case 'O': case 'P': case ',':
1405 case '0': case '1': case '2': case '3': case '4':
1406 case '5': case '6': case '7': case '8': case '9':
1408 error ("matching constraint not valid in output operand");
1412 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1413 excepting those that expand_call created. So match memory
1430 if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1432 #ifdef EXTRA_CONSTRAINT
1435 /* Otherwise we can't assume anything about the nature of
1436 the constraint except that it isn't purely registers.
1437 Treat it like "g" and hope for the best. */
1448 /* Similar, but for input constraints. */
1451 parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1452 constraints, allows_mem, allows_reg)
1453 const char **constraint_p;
1458 const char * const * constraints;
1462 const char *constraint = *constraint_p;
1463 const char *orig_constraint = constraint;
1464 size_t c_len = strlen (constraint);
1467 /* Assume the constraint doesn't allow the use of either
1468 a register or memory. */
1469 *allows_mem = false;
1470 *allows_reg = false;
1472 /* Make sure constraint has neither `=', `+', nor '&'. */
1474 for (j = 0; j < c_len; j++)
1475 switch (constraint[j])
1477 case '+': case '=': case '&':
1478 if (constraint == orig_constraint)
1480 error ("input operand constraint contains `%c'", constraint[j]);
1486 if (constraint == orig_constraint
1487 && input_num + 1 == ninputs - ninout)
1489 error ("`%%' constraint used with last operand");
1494 case 'V': case 'm': case 'o':
1499 case '?': case '!': case '*': case '#':
1500 case 'E': case 'F': case 'G': case 'H':
1501 case 's': case 'i': case 'n':
1502 case 'I': case 'J': case 'K': case 'L': case 'M':
1503 case 'N': case 'O': case 'P': case ',':
1506 /* Whether or not a numeric constraint allows a register is
1507 decided by the matching constraint, and so there is no need
1508 to do anything special with them. We must handle them in
1509 the default case, so that we don't unnecessarily force
1510 operands to memory. */
1511 case '0': case '1': case '2': case '3': case '4':
1512 case '5': case '6': case '7': case '8': case '9':
1515 unsigned long match;
1517 match = strtoul (constraint + j, &end, 10);
1518 if (match >= (unsigned long) noutputs)
1520 error ("matching constraint references invalid operand number");
1524 /* Try and find the real constraint for this dup. Only do this
1525 if the matching constraint is the only alternative. */
1527 && (j == 0 || (j == 1 && constraint[0] == '%')))
1529 constraint = constraints[match];
1530 *constraint_p = constraint;
1531 c_len = strlen (constraint);
1536 j = end - constraint;
1550 if (! ISALPHA (constraint[j]))
1552 error ("invalid punctuation `%c' in constraint", constraint[j]);
1555 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1557 #ifdef EXTRA_CONSTRAINT
1560 /* Otherwise we can't assume anything about the nature of
1561 the constraint except that it isn't purely registers.
1562 Treat it like "g" and hope for the best. */
1573 /* Generate RTL for an asm statement with arguments.
1574 STRING is the instruction template.
1575 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1576 Each output or input has an expression in the TREE_VALUE and
1577 and a tree list in TREE_PURPOSE which in turn contains a constraint
1578 name in TREE_VALUE (or NULL_TREE) and a constraint string
1580 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1581 that is clobbered by this insn.
1583 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1584 Some elements of OUTPUTS may be replaced with trees representing temporary
1585 values. The caller should copy those temporary values to the originally
1588 VOL nonzero means the insn is volatile; don't optimize it. */
1591 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1592 tree string, outputs, inputs, clobbers;
1594 const char *filename;
1597 rtvec argvec, constraintvec;
1599 int ninputs = list_length (inputs);
1600 int noutputs = list_length (outputs);
1605 /* Vector of RTX's of evaluated output operands. */
1606 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1607 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1608 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1609 enum machine_mode *inout_mode
1610 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1611 const char **constraints
1612 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1613 /* The insn we have emitted. */
1615 int old_generating_concat_p = generating_concat_p;
1617 /* An ASM with no outputs needs to be treated as volatile, for now. */
1621 if (! check_operand_nalternatives (outputs, inputs))
1624 if (! check_unique_operand_names (outputs, inputs))
1627 string = resolve_operand_names (string, outputs, inputs, constraints);
1629 #ifdef MD_ASM_CLOBBERS
1630 /* Sometimes we wish to automatically clobber registers across an asm.
1631 Case in point is when the i386 backend moved from cc0 to a hard reg --
1632 maintaining source-level compatibility means automatically clobbering
1633 the flags register. */
1634 MD_ASM_CLOBBERS (clobbers);
1637 /* Count the number of meaningful clobbered registers, ignoring what
1638 we would ignore later. */
1640 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1642 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1644 i = decode_reg_name (regname);
1645 if (i >= 0 || i == -4)
1648 error ("unknown register name `%s' in `asm'", regname);
1653 /* First pass over inputs and outputs checks validity and sets
1654 mark_addressable if needed. */
1657 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1659 tree val = TREE_VALUE (tail);
1660 tree type = TREE_TYPE (val);
1661 const char *constraint;
1666 /* If there's an erroneous arg, emit no insn. */
1667 if (type == error_mark_node)
1670 /* Try to parse the output constraint. If that fails, there's
1671 no point in going further. */
1672 constraint = constraints[i];
1673 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1674 &allows_mem, &allows_reg, &is_inout))
1681 && GET_CODE (DECL_RTL (val)) == REG
1682 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1683 (*lang_hooks.mark_addressable) (val);
1690 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1692 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1696 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1698 bool allows_reg, allows_mem;
1699 const char *constraint;
1701 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1702 would get VOIDmode and that could cause a crash in reload. */
1703 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1706 constraint = constraints[i + noutputs];
1707 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1708 constraints, &allows_mem, &allows_reg))
1711 if (! allows_reg && allows_mem)
1712 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1715 /* Second pass evaluates arguments. */
1718 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1720 tree val = TREE_VALUE (tail);
1721 tree type = TREE_TYPE (val);
1726 if (!parse_output_constraint (&constraints[i], i, ninputs,
1727 noutputs, &allows_mem, &allows_reg,
1731 /* If an output operand is not a decl or indirect ref and our constraint
1732 allows a register, make a temporary to act as an intermediate.
1733 Make the asm insn write into that, then our caller will copy it to
1734 the real output operand. Likewise for promoted variables. */
1736 generating_concat_p = 0;
1738 real_output_rtx[i] = NULL_RTX;
1739 if ((TREE_CODE (val) == INDIRECT_REF
1742 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1743 && ! (GET_CODE (DECL_RTL (val)) == REG
1744 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1748 output_rtx[i] = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1750 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1751 error ("output number %d not directly addressable", i);
1752 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1753 || GET_CODE (output_rtx[i]) == CONCAT)
1755 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1756 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1758 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1763 output_rtx[i] = assign_temp (type, 0, 0, 1);
1764 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1767 generating_concat_p = old_generating_concat_p;
1771 inout_mode[ninout] = TYPE_MODE (type);
1772 inout_opnum[ninout++] = i;
1776 /* Make vectors for the expression-rtx, constraint strings,
1777 and named operands. */
1779 argvec = rtvec_alloc (ninputs);
1780 constraintvec = rtvec_alloc (ninputs);
1782 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1783 : GET_MODE (output_rtx[0])),
1784 TREE_STRING_POINTER (string),
1785 empty_string, 0, argvec, constraintvec,
1788 MEM_VOLATILE_P (body) = vol;
1790 /* Eval the inputs and put them into ARGVEC.
1791 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1793 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1795 bool allows_reg, allows_mem;
1796 const char *constraint;
1800 constraint = constraints[i + noutputs];
1801 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1802 constraints, &allows_mem, &allows_reg))
1805 generating_concat_p = 0;
1807 val = TREE_VALUE (tail);
1808 type = TREE_TYPE (val);
1809 op = expand_expr (val, NULL_RTX, VOIDmode, 0);
1811 /* Never pass a CONCAT to an ASM. */
1812 if (GET_CODE (op) == CONCAT)
1813 op = force_reg (GET_MODE (op), op);
1815 if (asm_operand_ok (op, constraint) <= 0)
1818 op = force_reg (TYPE_MODE (type), op);
1819 else if (!allows_mem)
1820 warning ("asm operand %d probably doesn't match constraints",
1822 else if (CONSTANT_P (op))
1823 op = force_const_mem (TYPE_MODE (type), op);
1824 else if (GET_CODE (op) == REG
1825 || GET_CODE (op) == SUBREG
1826 || GET_CODE (op) == ADDRESSOF
1827 || GET_CODE (op) == CONCAT)
1829 tree qual_type = build_qualified_type (type,
1831 | TYPE_QUAL_CONST));
1832 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1834 emit_move_insn (memloc, op);
1838 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1840 /* We won't recognize volatile memory as available a
1841 memory_operand at this point. Ignore it. */
1843 else if (queued_subexp_p (op))
1846 /* ??? Leave this only until we have experience with what
1847 happens in combine and elsewhere when constraints are
1849 warning ("asm operand %d probably doesn't match constraints",
1853 generating_concat_p = old_generating_concat_p;
1854 ASM_OPERANDS_INPUT (body, i) = op;
1856 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1857 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1860 /* Protect all the operands from the queue now that they have all been
1863 generating_concat_p = 0;
1865 for (i = 0; i < ninputs - ninout; i++)
1866 ASM_OPERANDS_INPUT (body, i)
1867 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1869 for (i = 0; i < noutputs; i++)
1870 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1872 /* For in-out operands, copy output rtx to input rtx. */
1873 for (i = 0; i < ninout; i++)
1875 int j = inout_opnum[i];
1878 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1881 sprintf (buffer, "%d", j);
1882 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1883 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1886 generating_concat_p = old_generating_concat_p;
1888 /* Now, for each output, construct an rtx
1889 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1890 ARGVEC CONSTRAINTS OPNAMES))
1891 If there is more than one, put them inside a PARALLEL. */
1893 if (noutputs == 1 && nclobbers == 0)
1895 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1896 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1899 else if (noutputs == 0 && nclobbers == 0)
1901 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1902 insn = emit_insn (body);
1913 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1915 /* For each output operand, store a SET. */
1916 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1918 XVECEXP (body, 0, i)
1919 = gen_rtx_SET (VOIDmode,
1921 gen_rtx_ASM_OPERANDS
1922 (GET_MODE (output_rtx[i]),
1923 TREE_STRING_POINTER (string),
1924 constraints[i], i, argvec, constraintvec,
1927 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1930 /* If there are no outputs (but there are some clobbers)
1931 store the bare ASM_OPERANDS into the PARALLEL. */
1934 XVECEXP (body, 0, i++) = obody;
1936 /* Store (clobber REG) for each clobbered register specified. */
1938 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1940 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1941 int j = decode_reg_name (regname);
1945 if (j == -3) /* `cc', which is not a register */
1948 if (j == -4) /* `memory', don't cache memory across asm */
1950 XVECEXP (body, 0, i++)
1951 = gen_rtx_CLOBBER (VOIDmode,
1954 gen_rtx_SCRATCH (VOIDmode)));
1958 /* Ignore unknown register, error already signaled. */
1962 /* Use QImode since that's guaranteed to clobber just one reg. */
1963 XVECEXP (body, 0, i++)
1964 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1967 insn = emit_insn (body);
1970 /* For any outputs that needed reloading into registers, spill them
1971 back to where they belong. */
1972 for (i = 0; i < noutputs; ++i)
1973 if (real_output_rtx[i])
1974 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1979 /* A subroutine of expand_asm_operands. Check that all operands have
1980 the same number of alternatives. Return true if so. */
1983 check_operand_nalternatives (outputs, inputs)
1984 tree outputs, inputs;
1986 if (outputs || inputs)
1988 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1990 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1993 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1995 error ("too many alternatives in `asm'");
2002 const char *constraint
2003 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2005 if (n_occurrences (',', constraint) != nalternatives)
2007 error ("operand constraints for `asm' differ in number of alternatives");
2011 if (TREE_CHAIN (tmp))
2012 tmp = TREE_CHAIN (tmp);
2014 tmp = next, next = 0;
2021 /* A subroutine of expand_asm_operands. Check that all operand names
2022 are unique. Return true if so. We rely on the fact that these names
2023 are identifiers, and so have been canonicalized by get_identifier,
2024 so all we need are pointer comparisons. */
2027 check_unique_operand_names (outputs, inputs)
2028 tree outputs, inputs;
2032 for (i = outputs; i ; i = TREE_CHAIN (i))
2034 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2038 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2039 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
2043 for (i = inputs; i ; i = TREE_CHAIN (i))
2045 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2049 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2050 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
2052 for (j = outputs; j ; j = TREE_CHAIN (j))
2053 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
2060 error ("duplicate asm operand name '%s'",
2061 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
2065 /* A subroutine of expand_asm_operands. Resolve the names of the operands
2066 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
2067 STRING and in the constraints to those numbers. */
2070 resolve_operand_names (string, outputs, inputs, pconstraints)
2072 tree outputs, inputs;
2073 const char **pconstraints;
2075 char *buffer = xstrdup (TREE_STRING_POINTER (string));
2079 /* Assume that we will not need extra space to perform the substitution.
2080 This because we get to remove '[' and ']', which means we cannot have
2081 a problem until we have more than 999 operands. */
2084 while ((p = strchr (p, '%')) != NULL)
2088 else if (ISALPHA (p[1]) && p[2] == '[')
2096 p = resolve_operand_name_1 (p, outputs, inputs);
2099 string = build_string (strlen (buffer), buffer);
2102 /* Collect output constraints here because it's convenient.
2103 There should be no named operands here; this is verified
2104 in expand_asm_operand. */
2105 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2106 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2108 /* Substitute [<name>] in input constraint strings. */
2109 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2111 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2112 if (strchr (c, '[') == NULL)
2116 p = buffer = xstrdup (c);
2117 while ((p = strchr (p, '[')) != NULL)
2118 p = resolve_operand_name_1 (p, outputs, inputs);
2120 *pconstraints = ggc_alloc_string (buffer, -1);
2128 /* A subroutine of resolve_operand_names. P points to the '[' for a
2129 potential named operand of the form [<name>]. In place, replace
2130 the name and brackets with a number. Return a pointer to the
2131 balance of the string after substitution. */
2134 resolve_operand_name_1 (p, outputs, inputs)
2136 tree outputs, inputs;
2143 /* Collect the operand name. */
2144 q = strchr (p, ']');
2147 error ("missing close brace for named operand");
2148 return strchr (p, '\0');
2152 /* Resolve the name to a number. */
2153 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2155 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2158 const char *c = TREE_STRING_POINTER (name);
2159 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2163 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2165 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2168 const char *c = TREE_STRING_POINTER (name);
2169 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2175 error ("undefined named operand '%s'", p + 1);
2179 /* Replace the name with the number. Unfortunately, not all libraries
2180 get the return value of sprintf correct, so search for the end of the
2181 generated string by hand. */
2182 sprintf (p, "%d", op);
2183 p = strchr (p, '\0');
2185 /* Verify the no extra buffer space assumption. */
2189 /* Shift the rest of the buffer down to fill the gap. */
2190 memmove (p, q + 1, strlen (q + 1) + 1);
2195 /* Generate RTL to evaluate the expression EXP
2196 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2197 Provided just for backward-compatibility. expand_expr_stmt_value()
2198 should be used for new code. */
2201 expand_expr_stmt (exp)
2204 expand_expr_stmt_value (exp, -1, 1);
2207 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2208 whether to (1) save the value of the expression, (0) discard it or
2209 (-1) use expr_stmts_for_value to tell. The use of -1 is
2210 deprecated, and retained only for backward compatibility. */
2213 expand_expr_stmt_value (exp, want_value, maybe_last)
2215 int want_value, maybe_last;
2220 if (want_value == -1)
2221 want_value = expr_stmts_for_value != 0;
2223 /* If -W, warn about statements with no side effects,
2224 except for an explicit cast to void (e.g. for assert()), and
2225 except for last statement in ({...}) where they may be useful. */
2227 && (expr_stmts_for_value == 0 || ! maybe_last)
2228 && exp != error_mark_node)
2230 if (! TREE_SIDE_EFFECTS (exp))
2232 if ((extra_warnings || warn_unused_value)
2233 && !(TREE_CODE (exp) == CONVERT_EXPR
2234 && VOID_TYPE_P (TREE_TYPE (exp))))
2235 warning_with_file_and_line (emit_filename, emit_lineno,
2236 "statement with no effect");
2238 else if (warn_unused_value)
2239 warn_if_unused_value (exp);
2242 /* If EXP is of function type and we are expanding statements for
2243 value, convert it to pointer-to-function. */
2244 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2245 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2247 /* The call to `expand_expr' could cause last_expr_type and
2248 last_expr_value to get reset. Therefore, we set last_expr_value
2249 and last_expr_type *after* calling expand_expr. */
2250 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2252 type = TREE_TYPE (exp);
2254 /* If all we do is reference a volatile value in memory,
2255 copy it to a register to be sure it is actually touched. */
2256 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2258 if (TYPE_MODE (type) == VOIDmode)
2260 else if (TYPE_MODE (type) != BLKmode)
2261 value = copy_to_reg (value);
2264 rtx lab = gen_label_rtx ();
2266 /* Compare the value with itself to reference it. */
2267 emit_cmp_and_jump_insns (value, value, EQ,
2268 expand_expr (TYPE_SIZE (type),
2269 NULL_RTX, VOIDmode, 0),
2275 /* If this expression is part of a ({...}) and is in memory, we may have
2276 to preserve temporaries. */
2277 preserve_temp_slots (value);
2279 /* Free any temporaries used to evaluate this expression. Any temporary
2280 used as a result of this expression will already have been preserved
2286 last_expr_value = value;
2287 last_expr_type = type;
2293 /* Warn if EXP contains any computations whose results are not used.
2294 Return 1 if a warning is printed; 0 otherwise. */
2297 warn_if_unused_value (exp)
2300 if (TREE_USED (exp))
2303 /* Don't warn about void constructs. This includes casting to void,
2304 void function calls, and statement expressions with a final cast
2306 if (VOID_TYPE_P (TREE_TYPE (exp)))
2309 switch (TREE_CODE (exp))
2311 case PREINCREMENT_EXPR:
2312 case POSTINCREMENT_EXPR:
2313 case PREDECREMENT_EXPR:
2314 case POSTDECREMENT_EXPR:
2319 case METHOD_CALL_EXPR:
2321 case TRY_CATCH_EXPR:
2322 case WITH_CLEANUP_EXPR:
2327 /* For a binding, warn if no side effect within it. */
2328 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2331 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2333 case TRUTH_ORIF_EXPR:
2334 case TRUTH_ANDIF_EXPR:
2335 /* In && or ||, warn if 2nd operand has no side effect. */
2336 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2339 if (TREE_NO_UNUSED_WARNING (exp))
2341 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2343 /* Let people do `(foo (), 0)' without a warning. */
2344 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2346 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2350 case NON_LVALUE_EXPR:
2351 /* Don't warn about conversions not explicit in the user's program. */
2352 if (TREE_NO_UNUSED_WARNING (exp))
2354 /* Assignment to a cast usually results in a cast of a modify.
2355 Don't complain about that. There can be an arbitrary number of
2356 casts before the modify, so we must loop until we find the first
2357 non-cast expression and then test to see if that is a modify. */
2359 tree tem = TREE_OPERAND (exp, 0);
2361 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2362 tem = TREE_OPERAND (tem, 0);
2364 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2365 || TREE_CODE (tem) == CALL_EXPR)
2371 /* Don't warn about automatic dereferencing of references, since
2372 the user cannot control it. */
2373 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2374 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2378 /* Referencing a volatile value is a side effect, so don't warn. */
2380 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2381 && TREE_THIS_VOLATILE (exp))
2384 /* If this is an expression which has no operands, there is no value
2385 to be unused. There are no such language-independent codes,
2386 but front ends may define such. */
2387 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2388 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2392 /* If this is an expression with side effects, don't warn. */
2393 if (TREE_SIDE_EFFECTS (exp))
2396 warning_with_file_and_line (emit_filename, emit_lineno,
2397 "value computed is not used");
2402 /* Clear out the memory of the last expression evaluated. */
2410 /* Begin a statement-expression, i.e., a series of statements which
2411 may return a value. Return the RTL_EXPR for this statement expr.
2412 The caller must save that value and pass it to
2413 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2414 in the statement-expression are deallocated at the end of the
2418 expand_start_stmt_expr (has_scope)
2423 /* Make the RTL_EXPR node temporary, not momentary,
2424 so that rtl_expr_chain doesn't become garbage. */
2425 t = make_node (RTL_EXPR);
2426 do_pending_stack_adjust ();
2428 start_sequence_for_rtl_expr (t);
2432 expr_stmts_for_value++;
2433 last_expr_value = NULL_RTX;
2437 /* Restore the previous state at the end of a statement that returns a value.
2438 Returns a tree node representing the statement's value and the
2439 insns to compute the value.
2441 The nodes of that expression have been freed by now, so we cannot use them.
2442 But we don't want to do that anyway; the expression has already been
2443 evaluated and now we just want to use the value. So generate a RTL_EXPR
2444 with the proper type and RTL value.
2446 If the last substatement was not an expression,
2447 return something with type `void'. */
2450 expand_end_stmt_expr (t)
2455 if (! last_expr_value || ! last_expr_type)
2457 last_expr_value = const0_rtx;
2458 last_expr_type = void_type_node;
2460 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2461 /* Remove any possible QUEUED. */
2462 last_expr_value = protect_from_queue (last_expr_value, 0);
2466 TREE_TYPE (t) = last_expr_type;
2467 RTL_EXPR_RTL (t) = last_expr_value;
2468 RTL_EXPR_SEQUENCE (t) = get_insns ();
2470 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2474 /* Don't consider deleting this expr or containing exprs at tree level. */
2475 TREE_SIDE_EFFECTS (t) = 1;
2476 /* Propagate volatility of the actual RTL expr. */
2477 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2480 expr_stmts_for_value--;
2485 /* Generate RTL for the start of an if-then. COND is the expression
2486 whose truth should be tested.
2488 If EXITFLAG is nonzero, this conditional is visible to
2489 `exit_something'. */
2492 expand_start_cond (cond, exitflag)
2496 struct nesting *thiscond = ALLOC_NESTING ();
2498 /* Make an entry on cond_stack for the cond we are entering. */
2500 thiscond->next = cond_stack;
2501 thiscond->all = nesting_stack;
2502 thiscond->depth = ++nesting_depth;
2503 thiscond->data.cond.next_label = gen_label_rtx ();
2504 /* Before we encounter an `else', we don't need a separate exit label
2505 unless there are supposed to be exit statements
2506 to exit this conditional. */
2507 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2508 thiscond->data.cond.endif_label = thiscond->exit_label;
2509 cond_stack = thiscond;
2510 nesting_stack = thiscond;
2512 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2515 /* Generate RTL between then-clause and the elseif-clause
2516 of an if-then-elseif-.... */
2519 expand_start_elseif (cond)
2522 if (cond_stack->data.cond.endif_label == 0)
2523 cond_stack->data.cond.endif_label = gen_label_rtx ();
2524 emit_jump (cond_stack->data.cond.endif_label);
2525 emit_label (cond_stack->data.cond.next_label);
2526 cond_stack->data.cond.next_label = gen_label_rtx ();
2527 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2530 /* Generate RTL between the then-clause and the else-clause
2531 of an if-then-else. */
2534 expand_start_else ()
2536 if (cond_stack->data.cond.endif_label == 0)
2537 cond_stack->data.cond.endif_label = gen_label_rtx ();
2539 emit_jump (cond_stack->data.cond.endif_label);
2540 emit_label (cond_stack->data.cond.next_label);
2541 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2544 /* After calling expand_start_else, turn this "else" into an "else if"
2545 by providing another condition. */
2548 expand_elseif (cond)
2551 cond_stack->data.cond.next_label = gen_label_rtx ();
2552 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2555 /* Generate RTL for the end of an if-then.
2556 Pop the record for it off of cond_stack. */
2561 struct nesting *thiscond = cond_stack;
2563 do_pending_stack_adjust ();
2564 if (thiscond->data.cond.next_label)
2565 emit_label (thiscond->data.cond.next_label);
2566 if (thiscond->data.cond.endif_label)
2567 emit_label (thiscond->data.cond.endif_label);
2569 POPSTACK (cond_stack);
2573 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2574 loop should be exited by `exit_something'. This is a loop for which
2575 `expand_continue' will jump to the top of the loop.
2577 Make an entry on loop_stack to record the labels associated with
2581 expand_start_loop (exit_flag)
2584 struct nesting *thisloop = ALLOC_NESTING ();
2586 /* Make an entry on loop_stack for the loop we are entering. */
2588 thisloop->next = loop_stack;
2589 thisloop->all = nesting_stack;
2590 thisloop->depth = ++nesting_depth;
2591 thisloop->data.loop.start_label = gen_label_rtx ();
2592 thisloop->data.loop.end_label = gen_label_rtx ();
2593 thisloop->data.loop.alt_end_label = 0;
2594 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2595 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2596 loop_stack = thisloop;
2597 nesting_stack = thisloop;
2599 do_pending_stack_adjust ();
2601 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2602 emit_label (thisloop->data.loop.start_label);
2607 /* Like expand_start_loop but for a loop where the continuation point
2608 (for expand_continue_loop) will be specified explicitly. */
2611 expand_start_loop_continue_elsewhere (exit_flag)
2614 struct nesting *thisloop = expand_start_loop (exit_flag);
2615 loop_stack->data.loop.continue_label = gen_label_rtx ();
2619 /* Begin a null, aka do { } while (0) "loop". But since the contents
2620 of said loop can still contain a break, we must frob the loop nest. */
2623 expand_start_null_loop ()
2625 struct nesting *thisloop = ALLOC_NESTING ();
2627 /* Make an entry on loop_stack for the loop we are entering. */
2629 thisloop->next = loop_stack;
2630 thisloop->all = nesting_stack;
2631 thisloop->depth = ++nesting_depth;
2632 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2633 thisloop->data.loop.end_label = gen_label_rtx ();
2634 thisloop->data.loop.alt_end_label = NULL_RTX;
2635 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2636 thisloop->exit_label = thisloop->data.loop.end_label;
2637 loop_stack = thisloop;
2638 nesting_stack = thisloop;
2643 /* Specify the continuation point for a loop started with
2644 expand_start_loop_continue_elsewhere.
2645 Use this at the point in the code to which a continue statement
2649 expand_loop_continue_here ()
2651 do_pending_stack_adjust ();
2652 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2653 emit_label (loop_stack->data.loop.continue_label);
2656 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2657 Pop the block off of loop_stack. */
2662 rtx start_label = loop_stack->data.loop.start_label;
2664 int eh_regions, debug_blocks;
2666 /* Mark the continue-point at the top of the loop if none elsewhere. */
2667 if (start_label == loop_stack->data.loop.continue_label)
2668 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2670 do_pending_stack_adjust ();
2672 /* If the loop starts with a loop exit, roll that to the end where
2673 it will optimize together with the jump back.
2675 If the loop presently looks like this (in pseudo-C):
2679 if (test) goto end_label;
2685 transform it to look like:
2692 if (test) goto end_label;
2696 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2697 the end of the entry condtional. Without this, our lexical scan
2698 can't tell the difference between an entry conditional and a
2699 body conditional that exits the loop. Mistaking the two means
2700 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2701 screw up loop unrolling.
2703 Things will be oh so much better when loop optimization is done
2704 off of a proper control flow graph... */
2706 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2708 eh_regions = debug_blocks = 0;
2709 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2710 if (GET_CODE (etc_note) == NOTE)
2712 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2715 /* We must not walk into a nested loop. */
2716 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2718 etc_note = NULL_RTX;
2722 /* At the same time, scan for EH region notes, as we don't want
2723 to scrog region nesting. This shouldn't happen, but... */
2724 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2726 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2728 if (--eh_regions < 0)
2729 /* We've come to the end of an EH region, but never saw the
2730 beginning of that region. That means that an EH region
2731 begins before the top of the loop, and ends in the middle
2732 of it. The existence of such a situation violates a basic
2733 assumption in this code, since that would imply that even
2734 when EH_REGIONS is zero, we might move code out of an
2735 exception region. */
2739 /* Likewise for debug scopes. In this case we'll either (1) move
2740 all of the notes if they are properly nested or (2) leave the
2741 notes alone and only rotate the loop at high optimization
2742 levels when we expect to scrog debug info. */
2743 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2745 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2752 && (debug_blocks == 0 || optimize >= 2)
2753 && NEXT_INSN (etc_note) != NULL_RTX
2754 && ! any_condjump_p (get_last_insn ()))
2756 /* We found one. Move everything from START to ETC to the end
2757 of the loop, and add a jump from the top of the loop. */
2758 rtx top_label = gen_label_rtx ();
2759 rtx start_move = start_label;
2761 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2762 then we want to move this note also. */
2763 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2764 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2765 start_move = PREV_INSN (start_move);
2767 emit_label_before (top_label, start_move);
2769 /* Actually move the insns. If the debug scopes are nested, we
2770 can move everything at once. Otherwise we have to move them
2771 one by one and squeeze out the block notes. */
2772 if (debug_blocks == 0)
2773 reorder_insns (start_move, etc_note, get_last_insn ());
2776 rtx insn, next_insn;
2777 for (insn = start_move; insn; insn = next_insn)
2779 /* Figure out which insn comes after this one. We have
2780 to do this before we move INSN. */
2781 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2783 if (GET_CODE (insn) == NOTE
2784 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2785 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2788 reorder_insns (insn, insn, get_last_insn ());
2792 /* Add the jump from the top of the loop. */
2793 emit_jump_insn_before (gen_jump (start_label), top_label);
2794 emit_barrier_before (top_label);
2795 start_label = top_label;
2798 emit_jump (start_label);
2799 emit_note (NULL, NOTE_INSN_LOOP_END);
2800 emit_label (loop_stack->data.loop.end_label);
2802 POPSTACK (loop_stack);
2807 /* Finish a null loop, aka do { } while (0). */
2810 expand_end_null_loop ()
2812 do_pending_stack_adjust ();
2813 emit_label (loop_stack->data.loop.end_label);
2815 POPSTACK (loop_stack);
2820 /* Generate a jump to the current loop's continue-point.
2821 This is usually the top of the loop, but may be specified
2822 explicitly elsewhere. If not currently inside a loop,
2823 return 0 and do nothing; caller will print an error message. */
2826 expand_continue_loop (whichloop)
2827 struct nesting *whichloop;
2829 /* Emit information for branch prediction. */
2832 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2833 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2836 whichloop = loop_stack;
2839 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2844 /* Generate a jump to exit the current loop. If not currently inside a loop,
2845 return 0 and do nothing; caller will print an error message. */
2848 expand_exit_loop (whichloop)
2849 struct nesting *whichloop;
2853 whichloop = loop_stack;
2856 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2860 /* Generate a conditional jump to exit the current loop if COND
2861 evaluates to zero. If not currently inside a loop,
2862 return 0 and do nothing; caller will print an error message. */
2865 expand_exit_loop_if_false (whichloop, cond)
2866 struct nesting *whichloop;
2869 rtx label = gen_label_rtx ();
2874 whichloop = loop_stack;
2877 /* In order to handle fixups, we actually create a conditional jump
2878 around an unconditional branch to exit the loop. If fixups are
2879 necessary, they go before the unconditional branch. */
2881 do_jump (cond, NULL_RTX, label);
2882 last_insn = get_last_insn ();
2883 if (GET_CODE (last_insn) == CODE_LABEL)
2884 whichloop->data.loop.alt_end_label = last_insn;
2885 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2892 /* Like expand_exit_loop_if_false except also emit a note marking
2893 the end of the conditional. Should only be used immediately
2894 after expand_loop_start. */
2897 expand_exit_loop_top_cond (whichloop, cond)
2898 struct nesting *whichloop;
2901 if (! expand_exit_loop_if_false (whichloop, cond))
2904 emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2908 /* Return nonzero if the loop nest is empty. Else return zero. */
2911 stmt_loop_nest_empty ()
2913 /* cfun->stmt can be NULL if we are building a call to get the
2914 EH context for a setjmp/longjmp EH target and the current
2915 function was a deferred inline function. */
2916 return (cfun->stmt == NULL || loop_stack == NULL);
2919 /* Return non-zero if we should preserve sub-expressions as separate
2920 pseudos. We never do so if we aren't optimizing. We always do so
2921 if -fexpensive-optimizations.
2923 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2924 the loop may still be a small one. */
2927 preserve_subexpressions_p ()
2931 if (flag_expensive_optimizations)
2934 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2937 insn = get_last_insn_anywhere ();
2940 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2941 < n_non_fixed_regs * 3));
2945 /* Generate a jump to exit the current loop, conditional, binding contour
2946 or case statement. Not all such constructs are visible to this function,
2947 only those started with EXIT_FLAG nonzero. Individual languages use
2948 the EXIT_FLAG parameter to control which kinds of constructs you can
2951 If not currently inside anything that can be exited,
2952 return 0 and do nothing; caller will print an error message. */
2955 expand_exit_something ()
2959 for (n = nesting_stack; n; n = n->all)
2960 if (n->exit_label != 0)
2962 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2969 /* Generate RTL to return from the current function, with no value.
2970 (That is, we do not do anything about returning any value.) */
2973 expand_null_return ()
2977 last_insn = get_last_insn ();
2979 /* If this function was declared to return a value, but we
2980 didn't, clobber the return registers so that they are not
2981 propagated live to the rest of the function. */
2982 clobber_return_register ();
2984 expand_null_return_1 (last_insn);
2987 /* Try to guess whether the value of return means error code. */
2988 static enum br_predictor
2989 return_prediction (val)
2992 /* Different heuristics for pointers and scalars. */
2993 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2995 /* NULL is usually not returned. */
2996 if (val == const0_rtx)
2997 return PRED_NULL_RETURN;
3001 /* Negative return values are often used to indicate
3003 if (GET_CODE (val) == CONST_INT
3004 && INTVAL (val) < 0)
3005 return PRED_NEGATIVE_RETURN;
3006 /* Constant return values are also usually erors,
3007 zero/one often mean booleans so exclude them from the
3009 if (CONSTANT_P (val)
3010 && (val != const0_rtx && val != const1_rtx))
3011 return PRED_CONST_RETURN;
3013 return PRED_NO_PREDICTION;
3016 /* Generate RTL to return from the current function, with value VAL. */
3019 expand_value_return (val)
3024 enum br_predictor pred;
3026 if ((pred = return_prediction (val)) != PRED_NO_PREDICTION)
3028 /* Emit information for branch prediction. */
3031 note = emit_note (NULL, NOTE_INSN_PREDICTION);
3033 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
3037 last_insn = get_last_insn ();
3038 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
3040 /* Copy the value to the return location
3041 unless it's already there. */
3043 if (return_reg != val)
3045 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3046 #ifdef PROMOTE_FUNCTION_RETURN
3047 int unsignedp = TREE_UNSIGNED (type);
3048 enum machine_mode old_mode
3049 = DECL_MODE (DECL_RESULT (current_function_decl));
3050 enum machine_mode mode
3051 = promote_mode (type, old_mode, &unsignedp, 1);
3053 if (mode != old_mode)
3054 val = convert_modes (mode, old_mode, val, unsignedp);
3056 if (GET_CODE (return_reg) == PARALLEL)
3057 emit_group_load (return_reg, val, int_size_in_bytes (type));
3059 emit_move_insn (return_reg, val);
3062 expand_null_return_1 (last_insn);
3065 /* Output a return with no value. If LAST_INSN is nonzero,
3066 pretend that the return takes place after LAST_INSN. */
3069 expand_null_return_1 (last_insn)
3072 rtx end_label = cleanup_label ? cleanup_label : return_label;
3074 clear_pending_stack_adjust ();
3075 do_pending_stack_adjust ();
3079 end_label = return_label = gen_label_rtx ();
3080 expand_goto_internal (NULL_TREE, end_label, last_insn);
3083 /* Generate RTL to evaluate the expression RETVAL and return it
3084 from the current function. */
3087 expand_return (retval)
3090 /* If there are any cleanups to be performed, then they will
3091 be inserted following LAST_INSN. It is desirable
3092 that the last_insn, for such purposes, should be the
3093 last insn before computing the return value. Otherwise, cleanups
3094 which call functions can clobber the return value. */
3095 /* ??? rms: I think that is erroneous, because in C++ it would
3096 run destructors on variables that might be used in the subsequent
3097 computation of the return value. */
3103 /* If function wants no value, give it none. */
3104 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3106 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3108 expand_null_return ();
3112 if (retval == error_mark_node)
3114 /* Treat this like a return of no value from a function that
3116 expand_null_return ();
3119 else if (TREE_CODE (retval) == RESULT_DECL)
3120 retval_rhs = retval;
3121 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3122 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3123 retval_rhs = TREE_OPERAND (retval, 1);
3124 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3125 /* Recognize tail-recursive call to void function. */
3126 retval_rhs = retval;
3128 retval_rhs = NULL_TREE;
3130 last_insn = get_last_insn ();
3132 /* Distribute return down conditional expr if either of the sides
3133 may involve tail recursion (see test below). This enhances the number
3134 of tail recursions we see. Don't do this always since it can produce
3135 sub-optimal code in some cases and we distribute assignments into
3136 conditional expressions when it would help. */
3138 if (optimize && retval_rhs != 0
3139 && frame_offset == 0
3140 && TREE_CODE (retval_rhs) == COND_EXPR
3141 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3142 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3144 rtx label = gen_label_rtx ();
3147 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3148 start_cleanup_deferral ();
3149 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3150 DECL_RESULT (current_function_decl),
3151 TREE_OPERAND (retval_rhs, 1));
3152 TREE_SIDE_EFFECTS (expr) = 1;
3153 expand_return (expr);
3156 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3157 DECL_RESULT (current_function_decl),
3158 TREE_OPERAND (retval_rhs, 2));
3159 TREE_SIDE_EFFECTS (expr) = 1;
3160 expand_return (expr);
3161 end_cleanup_deferral ();
3165 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3167 /* If the result is an aggregate that is being returned in one (or more)
3168 registers, load the registers here. The compiler currently can't handle
3169 copying a BLKmode value into registers. We could put this code in a
3170 more general area (for use by everyone instead of just function
3171 call/return), but until this feature is generally usable it is kept here
3172 (and in expand_call). The value must go into a pseudo in case there
3173 are cleanups that will clobber the real return register. */
3176 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3177 && GET_CODE (result_rtl) == REG)
3180 unsigned HOST_WIDE_INT bitpos, xbitpos;
3181 unsigned HOST_WIDE_INT big_endian_correction = 0;
3182 unsigned HOST_WIDE_INT bytes
3183 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3184 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3185 unsigned int bitsize
3186 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3187 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3188 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3189 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3190 enum machine_mode tmpmode, result_reg_mode;
3194 expand_null_return ();
3198 /* Structures whose size is not a multiple of a word are aligned
3199 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3200 machine, this means we must skip the empty high order bytes when
3201 calculating the bit offset. */
3202 if (BYTES_BIG_ENDIAN
3203 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
3204 && bytes % UNITS_PER_WORD)
3205 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3208 /* Copy the structure BITSIZE bits at a time. */
3209 for (bitpos = 0, xbitpos = big_endian_correction;
3210 bitpos < bytes * BITS_PER_UNIT;
3211 bitpos += bitsize, xbitpos += bitsize)
3213 /* We need a new destination pseudo each time xbitpos is
3214 on a word boundary and when xbitpos == big_endian_correction
3215 (the first time through). */
3216 if (xbitpos % BITS_PER_WORD == 0
3217 || xbitpos == big_endian_correction)
3219 /* Generate an appropriate register. */
3220 dst = gen_reg_rtx (word_mode);
3221 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3223 /* Clear the destination before we move anything into it. */
3224 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3227 /* We need a new source operand each time bitpos is on a word
3229 if (bitpos % BITS_PER_WORD == 0)
3230 src = operand_subword_force (result_val,
3231 bitpos / BITS_PER_WORD,
3234 /* Use bitpos for the source extraction (left justified) and
3235 xbitpos for the destination store (right justified). */
3236 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3237 extract_bit_field (src, bitsize,
3238 bitpos % BITS_PER_WORD, 1,
3239 NULL_RTX, word_mode, word_mode,
3244 /* Find the smallest integer mode large enough to hold the
3245 entire structure and use that mode instead of BLKmode
3246 on the USE insn for the return register. */
3247 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3248 tmpmode != VOIDmode;
3249 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3250 /* Have we found a large enough mode? */
3251 if (GET_MODE_SIZE (tmpmode) >= bytes)
3254 /* No suitable mode found. */
3255 if (tmpmode == VOIDmode)
3258 PUT_MODE (result_rtl, tmpmode);
3260 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3261 result_reg_mode = word_mode;
3263 result_reg_mode = tmpmode;
3264 result_reg = gen_reg_rtx (result_reg_mode);
3267 for (i = 0; i < n_regs; i++)
3268 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3271 if (tmpmode != result_reg_mode)
3272 result_reg = gen_lowpart (tmpmode, result_reg);
3274 expand_value_return (result_reg);
3276 else if (retval_rhs != 0
3277 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3278 && (GET_CODE (result_rtl) == REG
3279 || (GET_CODE (result_rtl) == PARALLEL)))
3281 /* Calculate the return value into a temporary (usually a pseudo
3283 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3284 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3286 val = assign_temp (nt, 0, 0, 1);
3287 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3288 val = force_not_mem (val);
3290 /* Return the calculated value, doing cleanups first. */
3291 expand_value_return (val);
3295 /* No cleanups or no hard reg used;
3296 calculate value into hard return reg. */
3297 expand_expr (retval, const0_rtx, VOIDmode, 0);
3299 expand_value_return (result_rtl);
3303 /* Return 1 if the end of the generated RTX is not a barrier.
3304 This means code already compiled can drop through. */
3307 drop_through_at_end_p ()
3309 rtx insn = get_last_insn ();
3310 while (insn && GET_CODE (insn) == NOTE)
3311 insn = PREV_INSN (insn);
3312 return insn && GET_CODE (insn) != BARRIER;
3315 /* Attempt to optimize a potential tail recursion call into a goto.
3316 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3317 where to place the jump to the tail recursion label.
3319 Return TRUE if the call was optimized into a goto. */
3322 optimize_tail_recursion (arguments, last_insn)
3326 /* Finish checking validity, and if valid emit code to set the
3327 argument variables for the new call. */
3328 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3330 if (tail_recursion_label == 0)
3332 tail_recursion_label = gen_label_rtx ();
3333 emit_label_after (tail_recursion_label,
3334 tail_recursion_reentry);
3337 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3344 /* Emit code to alter this function's formal parms for a tail-recursive call.
3345 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3346 FORMALS is the chain of decls of formals.
3347 Return 1 if this can be done;
3348 otherwise return 0 and do not emit any code. */
3351 tail_recursion_args (actuals, formals)
3352 tree actuals, formals;
3354 tree a = actuals, f = formals;
3358 /* Check that number and types of actuals are compatible
3359 with the formals. This is not always true in valid C code.
3360 Also check that no formal needs to be addressable
3361 and that all formals are scalars. */
3363 /* Also count the args. */
3365 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3367 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3368 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3370 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3373 if (a != 0 || f != 0)
3376 /* Compute all the actuals. */
3378 argvec = (rtx *) alloca (i * sizeof (rtx));
3380 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3381 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3383 /* Find which actual values refer to current values of previous formals.
3384 Copy each of them now, before any formal is changed. */
3386 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3390 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3391 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3397 argvec[i] = copy_to_reg (argvec[i]);
3400 /* Store the values of the actuals into the formals. */
3402 for (f = formals, a = actuals, i = 0; f;
3403 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3405 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3406 emit_move_insn (DECL_RTL (f), argvec[i]);
3408 convert_move (DECL_RTL (f), argvec[i],
3409 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3416 /* Generate the RTL code for entering a binding contour.
3417 The variables are declared one by one, by calls to `expand_decl'.
3419 FLAGS is a bitwise or of the following flags:
3421 1 - Nonzero if this construct should be visible to
3424 2 - Nonzero if this contour does not require a
3425 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3426 language-independent code should set this flag because they
3427 will not create corresponding BLOCK nodes. (There should be
3428 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3429 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3430 when expand_end_bindings is called.
3432 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3433 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3437 expand_start_bindings_and_block (flags, block)
3441 struct nesting *thisblock = ALLOC_NESTING ();
3443 int exit_flag = ((flags & 1) != 0);
3444 int block_flag = ((flags & 2) == 0);
3446 /* If a BLOCK is supplied, then the caller should be requesting a
3447 NOTE_INSN_BLOCK_BEG note. */
3448 if (!block_flag && block)
3451 /* Create a note to mark the beginning of the block. */
3454 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3455 NOTE_BLOCK (note) = block;
3458 note = emit_note (NULL, NOTE_INSN_DELETED);
3460 /* Make an entry on block_stack for the block we are entering. */
3462 thisblock->next = block_stack;
3463 thisblock->all = nesting_stack;
3464 thisblock->depth = ++nesting_depth;
3465 thisblock->data.block.stack_level = 0;
3466 thisblock->data.block.cleanups = 0;
3467 thisblock->data.block.n_function_calls = 0;
3468 thisblock->data.block.exception_region = 0;
3469 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3471 thisblock->data.block.conditional_code = 0;
3472 thisblock->data.block.last_unconditional_cleanup = note;
3473 /* When we insert instructions after the last unconditional cleanup,
3474 we don't adjust last_insn. That means that a later add_insn will
3475 clobber the instructions we've just added. The easiest way to
3476 fix this is to just insert another instruction here, so that the
3477 instructions inserted after the last unconditional cleanup are
3478 never the last instruction. */
3479 emit_note (NULL, NOTE_INSN_DELETED);
3480 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3483 && !(block_stack->data.block.cleanups == NULL_TREE
3484 && block_stack->data.block.outer_cleanups == NULL_TREE))
3485 thisblock->data.block.outer_cleanups
3486 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3487 block_stack->data.block.outer_cleanups);
3489 thisblock->data.block.outer_cleanups = 0;
3490 thisblock->data.block.label_chain = 0;
3491 thisblock->data.block.innermost_stack_block = stack_block_stack;
3492 thisblock->data.block.first_insn = note;
3493 thisblock->data.block.block_start_count = ++current_block_start_count;
3494 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3495 block_stack = thisblock;
3496 nesting_stack = thisblock;
3498 /* Make a new level for allocating stack slots. */
3502 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3503 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3504 expand_expr are made. After we end the region, we know that all
3505 space for all temporaries that were created by TARGET_EXPRs will be
3506 destroyed and their space freed for reuse. */
3509 expand_start_target_temps ()
3511 /* This is so that even if the result is preserved, the space
3512 allocated will be freed, as we know that it is no longer in use. */
3515 /* Start a new binding layer that will keep track of all cleanup
3516 actions to be performed. */
3517 expand_start_bindings (2);
3519 target_temp_slot_level = temp_slot_level;
3523 expand_end_target_temps ()
3525 expand_end_bindings (NULL_TREE, 0, 0);
3527 /* This is so that even if the result is preserved, the space
3528 allocated will be freed, as we know that it is no longer in use. */
3532 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3533 in question represents the outermost pair of curly braces (i.e. the "body
3534 block") of a function or method.
3536 For any BLOCK node representing a "body block" of a function or method, the
3537 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3538 represents the outermost (function) scope for the function or method (i.e.
3539 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3540 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3543 is_body_block (stmt)
3546 if (TREE_CODE (stmt) == BLOCK)
3548 tree parent = BLOCK_SUPERCONTEXT (stmt);
3550 if (parent && TREE_CODE (parent) == BLOCK)
3552 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3554 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3562 /* True if we are currently emitting insns in an area of output code
3563 that is controlled by a conditional expression. This is used by
3564 the cleanup handling code to generate conditional cleanup actions. */
3567 conditional_context ()
3569 return block_stack && block_stack->data.block.conditional_code;
3572 /* Return an opaque pointer to the current nesting level, so frontend code
3573 can check its own sanity. */
3576 current_nesting_level ()
3578 return cfun ? block_stack : 0;
3581 /* Emit a handler label for a nonlocal goto handler.
3582 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3585 expand_nl_handler_label (slot, before_insn)
3586 rtx slot, before_insn;
3589 rtx handler_label = gen_label_rtx ();
3591 /* Don't let cleanup_cfg delete the handler. */
3592 LABEL_PRESERVE_P (handler_label) = 1;
3595 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3596 insns = get_insns ();
3598 emit_insns_before (insns, before_insn);
3600 emit_label (handler_label);
3602 return handler_label;
3605 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3608 expand_nl_goto_receiver ()
3610 #ifdef HAVE_nonlocal_goto
3611 if (! HAVE_nonlocal_goto)
3613 /* First adjust our frame pointer to its actual value. It was
3614 previously set to the start of the virtual area corresponding to
3615 the stacked variables when we branched here and now needs to be
3616 adjusted to the actual hardware fp value.
3618 Assignments are to virtual registers are converted by
3619 instantiate_virtual_regs into the corresponding assignment
3620 to the underlying register (fp in this case) that makes
3621 the original assignment true.
3622 So the following insn will actually be
3623 decrementing fp by STARTING_FRAME_OFFSET. */
3624 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3626 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3627 if (fixed_regs[ARG_POINTER_REGNUM])
3629 #ifdef ELIMINABLE_REGS
3630 /* If the argument pointer can be eliminated in favor of the
3631 frame pointer, we don't need to restore it. We assume here
3632 that if such an elimination is present, it can always be used.
3633 This is the case on all known machines; if we don't make this
3634 assumption, we do unnecessary saving on many machines. */
3635 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3638 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3639 if (elim_regs[i].from == ARG_POINTER_REGNUM
3640 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3643 if (i == ARRAY_SIZE (elim_regs))
3646 /* Now restore our arg pointer from the address at which it
3647 was saved in our stack frame. */
3648 emit_move_insn (virtual_incoming_args_rtx,
3649 copy_to_reg (get_arg_pointer_save_area (cfun)));
3654 #ifdef HAVE_nonlocal_goto_receiver
3655 if (HAVE_nonlocal_goto_receiver)
3656 emit_insn (gen_nonlocal_goto_receiver ());
3660 /* Make handlers for nonlocal gotos taking place in the function calls in
3664 expand_nl_goto_receivers (thisblock)
3665 struct nesting *thisblock;
3668 rtx afterward = gen_label_rtx ();
3673 /* Record the handler address in the stack slot for that purpose,
3674 during this block, saving and restoring the outer value. */
3675 if (thisblock->next != 0)
3676 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3678 rtx save_receiver = gen_reg_rtx (Pmode);
3679 emit_move_insn (XEXP (slot, 0), save_receiver);
3682 emit_move_insn (save_receiver, XEXP (slot, 0));
3683 insns = get_insns ();
3685 emit_insns_before (insns, thisblock->data.block.first_insn);
3688 /* Jump around the handlers; they run only when specially invoked. */
3689 emit_jump (afterward);
3691 /* Make a separate handler for each label. */
3692 link = nonlocal_labels;
3693 slot = nonlocal_goto_handler_slots;
3694 label_list = NULL_RTX;
3695 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3696 /* Skip any labels we shouldn't be able to jump to from here,
3697 we generate one special handler for all of them below which just calls
3699 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3702 lab = expand_nl_handler_label (XEXP (slot, 0),
3703 thisblock->data.block.first_insn);
3704 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3706 expand_nl_goto_receiver ();
3708 /* Jump to the "real" nonlocal label. */
3709 expand_goto (TREE_VALUE (link));
3712 /* A second pass over all nonlocal labels; this time we handle those
3713 we should not be able to jump to at this point. */
3714 link = nonlocal_labels;
3715 slot = nonlocal_goto_handler_slots;
3717 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3718 if (DECL_TOO_LATE (TREE_VALUE (link)))
3721 lab = expand_nl_handler_label (XEXP (slot, 0),
3722 thisblock->data.block.first_insn);
3723 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3729 expand_nl_goto_receiver ();
3730 expand_builtin_trap ();
3733 nonlocal_goto_handler_labels = label_list;
3734 emit_label (afterward);
3737 /* Warn about any unused VARS (which may contain nodes other than
3738 VAR_DECLs, but such nodes are ignored). The nodes are connected
3739 via the TREE_CHAIN field. */
3742 warn_about_unused_variables (vars)
3747 if (warn_unused_variable)
3748 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3749 if (TREE_CODE (decl) == VAR_DECL
3750 && ! TREE_USED (decl)
3751 && ! DECL_IN_SYSTEM_HEADER (decl)
3752 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3753 warning_with_decl (decl, "unused variable `%s'");
3756 /* Generate RTL code to terminate a binding contour.
3758 VARS is the chain of VAR_DECL nodes for the variables bound in this
3759 contour. There may actually be other nodes in this chain, but any
3760 nodes other than VAR_DECLS are ignored.
3762 MARK_ENDS is nonzero if we should put a note at the beginning
3763 and end of this binding contour.
3765 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3766 (That is true automatically if the contour has a saved stack level.) */
3769 expand_end_bindings (vars, mark_ends, dont_jump_in)
3774 struct nesting *thisblock = block_stack;
3776 /* If any of the variables in this scope were not used, warn the
3778 warn_about_unused_variables (vars);
3780 if (thisblock->exit_label)
3782 do_pending_stack_adjust ();
3783 emit_label (thisblock->exit_label);
3786 /* If necessary, make handlers for nonlocal gotos taking
3787 place in the function calls in this block. */
3788 if (function_call_count != thisblock->data.block.n_function_calls
3790 /* Make handler for outermost block
3791 if there were any nonlocal gotos to this function. */
3792 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3793 /* Make handler for inner block if it has something
3794 special to do when you jump out of it. */
3795 : (thisblock->data.block.cleanups != 0
3796 || thisblock->data.block.stack_level != 0)))
3797 expand_nl_goto_receivers (thisblock);
3799 /* Don't allow jumping into a block that has a stack level.
3800 Cleanups are allowed, though. */
3802 || thisblock->data.block.stack_level != 0)
3804 struct label_chain *chain;
3806 /* Any labels in this block are no longer valid to go to.
3807 Mark them to cause an error message. */
3808 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3810 DECL_TOO_LATE (chain->label) = 1;
3811 /* If any goto without a fixup came to this label,
3812 that must be an error, because gotos without fixups
3813 come from outside all saved stack-levels. */
3814 if (TREE_ADDRESSABLE (chain->label))
3815 error_with_decl (chain->label,
3816 "label `%s' used before containing binding contour");
3820 /* Restore stack level in effect before the block
3821 (only if variable-size objects allocated). */
3822 /* Perform any cleanups associated with the block. */
3824 if (thisblock->data.block.stack_level != 0
3825 || thisblock->data.block.cleanups != 0)
3830 /* Don't let cleanups affect ({...}) constructs. */
3831 int old_expr_stmts_for_value = expr_stmts_for_value;
3832 rtx old_last_expr_value = last_expr_value;
3833 tree old_last_expr_type = last_expr_type;
3834 expr_stmts_for_value = 0;
3836 /* Only clean up here if this point can actually be reached. */
3837 insn = get_last_insn ();
3838 if (GET_CODE (insn) == NOTE)
3839 insn = prev_nonnote_insn (insn);
3840 reachable = (! insn || GET_CODE (insn) != BARRIER);
3842 /* Do the cleanups. */
3843 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3845 do_pending_stack_adjust ();
3847 expr_stmts_for_value = old_expr_stmts_for_value;
3848 last_expr_value = old_last_expr_value;
3849 last_expr_type = old_last_expr_type;
3851 /* Restore the stack level. */
3853 if (reachable && thisblock->data.block.stack_level != 0)
3855 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3856 thisblock->data.block.stack_level, NULL_RTX);
3857 if (nonlocal_goto_handler_slots != 0)
3858 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3862 /* Any gotos out of this block must also do these things.
3863 Also report any gotos with fixups that came to labels in this
3865 fixup_gotos (thisblock,
3866 thisblock->data.block.stack_level,
3867 thisblock->data.block.cleanups,
3868 thisblock->data.block.first_insn,
3872 /* Mark the beginning and end of the scope if requested.
3873 We do this now, after running cleanups on the variables
3874 just going out of scope, so they are in scope for their cleanups. */
3878 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3879 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3882 /* Get rid of the beginning-mark if we don't make an end-mark. */
3883 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3885 /* Restore the temporary level of TARGET_EXPRs. */
3886 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3888 /* Restore block_stack level for containing block. */
3890 stack_block_stack = thisblock->data.block.innermost_stack_block;
3891 POPSTACK (block_stack);
3893 /* Pop the stack slot nesting and free any slots at this level. */
3897 /* Generate code to save the stack pointer at the start of the current block
3898 and set up to restore it on exit. */
3901 save_stack_pointer ()
3903 struct nesting *thisblock = block_stack;
3905 if (thisblock->data.block.stack_level == 0)
3907 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3908 &thisblock->data.block.stack_level,
3909 thisblock->data.block.first_insn);
3910 stack_block_stack = thisblock;
3914 /* Generate RTL for the automatic variable declaration DECL.
3915 (Other kinds of declarations are simply ignored if seen here.) */
3921 struct nesting *thisblock;
3924 type = TREE_TYPE (decl);
3926 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3927 type in case this node is used in a reference. */
3928 if (TREE_CODE (decl) == CONST_DECL)
3930 DECL_MODE (decl) = TYPE_MODE (type);
3931 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3932 DECL_SIZE (decl) = TYPE_SIZE (type);
3933 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3937 /* Otherwise, only automatic variables need any expansion done. Static and
3938 external variables, and external functions, will be handled by
3939 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3940 nothing. PARM_DECLs are handled in `assign_parms'. */
3941 if (TREE_CODE (decl) != VAR_DECL)
3944 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3947 thisblock = block_stack;
3949 /* Create the RTL representation for the variable. */
3951 if (type == error_mark_node)
3952 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3954 else if (DECL_SIZE (decl) == 0)
3955 /* Variable with incomplete type. */
3958 if (DECL_INITIAL (decl) == 0)
3959 /* Error message was already done; now avoid a crash. */
3960 x = gen_rtx_MEM (BLKmode, const0_rtx);
3962 /* An initializer is going to decide the size of this array.
3963 Until we know the size, represent its address with a reg. */
3964 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3966 set_mem_attributes (x, decl, 1);
3967 SET_DECL_RTL (decl, x);
3969 else if (DECL_MODE (decl) != BLKmode
3970 /* If -ffloat-store, don't put explicit float vars
3972 && !(flag_float_store
3973 && TREE_CODE (type) == REAL_TYPE)
3974 && ! TREE_THIS_VOLATILE (decl)
3975 && (DECL_REGISTER (decl) || optimize))
3977 /* Automatic variable that can go in a register. */
3978 int unsignedp = TREE_UNSIGNED (type);
3979 enum machine_mode reg_mode
3980 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3982 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3984 if (GET_CODE (DECL_RTL (decl)) == REG)
3985 REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
3986 else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
3988 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
3989 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
3992 mark_user_reg (DECL_RTL (decl));
3994 if (POINTER_TYPE_P (type))
3995 mark_reg_pointer (DECL_RTL (decl),
3996 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3998 maybe_set_unchanging (DECL_RTL (decl), decl);
4000 /* If something wants our address, try to use ADDRESSOF. */
4001 if (TREE_ADDRESSABLE (decl))
4002 put_var_into_stack (decl);
4005 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
4006 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
4007 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
4008 STACK_CHECK_MAX_VAR_SIZE)))
4010 /* Variable of fixed size that goes on the stack. */
4015 /* If we previously made RTL for this decl, it must be an array
4016 whose size was determined by the initializer.
4017 The old address was a register; set that register now
4018 to the proper address. */
4019 if (DECL_RTL_SET_P (decl))
4021 if (GET_CODE (DECL_RTL (decl)) != MEM
4022 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
4024 oldaddr = XEXP (DECL_RTL (decl), 0);
4027 /* Set alignment we actually gave this decl. */
4028 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
4029 : GET_MODE_BITSIZE (DECL_MODE (decl)));
4030 DECL_USER_ALIGN (decl) = 0;
4032 x = assign_temp (decl, 1, 1, 1);
4033 set_mem_attributes (x, decl, 1);
4034 SET_DECL_RTL (decl, x);
4038 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
4039 if (addr != oldaddr)
4040 emit_move_insn (oldaddr, addr);
4044 /* Dynamic-size object: must push space on the stack. */
4046 rtx address, size, x;
4048 /* Record the stack pointer on entry to block, if have
4049 not already done so. */
4050 do_pending_stack_adjust ();
4051 save_stack_pointer ();
4053 /* In function-at-a-time mode, variable_size doesn't expand this,
4055 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4056 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4057 const0_rtx, VOIDmode, 0);
4059 /* Compute the variable's size, in bytes. */
4060 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4063 /* Allocate space on the stack for the variable. Note that
4064 DECL_ALIGN says how the variable is to be aligned and we
4065 cannot use it to conclude anything about the alignment of
4067 address = allocate_dynamic_stack_space (size, NULL_RTX,
4068 TYPE_ALIGN (TREE_TYPE (decl)));
4070 /* Reference the variable indirect through that rtx. */
4071 x = gen_rtx_MEM (DECL_MODE (decl), address);
4072 set_mem_attributes (x, decl, 1);
4073 SET_DECL_RTL (decl, x);
4076 /* Indicate the alignment we actually gave this variable. */
4077 #ifdef STACK_BOUNDARY
4078 DECL_ALIGN (decl) = STACK_BOUNDARY;
4080 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4082 DECL_USER_ALIGN (decl) = 0;
4086 /* Emit code to perform the initialization of a declaration DECL. */
4089 expand_decl_init (decl)
4092 int was_used = TREE_USED (decl);
4094 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4095 for static decls. */
4096 if (TREE_CODE (decl) == CONST_DECL
4097 || TREE_STATIC (decl))
4100 /* Compute and store the initial value now. */
4102 if (DECL_INITIAL (decl) == error_mark_node)
4104 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4106 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4107 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4108 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4112 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4114 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4115 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4119 /* Don't let the initialization count as "using" the variable. */
4120 TREE_USED (decl) = was_used;
4122 /* Free any temporaries we made while initializing the decl. */
4123 preserve_temp_slots (NULL_RTX);
4127 /* CLEANUP is an expression to be executed at exit from this binding contour;
4128 for example, in C++, it might call the destructor for this variable.
4130 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4131 CLEANUP multiple times, and have the correct semantics. This
4132 happens in exception handling, for gotos, returns, breaks that
4133 leave the current scope.
4135 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4136 that is not associated with any particular variable. */
4139 expand_decl_cleanup (decl, cleanup)
4142 struct nesting *thisblock;
4144 /* Error if we are not in any block. */
4145 if (cfun == 0 || block_stack == 0)
4148 thisblock = block_stack;
4150 /* Record the cleanup if there is one. */
4156 tree *cleanups = &thisblock->data.block.cleanups;
4157 int cond_context = conditional_context ();
4161 rtx flag = gen_reg_rtx (word_mode);
4166 emit_move_insn (flag, const0_rtx);
4167 set_flag_0 = get_insns ();
4170 thisblock->data.block.last_unconditional_cleanup
4171 = emit_insns_after (set_flag_0,
4172 thisblock->data.block.last_unconditional_cleanup);
4174 emit_move_insn (flag, const1_rtx);
4176 cond = build_decl (VAR_DECL, NULL_TREE,
4177 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4178 SET_DECL_RTL (cond, flag);
4180 /* Conditionalize the cleanup. */
4181 cleanup = build (COND_EXPR, void_type_node,
4182 (*lang_hooks.truthvalue_conversion) (cond),
4183 cleanup, integer_zero_node);
4184 cleanup = fold (cleanup);
4186 cleanups = thisblock->data.block.cleanup_ptr;
4189 cleanup = unsave_expr (cleanup);
4191 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4194 /* If this block has a cleanup, it belongs in stack_block_stack. */
4195 stack_block_stack = thisblock;
4202 if (! using_eh_for_cleanups_p)
4203 TREE_ADDRESSABLE (t) = 1;
4205 expand_eh_region_start ();
4212 thisblock->data.block.last_unconditional_cleanup
4213 = emit_insns_after (seq,
4214 thisblock->data.block.last_unconditional_cleanup);
4218 thisblock->data.block.last_unconditional_cleanup
4220 /* When we insert instructions after the last unconditional cleanup,
4221 we don't adjust last_insn. That means that a later add_insn will
4222 clobber the instructions we've just added. The easiest way to
4223 fix this is to just insert another instruction here, so that the
4224 instructions inserted after the last unconditional cleanup are
4225 never the last instruction. */
4226 emit_note (NULL, NOTE_INSN_DELETED);
4227 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4233 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4237 expand_decl_cleanup_eh (decl, cleanup, eh_only)
4241 int ret = expand_decl_cleanup (decl, cleanup);
4244 tree node = block_stack->data.block.cleanups;
4245 CLEANUP_EH_ONLY (node) = eh_only;
4250 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4251 DECL_ELTS is the list of elements that belong to DECL's type.
4252 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4255 expand_anon_union_decl (decl, cleanup, decl_elts)
4256 tree decl, cleanup, decl_elts;
4258 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4262 /* If any of the elements are addressable, so is the entire union. */
4263 for (t = decl_elts; t; t = TREE_CHAIN (t))
4264 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4266 TREE_ADDRESSABLE (decl) = 1;
4271 expand_decl_cleanup (decl, cleanup);
4272 x = DECL_RTL (decl);
4274 /* Go through the elements, assigning RTL to each. */
4275 for (t = decl_elts; t; t = TREE_CHAIN (t))
4277 tree decl_elt = TREE_VALUE (t);
4278 tree cleanup_elt = TREE_PURPOSE (t);
4279 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4281 /* If any of the elements are addressable, so is the entire
4283 if (TREE_USED (decl_elt))
4284 TREE_USED (decl) = 1;
4286 /* Propagate the union's alignment to the elements. */
4287 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4288 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4290 /* If the element has BLKmode and the union doesn't, the union is
4291 aligned such that the element doesn't need to have BLKmode, so
4292 change the element's mode to the appropriate one for its size. */
4293 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4294 DECL_MODE (decl_elt) = mode
4295 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4297 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4298 instead create a new MEM rtx with the proper mode. */
4299 if (GET_CODE (x) == MEM)
4301 if (mode == GET_MODE (x))
4302 SET_DECL_RTL (decl_elt, x);
4304 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4306 else if (GET_CODE (x) == REG)
4308 if (mode == GET_MODE (x))
4309 SET_DECL_RTL (decl_elt, x);
4311 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4316 /* Record the cleanup if there is one. */
4319 thisblock->data.block.cleanups
4320 = tree_cons (decl_elt, cleanup_elt,
4321 thisblock->data.block.cleanups);
4325 /* Expand a list of cleanups LIST.
4326 Elements may be expressions or may be nested lists.
4328 If DONT_DO is nonnull, then any list-element
4329 whose TREE_PURPOSE matches DONT_DO is omitted.
4330 This is sometimes used to avoid a cleanup associated with
4331 a value that is being returned out of the scope.
4333 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4334 goto and handle protection regions specially in that case.
4336 If REACHABLE, we emit code, otherwise just inform the exception handling
4337 code about this finalization. */
4340 expand_cleanups (list, dont_do, in_fixup, reachable)
4347 for (tail = list; tail; tail = TREE_CHAIN (tail))
4348 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4350 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4351 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4354 if (! in_fixup && using_eh_for_cleanups_p)
4355 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4357 if (reachable && !CLEANUP_EH_ONLY (tail))
4359 /* Cleanups may be run multiple times. For example,
4360 when exiting a binding contour, we expand the
4361 cleanups associated with that contour. When a goto
4362 within that binding contour has a target outside that
4363 contour, it will expand all cleanups from its scope to
4364 the target. Though the cleanups are expanded multiple
4365 times, the control paths are non-overlapping so the
4366 cleanups will not be executed twice. */
4368 /* We may need to protect from outer cleanups. */
4369 if (in_fixup && using_eh_for_cleanups_p)
4371 expand_eh_region_start ();
4373 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4375 expand_eh_region_end_fixup (TREE_VALUE (tail));
4378 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4386 /* Mark when the context we are emitting RTL for as a conditional
4387 context, so that any cleanup actions we register with
4388 expand_decl_init will be properly conditionalized when those
4389 cleanup actions are later performed. Must be called before any
4390 expression (tree) is expanded that is within a conditional context. */
4393 start_cleanup_deferral ()
4395 /* block_stack can be NULL if we are inside the parameter list. It is
4396 OK to do nothing, because cleanups aren't possible here. */
4398 ++block_stack->data.block.conditional_code;
4401 /* Mark the end of a conditional region of code. Because cleanup
4402 deferrals may be nested, we may still be in a conditional region
4403 after we end the currently deferred cleanups, only after we end all
4404 deferred cleanups, are we back in unconditional code. */
4407 end_cleanup_deferral ()
4409 /* block_stack can be NULL if we are inside the parameter list. It is
4410 OK to do nothing, because cleanups aren't possible here. */
4412 --block_stack->data.block.conditional_code;
4415 /* Move all cleanups from the current block_stack
4416 to the containing block_stack, where they are assumed to
4417 have been created. If anything can cause a temporary to
4418 be created, but not expanded for more than one level of
4419 block_stacks, then this code will have to change. */
4424 struct nesting *block = block_stack;
4425 struct nesting *outer = block->next;
4427 outer->data.block.cleanups
4428 = chainon (block->data.block.cleanups,
4429 outer->data.block.cleanups);
4430 block->data.block.cleanups = 0;
4434 last_cleanup_this_contour ()
4436 if (block_stack == 0)
4439 return block_stack->data.block.cleanups;
4442 /* Return 1 if there are any pending cleanups at this point.
4443 If THIS_CONTOUR is nonzero, check the current contour as well.
4444 Otherwise, look only at the contours that enclose this one. */
4447 any_pending_cleanups (this_contour)
4450 struct nesting *block;
4452 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4455 if (this_contour && block_stack->data.block.cleanups != NULL)
4457 if (block_stack->data.block.cleanups == 0
4458 && block_stack->data.block.outer_cleanups == 0)
4461 for (block = block_stack->next; block; block = block->next)
4462 if (block->data.block.cleanups != 0)
4468 /* Enter a case (Pascal) or switch (C) statement.
4469 Push a block onto case_stack and nesting_stack
4470 to accumulate the case-labels that are seen
4471 and to record the labels generated for the statement.
4473 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4474 Otherwise, this construct is transparent for `exit_something'.
4476 EXPR is the index-expression to be dispatched on.
4477 TYPE is its nominal type. We could simply convert EXPR to this type,
4478 but instead we take short cuts. */
4481 expand_start_case (exit_flag, expr, type, printname)
4485 const char *printname;
4487 struct nesting *thiscase = ALLOC_NESTING ();
4489 /* Make an entry on case_stack for the case we are entering. */
4491 thiscase->next = case_stack;
4492 thiscase->all = nesting_stack;
4493 thiscase->depth = ++nesting_depth;
4494 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4495 thiscase->data.case_stmt.case_list = 0;
4496 thiscase->data.case_stmt.index_expr = expr;
4497 thiscase->data.case_stmt.nominal_type = type;
4498 thiscase->data.case_stmt.default_label = 0;
4499 thiscase->data.case_stmt.printname = printname;
4500 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4501 case_stack = thiscase;
4502 nesting_stack = thiscase;
4504 do_pending_stack_adjust ();
4506 /* Make sure case_stmt.start points to something that won't
4507 need any transformation before expand_end_case. */
4508 if (GET_CODE (get_last_insn ()) != NOTE)
4509 emit_note (NULL, NOTE_INSN_DELETED);
4511 thiscase->data.case_stmt.start = get_last_insn ();
4513 start_cleanup_deferral ();
4516 /* Start a "dummy case statement" within which case labels are invalid
4517 and are not connected to any larger real case statement.
4518 This can be used if you don't want to let a case statement jump
4519 into the middle of certain kinds of constructs. */
4522 expand_start_case_dummy ()
4524 struct nesting *thiscase = ALLOC_NESTING ();
4526 /* Make an entry on case_stack for the dummy. */
4528 thiscase->next = case_stack;
4529 thiscase->all = nesting_stack;
4530 thiscase->depth = ++nesting_depth;
4531 thiscase->exit_label = 0;
4532 thiscase->data.case_stmt.case_list = 0;
4533 thiscase->data.case_stmt.start = 0;
4534 thiscase->data.case_stmt.nominal_type = 0;
4535 thiscase->data.case_stmt.default_label = 0;
4536 case_stack = thiscase;
4537 nesting_stack = thiscase;
4538 start_cleanup_deferral ();
4541 /* End a dummy case statement. */
4544 expand_end_case_dummy ()
4546 end_cleanup_deferral ();
4547 POPSTACK (case_stack);
4550 /* Return the data type of the index-expression
4551 of the innermost case statement, or null if none. */
4554 case_index_expr_type ()
4557 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4564 /* If this is the first label, warn if any insns have been emitted. */
4565 if (case_stack->data.case_stmt.line_number_status >= 0)
4569 restore_line_number_status
4570 (case_stack->data.case_stmt.line_number_status);
4571 case_stack->data.case_stmt.line_number_status = -1;
4573 for (insn = case_stack->data.case_stmt.start;
4575 insn = NEXT_INSN (insn))
4577 if (GET_CODE (insn) == CODE_LABEL)
4579 if (GET_CODE (insn) != NOTE
4580 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4583 insn = PREV_INSN (insn);
4584 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4586 /* If insn is zero, then there must have been a syntax error. */
4588 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4589 NOTE_LINE_NUMBER (insn),
4590 "unreachable code at beginning of %s",
4591 case_stack->data.case_stmt.printname);
4598 /* Accumulate one case or default label inside a case or switch statement.
4599 VALUE is the value of the case (a null pointer, for a default label).
4600 The function CONVERTER, when applied to arguments T and V,
4601 converts the value V to the type T.
4603 If not currently inside a case or switch statement, return 1 and do
4604 nothing. The caller will print a language-specific error message.
4605 If VALUE is a duplicate or overlaps, return 2 and do nothing
4606 except store the (first) duplicate node in *DUPLICATE.
4607 If VALUE is out of range, return 3 and do nothing.
4608 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4609 Return 0 on success.
4611 Extended to handle range statements. */
4614 pushcase (value, converter, label, duplicate)
4616 tree (*converter) PARAMS ((tree, tree));
4623 /* Fail if not inside a real case statement. */
4624 if (! (case_stack && case_stack->data.case_stmt.start))
4627 if (stack_block_stack
4628 && stack_block_stack->depth > case_stack->depth)
4631 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4632 nominal_type = case_stack->data.case_stmt.nominal_type;
4634 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4635 if (index_type == error_mark_node)
4638 /* Convert VALUE to the type in which the comparisons are nominally done. */
4640 value = (*converter) (nominal_type, value);
4644 /* Fail if this value is out of range for the actual type of the index
4645 (which may be narrower than NOMINAL_TYPE). */
4647 && (TREE_CONSTANT_OVERFLOW (value)
4648 || ! int_fits_type_p (value, index_type)))
4651 return add_case_node (value, value, label, duplicate);
4654 /* Like pushcase but this case applies to all values between VALUE1 and
4655 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4656 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4657 starts at VALUE1 and ends at the highest value of the index type.
4658 If both are NULL, this case applies to all values.
4660 The return value is the same as that of pushcase but there is one
4661 additional error code: 4 means the specified range was empty. */
4664 pushcase_range (value1, value2, converter, label, duplicate)
4665 tree value1, value2;
4666 tree (*converter) PARAMS ((tree, tree));
4673 /* Fail if not inside a real case statement. */
4674 if (! (case_stack && case_stack->data.case_stmt.start))
4677 if (stack_block_stack
4678 && stack_block_stack->depth > case_stack->depth)
4681 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4682 nominal_type = case_stack->data.case_stmt.nominal_type;
4684 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4685 if (index_type == error_mark_node)
4690 /* Convert VALUEs to type in which the comparisons are nominally done
4691 and replace any unspecified value with the corresponding bound. */
4693 value1 = TYPE_MIN_VALUE (index_type);
4695 value2 = TYPE_MAX_VALUE (index_type);
4697 /* Fail if the range is empty. Do this before any conversion since
4698 we want to allow out-of-range empty ranges. */
4699 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4702 /* If the max was unbounded, use the max of the nominal_type we are
4703 converting to. Do this after the < check above to suppress false
4706 value2 = TYPE_MAX_VALUE (nominal_type);
4708 value1 = (*converter) (nominal_type, value1);
4709 value2 = (*converter) (nominal_type, value2);
4711 /* Fail if these values are out of range. */
4712 if (TREE_CONSTANT_OVERFLOW (value1)
4713 || ! int_fits_type_p (value1, index_type))
4716 if (TREE_CONSTANT_OVERFLOW (value2)
4717 || ! int_fits_type_p (value2, index_type))
4720 return add_case_node (value1, value2, label, duplicate);
4723 /* Do the actual insertion of a case label for pushcase and pushcase_range
4724 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4725 slowdown for large switch statements. */
4728 add_case_node (low, high, label, duplicate)
4733 struct case_node *p, **q, *r;
4735 /* If there's no HIGH value, then this is not a case range; it's
4736 just a simple case label. But that's just a degenerate case
4741 /* Handle default labels specially. */
4744 if (case_stack->data.case_stmt.default_label != 0)
4746 *duplicate = case_stack->data.case_stmt.default_label;
4749 case_stack->data.case_stmt.default_label = label;
4750 expand_label (label);
4754 q = &case_stack->data.case_stmt.case_list;
4761 /* Keep going past elements distinctly greater than HIGH. */
4762 if (tree_int_cst_lt (high, p->low))
4765 /* or distinctly less than LOW. */
4766 else if (tree_int_cst_lt (p->high, low))
4771 /* We have an overlap; this is an error. */
4772 *duplicate = p->code_label;
4777 /* Add this label to the chain, and succeed. */
4779 r = (struct case_node *) xmalloc (sizeof (struct case_node));
4782 /* If the bounds are equal, turn this into the one-value case. */
4783 if (tree_int_cst_equal (low, high))
4788 r->code_label = label;
4789 expand_label (label);
4799 struct case_node *s;
4805 if (! (b = p->balance))
4806 /* Growth propagation from left side. */
4813 if ((p->left = s = r->right))
4822 if ((r->parent = s))
4830 case_stack->data.case_stmt.case_list = r;
4833 /* r->balance == +1 */
4838 struct case_node *t = r->right;
4840 if ((p->left = s = t->right))
4844 if ((r->right = s = t->left))
4858 if ((t->parent = s))
4866 case_stack->data.case_stmt.case_list = t;
4873 /* p->balance == +1; growth of left side balances the node. */
4883 if (! (b = p->balance))
4884 /* Growth propagation from right side. */
4892 if ((p->right = s = r->left))
4900 if ((r->parent = s))
4909 case_stack->data.case_stmt.case_list = r;
4913 /* r->balance == -1 */
4917 struct case_node *t = r->left;
4919 if ((p->right = s = t->left))
4924 if ((r->left = s = t->right))
4938 if ((t->parent = s))
4947 case_stack->data.case_stmt.case_list = t;
4953 /* p->balance == -1; growth of right side balances the node. */
4966 /* Returns the number of possible values of TYPE.
4967 Returns -1 if the number is unknown, variable, or if the number does not
4968 fit in a HOST_WIDE_INT.
4969 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4970 do not increase monotonically (there may be duplicates);
4971 to 1 if the values increase monotonically, but not always by 1;
4972 otherwise sets it to 0. */
4975 all_cases_count (type, sparseness)
4980 HOST_WIDE_INT count, minval, lastval;
4984 switch (TREE_CODE (type))
4991 count = 1 << BITS_PER_UNIT;
4996 if (TYPE_MAX_VALUE (type) != 0
4997 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4998 TYPE_MIN_VALUE (type))))
4999 && 0 != (t = fold (build (PLUS_EXPR, type, t,
5000 convert (type, integer_zero_node))))
5001 && host_integerp (t, 1))
5002 count = tree_low_cst (t, 1);
5008 /* Don't waste time with enumeral types with huge values. */
5009 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
5010 || TYPE_MAX_VALUE (type) == 0
5011 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
5014 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
5017 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
5019 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
5021 if (*sparseness == 2 || thisval <= lastval)
5023 else if (thisval != minval + count)
5034 #define BITARRAY_TEST(ARRAY, INDEX) \
5035 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5036 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
5037 #define BITARRAY_SET(ARRAY, INDEX) \
5038 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5039 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
5041 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5042 with the case values we have seen, assuming the case expression
5044 SPARSENESS is as determined by all_cases_count.
5046 The time needed is proportional to COUNT, unless
5047 SPARSENESS is 2, in which case quadratic time is needed. */
5050 mark_seen_cases (type, cases_seen, count, sparseness)
5052 unsigned char *cases_seen;
5053 HOST_WIDE_INT count;
5056 tree next_node_to_try = NULL_TREE;
5057 HOST_WIDE_INT next_node_offset = 0;
5059 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5060 tree val = make_node (INTEGER_CST);
5062 TREE_TYPE (val) = type;
5066 else if (sparseness == 2)
5069 unsigned HOST_WIDE_INT xlo;
5071 /* This less efficient loop is only needed to handle
5072 duplicate case values (multiple enum constants
5073 with the same value). */
5074 TREE_TYPE (val) = TREE_TYPE (root->low);
5075 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5076 t = TREE_CHAIN (t), xlo++)
5078 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5079 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5083 /* Keep going past elements distinctly greater than VAL. */
5084 if (tree_int_cst_lt (val, n->low))
5087 /* or distinctly less than VAL. */
5088 else if (tree_int_cst_lt (n->high, val))
5093 /* We have found a matching range. */
5094 BITARRAY_SET (cases_seen, xlo);
5104 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5106 for (n = root; n; n = n->right)
5108 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5109 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5110 while (! tree_int_cst_lt (n->high, val))
5112 /* Calculate (into xlo) the "offset" of the integer (val).
5113 The element with lowest value has offset 0, the next smallest
5114 element has offset 1, etc. */
5116 unsigned HOST_WIDE_INT xlo;
5120 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5122 /* The TYPE_VALUES will be in increasing order, so
5123 starting searching where we last ended. */
5124 t = next_node_to_try;
5125 xlo = next_node_offset;
5131 t = TYPE_VALUES (type);
5134 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5136 next_node_to_try = TREE_CHAIN (t);
5137 next_node_offset = xlo + 1;
5142 if (t == next_node_to_try)
5151 t = TYPE_MIN_VALUE (type);
5153 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5157 add_double (xlo, xhi,
5158 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5162 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5163 BITARRAY_SET (cases_seen, xlo);
5165 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5167 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5173 /* Given a switch statement with an expression that is an enumeration
5174 type, warn if any of the enumeration type's literals are not
5175 covered by the case expressions of the switch. Also, warn if there
5176 are any extra switch cases that are *not* elements of the
5181 At one stage this function would: ``If all enumeration literals
5182 were covered by the case expressions, turn one of the expressions
5183 into the default expression since it should not be possible to fall
5184 through such a switch.''
5186 That code has since been removed as: ``This optimization is
5187 disabled because it causes valid programs to fail. ANSI C does not
5188 guarantee that an expression with enum type will have a value that
5189 is the same as one of the enumeration literals.'' */
5192 check_for_full_enumeration_handling (type)
5195 struct case_node *n;
5198 /* True iff the selector type is a numbered set mode. */
5201 /* The number of possible selector values. */
5204 /* For each possible selector value. a one iff it has been matched
5205 by a case value alternative. */
5206 unsigned char *cases_seen;
5208 /* The allocated size of cases_seen, in chars. */
5209 HOST_WIDE_INT bytes_needed;
5211 size = all_cases_count (type, &sparseness);
5212 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5214 if (size > 0 && size < 600000
5215 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5216 this optimization if we don't have enough memory rather than
5217 aborting, as xmalloc would do. */
5219 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5222 tree v = TYPE_VALUES (type);
5224 /* The time complexity of this code is normally O(N), where
5225 N being the number of members in the enumerated type.
5226 However, if type is an ENUMERAL_TYPE whose values do not
5227 increase monotonically, O(N*log(N)) time may be needed. */
5229 mark_seen_cases (type, cases_seen, size, sparseness);
5231 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5232 if (BITARRAY_TEST (cases_seen, i) == 0)
5233 warning ("enumeration value `%s' not handled in switch",
5234 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5239 /* Now we go the other way around; we warn if there are case
5240 expressions that don't correspond to enumerators. This can
5241 occur since C and C++ don't enforce type-checking of
5242 assignments to enumeration variables. */
5244 if (case_stack->data.case_stmt.case_list
5245 && case_stack->data.case_stmt.case_list->left)
5246 case_stack->data.case_stmt.case_list
5247 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5248 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5250 for (chain = TYPE_VALUES (type);
5251 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5252 chain = TREE_CHAIN (chain))
5257 if (TYPE_NAME (type) == 0)
5258 warning ("case value `%ld' not in enumerated type",
5259 (long) TREE_INT_CST_LOW (n->low));
5261 warning ("case value `%ld' not in enumerated type `%s'",
5262 (long) TREE_INT_CST_LOW (n->low),
5263 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5266 : DECL_NAME (TYPE_NAME (type))));
5268 if (!tree_int_cst_equal (n->low, n->high))
5270 for (chain = TYPE_VALUES (type);
5271 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5272 chain = TREE_CHAIN (chain))
5277 if (TYPE_NAME (type) == 0)
5278 warning ("case value `%ld' not in enumerated type",
5279 (long) TREE_INT_CST_LOW (n->high));
5281 warning ("case value `%ld' not in enumerated type `%s'",
5282 (long) TREE_INT_CST_LOW (n->high),
5283 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5286 : DECL_NAME (TYPE_NAME (type))));
5292 /* Free CN, and its children. */
5295 free_case_nodes (cn)
5300 free_case_nodes (cn->left);
5301 free_case_nodes (cn->right);
5308 /* Terminate a case (Pascal) or switch (C) statement
5309 in which ORIG_INDEX is the expression to be tested.
5310 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5311 type as given in the source before any compiler conversions.
5312 Generate the code to test it and jump to the right place. */
5315 expand_end_case_type (orig_index, orig_type)
5316 tree orig_index, orig_type;
5318 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5319 rtx default_label = 0;
5320 struct case_node *n;
5327 rtx before_case, end;
5328 struct nesting *thiscase = case_stack;
5329 tree index_expr, index_type;
5332 /* Don't crash due to previous errors. */
5333 if (thiscase == NULL)
5336 table_label = gen_label_rtx ();
5337 index_expr = thiscase->data.case_stmt.index_expr;
5338 index_type = TREE_TYPE (index_expr);
5339 unsignedp = TREE_UNSIGNED (index_type);
5340 if (orig_type == NULL)
5341 orig_type = TREE_TYPE (orig_index);
5343 do_pending_stack_adjust ();
5345 /* This might get an spurious warning in the presence of a syntax error;
5346 it could be fixed by moving the call to check_seenlabel after the
5347 check for error_mark_node, and copying the code of check_seenlabel that
5348 deals with case_stack->data.case_stmt.line_number_status /
5349 restore_line_number_status in front of the call to end_cleanup_deferral;
5350 However, this might miss some useful warnings in the presence of
5351 non-syntax errors. */
5354 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5355 if (index_type != error_mark_node)
5357 /* If the switch expression was an enumerated type, check that
5358 exactly all enumeration literals are covered by the cases.
5359 The check is made when -Wswitch was specified and there is no
5360 default case, or when -Wswitch-enum was specified. */
5361 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5362 || warn_switch_enum)
5363 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5364 && TREE_CODE (index_expr) != INTEGER_CST)
5365 check_for_full_enumeration_handling (orig_type);
5367 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5368 warning ("switch missing default case");
5370 /* If we don't have a default-label, create one here,
5371 after the body of the switch. */
5372 if (thiscase->data.case_stmt.default_label == 0)
5374 thiscase->data.case_stmt.default_label
5375 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5376 expand_label (thiscase->data.case_stmt.default_label);
5378 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5380 before_case = get_last_insn ();
5382 if (thiscase->data.case_stmt.case_list
5383 && thiscase->data.case_stmt.case_list->left)
5384 thiscase->data.case_stmt.case_list
5385 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5387 /* Simplify the case-list before we count it. */
5388 group_case_nodes (thiscase->data.case_stmt.case_list);
5390 /* Get upper and lower bounds of case values.
5391 Also convert all the case values to the index expr's data type. */
5394 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5396 /* Check low and high label values are integers. */
5397 if (TREE_CODE (n->low) != INTEGER_CST)
5399 if (TREE_CODE (n->high) != INTEGER_CST)
5402 n->low = convert (index_type, n->low);
5403 n->high = convert (index_type, n->high);
5405 /* Count the elements and track the largest and smallest
5406 of them (treating them as signed even if they are not). */
5414 if (INT_CST_LT (n->low, minval))
5416 if (INT_CST_LT (maxval, n->high))
5419 /* A range counts double, since it requires two compares. */
5420 if (! tree_int_cst_equal (n->low, n->high))
5424 /* Compute span of values. */
5426 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5428 end_cleanup_deferral ();
5432 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5434 emit_jump (default_label);
5437 /* If range of values is much bigger than number of values,
5438 make a sequence of conditional branches instead of a dispatch.
5439 If the switch-index is a constant, do it this way
5440 because we can optimize it. */
5442 else if (count < case_values_threshold ()
5443 || compare_tree_int (range, 10 * count) > 0
5444 /* RANGE may be signed, and really large ranges will show up
5445 as negative numbers. */
5446 || compare_tree_int (range, 0) < 0
5447 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5450 || TREE_CODE (index_expr) == INTEGER_CST
5451 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5452 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5454 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5456 /* If the index is a short or char that we do not have
5457 an insn to handle comparisons directly, convert it to
5458 a full integer now, rather than letting each comparison
5459 generate the conversion. */
5461 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5462 && ! have_insn_for (COMPARE, GET_MODE (index)))
5464 enum machine_mode wider_mode;
5465 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5466 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5467 if (have_insn_for (COMPARE, wider_mode))
5469 index = convert_to_mode (wider_mode, index, unsignedp);
5475 do_pending_stack_adjust ();
5477 index = protect_from_queue (index, 0);
5478 if (GET_CODE (index) == MEM)
5479 index = copy_to_reg (index);
5480 if (GET_CODE (index) == CONST_INT
5481 || TREE_CODE (index_expr) == INTEGER_CST)
5483 /* Make a tree node with the proper constant value
5484 if we don't already have one. */
5485 if (TREE_CODE (index_expr) != INTEGER_CST)
5488 = build_int_2 (INTVAL (index),
5489 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5490 index_expr = convert (index_type, index_expr);
5493 /* For constant index expressions we need only
5494 issue an unconditional branch to the appropriate
5495 target code. The job of removing any unreachable
5496 code is left to the optimisation phase if the
5497 "-O" option is specified. */
5498 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5499 if (! tree_int_cst_lt (index_expr, n->low)
5500 && ! tree_int_cst_lt (n->high, index_expr))
5504 emit_jump (label_rtx (n->code_label));
5506 emit_jump (default_label);
5510 /* If the index expression is not constant we generate
5511 a binary decision tree to select the appropriate
5512 target code. This is done as follows:
5514 The list of cases is rearranged into a binary tree,
5515 nearly optimal assuming equal probability for each case.
5517 The tree is transformed into RTL, eliminating
5518 redundant test conditions at the same time.
5520 If program flow could reach the end of the
5521 decision tree an unconditional jump to the
5522 default code is emitted. */
5525 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5526 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5527 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5528 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5529 default_label, index_type);
5530 emit_jump_if_reachable (default_label);
5535 if (! try_casesi (index_type, index_expr, minval, range,
5536 table_label, default_label))
5538 index_type = thiscase->data.case_stmt.nominal_type;
5540 /* Index jumptables from zero for suitable values of
5541 minval to avoid a subtraction. */
5543 && compare_tree_int (minval, 0) > 0
5544 && compare_tree_int (minval, 3) < 0)
5546 minval = integer_zero_node;
5550 if (! try_tablejump (index_type, index_expr, minval, range,
5551 table_label, default_label))
5555 /* Get table of labels to jump to, in order of case index. */
5557 ncases = tree_low_cst (range, 0) + 1;
5558 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5559 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5561 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5563 /* Compute the low and high bounds relative to the minimum
5564 value since that should fit in a HOST_WIDE_INT while the
5565 actual values may not. */
5567 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5568 n->low, minval)), 1);
5569 HOST_WIDE_INT i_high
5570 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5571 n->high, minval)), 1);
5574 for (i = i_low; i <= i_high; i ++)
5576 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5579 /* Fill in the gaps with the default. */
5580 for (i = 0; i < ncases; i++)
5581 if (labelvec[i] == 0)
5582 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5584 /* Output the table */
5585 emit_label (table_label);
5587 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5588 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5589 gen_rtx_LABEL_REF (Pmode, table_label),
5590 gen_rtvec_v (ncases, labelvec),
5591 const0_rtx, const0_rtx));
5593 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5594 gen_rtvec_v (ncases, labelvec)));
5596 /* If the case insn drops through the table,
5597 after the table we must jump to the default-label.
5598 Otherwise record no drop-through after the table. */
5599 #ifdef CASE_DROPS_THROUGH
5600 emit_jump (default_label);
5606 before_case = NEXT_INSN (before_case);
5607 end = get_last_insn ();
5608 if (squeeze_notes (&before_case, &end))
5610 reorder_insns (before_case, end,
5611 thiscase->data.case_stmt.start);
5614 end_cleanup_deferral ();
5616 if (thiscase->exit_label)
5617 emit_label (thiscase->exit_label);
5619 free_case_nodes (case_stack->data.case_stmt.case_list);
5620 POPSTACK (case_stack);
5625 /* Convert the tree NODE into a list linked by the right field, with the left
5626 field zeroed. RIGHT is used for recursion; it is a list to be placed
5627 rightmost in the resulting list. */
5629 static struct case_node *
5630 case_tree2list (node, right)
5631 struct case_node *node, *right;
5633 struct case_node *left;
5636 right = case_tree2list (node->right, right);
5638 node->right = right;
5639 if ((left = node->left))
5642 return case_tree2list (left, node);
5648 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5651 do_jump_if_equal (op1, op2, label, unsignedp)
5652 rtx op1, op2, label;
5655 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5657 if (INTVAL (op1) == INTVAL (op2))
5661 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5662 (GET_MODE (op1) == VOIDmode
5663 ? GET_MODE (op2) : GET_MODE (op1)),
5667 /* Not all case values are encountered equally. This function
5668 uses a heuristic to weight case labels, in cases where that
5669 looks like a reasonable thing to do.
5671 Right now, all we try to guess is text, and we establish the
5674 chars above space: 16
5683 If we find any cases in the switch that are not either -1 or in the range
5684 of valid ASCII characters, or are control characters other than those
5685 commonly used with "\", don't treat this switch scanning text.
5687 Return 1 if these nodes are suitable for cost estimation, otherwise
5691 estimate_case_costs (node)
5694 tree min_ascii = integer_minus_one_node;
5695 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5699 /* If we haven't already made the cost table, make it now. Note that the
5700 lower bound of the table is -1, not zero. */
5702 if (! cost_table_initialized)
5704 cost_table_initialized = 1;
5706 for (i = 0; i < 128; i++)
5709 COST_TABLE (i) = 16;
5710 else if (ISPUNCT (i))
5712 else if (ISCNTRL (i))
5713 COST_TABLE (i) = -1;
5716 COST_TABLE (' ') = 8;
5717 COST_TABLE ('\t') = 4;
5718 COST_TABLE ('\0') = 4;
5719 COST_TABLE ('\n') = 2;
5720 COST_TABLE ('\f') = 1;
5721 COST_TABLE ('\v') = 1;
5722 COST_TABLE ('\b') = 1;
5725 /* See if all the case expressions look like text. It is text if the
5726 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5727 as signed arithmetic since we don't want to ever access cost_table with a
5728 value less than -1. Also check that none of the constants in a range
5729 are strange control characters. */
5731 for (n = node; n; n = n->right)
5733 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5736 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5737 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5738 if (COST_TABLE (i) < 0)
5742 /* All interesting values are within the range of interesting
5743 ASCII characters. */
5747 /* Scan an ordered list of case nodes
5748 combining those with consecutive values or ranges.
5750 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5753 group_case_nodes (head)
5756 case_node_ptr node = head;
5760 rtx lb = next_real_insn (label_rtx (node->code_label));
5762 case_node_ptr np = node;
5764 /* Try to group the successors of NODE with NODE. */
5765 while (((np = np->right) != 0)
5766 /* Do they jump to the same place? */
5767 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5768 || (lb != 0 && lb2 != 0
5769 && simplejump_p (lb)
5770 && simplejump_p (lb2)
5771 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5772 SET_SRC (PATTERN (lb2)))))
5773 /* Are their ranges consecutive? */
5774 && tree_int_cst_equal (np->low,
5775 fold (build (PLUS_EXPR,
5776 TREE_TYPE (node->high),
5779 /* An overflow is not consecutive. */
5780 && tree_int_cst_lt (node->high,
5781 fold (build (PLUS_EXPR,
5782 TREE_TYPE (node->high),
5784 integer_one_node))))
5786 node->high = np->high;
5788 /* NP is the first node after NODE which can't be grouped with it.
5789 Delete the nodes in between, and move on to that node. */
5795 /* Take an ordered list of case nodes
5796 and transform them into a near optimal binary tree,
5797 on the assumption that any target code selection value is as
5798 likely as any other.
5800 The transformation is performed by splitting the ordered
5801 list into two equal sections plus a pivot. The parts are
5802 then attached to the pivot as left and right branches. Each
5803 branch is then transformed recursively. */
5806 balance_case_nodes (head, parent)
5807 case_node_ptr *head;
5808 case_node_ptr parent;
5821 /* Count the number of entries on branch. Also count the ranges. */
5825 if (!tree_int_cst_equal (np->low, np->high))
5829 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5833 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5841 /* Split this list if it is long enough for that to help. */
5846 /* Find the place in the list that bisects the list's total cost,
5847 Here I gets half the total cost. */
5852 /* Skip nodes while their cost does not reach that amount. */
5853 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5854 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5855 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5858 npp = &(*npp)->right;
5863 /* Leave this branch lopsided, but optimize left-hand
5864 side and fill in `parent' fields for right-hand side. */
5866 np->parent = parent;
5867 balance_case_nodes (&np->left, np);
5868 for (; np->right; np = np->right)
5869 np->right->parent = np;
5873 /* If there are just three nodes, split at the middle one. */
5875 npp = &(*npp)->right;
5878 /* Find the place in the list that bisects the list's total cost,
5879 where ranges count as 2.
5880 Here I gets half the total cost. */
5881 i = (i + ranges + 1) / 2;
5884 /* Skip nodes while their cost does not reach that amount. */
5885 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5890 npp = &(*npp)->right;
5895 np->parent = parent;
5898 /* Optimize each of the two split parts. */
5899 balance_case_nodes (&np->left, np);
5900 balance_case_nodes (&np->right, np);
5904 /* Else leave this branch as one level,
5905 but fill in `parent' fields. */
5907 np->parent = parent;
5908 for (; np->right; np = np->right)
5909 np->right->parent = np;
5914 /* Search the parent sections of the case node tree
5915 to see if a test for the lower bound of NODE would be redundant.
5916 INDEX_TYPE is the type of the index expression.
5918 The instructions to generate the case decision tree are
5919 output in the same order as nodes are processed so it is
5920 known that if a parent node checks the range of the current
5921 node minus one that the current node is bounded at its lower
5922 span. Thus the test would be redundant. */
5925 node_has_low_bound (node, index_type)
5930 case_node_ptr pnode;
5932 /* If the lower bound of this node is the lowest value in the index type,
5933 we need not test it. */
5935 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5938 /* If this node has a left branch, the value at the left must be less
5939 than that at this node, so it cannot be bounded at the bottom and
5940 we need not bother testing any further. */
5945 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5946 node->low, integer_one_node));
5948 /* If the subtraction above overflowed, we can't verify anything.
5949 Otherwise, look for a parent that tests our value - 1. */
5951 if (! tree_int_cst_lt (low_minus_one, node->low))
5954 for (pnode = node->parent; pnode; pnode = pnode->parent)
5955 if (tree_int_cst_equal (low_minus_one, pnode->high))
5961 /* Search the parent sections of the case node tree
5962 to see if a test for the upper bound of NODE would be redundant.
5963 INDEX_TYPE is the type of the index expression.
5965 The instructions to generate the case decision tree are
5966 output in the same order as nodes are processed so it is
5967 known that if a parent node checks the range of the current
5968 node plus one that the current node is bounded at its upper
5969 span. Thus the test would be redundant. */
5972 node_has_high_bound (node, index_type)
5977 case_node_ptr pnode;
5979 /* If there is no upper bound, obviously no test is needed. */
5981 if (TYPE_MAX_VALUE (index_type) == NULL)
5984 /* If the upper bound of this node is the highest value in the type
5985 of the index expression, we need not test against it. */
5987 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5990 /* If this node has a right branch, the value at the right must be greater
5991 than that at this node, so it cannot be bounded at the top and
5992 we need not bother testing any further. */
5997 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5998 node->high, integer_one_node));
6000 /* If the addition above overflowed, we can't verify anything.
6001 Otherwise, look for a parent that tests our value + 1. */
6003 if (! tree_int_cst_lt (node->high, high_plus_one))
6006 for (pnode = node->parent; pnode; pnode = pnode->parent)
6007 if (tree_int_cst_equal (high_plus_one, pnode->low))
6013 /* Search the parent sections of the
6014 case node tree to see if both tests for the upper and lower
6015 bounds of NODE would be redundant. */
6018 node_is_bounded (node, index_type)
6022 return (node_has_low_bound (node, index_type)
6023 && node_has_high_bound (node, index_type));
6026 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6029 emit_jump_if_reachable (label)
6032 if (GET_CODE (get_last_insn ()) != BARRIER)
6036 /* Emit step-by-step code to select a case for the value of INDEX.
6037 The thus generated decision tree follows the form of the
6038 case-node binary tree NODE, whose nodes represent test conditions.
6039 INDEX_TYPE is the type of the index of the switch.
6041 Care is taken to prune redundant tests from the decision tree
6042 by detecting any boundary conditions already checked by
6043 emitted rtx. (See node_has_high_bound, node_has_low_bound
6044 and node_is_bounded, above.)
6046 Where the test conditions can be shown to be redundant we emit
6047 an unconditional jump to the target code. As a further
6048 optimization, the subordinates of a tree node are examined to
6049 check for bounded nodes. In this case conditional and/or
6050 unconditional jumps as a result of the boundary check for the
6051 current node are arranged to target the subordinates associated
6052 code for out of bound conditions on the current node.
6054 We can assume that when control reaches the code generated here,
6055 the index value has already been compared with the parents
6056 of this node, and determined to be on the same side of each parent
6057 as this node is. Thus, if this node tests for the value 51,
6058 and a parent tested for 52, we don't need to consider
6059 the possibility of a value greater than 51. If another parent
6060 tests for the value 50, then this node need not test anything. */
6063 emit_case_nodes (index, node, default_label, index_type)
6069 /* If INDEX has an unsigned type, we must make unsigned branches. */
6070 int unsignedp = TREE_UNSIGNED (index_type);
6071 enum machine_mode mode = GET_MODE (index);
6072 enum machine_mode imode = TYPE_MODE (index_type);
6074 /* See if our parents have already tested everything for us.
6075 If they have, emit an unconditional jump for this node. */
6076 if (node_is_bounded (node, index_type))
6077 emit_jump (label_rtx (node->code_label));
6079 else if (tree_int_cst_equal (node->low, node->high))
6081 /* Node is single valued. First see if the index expression matches
6082 this node and then check our children, if any. */
6084 do_jump_if_equal (index,
6085 convert_modes (mode, imode,
6086 expand_expr (node->low, NULL_RTX,
6089 label_rtx (node->code_label), unsignedp);
6091 if (node->right != 0 && node->left != 0)
6093 /* This node has children on both sides.
6094 Dispatch to one side or the other
6095 by comparing the index value with this node's value.
6096 If one subtree is bounded, check that one first,
6097 so we can avoid real branches in the tree. */
6099 if (node_is_bounded (node->right, index_type))
6101 emit_cmp_and_jump_insns (index,
6104 expand_expr (node->high, NULL_RTX,
6107 GT, NULL_RTX, mode, unsignedp,
6108 label_rtx (node->right->code_label));
6109 emit_case_nodes (index, node->left, default_label, index_type);
6112 else if (node_is_bounded (node->left, index_type))
6114 emit_cmp_and_jump_insns (index,
6117 expand_expr (node->high, NULL_RTX,
6120 LT, NULL_RTX, mode, unsignedp,
6121 label_rtx (node->left->code_label));
6122 emit_case_nodes (index, node->right, default_label, index_type);
6127 /* Neither node is bounded. First distinguish the two sides;
6128 then emit the code for one side at a time. */
6130 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6132 /* See if the value is on the right. */
6133 emit_cmp_and_jump_insns (index,
6136 expand_expr (node->high, NULL_RTX,
6139 GT, NULL_RTX, mode, unsignedp,
6140 label_rtx (test_label));
6142 /* Value must be on the left.
6143 Handle the left-hand subtree. */
6144 emit_case_nodes (index, node->left, default_label, index_type);
6145 /* If left-hand subtree does nothing,
6147 emit_jump_if_reachable (default_label);
6149 /* Code branches here for the right-hand subtree. */
6150 expand_label (test_label);
6151 emit_case_nodes (index, node->right, default_label, index_type);
6155 else if (node->right != 0 && node->left == 0)
6157 /* Here we have a right child but no left so we issue conditional
6158 branch to default and process the right child.
6160 Omit the conditional branch to default if we it avoid only one
6161 right child; it costs too much space to save so little time. */
6163 if (node->right->right || node->right->left
6164 || !tree_int_cst_equal (node->right->low, node->right->high))
6166 if (!node_has_low_bound (node, index_type))
6168 emit_cmp_and_jump_insns (index,
6171 expand_expr (node->high, NULL_RTX,
6174 LT, NULL_RTX, mode, unsignedp,
6178 emit_case_nodes (index, node->right, default_label, index_type);
6181 /* We cannot process node->right normally
6182 since we haven't ruled out the numbers less than
6183 this node's value. So handle node->right explicitly. */
6184 do_jump_if_equal (index,
6187 expand_expr (node->right->low, NULL_RTX,
6190 label_rtx (node->right->code_label), unsignedp);
6193 else if (node->right == 0 && node->left != 0)
6195 /* Just one subtree, on the left. */
6196 if (node->left->left || node->left->right
6197 || !tree_int_cst_equal (node->left->low, node->left->high))
6199 if (!node_has_high_bound (node, index_type))
6201 emit_cmp_and_jump_insns (index,
6204 expand_expr (node->high, NULL_RTX,
6207 GT, NULL_RTX, mode, unsignedp,
6211 emit_case_nodes (index, node->left, default_label, index_type);
6214 /* We cannot process node->left normally
6215 since we haven't ruled out the numbers less than
6216 this node's value. So handle node->left explicitly. */
6217 do_jump_if_equal (index,
6220 expand_expr (node->left->low, NULL_RTX,
6223 label_rtx (node->left->code_label), unsignedp);
6228 /* Node is a range. These cases are very similar to those for a single
6229 value, except that we do not start by testing whether this node
6230 is the one to branch to. */
6232 if (node->right != 0 && node->left != 0)
6234 /* Node has subtrees on both sides.
6235 If the right-hand subtree is bounded,
6236 test for it first, since we can go straight there.
6237 Otherwise, we need to make a branch in the control structure,
6238 then handle the two subtrees. */
6239 tree test_label = 0;
6241 if (node_is_bounded (node->right, index_type))
6242 /* Right hand node is fully bounded so we can eliminate any
6243 testing and branch directly to the target code. */
6244 emit_cmp_and_jump_insns (index,
6247 expand_expr (node->high, NULL_RTX,
6250 GT, NULL_RTX, mode, unsignedp,
6251 label_rtx (node->right->code_label));
6254 /* Right hand node requires testing.
6255 Branch to a label where we will handle it later. */
6257 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6258 emit_cmp_and_jump_insns (index,
6261 expand_expr (node->high, NULL_RTX,
6264 GT, NULL_RTX, mode, unsignedp,
6265 label_rtx (test_label));
6268 /* Value belongs to this node or to the left-hand subtree. */
6270 emit_cmp_and_jump_insns (index,
6273 expand_expr (node->low, NULL_RTX,
6276 GE, NULL_RTX, mode, unsignedp,
6277 label_rtx (node->code_label));
6279 /* Handle the left-hand subtree. */
6280 emit_case_nodes (index, node->left, default_label, index_type);
6282 /* If right node had to be handled later, do that now. */
6286 /* If the left-hand subtree fell through,
6287 don't let it fall into the right-hand subtree. */
6288 emit_jump_if_reachable (default_label);
6290 expand_label (test_label);
6291 emit_case_nodes (index, node->right, default_label, index_type);
6295 else if (node->right != 0 && node->left == 0)
6297 /* Deal with values to the left of this node,
6298 if they are possible. */
6299 if (!node_has_low_bound (node, index_type))
6301 emit_cmp_and_jump_insns (index,
6304 expand_expr (node->low, NULL_RTX,
6307 LT, NULL_RTX, mode, unsignedp,
6311 /* Value belongs to this node or to the right-hand subtree. */
6313 emit_cmp_and_jump_insns (index,
6316 expand_expr (node->high, NULL_RTX,
6319 LE, NULL_RTX, mode, unsignedp,
6320 label_rtx (node->code_label));
6322 emit_case_nodes (index, node->right, default_label, index_type);
6325 else if (node->right == 0 && node->left != 0)
6327 /* Deal with values to the right of this node,
6328 if they are possible. */
6329 if (!node_has_high_bound (node, index_type))
6331 emit_cmp_and_jump_insns (index,
6334 expand_expr (node->high, NULL_RTX,
6337 GT, NULL_RTX, mode, unsignedp,
6341 /* Value belongs to this node or to the left-hand subtree. */
6343 emit_cmp_and_jump_insns (index,
6346 expand_expr (node->low, NULL_RTX,
6349 GE, NULL_RTX, mode, unsignedp,
6350 label_rtx (node->code_label));
6352 emit_case_nodes (index, node->left, default_label, index_type);
6357 /* Node has no children so we check low and high bounds to remove
6358 redundant tests. Only one of the bounds can exist,
6359 since otherwise this node is bounded--a case tested already. */
6360 int high_bound = node_has_high_bound (node, index_type);
6361 int low_bound = node_has_low_bound (node, index_type);
6363 if (!high_bound && low_bound)
6365 emit_cmp_and_jump_insns (index,
6368 expand_expr (node->high, NULL_RTX,
6371 GT, NULL_RTX, mode, unsignedp,
6375 else if (!low_bound && high_bound)
6377 emit_cmp_and_jump_insns (index,
6380 expand_expr (node->low, NULL_RTX,
6383 LT, NULL_RTX, mode, unsignedp,
6386 else if (!low_bound && !high_bound)
6388 /* Widen LOW and HIGH to the same width as INDEX. */
6389 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6390 tree low = build1 (CONVERT_EXPR, type, node->low);
6391 tree high = build1 (CONVERT_EXPR, type, node->high);
6392 rtx low_rtx, new_index, new_bound;
6394 /* Instead of doing two branches, emit one unsigned branch for
6395 (index-low) > (high-low). */
6396 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6397 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6398 NULL_RTX, unsignedp,
6400 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6404 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6405 mode, 1, default_label);
6408 emit_jump (label_rtx (node->code_label));