1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
45 #include "insn-config.h"
48 #include "hard-reg-set.h"
55 #include "langhooks.h"
58 /* Assume that case vectors are not pc-relative. */
59 #ifndef CASE_VECTOR_PC_RELATIVE
60 #define CASE_VECTOR_PC_RELATIVE 0
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
87 struct case_node GTY(())
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
101 /* These are used by estimate_case_costs and balance_case_nodes. */
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `loop_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting GTY(())
140 struct nesting *next;
151 /* For conds (if-then and if-then-else statements). */
154 /* Label for the end of the if construct.
155 There is none if EXITFLAG was not set
156 and no `else' has been seen yet. */
158 /* Label for the end of this alternative.
159 This may be the end of the if or the next else/elseif. */
161 } GTY ((tag ("COND_NESTING"))) cond;
165 /* Label at the top of the loop; place to loop back to. */
167 /* Label at the end of the whole construct. */
169 /* Label before a jump that branches to the end of the whole
170 construct. This is where destructors go if any. */
172 /* Label for `continue' statement to jump to;
173 this is in front of the stepper of the loop. */
175 } GTY ((tag ("LOOP_NESTING"))) loop;
176 /* For variable binding contours. */
179 /* Sequence number of this binding contour within the function,
180 in order of entry. */
181 int block_start_count;
182 /* Nonzero => value to restore stack to on exit. */
184 /* The NOTE that starts this contour.
185 Used by expand_goto to check whether the destination
186 is within each contour or not. */
188 /* Innermost containing binding contour that has a stack level. */
189 struct nesting *innermost_stack_block;
190 /* List of cleanups to be run on exit from this contour.
191 This is a list of expressions to be evaluated.
192 The TREE_PURPOSE of each link is the ..._DECL node
193 which the cleanup pertains to. */
195 /* List of cleanup-lists of blocks containing this block,
196 as they were at the locus where this block appears.
197 There is an element for each containing block,
198 ordered innermost containing block first.
199 The tail of this list can be 0,
200 if all remaining elements would be empty lists.
201 The element's TREE_VALUE is the cleanup-list of that block,
202 which may be null. */
204 /* Chain of labels defined inside this binding contour.
205 For contours that have stack levels or cleanups. */
206 struct label_chain *label_chain;
207 /* Number of function calls seen, as of start of this block. */
208 int n_function_calls;
209 /* Nonzero if this is associated with an EH region. */
210 int exception_region;
211 /* The saved target_temp_slot_level from our outer block.
212 We may reset target_temp_slot_level to be the level of
213 this block, if that is done, target_temp_slot_level
214 reverts to the saved target_temp_slot_level at the very
216 int block_target_temp_slot_level;
217 /* True if we are currently emitting insns in an area of
218 output code that is controlled by a conditional
219 expression. This is used by the cleanup handling code to
220 generate conditional cleanup actions. */
221 int conditional_code;
222 /* A place to move the start of the exception region for any
223 of the conditional cleanups, must be at the end or after
224 the start of the last unconditional cleanup, and before any
225 conditional branch points. */
226 rtx last_unconditional_cleanup;
227 } GTY ((tag ("BLOCK_NESTING"))) block;
228 /* For switch (C) or case (Pascal) statements,
229 and also for dummies (see `expand_start_case_dummy'). */
232 /* The insn after which the case dispatch should finally
233 be emitted. Zero for a dummy. */
235 /* A list of case labels; it is first built as an AVL tree.
236 During expand_end_case, this is converted to a list, and may be
237 rearranged into a nearly balanced binary tree. */
238 struct case_node *case_list;
239 /* Label to jump to if no case matches. */
241 /* The expression to be dispatched on. */
243 /* Type that INDEX_EXPR should be converted to. */
245 /* Name of this kind of statement, for warnings. */
246 const char *printname;
247 /* Used to save no_line_numbers till we see the first case label.
248 We set this to -1 when we see the first case label in this
250 int line_number_status;
251 } GTY ((tag ("CASE_NESTING"))) case_stmt;
252 } GTY ((desc ("%1.desc"))) data;
255 /* Allocate and return a new `struct nesting'. */
257 #define ALLOC_NESTING() \
258 (struct nesting *) ggc_alloc (sizeof (struct nesting))
260 /* Pop the nesting stack element by element until we pop off
261 the element which is at the top of STACK.
262 Update all the other stacks, popping off elements from them
263 as we pop them from nesting_stack. */
265 #define POPSTACK(STACK) \
266 do { struct nesting *target = STACK; \
267 struct nesting *this; \
268 do { this = nesting_stack; \
269 if (loop_stack == this) \
270 loop_stack = loop_stack->next; \
271 if (cond_stack == this) \
272 cond_stack = cond_stack->next; \
273 if (block_stack == this) \
274 block_stack = block_stack->next; \
275 if (stack_block_stack == this) \
276 stack_block_stack = stack_block_stack->next; \
277 if (case_stack == this) \
278 case_stack = case_stack->next; \
279 nesting_depth = nesting_stack->depth - 1; \
280 nesting_stack = this->all; } \
281 while (this != target); } while (0)
283 /* In some cases it is impossible to generate code for a forward goto
284 until the label definition is seen. This happens when it may be necessary
285 for the goto to reset the stack pointer: we don't yet know how to do that.
286 So expand_goto puts an entry on this fixup list.
287 Each time a binding contour that resets the stack is exited,
289 If the target label has now been defined, we can insert the proper code. */
291 struct goto_fixup GTY(())
293 /* Points to following fixup. */
294 struct goto_fixup *next;
295 /* Points to the insn before the jump insn.
296 If more code must be inserted, it goes after this insn. */
298 /* The LABEL_DECL that this jump is jumping to, or 0
299 for break, continue or return. */
301 /* The BLOCK for the place where this goto was found. */
303 /* The CODE_LABEL rtx that this is jumping to. */
305 /* Number of binding contours started in current function
306 before the label reference. */
307 int block_start_count;
308 /* The outermost stack level that should be restored for this jump.
309 Each time a binding contour that resets the stack is exited,
310 if the target label is *not* yet defined, this slot is updated. */
312 /* List of lists of cleanup expressions to be run by this goto.
313 There is one element for each block that this goto is within.
314 The tail of this list can be 0,
315 if all remaining elements would be empty.
316 The TREE_VALUE contains the cleanup list of that block as of the
317 time this goto was seen.
318 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
319 tree cleanup_list_list;
322 /* Within any binding contour that must restore a stack level,
323 all labels are recorded with a chain of these structures. */
325 struct label_chain GTY(())
327 /* Points to following fixup. */
328 struct label_chain *next;
332 struct stmt_status GTY(())
334 /* Chain of all pending binding contours. */
335 struct nesting * x_block_stack;
337 /* If any new stacks are added here, add them to POPSTACKS too. */
339 /* Chain of all pending binding contours that restore stack levels
341 struct nesting * x_stack_block_stack;
343 /* Chain of all pending conditional statements. */
344 struct nesting * x_cond_stack;
346 /* Chain of all pending loops. */
347 struct nesting * x_loop_stack;
349 /* Chain of all pending case or switch statements. */
350 struct nesting * x_case_stack;
352 /* Separate chain including all of the above,
353 chained through the `all' field. */
354 struct nesting * x_nesting_stack;
356 /* Number of entries on nesting_stack now. */
359 /* Number of binding contours started so far in this function. */
360 int x_block_start_count;
362 /* Each time we expand an expression-statement,
363 record the expr's type and its RTL value here. */
364 tree x_last_expr_type;
365 rtx x_last_expr_value;
367 /* Nonzero if within a ({...}) grouping, in which case we must
368 always compute a value for each expr-stmt in case it is the last one. */
369 int x_expr_stmts_for_value;
371 /* Filename and line number of last line-number note,
372 whether we actually emitted it or not. */
373 const char *x_emit_filename;
376 struct goto_fixup *x_goto_fixup_chain;
379 #define block_stack (cfun->stmt->x_block_stack)
380 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
381 #define cond_stack (cfun->stmt->x_cond_stack)
382 #define loop_stack (cfun->stmt->x_loop_stack)
383 #define case_stack (cfun->stmt->x_case_stack)
384 #define nesting_stack (cfun->stmt->x_nesting_stack)
385 #define nesting_depth (cfun->stmt->x_nesting_depth)
386 #define current_block_start_count (cfun->stmt->x_block_start_count)
387 #define last_expr_type (cfun->stmt->x_last_expr_type)
388 #define last_expr_value (cfun->stmt->x_last_expr_value)
389 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
390 #define emit_filename (cfun->stmt->x_emit_filename)
391 #define emit_lineno (cfun->stmt->x_emit_lineno)
392 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
394 /* Non-zero if we are using EH to handle cleanus. */
395 static int using_eh_for_cleanups_p = 0;
397 static int n_occurrences PARAMS ((int, const char *));
398 static bool parse_input_constraint PARAMS ((const char **, int, int, int,
399 int, const char * const *,
401 static bool decl_conflicts_with_clobbers_p PARAMS ((tree, const HARD_REG_SET));
402 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
403 static int expand_fixup PARAMS ((tree, rtx, rtx));
404 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
405 static void expand_nl_goto_receiver PARAMS ((void));
406 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
407 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
409 static bool check_operand_nalternatives PARAMS ((tree, tree));
410 static bool check_unique_operand_names PARAMS ((tree, tree));
411 static tree resolve_operand_names PARAMS ((tree, tree, tree,
413 static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
414 static void expand_null_return_1 PARAMS ((rtx));
415 static enum br_predictor return_prediction PARAMS ((rtx));
416 static void expand_value_return PARAMS ((rtx));
417 static int tail_recursion_args PARAMS ((tree, tree));
418 static void expand_cleanups PARAMS ((tree, tree, int, int));
419 static void check_seenlabel PARAMS ((void));
420 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
421 static int estimate_case_costs PARAMS ((case_node_ptr));
422 static void group_case_nodes PARAMS ((case_node_ptr));
423 static void balance_case_nodes PARAMS ((case_node_ptr *,
425 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
426 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
427 static int node_is_bounded PARAMS ((case_node_ptr, tree));
428 static void emit_jump_if_reachable PARAMS ((rtx));
429 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
430 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
433 using_eh_for_cleanups ()
435 using_eh_for_cleanups_p = 1;
439 init_stmt_for_function ()
441 cfun->stmt = ((struct stmt_status *)ggc_alloc (sizeof (struct stmt_status)));
443 /* We are not currently within any block, conditional, loop or case. */
445 stack_block_stack = 0;
452 current_block_start_count = 0;
454 /* No gotos have been expanded yet. */
455 goto_fixup_chain = 0;
457 /* We are not processing a ({...}) grouping. */
458 expr_stmts_for_value = 0;
462 /* Return nonzero if anything is pushed on the loop, condition, or case
467 return cond_stack || loop_stack || case_stack;
470 /* Record the current file and line. Called from emit_line_note. */
472 set_file_and_line_for_stmt (file, line)
476 /* If we're outputting an inline function, and we add a line note,
477 there may be no CFUN->STMT information. So, there's no need to
481 emit_filename = file;
486 /* Emit a no-op instruction. */
493 last_insn = get_last_insn ();
495 && (GET_CODE (last_insn) == CODE_LABEL
496 || (GET_CODE (last_insn) == NOTE
497 && prev_real_insn (last_insn) == 0)))
498 emit_insn (gen_nop ());
501 /* Return the rtx-label that corresponds to a LABEL_DECL,
502 creating it if necessary. */
508 if (TREE_CODE (label) != LABEL_DECL)
511 if (!DECL_RTL_SET_P (label))
512 SET_DECL_RTL (label, gen_label_rtx ());
514 return DECL_RTL (label);
518 /* Add an unconditional jump to LABEL as the next sequential instruction. */
524 do_pending_stack_adjust ();
525 emit_jump_insn (gen_jump (label));
529 /* Emit code to jump to the address
530 specified by the pointer expression EXP. */
533 expand_computed_goto (exp)
536 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
538 #ifdef POINTERS_EXTEND_UNSIGNED
539 if (GET_MODE (x) != Pmode)
540 x = convert_memory_address (Pmode, x);
544 do_pending_stack_adjust ();
545 emit_indirect_jump (x);
547 current_function_has_computed_jump = 1;
550 /* Handle goto statements and the labels that they can go to. */
552 /* Specify the location in the RTL code of a label LABEL,
553 which is a LABEL_DECL tree node.
555 This is used for the kind of label that the user can jump to with a
556 goto statement, and for alternatives of a switch or case statement.
557 RTL labels generated for loops and conditionals don't go through here;
558 they are generated directly at the RTL level, by other functions below.
560 Note that this has nothing to do with defining label *names*.
561 Languages vary in how they do that and what that even means. */
567 struct label_chain *p;
569 do_pending_stack_adjust ();
570 emit_label (label_rtx (label));
571 if (DECL_NAME (label))
572 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
574 if (stack_block_stack != 0)
576 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
577 p->next = stack_block_stack->data.block.label_chain;
578 stack_block_stack->data.block.label_chain = p;
583 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
584 from nested functions. */
587 declare_nonlocal_label (label)
590 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
592 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
593 LABEL_PRESERVE_P (label_rtx (label)) = 1;
594 if (nonlocal_goto_handler_slots == 0)
596 emit_stack_save (SAVE_NONLOCAL,
597 &nonlocal_goto_stack_level,
598 PREV_INSN (tail_recursion_reentry));
600 nonlocal_goto_handler_slots
601 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
604 /* Generate RTL code for a `goto' statement with target label LABEL.
605 LABEL should be a LABEL_DECL tree node that was or will later be
606 defined with `expand_label'. */
614 /* Check for a nonlocal goto to a containing function. */
615 context = decl_function_context (label);
616 if (context != 0 && context != current_function_decl)
618 struct function *p = find_function_data (context);
619 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
620 rtx handler_slot, static_chain, save_area, insn;
623 /* Find the corresponding handler slot for this label. */
624 handler_slot = p->x_nonlocal_goto_handler_slots;
625 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
626 link = TREE_CHAIN (link))
627 handler_slot = XEXP (handler_slot, 1);
628 handler_slot = XEXP (handler_slot, 0);
630 p->has_nonlocal_label = 1;
631 current_function_has_nonlocal_goto = 1;
632 LABEL_REF_NONLOCAL_P (label_ref) = 1;
634 /* Copy the rtl for the slots so that they won't be shared in
635 case the virtual stack vars register gets instantiated differently
636 in the parent than in the child. */
638 static_chain = copy_to_reg (lookup_static_chain (label));
640 /* Get addr of containing function's current nonlocal goto handler,
641 which will do any cleanups and then jump to the label. */
642 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
643 virtual_stack_vars_rtx,
646 /* Get addr of containing function's nonlocal save area. */
647 save_area = p->x_nonlocal_goto_stack_level;
649 save_area = replace_rtx (copy_rtx (save_area),
650 virtual_stack_vars_rtx, static_chain);
652 #if HAVE_nonlocal_goto
653 if (HAVE_nonlocal_goto)
654 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
655 save_area, label_ref));
659 /* Restore frame pointer for containing function.
660 This sets the actual hard register used for the frame pointer
661 to the location of the function's incoming static chain info.
662 The non-local goto handler will then adjust it to contain the
663 proper value and reload the argument pointer, if needed. */
664 emit_move_insn (hard_frame_pointer_rtx, static_chain);
665 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
667 /* USE of hard_frame_pointer_rtx added for consistency;
668 not clear if really needed. */
669 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
670 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
671 emit_indirect_jump (handler_slot);
674 /* Search backwards to the jump insn and mark it as a
676 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
678 if (GET_CODE (insn) == JUMP_INSN)
680 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
681 const0_rtx, REG_NOTES (insn));
684 else if (GET_CODE (insn) == CALL_INSN)
689 expand_goto_internal (label, label_rtx (label), NULL_RTX);
692 /* Generate RTL code for a `goto' statement with target label BODY.
693 LABEL should be a LABEL_REF.
694 LAST_INSN, if non-0, is the rtx we should consider as the last
695 insn emitted (for the purposes of cleaning up a return). */
698 expand_goto_internal (body, label, last_insn)
703 struct nesting *block;
706 if (GET_CODE (label) != CODE_LABEL)
709 /* If label has already been defined, we can tell now
710 whether and how we must alter the stack level. */
712 if (PREV_INSN (label) != 0)
714 /* Find the innermost pending block that contains the label.
715 (Check containment by comparing insn-uids.)
716 Then restore the outermost stack level within that block,
717 and do cleanups of all blocks contained in it. */
718 for (block = block_stack; block; block = block->next)
720 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
722 if (block->data.block.stack_level != 0)
723 stack_level = block->data.block.stack_level;
724 /* Execute the cleanups for blocks we are exiting. */
725 if (block->data.block.cleanups != 0)
727 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
728 do_pending_stack_adjust ();
734 /* Ensure stack adjust isn't done by emit_jump, as this
735 would clobber the stack pointer. This one should be
736 deleted as dead by flow. */
737 clear_pending_stack_adjust ();
738 do_pending_stack_adjust ();
740 /* Don't do this adjust if it's to the end label and this function
741 is to return with a depressed stack pointer. */
742 if (label == return_label
743 && (((TREE_CODE (TREE_TYPE (current_function_decl))
745 && (TYPE_RETURNS_STACK_DEPRESSED
746 (TREE_TYPE (current_function_decl))))))
749 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
752 if (body != 0 && DECL_TOO_LATE (body))
753 error ("jump to `%s' invalidly jumps into binding contour",
754 IDENTIFIER_POINTER (DECL_NAME (body)));
756 /* Label not yet defined: may need to put this goto
757 on the fixup list. */
758 else if (! expand_fixup (body, label, last_insn))
760 /* No fixup needed. Record that the label is the target
761 of at least one goto that has no fixup. */
763 TREE_ADDRESSABLE (body) = 1;
769 /* Generate if necessary a fixup for a goto
770 whose target label in tree structure (if any) is TREE_LABEL
771 and whose target in rtl is RTL_LABEL.
773 If LAST_INSN is nonzero, we pretend that the jump appears
774 after insn LAST_INSN instead of at the current point in the insn stream.
776 The fixup will be used later to insert insns just before the goto.
777 Those insns will restore the stack level as appropriate for the
778 target label, and will (in the case of C++) also invoke any object
779 destructors which have to be invoked when we exit the scopes which
780 are exited by the goto.
782 Value is nonzero if a fixup is made. */
785 expand_fixup (tree_label, rtl_label, last_insn)
790 struct nesting *block, *end_block;
792 /* See if we can recognize which block the label will be output in.
793 This is possible in some very common cases.
794 If we succeed, set END_BLOCK to that block.
795 Otherwise, set it to 0. */
798 && (rtl_label == cond_stack->data.cond.endif_label
799 || rtl_label == cond_stack->data.cond.next_label))
800 end_block = cond_stack;
801 /* If we are in a loop, recognize certain labels which
802 are likely targets. This reduces the number of fixups
803 we need to create. */
805 && (rtl_label == loop_stack->data.loop.start_label
806 || rtl_label == loop_stack->data.loop.end_label
807 || rtl_label == loop_stack->data.loop.continue_label))
808 end_block = loop_stack;
812 /* Now set END_BLOCK to the binding level to which we will return. */
816 struct nesting *next_block = end_block->all;
819 /* First see if the END_BLOCK is inside the innermost binding level.
820 If so, then no cleanups or stack levels are relevant. */
821 while (next_block && next_block != block)
822 next_block = next_block->all;
827 /* Otherwise, set END_BLOCK to the innermost binding level
828 which is outside the relevant control-structure nesting. */
829 next_block = block_stack->next;
830 for (block = block_stack; block != end_block; block = block->all)
831 if (block == next_block)
832 next_block = next_block->next;
833 end_block = next_block;
836 /* Does any containing block have a stack level or cleanups?
837 If not, no fixup is needed, and that is the normal case
838 (the only case, for standard C). */
839 for (block = block_stack; block != end_block; block = block->next)
840 if (block->data.block.stack_level != 0
841 || block->data.block.cleanups != 0)
844 if (block != end_block)
846 /* Ok, a fixup is needed. Add a fixup to the list of such. */
847 struct goto_fixup *fixup
848 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
849 /* In case an old stack level is restored, make sure that comes
850 after any pending stack adjust. */
851 /* ?? If the fixup isn't to come at the present position,
852 doing the stack adjust here isn't useful. Doing it with our
853 settings at that location isn't useful either. Let's hope
856 do_pending_stack_adjust ();
857 fixup->target = tree_label;
858 fixup->target_rtl = rtl_label;
860 /* Create a BLOCK node and a corresponding matched set of
861 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
862 this point. The notes will encapsulate any and all fixup
863 code which we might later insert at this point in the insn
864 stream. Also, the BLOCK node will be the parent (i.e. the
865 `SUPERBLOCK') of any other BLOCK nodes which we might create
866 later on when we are expanding the fixup code.
868 Note that optimization passes (including expand_end_loop)
869 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
873 rtx original_before_jump
874 = last_insn ? last_insn : get_last_insn ();
879 block = make_node (BLOCK);
880 TREE_USED (block) = 1;
882 if (!cfun->x_whole_function_mode_p)
883 (*lang_hooks.decls.insert_block) (block);
887 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
888 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
893 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
894 if (cfun->x_whole_function_mode_p)
895 NOTE_BLOCK (start) = block;
896 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
897 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
898 if (cfun->x_whole_function_mode_p)
899 NOTE_BLOCK (end) = block;
900 fixup->context = block;
902 emit_insn_after (start, original_before_jump);
905 fixup->block_start_count = current_block_start_count;
906 fixup->stack_level = 0;
907 fixup->cleanup_list_list
908 = ((block->data.block.outer_cleanups
909 || block->data.block.cleanups)
910 ? tree_cons (NULL_TREE, block->data.block.cleanups,
911 block->data.block.outer_cleanups)
913 fixup->next = goto_fixup_chain;
914 goto_fixup_chain = fixup;
920 /* Expand any needed fixups in the outputmost binding level of the
921 function. FIRST_INSN is the first insn in the function. */
924 expand_fixups (first_insn)
927 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
930 /* When exiting a binding contour, process all pending gotos requiring fixups.
931 THISBLOCK is the structure that describes the block being exited.
932 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
933 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
934 FIRST_INSN is the insn that began this contour.
936 Gotos that jump out of this contour must restore the
937 stack level and do the cleanups before actually jumping.
939 DONT_JUMP_IN nonzero means report error there is a jump into this
940 contour from before the beginning of the contour.
941 This is also done if STACK_LEVEL is nonzero. */
944 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
945 struct nesting *thisblock;
951 struct goto_fixup *f, *prev;
953 /* F is the fixup we are considering; PREV is the previous one. */
954 /* We run this loop in two passes so that cleanups of exited blocks
955 are run first, and blocks that are exited are marked so
958 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
960 /* Test for a fixup that is inactive because it is already handled. */
961 if (f->before_jump == 0)
963 /* Delete inactive fixup from the chain, if that is easy to do. */
965 prev->next = f->next;
967 /* Has this fixup's target label been defined?
968 If so, we can finalize it. */
969 else if (PREV_INSN (f->target_rtl) != 0)
973 /* If this fixup jumped into this contour from before the beginning
974 of this contour, report an error. This code used to use
975 the first non-label insn after f->target_rtl, but that's
976 wrong since such can be added, by things like put_var_into_stack
977 and have INSN_UIDs that are out of the range of the block. */
978 /* ??? Bug: this does not detect jumping in through intermediate
979 blocks that have stack levels or cleanups.
980 It detects only a problem with the innermost block
983 && (dont_jump_in || stack_level || cleanup_list)
984 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
985 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
986 && ! DECL_ERROR_ISSUED (f->target))
988 error_with_decl (f->target,
989 "label `%s' used before containing binding contour");
990 /* Prevent multiple errors for one label. */
991 DECL_ERROR_ISSUED (f->target) = 1;
994 /* We will expand the cleanups into a sequence of their own and
995 then later on we will attach this new sequence to the insn
996 stream just ahead of the actual jump insn. */
1000 /* Temporarily restore the lexical context where we will
1001 logically be inserting the fixup code. We do this for the
1002 sake of getting the debugging information right. */
1004 (*lang_hooks.decls.pushlevel) (0);
1005 (*lang_hooks.decls.set_block) (f->context);
1007 /* Expand the cleanups for blocks this jump exits. */
1008 if (f->cleanup_list_list)
1011 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1012 /* Marked elements correspond to blocks that have been closed.
1013 Do their cleanups. */
1014 if (TREE_ADDRESSABLE (lists)
1015 && TREE_VALUE (lists) != 0)
1017 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1018 /* Pop any pushes done in the cleanups,
1019 in case function is about to return. */
1020 do_pending_stack_adjust ();
1024 /* Restore stack level for the biggest contour that this
1025 jump jumps out of. */
1027 && ! (f->target_rtl == return_label
1028 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1030 && (TYPE_RETURNS_STACK_DEPRESSED
1031 (TREE_TYPE (current_function_decl))))))
1032 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1034 /* Finish up the sequence containing the insns which implement the
1035 necessary cleanups, and then attach that whole sequence to the
1036 insn stream just ahead of the actual jump insn. Attaching it
1037 at that point insures that any cleanups which are in fact
1038 implicit C++ object destructions (which must be executed upon
1039 leaving the block) appear (to the debugger) to be taking place
1040 in an area of the generated code where the object(s) being
1041 destructed are still "in scope". */
1043 cleanup_insns = get_insns ();
1044 (*lang_hooks.decls.poplevel) (1, 0, 0);
1047 emit_insn_after (cleanup_insns, f->before_jump);
1053 /* For any still-undefined labels, do the cleanups for this block now.
1054 We must do this now since items in the cleanup list may go out
1055 of scope when the block ends. */
1056 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1057 if (f->before_jump != 0
1058 && PREV_INSN (f->target_rtl) == 0
1059 /* Label has still not appeared. If we are exiting a block with
1060 a stack level to restore, that started before the fixup,
1061 mark this stack level as needing restoration
1062 when the fixup is later finalized. */
1064 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1065 means the label is undefined. That's erroneous, but possible. */
1066 && (thisblock->data.block.block_start_count
1067 <= f->block_start_count))
1069 tree lists = f->cleanup_list_list;
1072 for (; lists; lists = TREE_CHAIN (lists))
1073 /* If the following elt. corresponds to our containing block
1074 then the elt. must be for this block. */
1075 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1078 (*lang_hooks.decls.pushlevel) (0);
1079 (*lang_hooks.decls.set_block) (f->context);
1080 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1081 do_pending_stack_adjust ();
1082 cleanup_insns = get_insns ();
1083 (*lang_hooks.decls.poplevel) (1, 0, 0);
1085 if (cleanup_insns != 0)
1087 = emit_insn_after (cleanup_insns, f->before_jump);
1089 f->cleanup_list_list = TREE_CHAIN (lists);
1093 f->stack_level = stack_level;
1097 /* Return the number of times character C occurs in string S. */
1099 n_occurrences (c, s)
1109 /* Generate RTL for an asm statement (explicit assembler code).
1110 BODY is a STRING_CST node containing the assembler code text,
1111 or an ADDR_EXPR containing a STRING_CST. */
1117 if (TREE_CODE (body) == ADDR_EXPR)
1118 body = TREE_OPERAND (body, 0);
1120 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1121 TREE_STRING_POINTER (body)));
1125 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1126 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1127 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1128 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1129 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1130 constraint allows the use of a register operand. And, *IS_INOUT
1131 will be true if the operand is read-write, i.e., if it is used as
1132 an input as well as an output. If *CONSTRAINT_P is not in
1133 canonical form, it will be made canonical. (Note that `+' will be
1134 rpelaced with `=' as part of this process.)
1136 Returns TRUE if all went well; FALSE if an error occurred. */
1139 parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1140 allows_mem, allows_reg, is_inout)
1141 const char **constraint_p;
1149 const char *constraint = *constraint_p;
1152 /* Assume the constraint doesn't allow the use of either a register
1154 *allows_mem = false;
1155 *allows_reg = false;
1157 /* Allow the `=' or `+' to not be at the beginning of the string,
1158 since it wasn't explicitly documented that way, and there is a
1159 large body of code that puts it last. Swap the character to
1160 the front, so as not to uglify any place else. */
1161 p = strchr (constraint, '=');
1163 p = strchr (constraint, '+');
1165 /* If the string doesn't contain an `=', issue an error
1169 error ("output operand constraint lacks `='");
1173 /* If the constraint begins with `+', then the operand is both read
1174 from and written to. */
1175 *is_inout = (*p == '+');
1177 /* Canonicalize the output constraint so that it begins with `='. */
1178 if (p != constraint || is_inout)
1181 size_t c_len = strlen (constraint);
1183 if (p != constraint)
1184 warning ("output constraint `%c' for operand %d is not at the beginning",
1187 /* Make a copy of the constraint. */
1188 buf = alloca (c_len + 1);
1189 strcpy (buf, constraint);
1190 /* Swap the first character and the `=' or `+'. */
1191 buf[p - constraint] = buf[0];
1192 /* Make sure the first character is an `='. (Until we do this,
1193 it might be a `+'.) */
1195 /* Replace the constraint with the canonicalized string. */
1196 *constraint_p = ggc_alloc_string (buf, c_len);
1197 constraint = *constraint_p;
1200 /* Loop through the constraint string. */
1201 for (p = constraint + 1; *p; ++p)
1206 error ("operand constraint contains incorrectly positioned '+' or '='");
1210 if (operand_num + 1 == ninputs + noutputs)
1212 error ("`%%' constraint used with last operand");
1217 case 'V': case 'm': case 'o':
1221 case '?': case '!': case '*': case '&': case '#':
1222 case 'E': case 'F': case 'G': case 'H':
1223 case 's': case 'i': case 'n':
1224 case 'I': case 'J': case 'K': case 'L': case 'M':
1225 case 'N': case 'O': case 'P': case ',':
1228 case '0': case '1': case '2': case '3': case '4':
1229 case '5': case '6': case '7': case '8': case '9':
1231 error ("matching constraint not valid in output operand");
1235 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1236 excepting those that expand_call created. So match memory
1253 if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1255 #ifdef EXTRA_CONSTRAINT
1256 else if (EXTRA_ADDRESS_CONSTRAINT (*p))
1258 else if (EXTRA_MEMORY_CONSTRAINT (*p))
1262 /* Otherwise we can't assume anything about the nature of
1263 the constraint except that it isn't purely registers.
1264 Treat it like "g" and hope for the best. */
1275 /* Similar, but for input constraints. */
1278 parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1279 constraints, allows_mem, allows_reg)
1280 const char **constraint_p;
1285 const char * const * constraints;
1289 const char *constraint = *constraint_p;
1290 const char *orig_constraint = constraint;
1291 size_t c_len = strlen (constraint);
1294 /* Assume the constraint doesn't allow the use of either
1295 a register or memory. */
1296 *allows_mem = false;
1297 *allows_reg = false;
1299 /* Make sure constraint has neither `=', `+', nor '&'. */
1301 for (j = 0; j < c_len; j++)
1302 switch (constraint[j])
1304 case '+': case '=': case '&':
1305 if (constraint == orig_constraint)
1307 error ("input operand constraint contains `%c'", constraint[j]);
1313 if (constraint == orig_constraint
1314 && input_num + 1 == ninputs - ninout)
1316 error ("`%%' constraint used with last operand");
1321 case 'V': case 'm': case 'o':
1326 case '?': case '!': case '*': case '#':
1327 case 'E': case 'F': case 'G': case 'H':
1328 case 's': case 'i': case 'n':
1329 case 'I': case 'J': case 'K': case 'L': case 'M':
1330 case 'N': case 'O': case 'P': case ',':
1333 /* Whether or not a numeric constraint allows a register is
1334 decided by the matching constraint, and so there is no need
1335 to do anything special with them. We must handle them in
1336 the default case, so that we don't unnecessarily force
1337 operands to memory. */
1338 case '0': case '1': case '2': case '3': case '4':
1339 case '5': case '6': case '7': case '8': case '9':
1342 unsigned long match;
1344 match = strtoul (constraint + j, &end, 10);
1345 if (match >= (unsigned long) noutputs)
1347 error ("matching constraint references invalid operand number");
1351 /* Try and find the real constraint for this dup. Only do this
1352 if the matching constraint is the only alternative. */
1354 && (j == 0 || (j == 1 && constraint[0] == '%')))
1356 constraint = constraints[match];
1357 *constraint_p = constraint;
1358 c_len = strlen (constraint);
1363 j = end - constraint;
1377 if (! ISALPHA (constraint[j]))
1379 error ("invalid punctuation `%c' in constraint", constraint[j]);
1382 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1384 #ifdef EXTRA_CONSTRAINT
1385 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j]))
1387 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j]))
1391 /* Otherwise we can't assume anything about the nature of
1392 the constraint except that it isn't purely registers.
1393 Treat it like "g" and hope for the best. */
1404 /* Check for overlap between registers marked in CLOBBERED_REGS and
1405 anything inappropriate in DECL. Emit error and return TRUE for error,
1409 decl_conflicts_with_clobbers_p (decl, clobbered_regs)
1411 const HARD_REG_SET clobbered_regs;
1413 /* Conflicts between asm-declared register variables and the clobber
1414 list are not allowed. */
1415 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1416 && DECL_REGISTER (decl)
1417 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1419 rtx reg = DECL_RTL (decl);
1422 for (regno = REGNO (reg);
1423 regno < (REGNO (reg)
1424 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1426 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1428 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1429 IDENTIFIER_POINTER (DECL_NAME (decl)));
1431 /* Reset registerness to stop multiple errors emitted for a
1433 DECL_REGISTER (decl) = 0;
1440 /* Generate RTL for an asm statement with arguments.
1441 STRING is the instruction template.
1442 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1443 Each output or input has an expression in the TREE_VALUE and
1444 and a tree list in TREE_PURPOSE which in turn contains a constraint
1445 name in TREE_VALUE (or NULL_TREE) and a constraint string
1447 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1448 that is clobbered by this insn.
1450 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1451 Some elements of OUTPUTS may be replaced with trees representing temporary
1452 values. The caller should copy those temporary values to the originally
1455 VOL nonzero means the insn is volatile; don't optimize it. */
1458 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1459 tree string, outputs, inputs, clobbers;
1461 const char *filename;
1464 rtvec argvec, constraintvec;
1466 int ninputs = list_length (inputs);
1467 int noutputs = list_length (outputs);
1470 HARD_REG_SET clobbered_regs;
1471 int clobber_conflict_found = 0;
1474 /* Vector of RTX's of evaluated output operands. */
1475 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1476 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1477 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1478 enum machine_mode *inout_mode
1479 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1480 const char **constraints
1481 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1482 /* The insn we have emitted. */
1484 int old_generating_concat_p = generating_concat_p;
1486 /* An ASM with no outputs needs to be treated as volatile, for now. */
1490 if (! check_operand_nalternatives (outputs, inputs))
1493 if (! check_unique_operand_names (outputs, inputs))
1496 string = resolve_operand_names (string, outputs, inputs, constraints);
1498 #ifdef MD_ASM_CLOBBERS
1499 /* Sometimes we wish to automatically clobber registers across an asm.
1500 Case in point is when the i386 backend moved from cc0 to a hard reg --
1501 maintaining source-level compatibility means automatically clobbering
1502 the flags register. */
1503 MD_ASM_CLOBBERS (clobbers);
1506 /* Count the number of meaningful clobbered registers, ignoring what
1507 we would ignore later. */
1509 CLEAR_HARD_REG_SET (clobbered_regs);
1510 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1512 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1514 i = decode_reg_name (regname);
1515 if (i >= 0 || i == -4)
1518 error ("unknown register name `%s' in `asm'", regname);
1520 /* Mark clobbered registers. */
1522 SET_HARD_REG_BIT (clobbered_regs, i);
1527 /* First pass over inputs and outputs checks validity and sets
1528 mark_addressable if needed. */
1531 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1533 tree val = TREE_VALUE (tail);
1534 tree type = TREE_TYPE (val);
1535 const char *constraint;
1540 /* If there's an erroneous arg, emit no insn. */
1541 if (type == error_mark_node)
1544 /* Try to parse the output constraint. If that fails, there's
1545 no point in going further. */
1546 constraint = constraints[i];
1547 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1548 &allows_mem, &allows_reg, &is_inout))
1555 && GET_CODE (DECL_RTL (val)) == REG
1556 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1557 (*lang_hooks.mark_addressable) (val);
1564 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1566 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1570 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1572 bool allows_reg, allows_mem;
1573 const char *constraint;
1575 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1576 would get VOIDmode and that could cause a crash in reload. */
1577 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1580 constraint = constraints[i + noutputs];
1581 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1582 constraints, &allows_mem, &allows_reg))
1585 if (! allows_reg && allows_mem)
1586 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1589 /* Second pass evaluates arguments. */
1592 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1594 tree val = TREE_VALUE (tail);
1595 tree type = TREE_TYPE (val);
1600 if (!parse_output_constraint (&constraints[i], i, ninputs,
1601 noutputs, &allows_mem, &allows_reg,
1605 /* If an output operand is not a decl or indirect ref and our constraint
1606 allows a register, make a temporary to act as an intermediate.
1607 Make the asm insn write into that, then our caller will copy it to
1608 the real output operand. Likewise for promoted variables. */
1610 generating_concat_p = 0;
1612 real_output_rtx[i] = NULL_RTX;
1613 if ((TREE_CODE (val) == INDIRECT_REF
1616 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1617 && ! (GET_CODE (DECL_RTL (val)) == REG
1618 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1622 output_rtx[i] = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1624 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1625 error ("output number %d not directly addressable", i);
1626 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1627 || GET_CODE (output_rtx[i]) == CONCAT)
1629 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1630 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1632 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1637 output_rtx[i] = assign_temp (type, 0, 0, 1);
1638 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1641 generating_concat_p = old_generating_concat_p;
1645 inout_mode[ninout] = TYPE_MODE (type);
1646 inout_opnum[ninout++] = i;
1649 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1650 clobber_conflict_found = 1;
1653 /* Make vectors for the expression-rtx, constraint strings,
1654 and named operands. */
1656 argvec = rtvec_alloc (ninputs);
1657 constraintvec = rtvec_alloc (ninputs);
1659 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1660 : GET_MODE (output_rtx[0])),
1661 TREE_STRING_POINTER (string),
1662 empty_string, 0, argvec, constraintvec,
1665 MEM_VOLATILE_P (body) = vol;
1667 /* Eval the inputs and put them into ARGVEC.
1668 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1670 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1672 bool allows_reg, allows_mem;
1673 const char *constraint;
1677 constraint = constraints[i + noutputs];
1678 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1679 constraints, &allows_mem, &allows_reg))
1682 generating_concat_p = 0;
1684 val = TREE_VALUE (tail);
1685 type = TREE_TYPE (val);
1686 op = expand_expr (val, NULL_RTX, VOIDmode, 0);
1688 /* Never pass a CONCAT to an ASM. */
1689 if (GET_CODE (op) == CONCAT)
1690 op = force_reg (GET_MODE (op), op);
1692 if (asm_operand_ok (op, constraint) <= 0)
1695 op = force_reg (TYPE_MODE (type), op);
1696 else if (!allows_mem)
1697 warning ("asm operand %d probably doesn't match constraints",
1699 else if (CONSTANT_P (op))
1700 op = force_const_mem (TYPE_MODE (type), op);
1701 else if (GET_CODE (op) == REG
1702 || GET_CODE (op) == SUBREG
1703 || GET_CODE (op) == ADDRESSOF
1704 || GET_CODE (op) == CONCAT)
1706 tree qual_type = build_qualified_type (type,
1708 | TYPE_QUAL_CONST));
1709 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1711 emit_move_insn (memloc, op);
1715 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1717 /* We won't recognize volatile memory as available a
1718 memory_operand at this point. Ignore it. */
1720 else if (queued_subexp_p (op))
1723 /* ??? Leave this only until we have experience with what
1724 happens in combine and elsewhere when constraints are
1726 warning ("asm operand %d probably doesn't match constraints",
1730 generating_concat_p = old_generating_concat_p;
1731 ASM_OPERANDS_INPUT (body, i) = op;
1733 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1734 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1736 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1737 clobber_conflict_found = 1;
1740 /* Protect all the operands from the queue now that they have all been
1743 generating_concat_p = 0;
1745 for (i = 0; i < ninputs - ninout; i++)
1746 ASM_OPERANDS_INPUT (body, i)
1747 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1749 for (i = 0; i < noutputs; i++)
1750 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1752 /* For in-out operands, copy output rtx to input rtx. */
1753 for (i = 0; i < ninout; i++)
1755 int j = inout_opnum[i];
1758 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1761 sprintf (buffer, "%d", j);
1762 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1763 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1766 generating_concat_p = old_generating_concat_p;
1768 /* Now, for each output, construct an rtx
1769 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1770 ARGVEC CONSTRAINTS OPNAMES))
1771 If there is more than one, put them inside a PARALLEL. */
1773 if (noutputs == 1 && nclobbers == 0)
1775 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1776 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1779 else if (noutputs == 0 && nclobbers == 0)
1781 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1782 insn = emit_insn (body);
1793 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1795 /* For each output operand, store a SET. */
1796 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1798 XVECEXP (body, 0, i)
1799 = gen_rtx_SET (VOIDmode,
1801 gen_rtx_ASM_OPERANDS
1802 (GET_MODE (output_rtx[i]),
1803 TREE_STRING_POINTER (string),
1804 constraints[i], i, argvec, constraintvec,
1807 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1810 /* If there are no outputs (but there are some clobbers)
1811 store the bare ASM_OPERANDS into the PARALLEL. */
1814 XVECEXP (body, 0, i++) = obody;
1816 /* Store (clobber REG) for each clobbered register specified. */
1818 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1820 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1821 int j = decode_reg_name (regname);
1826 if (j == -3) /* `cc', which is not a register */
1829 if (j == -4) /* `memory', don't cache memory across asm */
1831 XVECEXP (body, 0, i++)
1832 = gen_rtx_CLOBBER (VOIDmode,
1835 gen_rtx_SCRATCH (VOIDmode)));
1839 /* Ignore unknown register, error already signaled. */
1843 /* Use QImode since that's guaranteed to clobber just one reg. */
1844 clobbered_reg = gen_rtx_REG (QImode, j);
1846 /* Do sanity check for overlap between clobbers and respectively
1847 input and outputs that hasn't been handled. Such overlap
1848 should have been detected and reported above. */
1849 if (!clobber_conflict_found)
1853 /* We test the old body (obody) contents to avoid tripping
1854 over the under-construction body. */
1855 for (opno = 0; opno < noutputs; opno++)
1856 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1857 internal_error ("asm clobber conflict with output operand");
1859 for (opno = 0; opno < ninputs - ninout; opno++)
1860 if (reg_overlap_mentioned_p (clobbered_reg,
1861 ASM_OPERANDS_INPUT (obody, opno)))
1862 internal_error ("asm clobber conflict with input operand");
1865 XVECEXP (body, 0, i++)
1866 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1869 insn = emit_insn (body);
1872 /* For any outputs that needed reloading into registers, spill them
1873 back to where they belong. */
1874 for (i = 0; i < noutputs; ++i)
1875 if (real_output_rtx[i])
1876 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1881 /* A subroutine of expand_asm_operands. Check that all operands have
1882 the same number of alternatives. Return true if so. */
1885 check_operand_nalternatives (outputs, inputs)
1886 tree outputs, inputs;
1888 if (outputs || inputs)
1890 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1892 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1895 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1897 error ("too many alternatives in `asm'");
1904 const char *constraint
1905 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1907 if (n_occurrences (',', constraint) != nalternatives)
1909 error ("operand constraints for `asm' differ in number of alternatives");
1913 if (TREE_CHAIN (tmp))
1914 tmp = TREE_CHAIN (tmp);
1916 tmp = next, next = 0;
1923 /* A subroutine of expand_asm_operands. Check that all operand names
1924 are unique. Return true if so. We rely on the fact that these names
1925 are identifiers, and so have been canonicalized by get_identifier,
1926 so all we need are pointer comparisons. */
1929 check_unique_operand_names (outputs, inputs)
1930 tree outputs, inputs;
1934 for (i = outputs; i ; i = TREE_CHAIN (i))
1936 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1940 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1941 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1945 for (i = inputs; i ; i = TREE_CHAIN (i))
1947 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1951 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1952 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1954 for (j = outputs; j ; j = TREE_CHAIN (j))
1955 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1962 error ("duplicate asm operand name '%s'",
1963 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1967 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1968 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1969 STRING and in the constraints to those numbers. */
1972 resolve_operand_names (string, outputs, inputs, pconstraints)
1974 tree outputs, inputs;
1975 const char **pconstraints;
1977 char *buffer = xstrdup (TREE_STRING_POINTER (string));
1981 /* Assume that we will not need extra space to perform the substitution.
1982 This because we get to remove '[' and ']', which means we cannot have
1983 a problem until we have more than 999 operands. */
1986 while ((p = strchr (p, '%')) != NULL)
1990 else if (ISALPHA (p[1]) && p[2] == '[')
1998 p = resolve_operand_name_1 (p, outputs, inputs);
2001 string = build_string (strlen (buffer), buffer);
2004 /* Collect output constraints here because it's convenient.
2005 There should be no named operands here; this is verified
2006 in expand_asm_operand. */
2007 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2008 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2010 /* Substitute [<name>] in input constraint strings. */
2011 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2013 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2014 if (strchr (c, '[') == NULL)
2018 p = buffer = xstrdup (c);
2019 while ((p = strchr (p, '[')) != NULL)
2020 p = resolve_operand_name_1 (p, outputs, inputs);
2022 *pconstraints = ggc_alloc_string (buffer, -1);
2030 /* A subroutine of resolve_operand_names. P points to the '[' for a
2031 potential named operand of the form [<name>]. In place, replace
2032 the name and brackets with a number. Return a pointer to the
2033 balance of the string after substitution. */
2036 resolve_operand_name_1 (p, outputs, inputs)
2038 tree outputs, inputs;
2045 /* Collect the operand name. */
2046 q = strchr (p, ']');
2049 error ("missing close brace for named operand");
2050 return strchr (p, '\0');
2054 /* Resolve the name to a number. */
2055 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2057 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2060 const char *c = TREE_STRING_POINTER (name);
2061 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2065 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2067 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2070 const char *c = TREE_STRING_POINTER (name);
2071 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2077 error ("undefined named operand '%s'", p + 1);
2081 /* Replace the name with the number. Unfortunately, not all libraries
2082 get the return value of sprintf correct, so search for the end of the
2083 generated string by hand. */
2084 sprintf (p, "%d", op);
2085 p = strchr (p, '\0');
2087 /* Verify the no extra buffer space assumption. */
2091 /* Shift the rest of the buffer down to fill the gap. */
2092 memmove (p, q + 1, strlen (q + 1) + 1);
2097 /* Generate RTL to evaluate the expression EXP
2098 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2099 Provided just for backward-compatibility. expand_expr_stmt_value()
2100 should be used for new code. */
2103 expand_expr_stmt (exp)
2106 expand_expr_stmt_value (exp, -1, 1);
2109 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2110 whether to (1) save the value of the expression, (0) discard it or
2111 (-1) use expr_stmts_for_value to tell. The use of -1 is
2112 deprecated, and retained only for backward compatibility. */
2115 expand_expr_stmt_value (exp, want_value, maybe_last)
2117 int want_value, maybe_last;
2122 if (want_value == -1)
2123 want_value = expr_stmts_for_value != 0;
2125 /* If -W, warn about statements with no side effects,
2126 except for an explicit cast to void (e.g. for assert()), and
2127 except for last statement in ({...}) where they may be useful. */
2129 && (expr_stmts_for_value == 0 || ! maybe_last)
2130 && exp != error_mark_node)
2132 if (! TREE_SIDE_EFFECTS (exp))
2134 if ((extra_warnings || warn_unused_value)
2135 && !(TREE_CODE (exp) == CONVERT_EXPR
2136 && VOID_TYPE_P (TREE_TYPE (exp))))
2137 warning_with_file_and_line (emit_filename, emit_lineno,
2138 "statement with no effect");
2140 else if (warn_unused_value)
2141 warn_if_unused_value (exp);
2144 /* If EXP is of function type and we are expanding statements for
2145 value, convert it to pointer-to-function. */
2146 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2147 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2149 /* The call to `expand_expr' could cause last_expr_type and
2150 last_expr_value to get reset. Therefore, we set last_expr_value
2151 and last_expr_type *after* calling expand_expr. */
2152 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2154 type = TREE_TYPE (exp);
2156 /* If all we do is reference a volatile value in memory,
2157 copy it to a register to be sure it is actually touched. */
2158 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2160 if (TYPE_MODE (type) == VOIDmode)
2162 else if (TYPE_MODE (type) != BLKmode)
2163 value = copy_to_reg (value);
2166 rtx lab = gen_label_rtx ();
2168 /* Compare the value with itself to reference it. */
2169 emit_cmp_and_jump_insns (value, value, EQ,
2170 expand_expr (TYPE_SIZE (type),
2171 NULL_RTX, VOIDmode, 0),
2177 /* If this expression is part of a ({...}) and is in memory, we may have
2178 to preserve temporaries. */
2179 preserve_temp_slots (value);
2181 /* Free any temporaries used to evaluate this expression. Any temporary
2182 used as a result of this expression will already have been preserved
2188 last_expr_value = value;
2189 last_expr_type = type;
2195 /* Warn if EXP contains any computations whose results are not used.
2196 Return 1 if a warning is printed; 0 otherwise. */
2199 warn_if_unused_value (exp)
2202 if (TREE_USED (exp))
2205 /* Don't warn about void constructs. This includes casting to void,
2206 void function calls, and statement expressions with a final cast
2208 if (VOID_TYPE_P (TREE_TYPE (exp)))
2211 switch (TREE_CODE (exp))
2213 case PREINCREMENT_EXPR:
2214 case POSTINCREMENT_EXPR:
2215 case PREDECREMENT_EXPR:
2216 case POSTDECREMENT_EXPR:
2221 case METHOD_CALL_EXPR:
2223 case TRY_CATCH_EXPR:
2224 case WITH_CLEANUP_EXPR:
2229 /* For a binding, warn if no side effect within it. */
2230 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2233 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2235 case TRUTH_ORIF_EXPR:
2236 case TRUTH_ANDIF_EXPR:
2237 /* In && or ||, warn if 2nd operand has no side effect. */
2238 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2241 if (TREE_NO_UNUSED_WARNING (exp))
2243 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2245 /* Let people do `(foo (), 0)' without a warning. */
2246 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2248 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2252 case NON_LVALUE_EXPR:
2253 /* Don't warn about conversions not explicit in the user's program. */
2254 if (TREE_NO_UNUSED_WARNING (exp))
2256 /* Assignment to a cast usually results in a cast of a modify.
2257 Don't complain about that. There can be an arbitrary number of
2258 casts before the modify, so we must loop until we find the first
2259 non-cast expression and then test to see if that is a modify. */
2261 tree tem = TREE_OPERAND (exp, 0);
2263 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2264 tem = TREE_OPERAND (tem, 0);
2266 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2267 || TREE_CODE (tem) == CALL_EXPR)
2273 /* Don't warn about automatic dereferencing of references, since
2274 the user cannot control it. */
2275 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2276 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2280 /* Referencing a volatile value is a side effect, so don't warn. */
2282 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2283 && TREE_THIS_VOLATILE (exp))
2286 /* If this is an expression which has no operands, there is no value
2287 to be unused. There are no such language-independent codes,
2288 but front ends may define such. */
2289 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2290 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2294 /* If this is an expression with side effects, don't warn. */
2295 if (TREE_SIDE_EFFECTS (exp))
2298 warning_with_file_and_line (emit_filename, emit_lineno,
2299 "value computed is not used");
2304 /* Clear out the memory of the last expression evaluated. */
2309 last_expr_type = NULL_TREE;
2310 last_expr_value = NULL_RTX;
2313 /* Begin a statement-expression, i.e., a series of statements which
2314 may return a value. Return the RTL_EXPR for this statement expr.
2315 The caller must save that value and pass it to
2316 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2317 in the statement-expression are deallocated at the end of the
2321 expand_start_stmt_expr (has_scope)
2326 /* Make the RTL_EXPR node temporary, not momentary,
2327 so that rtl_expr_chain doesn't become garbage. */
2328 t = make_node (RTL_EXPR);
2329 do_pending_stack_adjust ();
2331 start_sequence_for_rtl_expr (t);
2335 expr_stmts_for_value++;
2339 /* Restore the previous state at the end of a statement that returns a value.
2340 Returns a tree node representing the statement's value and the
2341 insns to compute the value.
2343 The nodes of that expression have been freed by now, so we cannot use them.
2344 But we don't want to do that anyway; the expression has already been
2345 evaluated and now we just want to use the value. So generate a RTL_EXPR
2346 with the proper type and RTL value.
2348 If the last substatement was not an expression,
2349 return something with type `void'. */
2352 expand_end_stmt_expr (t)
2357 if (! last_expr_value || ! last_expr_type)
2359 last_expr_value = const0_rtx;
2360 last_expr_type = void_type_node;
2362 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2363 /* Remove any possible QUEUED. */
2364 last_expr_value = protect_from_queue (last_expr_value, 0);
2368 TREE_TYPE (t) = last_expr_type;
2369 RTL_EXPR_RTL (t) = last_expr_value;
2370 RTL_EXPR_SEQUENCE (t) = get_insns ();
2372 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2376 /* Don't consider deleting this expr or containing exprs at tree level. */
2377 TREE_SIDE_EFFECTS (t) = 1;
2378 /* Propagate volatility of the actual RTL expr. */
2379 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2382 expr_stmts_for_value--;
2387 /* Generate RTL for the start of an if-then. COND is the expression
2388 whose truth should be tested.
2390 If EXITFLAG is nonzero, this conditional is visible to
2391 `exit_something'. */
2394 expand_start_cond (cond, exitflag)
2398 struct nesting *thiscond = ALLOC_NESTING ();
2400 /* Make an entry on cond_stack for the cond we are entering. */
2402 thiscond->desc = COND_NESTING;
2403 thiscond->next = cond_stack;
2404 thiscond->all = nesting_stack;
2405 thiscond->depth = ++nesting_depth;
2406 thiscond->data.cond.next_label = gen_label_rtx ();
2407 /* Before we encounter an `else', we don't need a separate exit label
2408 unless there are supposed to be exit statements
2409 to exit this conditional. */
2410 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2411 thiscond->data.cond.endif_label = thiscond->exit_label;
2412 cond_stack = thiscond;
2413 nesting_stack = thiscond;
2415 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2418 /* Generate RTL between then-clause and the elseif-clause
2419 of an if-then-elseif-.... */
2422 expand_start_elseif (cond)
2425 if (cond_stack->data.cond.endif_label == 0)
2426 cond_stack->data.cond.endif_label = gen_label_rtx ();
2427 emit_jump (cond_stack->data.cond.endif_label);
2428 emit_label (cond_stack->data.cond.next_label);
2429 cond_stack->data.cond.next_label = gen_label_rtx ();
2430 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2433 /* Generate RTL between the then-clause and the else-clause
2434 of an if-then-else. */
2437 expand_start_else ()
2439 if (cond_stack->data.cond.endif_label == 0)
2440 cond_stack->data.cond.endif_label = gen_label_rtx ();
2442 emit_jump (cond_stack->data.cond.endif_label);
2443 emit_label (cond_stack->data.cond.next_label);
2444 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2447 /* After calling expand_start_else, turn this "else" into an "else if"
2448 by providing another condition. */
2451 expand_elseif (cond)
2454 cond_stack->data.cond.next_label = gen_label_rtx ();
2455 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2458 /* Generate RTL for the end of an if-then.
2459 Pop the record for it off of cond_stack. */
2464 struct nesting *thiscond = cond_stack;
2466 do_pending_stack_adjust ();
2467 if (thiscond->data.cond.next_label)
2468 emit_label (thiscond->data.cond.next_label);
2469 if (thiscond->data.cond.endif_label)
2470 emit_label (thiscond->data.cond.endif_label);
2472 POPSTACK (cond_stack);
2476 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2477 loop should be exited by `exit_something'. This is a loop for which
2478 `expand_continue' will jump to the top of the loop.
2480 Make an entry on loop_stack to record the labels associated with
2484 expand_start_loop (exit_flag)
2487 struct nesting *thisloop = ALLOC_NESTING ();
2489 /* Make an entry on loop_stack for the loop we are entering. */
2491 thisloop->desc = LOOP_NESTING;
2492 thisloop->next = loop_stack;
2493 thisloop->all = nesting_stack;
2494 thisloop->depth = ++nesting_depth;
2495 thisloop->data.loop.start_label = gen_label_rtx ();
2496 thisloop->data.loop.end_label = gen_label_rtx ();
2497 thisloop->data.loop.alt_end_label = 0;
2498 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2499 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2500 loop_stack = thisloop;
2501 nesting_stack = thisloop;
2503 do_pending_stack_adjust ();
2505 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2506 emit_label (thisloop->data.loop.start_label);
2511 /* Like expand_start_loop but for a loop where the continuation point
2512 (for expand_continue_loop) will be specified explicitly. */
2515 expand_start_loop_continue_elsewhere (exit_flag)
2518 struct nesting *thisloop = expand_start_loop (exit_flag);
2519 loop_stack->data.loop.continue_label = gen_label_rtx ();
2523 /* Begin a null, aka do { } while (0) "loop". But since the contents
2524 of said loop can still contain a break, we must frob the loop nest. */
2527 expand_start_null_loop ()
2529 struct nesting *thisloop = ALLOC_NESTING ();
2531 /* Make an entry on loop_stack for the loop we are entering. */
2533 thisloop->desc = LOOP_NESTING;
2534 thisloop->next = loop_stack;
2535 thisloop->all = nesting_stack;
2536 thisloop->depth = ++nesting_depth;
2537 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2538 thisloop->data.loop.end_label = gen_label_rtx ();
2539 thisloop->data.loop.alt_end_label = NULL_RTX;
2540 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2541 thisloop->exit_label = thisloop->data.loop.end_label;
2542 loop_stack = thisloop;
2543 nesting_stack = thisloop;
2548 /* Specify the continuation point for a loop started with
2549 expand_start_loop_continue_elsewhere.
2550 Use this at the point in the code to which a continue statement
2554 expand_loop_continue_here ()
2556 do_pending_stack_adjust ();
2557 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2558 emit_label (loop_stack->data.loop.continue_label);
2561 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2562 Pop the block off of loop_stack. */
2567 rtx start_label = loop_stack->data.loop.start_label;
2569 int eh_regions, debug_blocks;
2571 /* Mark the continue-point at the top of the loop if none elsewhere. */
2572 if (start_label == loop_stack->data.loop.continue_label)
2573 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2575 do_pending_stack_adjust ();
2577 /* If the loop starts with a loop exit, roll that to the end where
2578 it will optimize together with the jump back.
2580 If the loop presently looks like this (in pseudo-C):
2584 if (test) goto end_label;
2590 transform it to look like:
2597 if (test) goto end_label;
2601 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2602 the end of the entry condtional. Without this, our lexical scan
2603 can't tell the difference between an entry conditional and a
2604 body conditional that exits the loop. Mistaking the two means
2605 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2606 screw up loop unrolling.
2608 Things will be oh so much better when loop optimization is done
2609 off of a proper control flow graph... */
2611 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2613 eh_regions = debug_blocks = 0;
2614 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2615 if (GET_CODE (etc_note) == NOTE)
2617 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2620 /* We must not walk into a nested loop. */
2621 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2623 etc_note = NULL_RTX;
2627 /* At the same time, scan for EH region notes, as we don't want
2628 to scrog region nesting. This shouldn't happen, but... */
2629 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2631 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2633 if (--eh_regions < 0)
2634 /* We've come to the end of an EH region, but never saw the
2635 beginning of that region. That means that an EH region
2636 begins before the top of the loop, and ends in the middle
2637 of it. The existence of such a situation violates a basic
2638 assumption in this code, since that would imply that even
2639 when EH_REGIONS is zero, we might move code out of an
2640 exception region. */
2644 /* Likewise for debug scopes. In this case we'll either (1) move
2645 all of the notes if they are properly nested or (2) leave the
2646 notes alone and only rotate the loop at high optimization
2647 levels when we expect to scrog debug info. */
2648 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2650 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2657 && (debug_blocks == 0 || optimize >= 2)
2658 && NEXT_INSN (etc_note) != NULL_RTX
2659 && ! any_condjump_p (get_last_insn ()))
2661 /* We found one. Move everything from START to ETC to the end
2662 of the loop, and add a jump from the top of the loop. */
2663 rtx top_label = gen_label_rtx ();
2664 rtx start_move = start_label;
2666 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2667 then we want to move this note also. */
2668 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2669 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2670 start_move = PREV_INSN (start_move);
2672 emit_label_before (top_label, start_move);
2674 /* Actually move the insns. If the debug scopes are nested, we
2675 can move everything at once. Otherwise we have to move them
2676 one by one and squeeze out the block notes. */
2677 if (debug_blocks == 0)
2678 reorder_insns (start_move, etc_note, get_last_insn ());
2681 rtx insn, next_insn;
2682 for (insn = start_move; insn; insn = next_insn)
2684 /* Figure out which insn comes after this one. We have
2685 to do this before we move INSN. */
2686 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2688 if (GET_CODE (insn) == NOTE
2689 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2690 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2693 reorder_insns (insn, insn, get_last_insn ());
2697 /* Add the jump from the top of the loop. */
2698 emit_jump_insn_before (gen_jump (start_label), top_label);
2699 emit_barrier_before (top_label);
2700 start_label = top_label;
2703 emit_jump (start_label);
2704 emit_note (NULL, NOTE_INSN_LOOP_END);
2705 emit_label (loop_stack->data.loop.end_label);
2707 POPSTACK (loop_stack);
2712 /* Finish a null loop, aka do { } while (0). */
2715 expand_end_null_loop ()
2717 do_pending_stack_adjust ();
2718 emit_label (loop_stack->data.loop.end_label);
2720 POPSTACK (loop_stack);
2725 /* Generate a jump to the current loop's continue-point.
2726 This is usually the top of the loop, but may be specified
2727 explicitly elsewhere. If not currently inside a loop,
2728 return 0 and do nothing; caller will print an error message. */
2731 expand_continue_loop (whichloop)
2732 struct nesting *whichloop;
2734 /* Emit information for branch prediction. */
2737 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2738 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2741 whichloop = loop_stack;
2744 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2749 /* Generate a jump to exit the current loop. If not currently inside a loop,
2750 return 0 and do nothing; caller will print an error message. */
2753 expand_exit_loop (whichloop)
2754 struct nesting *whichloop;
2758 whichloop = loop_stack;
2761 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2765 /* Generate a conditional jump to exit the current loop if COND
2766 evaluates to zero. If not currently inside a loop,
2767 return 0 and do nothing; caller will print an error message. */
2770 expand_exit_loop_if_false (whichloop, cond)
2771 struct nesting *whichloop;
2774 rtx label = gen_label_rtx ();
2779 whichloop = loop_stack;
2782 /* In order to handle fixups, we actually create a conditional jump
2783 around an unconditional branch to exit the loop. If fixups are
2784 necessary, they go before the unconditional branch. */
2786 do_jump (cond, NULL_RTX, label);
2787 last_insn = get_last_insn ();
2788 if (GET_CODE (last_insn) == CODE_LABEL)
2789 whichloop->data.loop.alt_end_label = last_insn;
2790 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2797 /* Like expand_exit_loop_if_false except also emit a note marking
2798 the end of the conditional. Should only be used immediately
2799 after expand_loop_start. */
2802 expand_exit_loop_top_cond (whichloop, cond)
2803 struct nesting *whichloop;
2806 if (! expand_exit_loop_if_false (whichloop, cond))
2809 emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2813 /* Return nonzero if the loop nest is empty. Else return zero. */
2816 stmt_loop_nest_empty ()
2818 /* cfun->stmt can be NULL if we are building a call to get the
2819 EH context for a setjmp/longjmp EH target and the current
2820 function was a deferred inline function. */
2821 return (cfun->stmt == NULL || loop_stack == NULL);
2824 /* Return nonzero if we should preserve sub-expressions as separate
2825 pseudos. We never do so if we aren't optimizing. We always do so
2826 if -fexpensive-optimizations.
2828 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2829 the loop may still be a small one. */
2832 preserve_subexpressions_p ()
2836 if (flag_expensive_optimizations)
2839 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2842 insn = get_last_insn_anywhere ();
2845 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2846 < n_non_fixed_regs * 3));
2850 /* Generate a jump to exit the current loop, conditional, binding contour
2851 or case statement. Not all such constructs are visible to this function,
2852 only those started with EXIT_FLAG nonzero. Individual languages use
2853 the EXIT_FLAG parameter to control which kinds of constructs you can
2856 If not currently inside anything that can be exited,
2857 return 0 and do nothing; caller will print an error message. */
2860 expand_exit_something ()
2864 for (n = nesting_stack; n; n = n->all)
2865 if (n->exit_label != 0)
2867 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2874 /* Generate RTL to return from the current function, with no value.
2875 (That is, we do not do anything about returning any value.) */
2878 expand_null_return ()
2882 last_insn = get_last_insn ();
2884 /* If this function was declared to return a value, but we
2885 didn't, clobber the return registers so that they are not
2886 propagated live to the rest of the function. */
2887 clobber_return_register ();
2889 expand_null_return_1 (last_insn);
2892 /* Try to guess whether the value of return means error code. */
2893 static enum br_predictor
2894 return_prediction (val)
2897 /* Different heuristics for pointers and scalars. */
2898 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2900 /* NULL is usually not returned. */
2901 if (val == const0_rtx)
2902 return PRED_NULL_RETURN;
2906 /* Negative return values are often used to indicate
2908 if (GET_CODE (val) == CONST_INT
2909 && INTVAL (val) < 0)
2910 return PRED_NEGATIVE_RETURN;
2911 /* Constant return values are also usually erors,
2912 zero/one often mean booleans so exclude them from the
2914 if (CONSTANT_P (val)
2915 && (val != const0_rtx && val != const1_rtx))
2916 return PRED_CONST_RETURN;
2918 return PRED_NO_PREDICTION;
2921 /* Generate RTL to return from the current function, with value VAL. */
2924 expand_value_return (val)
2929 enum br_predictor pred;
2931 if ((pred = return_prediction (val)) != PRED_NO_PREDICTION)
2933 /* Emit information for branch prediction. */
2936 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2938 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2942 last_insn = get_last_insn ();
2943 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2945 /* Copy the value to the return location
2946 unless it's already there. */
2948 if (return_reg != val)
2950 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2951 #ifdef PROMOTE_FUNCTION_RETURN
2952 int unsignedp = TREE_UNSIGNED (type);
2953 enum machine_mode old_mode
2954 = DECL_MODE (DECL_RESULT (current_function_decl));
2955 enum machine_mode mode
2956 = promote_mode (type, old_mode, &unsignedp, 1);
2958 if (mode != old_mode)
2959 val = convert_modes (mode, old_mode, val, unsignedp);
2961 if (GET_CODE (return_reg) == PARALLEL)
2962 emit_group_load (return_reg, val, int_size_in_bytes (type));
2964 emit_move_insn (return_reg, val);
2967 expand_null_return_1 (last_insn);
2970 /* Output a return with no value. If LAST_INSN is nonzero,
2971 pretend that the return takes place after LAST_INSN. */
2974 expand_null_return_1 (last_insn)
2977 rtx end_label = cleanup_label ? cleanup_label : return_label;
2979 clear_pending_stack_adjust ();
2980 do_pending_stack_adjust ();
2984 end_label = return_label = gen_label_rtx ();
2985 expand_goto_internal (NULL_TREE, end_label, last_insn);
2988 /* Generate RTL to evaluate the expression RETVAL and return it
2989 from the current function. */
2992 expand_return (retval)
2995 /* If there are any cleanups to be performed, then they will
2996 be inserted following LAST_INSN. It is desirable
2997 that the last_insn, for such purposes, should be the
2998 last insn before computing the return value. Otherwise, cleanups
2999 which call functions can clobber the return value. */
3000 /* ??? rms: I think that is erroneous, because in C++ it would
3001 run destructors on variables that might be used in the subsequent
3002 computation of the return value. */
3008 /* If function wants no value, give it none. */
3009 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3011 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3013 expand_null_return ();
3017 if (retval == error_mark_node)
3019 /* Treat this like a return of no value from a function that
3021 expand_null_return ();
3024 else if (TREE_CODE (retval) == RESULT_DECL)
3025 retval_rhs = retval;
3026 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3027 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3028 retval_rhs = TREE_OPERAND (retval, 1);
3029 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3030 /* Recognize tail-recursive call to void function. */
3031 retval_rhs = retval;
3033 retval_rhs = NULL_TREE;
3035 last_insn = get_last_insn ();
3037 /* Distribute return down conditional expr if either of the sides
3038 may involve tail recursion (see test below). This enhances the number
3039 of tail recursions we see. Don't do this always since it can produce
3040 sub-optimal code in some cases and we distribute assignments into
3041 conditional expressions when it would help. */
3043 if (optimize && retval_rhs != 0
3044 && frame_offset == 0
3045 && TREE_CODE (retval_rhs) == COND_EXPR
3046 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3047 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3049 rtx label = gen_label_rtx ();
3052 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3053 start_cleanup_deferral ();
3054 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3055 DECL_RESULT (current_function_decl),
3056 TREE_OPERAND (retval_rhs, 1));
3057 TREE_SIDE_EFFECTS (expr) = 1;
3058 expand_return (expr);
3061 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3062 DECL_RESULT (current_function_decl),
3063 TREE_OPERAND (retval_rhs, 2));
3064 TREE_SIDE_EFFECTS (expr) = 1;
3065 expand_return (expr);
3066 end_cleanup_deferral ();
3070 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3072 /* If the result is an aggregate that is being returned in one (or more)
3073 registers, load the registers here. The compiler currently can't handle
3074 copying a BLKmode value into registers. We could put this code in a
3075 more general area (for use by everyone instead of just function
3076 call/return), but until this feature is generally usable it is kept here
3077 (and in expand_call). The value must go into a pseudo in case there
3078 are cleanups that will clobber the real return register. */
3081 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3082 && GET_CODE (result_rtl) == REG)
3085 unsigned HOST_WIDE_INT bitpos, xbitpos;
3086 unsigned HOST_WIDE_INT big_endian_correction = 0;
3087 unsigned HOST_WIDE_INT bytes
3088 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3089 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3090 unsigned int bitsize
3091 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3092 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3093 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3094 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3095 enum machine_mode tmpmode, result_reg_mode;
3099 expand_null_return ();
3103 /* Structures whose size is not a multiple of a word are aligned
3104 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3105 machine, this means we must skip the empty high order bytes when
3106 calculating the bit offset. */
3107 if (BYTES_BIG_ENDIAN
3108 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
3109 && bytes % UNITS_PER_WORD)
3110 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3113 /* Copy the structure BITSIZE bits at a time. */
3114 for (bitpos = 0, xbitpos = big_endian_correction;
3115 bitpos < bytes * BITS_PER_UNIT;
3116 bitpos += bitsize, xbitpos += bitsize)
3118 /* We need a new destination pseudo each time xbitpos is
3119 on a word boundary and when xbitpos == big_endian_correction
3120 (the first time through). */
3121 if (xbitpos % BITS_PER_WORD == 0
3122 || xbitpos == big_endian_correction)
3124 /* Generate an appropriate register. */
3125 dst = gen_reg_rtx (word_mode);
3126 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3128 /* Clear the destination before we move anything into it. */
3129 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3132 /* We need a new source operand each time bitpos is on a word
3134 if (bitpos % BITS_PER_WORD == 0)
3135 src = operand_subword_force (result_val,
3136 bitpos / BITS_PER_WORD,
3139 /* Use bitpos for the source extraction (left justified) and
3140 xbitpos for the destination store (right justified). */
3141 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3142 extract_bit_field (src, bitsize,
3143 bitpos % BITS_PER_WORD, 1,
3144 NULL_RTX, word_mode, word_mode,
3149 /* Find the smallest integer mode large enough to hold the
3150 entire structure and use that mode instead of BLKmode
3151 on the USE insn for the return register. */
3152 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3153 tmpmode != VOIDmode;
3154 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3155 /* Have we found a large enough mode? */
3156 if (GET_MODE_SIZE (tmpmode) >= bytes)
3159 /* No suitable mode found. */
3160 if (tmpmode == VOIDmode)
3163 PUT_MODE (result_rtl, tmpmode);
3165 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3166 result_reg_mode = word_mode;
3168 result_reg_mode = tmpmode;
3169 result_reg = gen_reg_rtx (result_reg_mode);
3172 for (i = 0; i < n_regs; i++)
3173 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3176 if (tmpmode != result_reg_mode)
3177 result_reg = gen_lowpart (tmpmode, result_reg);
3179 expand_value_return (result_reg);
3181 else if (retval_rhs != 0
3182 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3183 && (GET_CODE (result_rtl) == REG
3184 || (GET_CODE (result_rtl) == PARALLEL)))
3186 /* Calculate the return value into a temporary (usually a pseudo
3188 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3189 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3191 val = assign_temp (nt, 0, 0, 1);
3192 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3193 val = force_not_mem (val);
3195 /* Return the calculated value, doing cleanups first. */
3196 expand_value_return (val);
3200 /* No cleanups or no hard reg used;
3201 calculate value into hard return reg. */
3202 expand_expr (retval, const0_rtx, VOIDmode, 0);
3204 expand_value_return (result_rtl);
3208 /* Return 1 if the end of the generated RTX is not a barrier.
3209 This means code already compiled can drop through. */
3212 drop_through_at_end_p ()
3214 rtx insn = get_last_insn ();
3215 while (insn && GET_CODE (insn) == NOTE)
3216 insn = PREV_INSN (insn);
3217 return insn && GET_CODE (insn) != BARRIER;
3220 /* Attempt to optimize a potential tail recursion call into a goto.
3221 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3222 where to place the jump to the tail recursion label.
3224 Return TRUE if the call was optimized into a goto. */
3227 optimize_tail_recursion (arguments, last_insn)
3231 /* Finish checking validity, and if valid emit code to set the
3232 argument variables for the new call. */
3233 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3235 if (tail_recursion_label == 0)
3237 tail_recursion_label = gen_label_rtx ();
3238 emit_label_after (tail_recursion_label,
3239 tail_recursion_reentry);
3242 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3249 /* Emit code to alter this function's formal parms for a tail-recursive call.
3250 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3251 FORMALS is the chain of decls of formals.
3252 Return 1 if this can be done;
3253 otherwise return 0 and do not emit any code. */
3256 tail_recursion_args (actuals, formals)
3257 tree actuals, formals;
3259 tree a = actuals, f = formals;
3263 /* Check that number and types of actuals are compatible
3264 with the formals. This is not always true in valid C code.
3265 Also check that no formal needs to be addressable
3266 and that all formals are scalars. */
3268 /* Also count the args. */
3270 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3272 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3273 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3275 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3278 if (a != 0 || f != 0)
3281 /* Compute all the actuals. */
3283 argvec = (rtx *) alloca (i * sizeof (rtx));
3285 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3286 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3288 /* Find which actual values refer to current values of previous formals.
3289 Copy each of them now, before any formal is changed. */
3291 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3295 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3296 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3302 argvec[i] = copy_to_reg (argvec[i]);
3305 /* Store the values of the actuals into the formals. */
3307 for (f = formals, a = actuals, i = 0; f;
3308 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3310 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3311 emit_move_insn (DECL_RTL (f), argvec[i]);
3313 convert_move (DECL_RTL (f), argvec[i],
3314 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3321 /* Generate the RTL code for entering a binding contour.
3322 The variables are declared one by one, by calls to `expand_decl'.
3324 FLAGS is a bitwise or of the following flags:
3326 1 - Nonzero if this construct should be visible to
3329 2 - Nonzero if this contour does not require a
3330 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3331 language-independent code should set this flag because they
3332 will not create corresponding BLOCK nodes. (There should be
3333 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3334 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3335 when expand_end_bindings is called.
3337 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3338 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3342 expand_start_bindings_and_block (flags, block)
3346 struct nesting *thisblock = ALLOC_NESTING ();
3348 int exit_flag = ((flags & 1) != 0);
3349 int block_flag = ((flags & 2) == 0);
3351 /* If a BLOCK is supplied, then the caller should be requesting a
3352 NOTE_INSN_BLOCK_BEG note. */
3353 if (!block_flag && block)
3356 /* Create a note to mark the beginning of the block. */
3359 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3360 NOTE_BLOCK (note) = block;
3363 note = emit_note (NULL, NOTE_INSN_DELETED);
3365 /* Make an entry on block_stack for the block we are entering. */
3367 thisblock->desc = BLOCK_NESTING;
3368 thisblock->next = block_stack;
3369 thisblock->all = nesting_stack;
3370 thisblock->depth = ++nesting_depth;
3371 thisblock->data.block.stack_level = 0;
3372 thisblock->data.block.cleanups = 0;
3373 thisblock->data.block.n_function_calls = 0;
3374 thisblock->data.block.exception_region = 0;
3375 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3377 thisblock->data.block.conditional_code = 0;
3378 thisblock->data.block.last_unconditional_cleanup = note;
3379 /* When we insert instructions after the last unconditional cleanup,
3380 we don't adjust last_insn. That means that a later add_insn will
3381 clobber the instructions we've just added. The easiest way to
3382 fix this is to just insert another instruction here, so that the
3383 instructions inserted after the last unconditional cleanup are
3384 never the last instruction. */
3385 emit_note (NULL, NOTE_INSN_DELETED);
3388 && !(block_stack->data.block.cleanups == NULL_TREE
3389 && block_stack->data.block.outer_cleanups == NULL_TREE))
3390 thisblock->data.block.outer_cleanups
3391 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3392 block_stack->data.block.outer_cleanups);
3394 thisblock->data.block.outer_cleanups = 0;
3395 thisblock->data.block.label_chain = 0;
3396 thisblock->data.block.innermost_stack_block = stack_block_stack;
3397 thisblock->data.block.first_insn = note;
3398 thisblock->data.block.block_start_count = ++current_block_start_count;
3399 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3400 block_stack = thisblock;
3401 nesting_stack = thisblock;
3403 /* Make a new level for allocating stack slots. */
3407 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3408 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3409 expand_expr are made. After we end the region, we know that all
3410 space for all temporaries that were created by TARGET_EXPRs will be
3411 destroyed and their space freed for reuse. */
3414 expand_start_target_temps ()
3416 /* This is so that even if the result is preserved, the space
3417 allocated will be freed, as we know that it is no longer in use. */
3420 /* Start a new binding layer that will keep track of all cleanup
3421 actions to be performed. */
3422 expand_start_bindings (2);
3424 target_temp_slot_level = temp_slot_level;
3428 expand_end_target_temps ()
3430 expand_end_bindings (NULL_TREE, 0, 0);
3432 /* This is so that even if the result is preserved, the space
3433 allocated will be freed, as we know that it is no longer in use. */
3437 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3438 in question represents the outermost pair of curly braces (i.e. the "body
3439 block") of a function or method.
3441 For any BLOCK node representing a "body block" of a function or method, the
3442 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3443 represents the outermost (function) scope for the function or method (i.e.
3444 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3445 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3448 is_body_block (stmt)
3451 if (TREE_CODE (stmt) == BLOCK)
3453 tree parent = BLOCK_SUPERCONTEXT (stmt);
3455 if (parent && TREE_CODE (parent) == BLOCK)
3457 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3459 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3467 /* True if we are currently emitting insns in an area of output code
3468 that is controlled by a conditional expression. This is used by
3469 the cleanup handling code to generate conditional cleanup actions. */
3472 conditional_context ()
3474 return block_stack && block_stack->data.block.conditional_code;
3477 /* Return an opaque pointer to the current nesting level, so frontend code
3478 can check its own sanity. */
3481 current_nesting_level ()
3483 return cfun ? block_stack : 0;
3486 /* Emit a handler label for a nonlocal goto handler.
3487 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3490 expand_nl_handler_label (slot, before_insn)
3491 rtx slot, before_insn;
3494 rtx handler_label = gen_label_rtx ();
3496 /* Don't let cleanup_cfg delete the handler. */
3497 LABEL_PRESERVE_P (handler_label) = 1;
3500 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3501 insns = get_insns ();
3503 emit_insn_before (insns, before_insn);
3505 emit_label (handler_label);
3507 return handler_label;
3510 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3513 expand_nl_goto_receiver ()
3515 #ifdef HAVE_nonlocal_goto
3516 if (! HAVE_nonlocal_goto)
3518 /* First adjust our frame pointer to its actual value. It was
3519 previously set to the start of the virtual area corresponding to
3520 the stacked variables when we branched here and now needs to be
3521 adjusted to the actual hardware fp value.
3523 Assignments are to virtual registers are converted by
3524 instantiate_virtual_regs into the corresponding assignment
3525 to the underlying register (fp in this case) that makes
3526 the original assignment true.
3527 So the following insn will actually be
3528 decrementing fp by STARTING_FRAME_OFFSET. */
3529 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3531 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3532 if (fixed_regs[ARG_POINTER_REGNUM])
3534 #ifdef ELIMINABLE_REGS
3535 /* If the argument pointer can be eliminated in favor of the
3536 frame pointer, we don't need to restore it. We assume here
3537 that if such an elimination is present, it can always be used.
3538 This is the case on all known machines; if we don't make this
3539 assumption, we do unnecessary saving on many machines. */
3540 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3543 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3544 if (elim_regs[i].from == ARG_POINTER_REGNUM
3545 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3548 if (i == ARRAY_SIZE (elim_regs))
3551 /* Now restore our arg pointer from the address at which it
3552 was saved in our stack frame. */
3553 emit_move_insn (virtual_incoming_args_rtx,
3554 copy_to_reg (get_arg_pointer_save_area (cfun)));
3559 #ifdef HAVE_nonlocal_goto_receiver
3560 if (HAVE_nonlocal_goto_receiver)
3561 emit_insn (gen_nonlocal_goto_receiver ());
3565 /* Make handlers for nonlocal gotos taking place in the function calls in
3569 expand_nl_goto_receivers (thisblock)
3570 struct nesting *thisblock;
3573 rtx afterward = gen_label_rtx ();
3578 /* Record the handler address in the stack slot for that purpose,
3579 during this block, saving and restoring the outer value. */
3580 if (thisblock->next != 0)
3581 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3583 rtx save_receiver = gen_reg_rtx (Pmode);
3584 emit_move_insn (XEXP (slot, 0), save_receiver);
3587 emit_move_insn (save_receiver, XEXP (slot, 0));
3588 insns = get_insns ();
3590 emit_insn_before (insns, thisblock->data.block.first_insn);
3593 /* Jump around the handlers; they run only when specially invoked. */
3594 emit_jump (afterward);
3596 /* Make a separate handler for each label. */
3597 link = nonlocal_labels;
3598 slot = nonlocal_goto_handler_slots;
3599 label_list = NULL_RTX;
3600 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3601 /* Skip any labels we shouldn't be able to jump to from here,
3602 we generate one special handler for all of them below which just calls
3604 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3607 lab = expand_nl_handler_label (XEXP (slot, 0),
3608 thisblock->data.block.first_insn);
3609 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3611 expand_nl_goto_receiver ();
3613 /* Jump to the "real" nonlocal label. */
3614 expand_goto (TREE_VALUE (link));
3617 /* A second pass over all nonlocal labels; this time we handle those
3618 we should not be able to jump to at this point. */
3619 link = nonlocal_labels;
3620 slot = nonlocal_goto_handler_slots;
3622 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3623 if (DECL_TOO_LATE (TREE_VALUE (link)))
3626 lab = expand_nl_handler_label (XEXP (slot, 0),
3627 thisblock->data.block.first_insn);
3628 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3634 expand_nl_goto_receiver ();
3635 expand_builtin_trap ();
3638 nonlocal_goto_handler_labels = label_list;
3639 emit_label (afterward);
3642 /* Warn about any unused VARS (which may contain nodes other than
3643 VAR_DECLs, but such nodes are ignored). The nodes are connected
3644 via the TREE_CHAIN field. */
3647 warn_about_unused_variables (vars)
3652 if (warn_unused_variable)
3653 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3654 if (TREE_CODE (decl) == VAR_DECL
3655 && ! TREE_USED (decl)
3656 && ! DECL_IN_SYSTEM_HEADER (decl)
3657 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3658 warning_with_decl (decl, "unused variable `%s'");
3661 /* Generate RTL code to terminate a binding contour.
3663 VARS is the chain of VAR_DECL nodes for the variables bound in this
3664 contour. There may actually be other nodes in this chain, but any
3665 nodes other than VAR_DECLS are ignored.
3667 MARK_ENDS is nonzero if we should put a note at the beginning
3668 and end of this binding contour.
3670 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3671 (That is true automatically if the contour has a saved stack level.) */
3674 expand_end_bindings (vars, mark_ends, dont_jump_in)
3679 struct nesting *thisblock = block_stack;
3681 /* If any of the variables in this scope were not used, warn the
3683 warn_about_unused_variables (vars);
3685 if (thisblock->exit_label)
3687 do_pending_stack_adjust ();
3688 emit_label (thisblock->exit_label);
3691 /* If necessary, make handlers for nonlocal gotos taking
3692 place in the function calls in this block. */
3693 if (function_call_count != thisblock->data.block.n_function_calls
3695 /* Make handler for outermost block
3696 if there were any nonlocal gotos to this function. */
3697 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3698 /* Make handler for inner block if it has something
3699 special to do when you jump out of it. */
3700 : (thisblock->data.block.cleanups != 0
3701 || thisblock->data.block.stack_level != 0)))
3702 expand_nl_goto_receivers (thisblock);
3704 /* Don't allow jumping into a block that has a stack level.
3705 Cleanups are allowed, though. */
3707 || thisblock->data.block.stack_level != 0)
3709 struct label_chain *chain;
3711 /* Any labels in this block are no longer valid to go to.
3712 Mark them to cause an error message. */
3713 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3715 DECL_TOO_LATE (chain->label) = 1;
3716 /* If any goto without a fixup came to this label,
3717 that must be an error, because gotos without fixups
3718 come from outside all saved stack-levels. */
3719 if (TREE_ADDRESSABLE (chain->label))
3720 error_with_decl (chain->label,
3721 "label `%s' used before containing binding contour");
3725 /* Restore stack level in effect before the block
3726 (only if variable-size objects allocated). */
3727 /* Perform any cleanups associated with the block. */
3729 if (thisblock->data.block.stack_level != 0
3730 || thisblock->data.block.cleanups != 0)
3735 /* Don't let cleanups affect ({...}) constructs. */
3736 int old_expr_stmts_for_value = expr_stmts_for_value;
3737 rtx old_last_expr_value = last_expr_value;
3738 tree old_last_expr_type = last_expr_type;
3739 expr_stmts_for_value = 0;
3741 /* Only clean up here if this point can actually be reached. */
3742 insn = get_last_insn ();
3743 if (GET_CODE (insn) == NOTE)
3744 insn = prev_nonnote_insn (insn);
3745 reachable = (! insn || GET_CODE (insn) != BARRIER);
3747 /* Do the cleanups. */
3748 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3750 do_pending_stack_adjust ();
3752 expr_stmts_for_value = old_expr_stmts_for_value;
3753 last_expr_value = old_last_expr_value;
3754 last_expr_type = old_last_expr_type;
3756 /* Restore the stack level. */
3758 if (reachable && thisblock->data.block.stack_level != 0)
3760 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3761 thisblock->data.block.stack_level, NULL_RTX);
3762 if (nonlocal_goto_handler_slots != 0)
3763 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3767 /* Any gotos out of this block must also do these things.
3768 Also report any gotos with fixups that came to labels in this
3770 fixup_gotos (thisblock,
3771 thisblock->data.block.stack_level,
3772 thisblock->data.block.cleanups,
3773 thisblock->data.block.first_insn,
3777 /* Mark the beginning and end of the scope if requested.
3778 We do this now, after running cleanups on the variables
3779 just going out of scope, so they are in scope for their cleanups. */
3783 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3784 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3787 /* Get rid of the beginning-mark if we don't make an end-mark. */
3788 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3790 /* Restore the temporary level of TARGET_EXPRs. */
3791 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3793 /* Restore block_stack level for containing block. */
3795 stack_block_stack = thisblock->data.block.innermost_stack_block;
3796 POPSTACK (block_stack);
3798 /* Pop the stack slot nesting and free any slots at this level. */
3802 /* Generate code to save the stack pointer at the start of the current block
3803 and set up to restore it on exit. */
3806 save_stack_pointer ()
3808 struct nesting *thisblock = block_stack;
3810 if (thisblock->data.block.stack_level == 0)
3812 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3813 &thisblock->data.block.stack_level,
3814 thisblock->data.block.first_insn);
3815 stack_block_stack = thisblock;
3819 /* Generate RTL for the automatic variable declaration DECL.
3820 (Other kinds of declarations are simply ignored if seen here.) */
3826 struct nesting *thisblock;
3829 type = TREE_TYPE (decl);
3831 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3832 type in case this node is used in a reference. */
3833 if (TREE_CODE (decl) == CONST_DECL)
3835 DECL_MODE (decl) = TYPE_MODE (type);
3836 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3837 DECL_SIZE (decl) = TYPE_SIZE (type);
3838 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3842 /* Otherwise, only automatic variables need any expansion done. Static and
3843 external variables, and external functions, will be handled by
3844 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3845 nothing. PARM_DECLs are handled in `assign_parms'. */
3846 if (TREE_CODE (decl) != VAR_DECL)
3849 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3852 thisblock = block_stack;
3854 /* Create the RTL representation for the variable. */
3856 if (type == error_mark_node)
3857 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3859 else if (DECL_SIZE (decl) == 0)
3860 /* Variable with incomplete type. */
3863 if (DECL_INITIAL (decl) == 0)
3864 /* Error message was already done; now avoid a crash. */
3865 x = gen_rtx_MEM (BLKmode, const0_rtx);
3867 /* An initializer is going to decide the size of this array.
3868 Until we know the size, represent its address with a reg. */
3869 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3871 set_mem_attributes (x, decl, 1);
3872 SET_DECL_RTL (decl, x);
3874 else if (DECL_MODE (decl) != BLKmode
3875 /* If -ffloat-store, don't put explicit float vars
3877 && !(flag_float_store
3878 && TREE_CODE (type) == REAL_TYPE)
3879 && ! TREE_THIS_VOLATILE (decl)
3880 && (DECL_REGISTER (decl) || optimize))
3882 /* Automatic variable that can go in a register. */
3883 int unsignedp = TREE_UNSIGNED (type);
3884 enum machine_mode reg_mode
3885 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3887 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3889 if (GET_CODE (DECL_RTL (decl)) == REG)
3890 REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
3891 else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
3893 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
3894 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
3897 mark_user_reg (DECL_RTL (decl));
3899 if (POINTER_TYPE_P (type))
3900 mark_reg_pointer (DECL_RTL (decl),
3901 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3903 maybe_set_unchanging (DECL_RTL (decl), decl);
3905 /* If something wants our address, try to use ADDRESSOF. */
3906 if (TREE_ADDRESSABLE (decl))
3907 put_var_into_stack (decl);
3910 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3911 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3912 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3913 STACK_CHECK_MAX_VAR_SIZE)))
3915 /* Variable of fixed size that goes on the stack. */
3920 /* If we previously made RTL for this decl, it must be an array
3921 whose size was determined by the initializer.
3922 The old address was a register; set that register now
3923 to the proper address. */
3924 if (DECL_RTL_SET_P (decl))
3926 if (GET_CODE (DECL_RTL (decl)) != MEM
3927 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3929 oldaddr = XEXP (DECL_RTL (decl), 0);
3932 /* Set alignment we actually gave this decl. */
3933 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3934 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3935 DECL_USER_ALIGN (decl) = 0;
3937 x = assign_temp (decl, 1, 1, 1);
3938 set_mem_attributes (x, decl, 1);
3939 SET_DECL_RTL (decl, x);
3943 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3944 if (addr != oldaddr)
3945 emit_move_insn (oldaddr, addr);
3949 /* Dynamic-size object: must push space on the stack. */
3951 rtx address, size, x;
3953 /* Record the stack pointer on entry to block, if have
3954 not already done so. */
3955 do_pending_stack_adjust ();
3956 save_stack_pointer ();
3958 /* In function-at-a-time mode, variable_size doesn't expand this,
3960 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3961 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3962 const0_rtx, VOIDmode, 0);
3964 /* Compute the variable's size, in bytes. */
3965 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3968 /* Allocate space on the stack for the variable. Note that
3969 DECL_ALIGN says how the variable is to be aligned and we
3970 cannot use it to conclude anything about the alignment of
3972 address = allocate_dynamic_stack_space (size, NULL_RTX,
3973 TYPE_ALIGN (TREE_TYPE (decl)));
3975 /* Reference the variable indirect through that rtx. */
3976 x = gen_rtx_MEM (DECL_MODE (decl), address);
3977 set_mem_attributes (x, decl, 1);
3978 SET_DECL_RTL (decl, x);
3981 /* Indicate the alignment we actually gave this variable. */
3982 #ifdef STACK_BOUNDARY
3983 DECL_ALIGN (decl) = STACK_BOUNDARY;
3985 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3987 DECL_USER_ALIGN (decl) = 0;
3991 /* Emit code to perform the initialization of a declaration DECL. */
3994 expand_decl_init (decl)
3997 int was_used = TREE_USED (decl);
3999 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4000 for static decls. */
4001 if (TREE_CODE (decl) == CONST_DECL
4002 || TREE_STATIC (decl))
4005 /* Compute and store the initial value now. */
4007 if (DECL_INITIAL (decl) == error_mark_node)
4009 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4011 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4012 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4013 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4017 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4019 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4020 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4024 /* Don't let the initialization count as "using" the variable. */
4025 TREE_USED (decl) = was_used;
4027 /* Free any temporaries we made while initializing the decl. */
4028 preserve_temp_slots (NULL_RTX);
4032 /* CLEANUP is an expression to be executed at exit from this binding contour;
4033 for example, in C++, it might call the destructor for this variable.
4035 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4036 CLEANUP multiple times, and have the correct semantics. This
4037 happens in exception handling, for gotos, returns, breaks that
4038 leave the current scope.
4040 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4041 that is not associated with any particular variable. */
4044 expand_decl_cleanup (decl, cleanup)
4047 struct nesting *thisblock;
4049 /* Error if we are not in any block. */
4050 if (cfun == 0 || block_stack == 0)
4053 thisblock = block_stack;
4055 /* Record the cleanup if there is one. */
4061 tree *cleanups = &thisblock->data.block.cleanups;
4062 int cond_context = conditional_context ();
4066 rtx flag = gen_reg_rtx (word_mode);
4071 emit_move_insn (flag, const0_rtx);
4072 set_flag_0 = get_insns ();
4075 thisblock->data.block.last_unconditional_cleanup
4076 = emit_insn_after (set_flag_0,
4077 thisblock->data.block.last_unconditional_cleanup);
4079 emit_move_insn (flag, const1_rtx);
4081 cond = build_decl (VAR_DECL, NULL_TREE,
4082 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4083 SET_DECL_RTL (cond, flag);
4085 /* Conditionalize the cleanup. */
4086 cleanup = build (COND_EXPR, void_type_node,
4087 (*lang_hooks.truthvalue_conversion) (cond),
4088 cleanup, integer_zero_node);
4089 cleanup = fold (cleanup);
4091 cleanups = &thisblock->data.block.cleanups;
4094 cleanup = unsave_expr (cleanup);
4096 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4099 /* If this block has a cleanup, it belongs in stack_block_stack. */
4100 stack_block_stack = thisblock;
4107 if (! using_eh_for_cleanups_p)
4108 TREE_ADDRESSABLE (t) = 1;
4110 expand_eh_region_start ();
4117 thisblock->data.block.last_unconditional_cleanup
4118 = emit_insn_after (seq,
4119 thisblock->data.block.last_unconditional_cleanup);
4123 thisblock->data.block.last_unconditional_cleanup
4125 /* When we insert instructions after the last unconditional cleanup,
4126 we don't adjust last_insn. That means that a later add_insn will
4127 clobber the instructions we've just added. The easiest way to
4128 fix this is to just insert another instruction here, so that the
4129 instructions inserted after the last unconditional cleanup are
4130 never the last instruction. */
4131 emit_note (NULL, NOTE_INSN_DELETED);
4137 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4141 expand_decl_cleanup_eh (decl, cleanup, eh_only)
4145 int ret = expand_decl_cleanup (decl, cleanup);
4148 tree node = block_stack->data.block.cleanups;
4149 CLEANUP_EH_ONLY (node) = eh_only;
4154 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4155 DECL_ELTS is the list of elements that belong to DECL's type.
4156 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4159 expand_anon_union_decl (decl, cleanup, decl_elts)
4160 tree decl, cleanup, decl_elts;
4162 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4166 /* If any of the elements are addressable, so is the entire union. */
4167 for (t = decl_elts; t; t = TREE_CHAIN (t))
4168 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4170 TREE_ADDRESSABLE (decl) = 1;
4175 expand_decl_cleanup (decl, cleanup);
4176 x = DECL_RTL (decl);
4178 /* Go through the elements, assigning RTL to each. */
4179 for (t = decl_elts; t; t = TREE_CHAIN (t))
4181 tree decl_elt = TREE_VALUE (t);
4182 tree cleanup_elt = TREE_PURPOSE (t);
4183 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4185 /* If any of the elements are addressable, so is the entire
4187 if (TREE_USED (decl_elt))
4188 TREE_USED (decl) = 1;
4190 /* Propagate the union's alignment to the elements. */
4191 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4192 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4194 /* If the element has BLKmode and the union doesn't, the union is
4195 aligned such that the element doesn't need to have BLKmode, so
4196 change the element's mode to the appropriate one for its size. */
4197 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4198 DECL_MODE (decl_elt) = mode
4199 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4201 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4202 instead create a new MEM rtx with the proper mode. */
4203 if (GET_CODE (x) == MEM)
4205 if (mode == GET_MODE (x))
4206 SET_DECL_RTL (decl_elt, x);
4208 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4210 else if (GET_CODE (x) == REG)
4212 if (mode == GET_MODE (x))
4213 SET_DECL_RTL (decl_elt, x);
4215 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4220 /* Record the cleanup if there is one. */
4223 thisblock->data.block.cleanups
4224 = tree_cons (decl_elt, cleanup_elt,
4225 thisblock->data.block.cleanups);
4229 /* Expand a list of cleanups LIST.
4230 Elements may be expressions or may be nested lists.
4232 If DONT_DO is nonnull, then any list-element
4233 whose TREE_PURPOSE matches DONT_DO is omitted.
4234 This is sometimes used to avoid a cleanup associated with
4235 a value that is being returned out of the scope.
4237 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4238 goto and handle protection regions specially in that case.
4240 If REACHABLE, we emit code, otherwise just inform the exception handling
4241 code about this finalization. */
4244 expand_cleanups (list, dont_do, in_fixup, reachable)
4251 for (tail = list; tail; tail = TREE_CHAIN (tail))
4252 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4254 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4255 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4258 if (! in_fixup && using_eh_for_cleanups_p)
4259 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4261 if (reachable && !CLEANUP_EH_ONLY (tail))
4263 /* Cleanups may be run multiple times. For example,
4264 when exiting a binding contour, we expand the
4265 cleanups associated with that contour. When a goto
4266 within that binding contour has a target outside that
4267 contour, it will expand all cleanups from its scope to
4268 the target. Though the cleanups are expanded multiple
4269 times, the control paths are non-overlapping so the
4270 cleanups will not be executed twice. */
4272 /* We may need to protect from outer cleanups. */
4273 if (in_fixup && using_eh_for_cleanups_p)
4275 expand_eh_region_start ();
4277 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4279 expand_eh_region_end_fixup (TREE_VALUE (tail));
4282 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4290 /* Mark when the context we are emitting RTL for as a conditional
4291 context, so that any cleanup actions we register with
4292 expand_decl_init will be properly conditionalized when those
4293 cleanup actions are later performed. Must be called before any
4294 expression (tree) is expanded that is within a conditional context. */
4297 start_cleanup_deferral ()
4299 /* block_stack can be NULL if we are inside the parameter list. It is
4300 OK to do nothing, because cleanups aren't possible here. */
4302 ++block_stack->data.block.conditional_code;
4305 /* Mark the end of a conditional region of code. Because cleanup
4306 deferrals may be nested, we may still be in a conditional region
4307 after we end the currently deferred cleanups, only after we end all
4308 deferred cleanups, are we back in unconditional code. */
4311 end_cleanup_deferral ()
4313 /* block_stack can be NULL if we are inside the parameter list. It is
4314 OK to do nothing, because cleanups aren't possible here. */
4316 --block_stack->data.block.conditional_code;
4319 /* Move all cleanups from the current block_stack
4320 to the containing block_stack, where they are assumed to
4321 have been created. If anything can cause a temporary to
4322 be created, but not expanded for more than one level of
4323 block_stacks, then this code will have to change. */
4328 struct nesting *block = block_stack;
4329 struct nesting *outer = block->next;
4331 outer->data.block.cleanups
4332 = chainon (block->data.block.cleanups,
4333 outer->data.block.cleanups);
4334 block->data.block.cleanups = 0;
4338 last_cleanup_this_contour ()
4340 if (block_stack == 0)
4343 return block_stack->data.block.cleanups;
4346 /* Return 1 if there are any pending cleanups at this point.
4347 If THIS_CONTOUR is nonzero, check the current contour as well.
4348 Otherwise, look only at the contours that enclose this one. */
4351 any_pending_cleanups (this_contour)
4354 struct nesting *block;
4356 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4359 if (this_contour && block_stack->data.block.cleanups != NULL)
4361 if (block_stack->data.block.cleanups == 0
4362 && block_stack->data.block.outer_cleanups == 0)
4365 for (block = block_stack->next; block; block = block->next)
4366 if (block->data.block.cleanups != 0)
4372 /* Enter a case (Pascal) or switch (C) statement.
4373 Push a block onto case_stack and nesting_stack
4374 to accumulate the case-labels that are seen
4375 and to record the labels generated for the statement.
4377 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4378 Otherwise, this construct is transparent for `exit_something'.
4380 EXPR is the index-expression to be dispatched on.
4381 TYPE is its nominal type. We could simply convert EXPR to this type,
4382 but instead we take short cuts. */
4385 expand_start_case (exit_flag, expr, type, printname)
4389 const char *printname;
4391 struct nesting *thiscase = ALLOC_NESTING ();
4393 /* Make an entry on case_stack for the case we are entering. */
4395 thiscase->desc = CASE_NESTING;
4396 thiscase->next = case_stack;
4397 thiscase->all = nesting_stack;
4398 thiscase->depth = ++nesting_depth;
4399 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4400 thiscase->data.case_stmt.case_list = 0;
4401 thiscase->data.case_stmt.index_expr = expr;
4402 thiscase->data.case_stmt.nominal_type = type;
4403 thiscase->data.case_stmt.default_label = 0;
4404 thiscase->data.case_stmt.printname = printname;
4405 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4406 case_stack = thiscase;
4407 nesting_stack = thiscase;
4409 do_pending_stack_adjust ();
4411 /* Make sure case_stmt.start points to something that won't
4412 need any transformation before expand_end_case. */
4413 if (GET_CODE (get_last_insn ()) != NOTE)
4414 emit_note (NULL, NOTE_INSN_DELETED);
4416 thiscase->data.case_stmt.start = get_last_insn ();
4418 start_cleanup_deferral ();
4421 /* Start a "dummy case statement" within which case labels are invalid
4422 and are not connected to any larger real case statement.
4423 This can be used if you don't want to let a case statement jump
4424 into the middle of certain kinds of constructs. */
4427 expand_start_case_dummy ()
4429 struct nesting *thiscase = ALLOC_NESTING ();
4431 /* Make an entry on case_stack for the dummy. */
4433 thiscase->desc = CASE_NESTING;
4434 thiscase->next = case_stack;
4435 thiscase->all = nesting_stack;
4436 thiscase->depth = ++nesting_depth;
4437 thiscase->exit_label = 0;
4438 thiscase->data.case_stmt.case_list = 0;
4439 thiscase->data.case_stmt.start = 0;
4440 thiscase->data.case_stmt.nominal_type = 0;
4441 thiscase->data.case_stmt.default_label = 0;
4442 case_stack = thiscase;
4443 nesting_stack = thiscase;
4444 start_cleanup_deferral ();
4447 /* End a dummy case statement. */
4450 expand_end_case_dummy ()
4452 end_cleanup_deferral ();
4453 POPSTACK (case_stack);
4456 /* Return the data type of the index-expression
4457 of the innermost case statement, or null if none. */
4460 case_index_expr_type ()
4463 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4470 /* If this is the first label, warn if any insns have been emitted. */
4471 if (case_stack->data.case_stmt.line_number_status >= 0)
4475 restore_line_number_status
4476 (case_stack->data.case_stmt.line_number_status);
4477 case_stack->data.case_stmt.line_number_status = -1;
4479 for (insn = case_stack->data.case_stmt.start;
4481 insn = NEXT_INSN (insn))
4483 if (GET_CODE (insn) == CODE_LABEL)
4485 if (GET_CODE (insn) != NOTE
4486 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4489 insn = PREV_INSN (insn);
4490 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4492 /* If insn is zero, then there must have been a syntax error. */
4494 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4495 NOTE_LINE_NUMBER (insn),
4496 "unreachable code at beginning of %s",
4497 case_stack->data.case_stmt.printname);
4504 /* Accumulate one case or default label inside a case or switch statement.
4505 VALUE is the value of the case (a null pointer, for a default label).
4506 The function CONVERTER, when applied to arguments T and V,
4507 converts the value V to the type T.
4509 If not currently inside a case or switch statement, return 1 and do
4510 nothing. The caller will print a language-specific error message.
4511 If VALUE is a duplicate or overlaps, return 2 and do nothing
4512 except store the (first) duplicate node in *DUPLICATE.
4513 If VALUE is out of range, return 3 and do nothing.
4514 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4515 Return 0 on success.
4517 Extended to handle range statements. */
4520 pushcase (value, converter, label, duplicate)
4522 tree (*converter) PARAMS ((tree, tree));
4529 /* Fail if not inside a real case statement. */
4530 if (! (case_stack && case_stack->data.case_stmt.start))
4533 if (stack_block_stack
4534 && stack_block_stack->depth > case_stack->depth)
4537 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4538 nominal_type = case_stack->data.case_stmt.nominal_type;
4540 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4541 if (index_type == error_mark_node)
4544 /* Convert VALUE to the type in which the comparisons are nominally done. */
4546 value = (*converter) (nominal_type, value);
4550 /* Fail if this value is out of range for the actual type of the index
4551 (which may be narrower than NOMINAL_TYPE). */
4553 && (TREE_CONSTANT_OVERFLOW (value)
4554 || ! int_fits_type_p (value, index_type)))
4557 return add_case_node (value, value, label, duplicate);
4560 /* Like pushcase but this case applies to all values between VALUE1 and
4561 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4562 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4563 starts at VALUE1 and ends at the highest value of the index type.
4564 If both are NULL, this case applies to all values.
4566 The return value is the same as that of pushcase but there is one
4567 additional error code: 4 means the specified range was empty. */
4570 pushcase_range (value1, value2, converter, label, duplicate)
4571 tree value1, value2;
4572 tree (*converter) PARAMS ((tree, tree));
4579 /* Fail if not inside a real case statement. */
4580 if (! (case_stack && case_stack->data.case_stmt.start))
4583 if (stack_block_stack
4584 && stack_block_stack->depth > case_stack->depth)
4587 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4588 nominal_type = case_stack->data.case_stmt.nominal_type;
4590 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4591 if (index_type == error_mark_node)
4596 /* Convert VALUEs to type in which the comparisons are nominally done
4597 and replace any unspecified value with the corresponding bound. */
4599 value1 = TYPE_MIN_VALUE (index_type);
4601 value2 = TYPE_MAX_VALUE (index_type);
4603 /* Fail if the range is empty. Do this before any conversion since
4604 we want to allow out-of-range empty ranges. */
4605 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4608 /* If the max was unbounded, use the max of the nominal_type we are
4609 converting to. Do this after the < check above to suppress false
4612 value2 = TYPE_MAX_VALUE (nominal_type);
4614 value1 = (*converter) (nominal_type, value1);
4615 value2 = (*converter) (nominal_type, value2);
4617 /* Fail if these values are out of range. */
4618 if (TREE_CONSTANT_OVERFLOW (value1)
4619 || ! int_fits_type_p (value1, index_type))
4622 if (TREE_CONSTANT_OVERFLOW (value2)
4623 || ! int_fits_type_p (value2, index_type))
4626 return add_case_node (value1, value2, label, duplicate);
4629 /* Do the actual insertion of a case label for pushcase and pushcase_range
4630 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4631 slowdown for large switch statements. */
4634 add_case_node (low, high, label, duplicate)
4639 struct case_node *p, **q, *r;
4641 /* If there's no HIGH value, then this is not a case range; it's
4642 just a simple case label. But that's just a degenerate case
4647 /* Handle default labels specially. */
4650 if (case_stack->data.case_stmt.default_label != 0)
4652 *duplicate = case_stack->data.case_stmt.default_label;
4655 case_stack->data.case_stmt.default_label = label;
4656 expand_label (label);
4660 q = &case_stack->data.case_stmt.case_list;
4667 /* Keep going past elements distinctly greater than HIGH. */
4668 if (tree_int_cst_lt (high, p->low))
4671 /* or distinctly less than LOW. */
4672 else if (tree_int_cst_lt (p->high, low))
4677 /* We have an overlap; this is an error. */
4678 *duplicate = p->code_label;
4683 /* Add this label to the chain, and succeed. */
4685 r = (struct case_node *) ggc_alloc (sizeof (struct case_node));
4688 /* If the bounds are equal, turn this into the one-value case. */
4689 if (tree_int_cst_equal (low, high))
4694 r->code_label = label;
4695 expand_label (label);
4705 struct case_node *s;
4711 if (! (b = p->balance))
4712 /* Growth propagation from left side. */
4719 if ((p->left = s = r->right))
4728 if ((r->parent = s))
4736 case_stack->data.case_stmt.case_list = r;
4739 /* r->balance == +1 */
4744 struct case_node *t = r->right;
4746 if ((p->left = s = t->right))
4750 if ((r->right = s = t->left))
4764 if ((t->parent = s))
4772 case_stack->data.case_stmt.case_list = t;
4779 /* p->balance == +1; growth of left side balances the node. */
4789 if (! (b = p->balance))
4790 /* Growth propagation from right side. */
4798 if ((p->right = s = r->left))
4806 if ((r->parent = s))
4815 case_stack->data.case_stmt.case_list = r;
4819 /* r->balance == -1 */
4823 struct case_node *t = r->left;
4825 if ((p->right = s = t->left))
4830 if ((r->left = s = t->right))
4844 if ((t->parent = s))
4853 case_stack->data.case_stmt.case_list = t;
4859 /* p->balance == -1; growth of right side balances the node. */
4872 /* Returns the number of possible values of TYPE.
4873 Returns -1 if the number is unknown, variable, or if the number does not
4874 fit in a HOST_WIDE_INT.
4875 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4876 do not increase monotonically (there may be duplicates);
4877 to 1 if the values increase monotonically, but not always by 1;
4878 otherwise sets it to 0. */
4881 all_cases_count (type, sparseness)
4886 HOST_WIDE_INT count, minval, lastval;
4890 switch (TREE_CODE (type))
4897 count = 1 << BITS_PER_UNIT;
4902 if (TYPE_MAX_VALUE (type) != 0
4903 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4904 TYPE_MIN_VALUE (type))))
4905 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4906 convert (type, integer_zero_node))))
4907 && host_integerp (t, 1))
4908 count = tree_low_cst (t, 1);
4914 /* Don't waste time with enumeral types with huge values. */
4915 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4916 || TYPE_MAX_VALUE (type) == 0
4917 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4920 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4923 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4925 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4927 if (*sparseness == 2 || thisval <= lastval)
4929 else if (thisval != minval + count)
4940 #define BITARRAY_TEST(ARRAY, INDEX) \
4941 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4942 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4943 #define BITARRAY_SET(ARRAY, INDEX) \
4944 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4945 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4947 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4948 with the case values we have seen, assuming the case expression
4950 SPARSENESS is as determined by all_cases_count.
4952 The time needed is proportional to COUNT, unless
4953 SPARSENESS is 2, in which case quadratic time is needed. */
4956 mark_seen_cases (type, cases_seen, count, sparseness)
4958 unsigned char *cases_seen;
4959 HOST_WIDE_INT count;
4962 tree next_node_to_try = NULL_TREE;
4963 HOST_WIDE_INT next_node_offset = 0;
4965 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4966 tree val = make_node (INTEGER_CST);
4968 TREE_TYPE (val) = type;
4972 else if (sparseness == 2)
4975 unsigned HOST_WIDE_INT xlo;
4977 /* This less efficient loop is only needed to handle
4978 duplicate case values (multiple enum constants
4979 with the same value). */
4980 TREE_TYPE (val) = TREE_TYPE (root->low);
4981 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4982 t = TREE_CHAIN (t), xlo++)
4984 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4985 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4989 /* Keep going past elements distinctly greater than VAL. */
4990 if (tree_int_cst_lt (val, n->low))
4993 /* or distinctly less than VAL. */
4994 else if (tree_int_cst_lt (n->high, val))
4999 /* We have found a matching range. */
5000 BITARRAY_SET (cases_seen, xlo);
5010 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5012 for (n = root; n; n = n->right)
5014 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5015 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5016 while (! tree_int_cst_lt (n->high, val))
5018 /* Calculate (into xlo) the "offset" of the integer (val).
5019 The element with lowest value has offset 0, the next smallest
5020 element has offset 1, etc. */
5022 unsigned HOST_WIDE_INT xlo;
5026 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5028 /* The TYPE_VALUES will be in increasing order, so
5029 starting searching where we last ended. */
5030 t = next_node_to_try;
5031 xlo = next_node_offset;
5037 t = TYPE_VALUES (type);
5040 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5042 next_node_to_try = TREE_CHAIN (t);
5043 next_node_offset = xlo + 1;
5048 if (t == next_node_to_try)
5057 t = TYPE_MIN_VALUE (type);
5059 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5063 add_double (xlo, xhi,
5064 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5068 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5069 BITARRAY_SET (cases_seen, xlo);
5071 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5073 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5079 /* Given a switch statement with an expression that is an enumeration
5080 type, warn if any of the enumeration type's literals are not
5081 covered by the case expressions of the switch. Also, warn if there
5082 are any extra switch cases that are *not* elements of the
5087 At one stage this function would: ``If all enumeration literals
5088 were covered by the case expressions, turn one of the expressions
5089 into the default expression since it should not be possible to fall
5090 through such a switch.''
5092 That code has since been removed as: ``This optimization is
5093 disabled because it causes valid programs to fail. ANSI C does not
5094 guarantee that an expression with enum type will have a value that
5095 is the same as one of the enumeration literals.'' */
5098 check_for_full_enumeration_handling (type)
5101 struct case_node *n;
5104 /* True iff the selector type is a numbered set mode. */
5107 /* The number of possible selector values. */
5110 /* For each possible selector value. a one iff it has been matched
5111 by a case value alternative. */
5112 unsigned char *cases_seen;
5114 /* The allocated size of cases_seen, in chars. */
5115 HOST_WIDE_INT bytes_needed;
5117 size = all_cases_count (type, &sparseness);
5118 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5120 if (size > 0 && size < 600000
5121 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5122 this optimization if we don't have enough memory rather than
5123 aborting, as xmalloc would do. */
5125 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5128 tree v = TYPE_VALUES (type);
5130 /* The time complexity of this code is normally O(N), where
5131 N being the number of members in the enumerated type.
5132 However, if type is an ENUMERAL_TYPE whose values do not
5133 increase monotonically, O(N*log(N)) time may be needed. */
5135 mark_seen_cases (type, cases_seen, size, sparseness);
5137 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5138 if (BITARRAY_TEST (cases_seen, i) == 0)
5139 warning ("enumeration value `%s' not handled in switch",
5140 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5145 /* Now we go the other way around; we warn if there are case
5146 expressions that don't correspond to enumerators. This can
5147 occur since C and C++ don't enforce type-checking of
5148 assignments to enumeration variables. */
5150 if (case_stack->data.case_stmt.case_list
5151 && case_stack->data.case_stmt.case_list->left)
5152 case_stack->data.case_stmt.case_list
5153 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5154 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5156 for (chain = TYPE_VALUES (type);
5157 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5158 chain = TREE_CHAIN (chain))
5163 if (TYPE_NAME (type) == 0)
5164 warning ("case value `%ld' not in enumerated type",
5165 (long) TREE_INT_CST_LOW (n->low));
5167 warning ("case value `%ld' not in enumerated type `%s'",
5168 (long) TREE_INT_CST_LOW (n->low),
5169 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5172 : DECL_NAME (TYPE_NAME (type))));
5174 if (!tree_int_cst_equal (n->low, n->high))
5176 for (chain = TYPE_VALUES (type);
5177 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5178 chain = TREE_CHAIN (chain))
5183 if (TYPE_NAME (type) == 0)
5184 warning ("case value `%ld' not in enumerated type",
5185 (long) TREE_INT_CST_LOW (n->high));
5187 warning ("case value `%ld' not in enumerated type `%s'",
5188 (long) TREE_INT_CST_LOW (n->high),
5189 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5192 : DECL_NAME (TYPE_NAME (type))));
5200 /* Terminate a case (Pascal) or switch (C) statement
5201 in which ORIG_INDEX is the expression to be tested.
5202 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5203 type as given in the source before any compiler conversions.
5204 Generate the code to test it and jump to the right place. */
5207 expand_end_case_type (orig_index, orig_type)
5208 tree orig_index, orig_type;
5210 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5211 rtx default_label = 0;
5212 struct case_node *n;
5219 rtx before_case, end;
5220 struct nesting *thiscase = case_stack;
5221 tree index_expr, index_type;
5224 /* Don't crash due to previous errors. */
5225 if (thiscase == NULL)
5228 table_label = gen_label_rtx ();
5229 index_expr = thiscase->data.case_stmt.index_expr;
5230 index_type = TREE_TYPE (index_expr);
5231 unsignedp = TREE_UNSIGNED (index_type);
5232 if (orig_type == NULL)
5233 orig_type = TREE_TYPE (orig_index);
5235 do_pending_stack_adjust ();
5237 /* This might get an spurious warning in the presence of a syntax error;
5238 it could be fixed by moving the call to check_seenlabel after the
5239 check for error_mark_node, and copying the code of check_seenlabel that
5240 deals with case_stack->data.case_stmt.line_number_status /
5241 restore_line_number_status in front of the call to end_cleanup_deferral;
5242 However, this might miss some useful warnings in the presence of
5243 non-syntax errors. */
5246 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5247 if (index_type != error_mark_node)
5249 /* If the switch expression was an enumerated type, check that
5250 exactly all enumeration literals are covered by the cases.
5251 The check is made when -Wswitch was specified and there is no
5252 default case, or when -Wswitch-enum was specified. */
5253 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5254 || warn_switch_enum)
5255 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5256 && TREE_CODE (index_expr) != INTEGER_CST)
5257 check_for_full_enumeration_handling (orig_type);
5259 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5260 warning ("switch missing default case");
5262 /* If we don't have a default-label, create one here,
5263 after the body of the switch. */
5264 if (thiscase->data.case_stmt.default_label == 0)
5266 thiscase->data.case_stmt.default_label
5267 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5268 expand_label (thiscase->data.case_stmt.default_label);
5270 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5272 before_case = get_last_insn ();
5274 if (thiscase->data.case_stmt.case_list
5275 && thiscase->data.case_stmt.case_list->left)
5276 thiscase->data.case_stmt.case_list
5277 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5279 /* Simplify the case-list before we count it. */
5280 group_case_nodes (thiscase->data.case_stmt.case_list);
5282 /* Get upper and lower bounds of case values.
5283 Also convert all the case values to the index expr's data type. */
5286 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5288 /* Check low and high label values are integers. */
5289 if (TREE_CODE (n->low) != INTEGER_CST)
5291 if (TREE_CODE (n->high) != INTEGER_CST)
5294 n->low = convert (index_type, n->low);
5295 n->high = convert (index_type, n->high);
5297 /* Count the elements and track the largest and smallest
5298 of them (treating them as signed even if they are not). */
5306 if (INT_CST_LT (n->low, minval))
5308 if (INT_CST_LT (maxval, n->high))
5311 /* A range counts double, since it requires two compares. */
5312 if (! tree_int_cst_equal (n->low, n->high))
5316 /* Compute span of values. */
5318 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5320 end_cleanup_deferral ();
5324 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5326 emit_jump (default_label);
5329 /* If range of values is much bigger than number of values,
5330 make a sequence of conditional branches instead of a dispatch.
5331 If the switch-index is a constant, do it this way
5332 because we can optimize it. */
5334 else if (count < case_values_threshold ()
5335 || compare_tree_int (range, 10 * count) > 0
5336 /* RANGE may be signed, and really large ranges will show up
5337 as negative numbers. */
5338 || compare_tree_int (range, 0) < 0
5339 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5342 || TREE_CODE (index_expr) == INTEGER_CST
5343 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5344 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5346 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5348 /* If the index is a short or char that we do not have
5349 an insn to handle comparisons directly, convert it to
5350 a full integer now, rather than letting each comparison
5351 generate the conversion. */
5353 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5354 && ! have_insn_for (COMPARE, GET_MODE (index)))
5356 enum machine_mode wider_mode;
5357 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5358 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5359 if (have_insn_for (COMPARE, wider_mode))
5361 index = convert_to_mode (wider_mode, index, unsignedp);
5367 do_pending_stack_adjust ();
5369 index = protect_from_queue (index, 0);
5370 if (GET_CODE (index) == MEM)
5371 index = copy_to_reg (index);
5372 if (GET_CODE (index) == CONST_INT
5373 || TREE_CODE (index_expr) == INTEGER_CST)
5375 /* Make a tree node with the proper constant value
5376 if we don't already have one. */
5377 if (TREE_CODE (index_expr) != INTEGER_CST)
5380 = build_int_2 (INTVAL (index),
5381 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5382 index_expr = convert (index_type, index_expr);
5385 /* For constant index expressions we need only
5386 issue an unconditional branch to the appropriate
5387 target code. The job of removing any unreachable
5388 code is left to the optimisation phase if the
5389 "-O" option is specified. */
5390 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5391 if (! tree_int_cst_lt (index_expr, n->low)
5392 && ! tree_int_cst_lt (n->high, index_expr))
5396 emit_jump (label_rtx (n->code_label));
5398 emit_jump (default_label);
5402 /* If the index expression is not constant we generate
5403 a binary decision tree to select the appropriate
5404 target code. This is done as follows:
5406 The list of cases is rearranged into a binary tree,
5407 nearly optimal assuming equal probability for each case.
5409 The tree is transformed into RTL, eliminating
5410 redundant test conditions at the same time.
5412 If program flow could reach the end of the
5413 decision tree an unconditional jump to the
5414 default code is emitted. */
5417 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5418 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5419 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5420 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5421 default_label, index_type);
5422 emit_jump_if_reachable (default_label);
5427 if (! try_casesi (index_type, index_expr, minval, range,
5428 table_label, default_label))
5430 index_type = thiscase->data.case_stmt.nominal_type;
5432 /* Index jumptables from zero for suitable values of
5433 minval to avoid a subtraction. */
5435 && compare_tree_int (minval, 0) > 0
5436 && compare_tree_int (minval, 3) < 0)
5438 minval = integer_zero_node;
5442 if (! try_tablejump (index_type, index_expr, minval, range,
5443 table_label, default_label))
5447 /* Get table of labels to jump to, in order of case index. */
5449 ncases = tree_low_cst (range, 0) + 1;
5450 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5451 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5453 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5455 /* Compute the low and high bounds relative to the minimum
5456 value since that should fit in a HOST_WIDE_INT while the
5457 actual values may not. */
5459 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5460 n->low, minval)), 1);
5461 HOST_WIDE_INT i_high
5462 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5463 n->high, minval)), 1);
5466 for (i = i_low; i <= i_high; i ++)
5468 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5471 /* Fill in the gaps with the default. */
5472 for (i = 0; i < ncases; i++)
5473 if (labelvec[i] == 0)
5474 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5476 /* Output the table */
5477 emit_label (table_label);
5479 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5480 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5481 gen_rtx_LABEL_REF (Pmode, table_label),
5482 gen_rtvec_v (ncases, labelvec),
5483 const0_rtx, const0_rtx));
5485 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5486 gen_rtvec_v (ncases, labelvec)));
5488 /* If the case insn drops through the table,
5489 after the table we must jump to the default-label.
5490 Otherwise record no drop-through after the table. */
5491 #ifdef CASE_DROPS_THROUGH
5492 emit_jump (default_label);
5498 before_case = NEXT_INSN (before_case);
5499 end = get_last_insn ();
5500 if (squeeze_notes (&before_case, &end))
5502 reorder_insns (before_case, end,
5503 thiscase->data.case_stmt.start);
5506 end_cleanup_deferral ();
5508 if (thiscase->exit_label)
5509 emit_label (thiscase->exit_label);
5511 POPSTACK (case_stack);
5516 /* Convert the tree NODE into a list linked by the right field, with the left
5517 field zeroed. RIGHT is used for recursion; it is a list to be placed
5518 rightmost in the resulting list. */
5520 static struct case_node *
5521 case_tree2list (node, right)
5522 struct case_node *node, *right;
5524 struct case_node *left;
5527 right = case_tree2list (node->right, right);
5529 node->right = right;
5530 if ((left = node->left))
5533 return case_tree2list (left, node);
5539 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5542 do_jump_if_equal (op1, op2, label, unsignedp)
5543 rtx op1, op2, label;
5546 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5548 if (INTVAL (op1) == INTVAL (op2))
5552 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5553 (GET_MODE (op1) == VOIDmode
5554 ? GET_MODE (op2) : GET_MODE (op1)),
5558 /* Not all case values are encountered equally. This function
5559 uses a heuristic to weight case labels, in cases where that
5560 looks like a reasonable thing to do.
5562 Right now, all we try to guess is text, and we establish the
5565 chars above space: 16
5574 If we find any cases in the switch that are not either -1 or in the range
5575 of valid ASCII characters, or are control characters other than those
5576 commonly used with "\", don't treat this switch scanning text.
5578 Return 1 if these nodes are suitable for cost estimation, otherwise
5582 estimate_case_costs (node)
5585 tree min_ascii = integer_minus_one_node;
5586 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5590 /* If we haven't already made the cost table, make it now. Note that the
5591 lower bound of the table is -1, not zero. */
5593 if (! cost_table_initialized)
5595 cost_table_initialized = 1;
5597 for (i = 0; i < 128; i++)
5600 COST_TABLE (i) = 16;
5601 else if (ISPUNCT (i))
5603 else if (ISCNTRL (i))
5604 COST_TABLE (i) = -1;
5607 COST_TABLE (' ') = 8;
5608 COST_TABLE ('\t') = 4;
5609 COST_TABLE ('\0') = 4;
5610 COST_TABLE ('\n') = 2;
5611 COST_TABLE ('\f') = 1;
5612 COST_TABLE ('\v') = 1;
5613 COST_TABLE ('\b') = 1;
5616 /* See if all the case expressions look like text. It is text if the
5617 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5618 as signed arithmetic since we don't want to ever access cost_table with a
5619 value less than -1. Also check that none of the constants in a range
5620 are strange control characters. */
5622 for (n = node; n; n = n->right)
5624 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5627 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5628 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5629 if (COST_TABLE (i) < 0)
5633 /* All interesting values are within the range of interesting
5634 ASCII characters. */
5638 /* Scan an ordered list of case nodes
5639 combining those with consecutive values or ranges.
5641 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5644 group_case_nodes (head)
5647 case_node_ptr node = head;
5651 rtx lb = next_real_insn (label_rtx (node->code_label));
5653 case_node_ptr np = node;
5655 /* Try to group the successors of NODE with NODE. */
5656 while (((np = np->right) != 0)
5657 /* Do they jump to the same place? */
5658 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5659 || (lb != 0 && lb2 != 0
5660 && simplejump_p (lb)
5661 && simplejump_p (lb2)
5662 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5663 SET_SRC (PATTERN (lb2)))))
5664 /* Are their ranges consecutive? */
5665 && tree_int_cst_equal (np->low,
5666 fold (build (PLUS_EXPR,
5667 TREE_TYPE (node->high),
5670 /* An overflow is not consecutive. */
5671 && tree_int_cst_lt (node->high,
5672 fold (build (PLUS_EXPR,
5673 TREE_TYPE (node->high),
5675 integer_one_node))))
5677 node->high = np->high;
5679 /* NP is the first node after NODE which can't be grouped with it.
5680 Delete the nodes in between, and move on to that node. */
5686 /* Take an ordered list of case nodes
5687 and transform them into a near optimal binary tree,
5688 on the assumption that any target code selection value is as
5689 likely as any other.
5691 The transformation is performed by splitting the ordered
5692 list into two equal sections plus a pivot. The parts are
5693 then attached to the pivot as left and right branches. Each
5694 branch is then transformed recursively. */
5697 balance_case_nodes (head, parent)
5698 case_node_ptr *head;
5699 case_node_ptr parent;
5712 /* Count the number of entries on branch. Also count the ranges. */
5716 if (!tree_int_cst_equal (np->low, np->high))
5720 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5724 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5732 /* Split this list if it is long enough for that to help. */
5737 /* Find the place in the list that bisects the list's total cost,
5738 Here I gets half the total cost. */
5743 /* Skip nodes while their cost does not reach that amount. */
5744 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5745 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5746 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5749 npp = &(*npp)->right;
5754 /* Leave this branch lopsided, but optimize left-hand
5755 side and fill in `parent' fields for right-hand side. */
5757 np->parent = parent;
5758 balance_case_nodes (&np->left, np);
5759 for (; np->right; np = np->right)
5760 np->right->parent = np;
5764 /* If there are just three nodes, split at the middle one. */
5766 npp = &(*npp)->right;
5769 /* Find the place in the list that bisects the list's total cost,
5770 where ranges count as 2.
5771 Here I gets half the total cost. */
5772 i = (i + ranges + 1) / 2;
5775 /* Skip nodes while their cost does not reach that amount. */
5776 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5781 npp = &(*npp)->right;
5786 np->parent = parent;
5789 /* Optimize each of the two split parts. */
5790 balance_case_nodes (&np->left, np);
5791 balance_case_nodes (&np->right, np);
5795 /* Else leave this branch as one level,
5796 but fill in `parent' fields. */
5798 np->parent = parent;
5799 for (; np->right; np = np->right)
5800 np->right->parent = np;
5805 /* Search the parent sections of the case node tree
5806 to see if a test for the lower bound of NODE would be redundant.
5807 INDEX_TYPE is the type of the index expression.
5809 The instructions to generate the case decision tree are
5810 output in the same order as nodes are processed so it is
5811 known that if a parent node checks the range of the current
5812 node minus one that the current node is bounded at its lower
5813 span. Thus the test would be redundant. */
5816 node_has_low_bound (node, index_type)
5821 case_node_ptr pnode;
5823 /* If the lower bound of this node is the lowest value in the index type,
5824 we need not test it. */
5826 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5829 /* If this node has a left branch, the value at the left must be less
5830 than that at this node, so it cannot be bounded at the bottom and
5831 we need not bother testing any further. */
5836 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5837 node->low, integer_one_node));
5839 /* If the subtraction above overflowed, we can't verify anything.
5840 Otherwise, look for a parent that tests our value - 1. */
5842 if (! tree_int_cst_lt (low_minus_one, node->low))
5845 for (pnode = node->parent; pnode; pnode = pnode->parent)
5846 if (tree_int_cst_equal (low_minus_one, pnode->high))
5852 /* Search the parent sections of the case node tree
5853 to see if a test for the upper bound of NODE would be redundant.
5854 INDEX_TYPE is the type of the index expression.
5856 The instructions to generate the case decision tree are
5857 output in the same order as nodes are processed so it is
5858 known that if a parent node checks the range of the current
5859 node plus one that the current node is bounded at its upper
5860 span. Thus the test would be redundant. */
5863 node_has_high_bound (node, index_type)
5868 case_node_ptr pnode;
5870 /* If there is no upper bound, obviously no test is needed. */
5872 if (TYPE_MAX_VALUE (index_type) == NULL)
5875 /* If the upper bound of this node is the highest value in the type
5876 of the index expression, we need not test against it. */
5878 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5881 /* If this node has a right branch, the value at the right must be greater
5882 than that at this node, so it cannot be bounded at the top and
5883 we need not bother testing any further. */
5888 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5889 node->high, integer_one_node));
5891 /* If the addition above overflowed, we can't verify anything.
5892 Otherwise, look for a parent that tests our value + 1. */
5894 if (! tree_int_cst_lt (node->high, high_plus_one))
5897 for (pnode = node->parent; pnode; pnode = pnode->parent)
5898 if (tree_int_cst_equal (high_plus_one, pnode->low))
5904 /* Search the parent sections of the
5905 case node tree to see if both tests for the upper and lower
5906 bounds of NODE would be redundant. */
5909 node_is_bounded (node, index_type)
5913 return (node_has_low_bound (node, index_type)
5914 && node_has_high_bound (node, index_type));
5917 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5920 emit_jump_if_reachable (label)
5923 if (GET_CODE (get_last_insn ()) != BARRIER)
5927 /* Emit step-by-step code to select a case for the value of INDEX.
5928 The thus generated decision tree follows the form of the
5929 case-node binary tree NODE, whose nodes represent test conditions.
5930 INDEX_TYPE is the type of the index of the switch.
5932 Care is taken to prune redundant tests from the decision tree
5933 by detecting any boundary conditions already checked by
5934 emitted rtx. (See node_has_high_bound, node_has_low_bound
5935 and node_is_bounded, above.)
5937 Where the test conditions can be shown to be redundant we emit
5938 an unconditional jump to the target code. As a further
5939 optimization, the subordinates of a tree node are examined to
5940 check for bounded nodes. In this case conditional and/or
5941 unconditional jumps as a result of the boundary check for the
5942 current node are arranged to target the subordinates associated
5943 code for out of bound conditions on the current node.
5945 We can assume that when control reaches the code generated here,
5946 the index value has already been compared with the parents
5947 of this node, and determined to be on the same side of each parent
5948 as this node is. Thus, if this node tests for the value 51,
5949 and a parent tested for 52, we don't need to consider
5950 the possibility of a value greater than 51. If another parent
5951 tests for the value 50, then this node need not test anything. */
5954 emit_case_nodes (index, node, default_label, index_type)
5960 /* If INDEX has an unsigned type, we must make unsigned branches. */
5961 int unsignedp = TREE_UNSIGNED (index_type);
5962 enum machine_mode mode = GET_MODE (index);
5963 enum machine_mode imode = TYPE_MODE (index_type);
5965 /* See if our parents have already tested everything for us.
5966 If they have, emit an unconditional jump for this node. */
5967 if (node_is_bounded (node, index_type))
5968 emit_jump (label_rtx (node->code_label));
5970 else if (tree_int_cst_equal (node->low, node->high))
5972 /* Node is single valued. First see if the index expression matches
5973 this node and then check our children, if any. */
5975 do_jump_if_equal (index,
5976 convert_modes (mode, imode,
5977 expand_expr (node->low, NULL_RTX,
5980 label_rtx (node->code_label), unsignedp);
5982 if (node->right != 0 && node->left != 0)
5984 /* This node has children on both sides.
5985 Dispatch to one side or the other
5986 by comparing the index value with this node's value.
5987 If one subtree is bounded, check that one first,
5988 so we can avoid real branches in the tree. */
5990 if (node_is_bounded (node->right, index_type))
5992 emit_cmp_and_jump_insns (index,
5995 expand_expr (node->high, NULL_RTX,
5998 GT, NULL_RTX, mode, unsignedp,
5999 label_rtx (node->right->code_label));
6000 emit_case_nodes (index, node->left, default_label, index_type);
6003 else if (node_is_bounded (node->left, index_type))
6005 emit_cmp_and_jump_insns (index,
6008 expand_expr (node->high, NULL_RTX,
6011 LT, NULL_RTX, mode, unsignedp,
6012 label_rtx (node->left->code_label));
6013 emit_case_nodes (index, node->right, default_label, index_type);
6018 /* Neither node is bounded. First distinguish the two sides;
6019 then emit the code for one side at a time. */
6021 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6023 /* See if the value is on the right. */
6024 emit_cmp_and_jump_insns (index,
6027 expand_expr (node->high, NULL_RTX,
6030 GT, NULL_RTX, mode, unsignedp,
6031 label_rtx (test_label));
6033 /* Value must be on the left.
6034 Handle the left-hand subtree. */
6035 emit_case_nodes (index, node->left, default_label, index_type);
6036 /* If left-hand subtree does nothing,
6038 emit_jump_if_reachable (default_label);
6040 /* Code branches here for the right-hand subtree. */
6041 expand_label (test_label);
6042 emit_case_nodes (index, node->right, default_label, index_type);
6046 else if (node->right != 0 && node->left == 0)
6048 /* Here we have a right child but no left so we issue conditional
6049 branch to default and process the right child.
6051 Omit the conditional branch to default if we it avoid only one
6052 right child; it costs too much space to save so little time. */
6054 if (node->right->right || node->right->left
6055 || !tree_int_cst_equal (node->right->low, node->right->high))
6057 if (!node_has_low_bound (node, index_type))
6059 emit_cmp_and_jump_insns (index,
6062 expand_expr (node->high, NULL_RTX,
6065 LT, NULL_RTX, mode, unsignedp,
6069 emit_case_nodes (index, node->right, default_label, index_type);
6072 /* We cannot process node->right normally
6073 since we haven't ruled out the numbers less than
6074 this node's value. So handle node->right explicitly. */
6075 do_jump_if_equal (index,
6078 expand_expr (node->right->low, NULL_RTX,
6081 label_rtx (node->right->code_label), unsignedp);
6084 else if (node->right == 0 && node->left != 0)
6086 /* Just one subtree, on the left. */
6087 if (node->left->left || node->left->right
6088 || !tree_int_cst_equal (node->left->low, node->left->high))
6090 if (!node_has_high_bound (node, index_type))
6092 emit_cmp_and_jump_insns (index,
6095 expand_expr (node->high, NULL_RTX,
6098 GT, NULL_RTX, mode, unsignedp,
6102 emit_case_nodes (index, node->left, default_label, index_type);
6105 /* We cannot process node->left normally
6106 since we haven't ruled out the numbers less than
6107 this node's value. So handle node->left explicitly. */
6108 do_jump_if_equal (index,
6111 expand_expr (node->left->low, NULL_RTX,
6114 label_rtx (node->left->code_label), unsignedp);
6119 /* Node is a range. These cases are very similar to those for a single
6120 value, except that we do not start by testing whether this node
6121 is the one to branch to. */
6123 if (node->right != 0 && node->left != 0)
6125 /* Node has subtrees on both sides.
6126 If the right-hand subtree is bounded,
6127 test for it first, since we can go straight there.
6128 Otherwise, we need to make a branch in the control structure,
6129 then handle the two subtrees. */
6130 tree test_label = 0;
6132 if (node_is_bounded (node->right, index_type))
6133 /* Right hand node is fully bounded so we can eliminate any
6134 testing and branch directly to the target code. */
6135 emit_cmp_and_jump_insns (index,
6138 expand_expr (node->high, NULL_RTX,
6141 GT, NULL_RTX, mode, unsignedp,
6142 label_rtx (node->right->code_label));
6145 /* Right hand node requires testing.
6146 Branch to a label where we will handle it later. */
6148 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6149 emit_cmp_and_jump_insns (index,
6152 expand_expr (node->high, NULL_RTX,
6155 GT, NULL_RTX, mode, unsignedp,
6156 label_rtx (test_label));
6159 /* Value belongs to this node or to the left-hand subtree. */
6161 emit_cmp_and_jump_insns (index,
6164 expand_expr (node->low, NULL_RTX,
6167 GE, NULL_RTX, mode, unsignedp,
6168 label_rtx (node->code_label));
6170 /* Handle the left-hand subtree. */
6171 emit_case_nodes (index, node->left, default_label, index_type);
6173 /* If right node had to be handled later, do that now. */
6177 /* If the left-hand subtree fell through,
6178 don't let it fall into the right-hand subtree. */
6179 emit_jump_if_reachable (default_label);
6181 expand_label (test_label);
6182 emit_case_nodes (index, node->right, default_label, index_type);
6186 else if (node->right != 0 && node->left == 0)
6188 /* Deal with values to the left of this node,
6189 if they are possible. */
6190 if (!node_has_low_bound (node, index_type))
6192 emit_cmp_and_jump_insns (index,
6195 expand_expr (node->low, NULL_RTX,
6198 LT, NULL_RTX, mode, unsignedp,
6202 /* Value belongs to this node or to the right-hand subtree. */
6204 emit_cmp_and_jump_insns (index,
6207 expand_expr (node->high, NULL_RTX,
6210 LE, NULL_RTX, mode, unsignedp,
6211 label_rtx (node->code_label));
6213 emit_case_nodes (index, node->right, default_label, index_type);
6216 else if (node->right == 0 && node->left != 0)
6218 /* Deal with values to the right of this node,
6219 if they are possible. */
6220 if (!node_has_high_bound (node, index_type))
6222 emit_cmp_and_jump_insns (index,
6225 expand_expr (node->high, NULL_RTX,
6228 GT, NULL_RTX, mode, unsignedp,
6232 /* Value belongs to this node or to the left-hand subtree. */
6234 emit_cmp_and_jump_insns (index,
6237 expand_expr (node->low, NULL_RTX,
6240 GE, NULL_RTX, mode, unsignedp,
6241 label_rtx (node->code_label));
6243 emit_case_nodes (index, node->left, default_label, index_type);
6248 /* Node has no children so we check low and high bounds to remove
6249 redundant tests. Only one of the bounds can exist,
6250 since otherwise this node is bounded--a case tested already. */
6251 int high_bound = node_has_high_bound (node, index_type);
6252 int low_bound = node_has_low_bound (node, index_type);
6254 if (!high_bound && low_bound)
6256 emit_cmp_and_jump_insns (index,
6259 expand_expr (node->high, NULL_RTX,
6262 GT, NULL_RTX, mode, unsignedp,
6266 else if (!low_bound && high_bound)
6268 emit_cmp_and_jump_insns (index,
6271 expand_expr (node->low, NULL_RTX,
6274 LT, NULL_RTX, mode, unsignedp,
6277 else if (!low_bound && !high_bound)
6279 /* Widen LOW and HIGH to the same width as INDEX. */
6280 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6281 tree low = build1 (CONVERT_EXPR, type, node->low);
6282 tree high = build1 (CONVERT_EXPR, type, node->high);
6283 rtx low_rtx, new_index, new_bound;
6285 /* Instead of doing two branches, emit one unsigned branch for
6286 (index-low) > (high-low). */
6287 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6288 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6289 NULL_RTX, unsignedp,
6291 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6295 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6296 mode, 1, default_label);
6299 emit_jump (label_rtx (node->code_label));
6304 #include "gt-stmt.h"