1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
38 #include "coretypes.h"
47 #include "insn-config.h"
50 #include "hard-reg-set.h"
57 #include "langhooks.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
87 struct case_node GTY(())
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
101 /* These are used by estimate_case_costs and balance_case_nodes. */
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `cond_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting GTY(())
140 struct nesting *next;
150 /* For conds (if-then and if-then-else statements). */
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
160 } GTY ((tag ("COND_NESTING"))) cond;
161 /* For variable binding contours. */
164 /* Sequence number of this binding contour within the function,
165 in order of entry. */
166 int block_start_count;
167 /* Nonzero => value to restore stack to on exit. */
169 /* The NOTE that starts this contour.
170 Used by expand_goto to check whether the destination
171 is within each contour or not. */
173 /* Innermost containing binding contour that has a stack level. */
174 struct nesting *innermost_stack_block;
175 /* List of cleanups to be run on exit from this contour.
176 This is a list of expressions to be evaluated.
177 The TREE_PURPOSE of each link is the ..._DECL node
178 which the cleanup pertains to. */
180 /* List of cleanup-lists of blocks containing this block,
181 as they were at the locus where this block appears.
182 There is an element for each containing block,
183 ordered innermost containing block first.
184 The tail of this list can be 0,
185 if all remaining elements would be empty lists.
186 The element's TREE_VALUE is the cleanup-list of that block,
187 which may be null. */
189 /* Chain of labels defined inside this binding contour.
190 For contours that have stack levels or cleanups. */
191 struct label_chain *label_chain;
192 /* Nonzero if this is associated with an EH region. */
193 int exception_region;
194 /* The saved target_temp_slot_level from our outer block.
195 We may reset target_temp_slot_level to be the level of
196 this block, if that is done, target_temp_slot_level
197 reverts to the saved target_temp_slot_level at the very
199 int block_target_temp_slot_level;
200 /* True if we are currently emitting insns in an area of
201 output code that is controlled by a conditional
202 expression. This is used by the cleanup handling code to
203 generate conditional cleanup actions. */
204 int conditional_code;
205 /* A place to move the start of the exception region for any
206 of the conditional cleanups, must be at the end or after
207 the start of the last unconditional cleanup, and before any
208 conditional branch points. */
209 rtx last_unconditional_cleanup;
210 } GTY ((tag ("BLOCK_NESTING"))) block;
211 /* For switch (C) or case (Pascal) statements. */
214 /* The insn after which the case dispatch should finally
215 be emitted. Zero for a dummy. */
217 /* A list of case labels; it is first built as an AVL tree.
218 During expand_end_case, this is converted to a list, and may be
219 rearranged into a nearly balanced binary tree. */
220 struct case_node *case_list;
221 /* Label to jump to if no case matches. */
223 /* The expression to be dispatched on. */
225 /* Type that INDEX_EXPR should be converted to. */
227 /* Name of this kind of statement, for warnings. */
228 const char *printname;
229 /* Used to save no_line_numbers till we see the first case label.
230 We set this to -1 when we see the first case label in this
232 int line_number_status;
233 } GTY ((tag ("CASE_NESTING"))) case_stmt;
234 } GTY ((desc ("%1.desc"))) data;
237 /* Allocate and return a new `struct nesting'. */
239 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
241 /* Pop the nesting stack element by element until we pop off
242 the element which is at the top of STACK.
243 Update all the other stacks, popping off elements from them
244 as we pop them from nesting_stack. */
246 #define POPSTACK(STACK) \
247 do { struct nesting *target = STACK; \
248 struct nesting *this; \
249 do { this = nesting_stack; \
250 if (cond_stack == this) \
251 cond_stack = cond_stack->next; \
252 if (block_stack == this) \
253 block_stack = block_stack->next; \
254 if (stack_block_stack == this) \
255 stack_block_stack = stack_block_stack->next; \
256 if (case_stack == this) \
257 case_stack = case_stack->next; \
258 nesting_depth = nesting_stack->depth - 1; \
259 nesting_stack = this->all; } \
260 while (this != target); } while (0)
262 /* In some cases it is impossible to generate code for a forward goto
263 until the label definition is seen. This happens when it may be necessary
264 for the goto to reset the stack pointer: we don't yet know how to do that.
265 So expand_goto puts an entry on this fixup list.
266 Each time a binding contour that resets the stack is exited,
268 If the target label has now been defined, we can insert the proper code. */
270 struct goto_fixup GTY(())
272 /* Points to following fixup. */
273 struct goto_fixup *next;
274 /* Points to the insn before the jump insn.
275 If more code must be inserted, it goes after this insn. */
277 /* The LABEL_DECL that this jump is jumping to, or 0
278 for break, continue or return. */
280 /* The BLOCK for the place where this goto was found. */
282 /* The CODE_LABEL rtx that this is jumping to. */
284 /* Number of binding contours started in current function
285 before the label reference. */
286 int block_start_count;
287 /* The outermost stack level that should be restored for this jump.
288 Each time a binding contour that resets the stack is exited,
289 if the target label is *not* yet defined, this slot is updated. */
291 /* List of lists of cleanup expressions to be run by this goto.
292 There is one element for each block that this goto is within.
293 The tail of this list can be 0,
294 if all remaining elements would be empty.
295 The TREE_VALUE contains the cleanup list of that block as of the
296 time this goto was seen.
297 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
298 tree cleanup_list_list;
301 /* Within any binding contour that must restore a stack level,
302 all labels are recorded with a chain of these structures. */
304 struct label_chain GTY(())
306 /* Points to following fixup. */
307 struct label_chain *next;
311 struct stmt_status GTY(())
313 /* Chain of all pending binding contours. */
314 struct nesting * x_block_stack;
316 /* If any new stacks are added here, add them to POPSTACKS too. */
318 /* Chain of all pending binding contours that restore stack levels
320 struct nesting * x_stack_block_stack;
322 /* Chain of all pending conditional statements. */
323 struct nesting * x_cond_stack;
325 /* Chain of all pending case or switch statements. */
326 struct nesting * x_case_stack;
328 /* Separate chain including all of the above,
329 chained through the `all' field. */
330 struct nesting * x_nesting_stack;
332 /* Number of entries on nesting_stack now. */
335 /* Number of binding contours started so far in this function. */
336 int x_block_start_count;
338 /* Each time we expand an expression-statement,
339 record the expr's type and its RTL value here. */
340 tree x_last_expr_type;
341 rtx x_last_expr_value;
342 rtx x_last_expr_alt_rtl;
344 /* Nonzero if within a ({...}) grouping, in which case we must
345 always compute a value for each expr-stmt in case it is the last one. */
346 int x_expr_stmts_for_value;
348 /* Location of last line-number note, whether we actually
349 emitted it or not. */
350 location_t x_emit_locus;
352 struct goto_fixup *x_goto_fixup_chain;
355 #define block_stack (cfun->stmt->x_block_stack)
356 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
357 #define cond_stack (cfun->stmt->x_cond_stack)
358 #define case_stack (cfun->stmt->x_case_stack)
359 #define nesting_stack (cfun->stmt->x_nesting_stack)
360 #define nesting_depth (cfun->stmt->x_nesting_depth)
361 #define current_block_start_count (cfun->stmt->x_block_start_count)
362 #define last_expr_type (cfun->stmt->x_last_expr_type)
363 #define last_expr_value (cfun->stmt->x_last_expr_value)
364 #define last_expr_alt_rtl (cfun->stmt->x_last_expr_alt_rtl)
365 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
366 #define emit_locus (cfun->stmt->x_emit_locus)
367 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
369 /* Nonzero if we are using EH to handle cleanups. */
370 int using_eh_for_cleanups_p = 0;
372 static int n_occurrences (int, const char *);
373 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
374 static void expand_goto_internal (tree, rtx, rtx);
375 static int expand_fixup (tree, rtx, rtx);
376 static void expand_nl_goto_receiver (void);
377 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
378 static bool check_operand_nalternatives (tree, tree);
379 static bool check_unique_operand_names (tree, tree);
380 static char *resolve_operand_name_1 (char *, tree, tree);
381 static void expand_null_return_1 (rtx);
382 static enum br_predictor return_prediction (rtx);
383 static rtx shift_return_value (rtx);
384 static void expand_value_return (rtx);
385 static void expand_cleanups (tree, int, int);
386 static void do_jump_if_equal (rtx, rtx, rtx, int);
387 static int estimate_case_costs (case_node_ptr);
388 static bool same_case_target_p (rtx, rtx);
389 static void strip_default_case_nodes (case_node_ptr *, rtx);
390 static bool lshift_cheap_p (void);
391 static int case_bit_test_cmp (const void *, const void *);
392 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
393 static void group_case_nodes (case_node_ptr);
394 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
395 static int node_has_low_bound (case_node_ptr, tree);
396 static int node_has_high_bound (case_node_ptr, tree);
397 static int node_is_bounded (case_node_ptr, tree);
398 static void emit_jump_if_reachable (rtx);
399 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
400 static struct case_node *case_tree2list (case_node *, case_node *);
403 using_eh_for_cleanups (void)
405 using_eh_for_cleanups_p = 1;
409 init_stmt_for_function (void)
411 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
414 /* Record the current file and line. Called from emit_line_note. */
417 set_file_and_line_for_stmt (location_t location)
419 /* If we're outputting an inline function, and we add a line note,
420 there may be no CFUN->STMT information. So, there's no need to
423 emit_locus = location;
426 /* Emit a no-op instruction. */
433 last_insn = get_last_insn ();
435 && (GET_CODE (last_insn) == CODE_LABEL
436 || (GET_CODE (last_insn) == NOTE
437 && prev_real_insn (last_insn) == 0)))
438 emit_insn (gen_nop ());
441 /* Return the rtx-label that corresponds to a LABEL_DECL,
442 creating it if necessary. */
445 label_rtx (tree label)
447 if (TREE_CODE (label) != LABEL_DECL)
450 if (!DECL_RTL_SET_P (label))
452 rtx r = gen_label_rtx ();
453 SET_DECL_RTL (label, r);
454 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
455 LABEL_PRESERVE_P (r) = 1;
458 return DECL_RTL (label);
461 /* As above, but also put it on the forced-reference list of the
462 function that contains it. */
464 force_label_rtx (tree label)
466 rtx ref = label_rtx (label);
467 tree function = decl_function_context (label);
473 if (function != current_function_decl)
474 p = find_function_data (function);
478 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
479 p->expr->x_forced_labels);
483 /* Add an unconditional jump to LABEL as the next sequential instruction. */
486 emit_jump (rtx label)
488 do_pending_stack_adjust ();
489 emit_jump_insn (gen_jump (label));
493 /* Emit code to jump to the address
494 specified by the pointer expression EXP. */
497 expand_computed_goto (tree exp)
499 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
501 x = convert_memory_address (Pmode, x);
504 do_pending_stack_adjust ();
505 emit_indirect_jump (x);
508 /* Handle goto statements and the labels that they can go to. */
510 /* Specify the location in the RTL code of a label LABEL,
511 which is a LABEL_DECL tree node.
513 This is used for the kind of label that the user can jump to with a
514 goto statement, and for alternatives of a switch or case statement.
515 RTL labels generated for loops and conditionals don't go through here;
516 they are generated directly at the RTL level, by other functions below.
518 Note that this has nothing to do with defining label *names*.
519 Languages vary in how they do that and what that even means. */
522 expand_label (tree label)
524 struct label_chain *p;
525 rtx label_r = label_rtx (label);
527 do_pending_stack_adjust ();
528 emit_label (label_r);
529 if (DECL_NAME (label))
530 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
532 if (DECL_NONLOCAL (label))
534 expand_nl_goto_receiver ();
535 nonlocal_goto_handler_labels
536 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
537 nonlocal_goto_handler_labels);
540 if (FORCED_LABEL (label))
541 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
543 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
544 maybe_set_first_label_num (label_r);
546 if (stack_block_stack != 0)
548 p = ggc_alloc (sizeof (struct label_chain));
549 p->next = stack_block_stack->data.block.label_chain;
550 stack_block_stack->data.block.label_chain = p;
555 /* Generate RTL code for a `goto' statement with target label LABEL.
556 LABEL should be a LABEL_DECL tree node that was or will later be
557 defined with `expand_label'. */
560 expand_goto (tree label)
562 #ifdef ENABLE_CHECKING
563 /* Check for a nonlocal goto to a containing function. Should have
564 gotten translated to __builtin_nonlocal_goto. */
565 tree context = decl_function_context (label);
566 if (context != 0 && context != current_function_decl)
570 expand_goto_internal (label, label_rtx (label), NULL_RTX);
573 /* Generate RTL code for a `goto' statement with target label BODY.
574 LABEL should be a LABEL_REF.
575 LAST_INSN, if non-0, is the rtx we should consider as the last
576 insn emitted (for the purposes of cleaning up a return). */
579 expand_goto_internal (tree body, rtx label, rtx last_insn)
581 struct nesting *block;
584 if (GET_CODE (label) != CODE_LABEL)
587 /* If label has already been defined, we can tell now
588 whether and how we must alter the stack level. */
590 if (PREV_INSN (label) != 0)
592 /* Find the innermost pending block that contains the label.
593 (Check containment by comparing insn-uids.)
594 Then restore the outermost stack level within that block,
595 and do cleanups of all blocks contained in it. */
596 for (block = block_stack; block; block = block->next)
598 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
600 if (block->data.block.stack_level != 0)
601 stack_level = block->data.block.stack_level;
602 /* Execute the cleanups for blocks we are exiting. */
603 if (block->data.block.cleanups != 0)
605 expand_cleanups (block->data.block.cleanups, 1, 1);
606 do_pending_stack_adjust ();
612 /* Ensure stack adjust isn't done by emit_jump, as this
613 would clobber the stack pointer. This one should be
614 deleted as dead by flow. */
615 clear_pending_stack_adjust ();
616 do_pending_stack_adjust ();
618 /* Don't do this adjust if it's to the end label and this function
619 is to return with a depressed stack pointer. */
620 if (label == return_label
621 && (((TREE_CODE (TREE_TYPE (current_function_decl))
623 && (TYPE_RETURNS_STACK_DEPRESSED
624 (TREE_TYPE (current_function_decl))))))
627 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
630 if (body != 0 && DECL_TOO_LATE (body))
631 error ("jump to `%s' invalidly jumps into binding contour",
632 IDENTIFIER_POINTER (DECL_NAME (body)));
634 /* Label not yet defined: may need to put this goto
635 on the fixup list. */
636 else if (! expand_fixup (body, label, last_insn))
638 /* No fixup needed. Record that the label is the target
639 of at least one goto that has no fixup. */
641 TREE_ADDRESSABLE (body) = 1;
647 /* Generate if necessary a fixup for a goto
648 whose target label in tree structure (if any) is TREE_LABEL
649 and whose target in rtl is RTL_LABEL.
651 If LAST_INSN is nonzero, we pretend that the jump appears
652 after insn LAST_INSN instead of at the current point in the insn stream.
654 The fixup will be used later to insert insns just before the goto.
655 Those insns will restore the stack level as appropriate for the
656 target label, and will (in the case of C++) also invoke any object
657 destructors which have to be invoked when we exit the scopes which
658 are exited by the goto.
660 Value is nonzero if a fixup is made. */
663 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
665 struct nesting *block, *end_block;
667 /* See if we can recognize which block the label will be output in.
668 This is possible in some very common cases.
669 If we succeed, set END_BLOCK to that block.
670 Otherwise, set it to 0. */
673 && (rtl_label == cond_stack->data.cond.endif_label
674 || rtl_label == cond_stack->data.cond.next_label))
675 end_block = cond_stack;
679 /* Now set END_BLOCK to the binding level to which we will return. */
683 struct nesting *next_block = end_block->all;
686 /* First see if the END_BLOCK is inside the innermost binding level.
687 If so, then no cleanups or stack levels are relevant. */
688 while (next_block && next_block != block)
689 next_block = next_block->all;
694 /* Otherwise, set END_BLOCK to the innermost binding level
695 which is outside the relevant control-structure nesting. */
696 next_block = block_stack->next;
697 for (block = block_stack; block != end_block; block = block->all)
698 if (block == next_block)
699 next_block = next_block->next;
700 end_block = next_block;
703 /* Does any containing block have a stack level or cleanups?
704 If not, no fixup is needed, and that is the normal case
705 (the only case, for standard C). */
706 for (block = block_stack; block != end_block; block = block->next)
707 if (block->data.block.stack_level != 0
708 || block->data.block.cleanups != 0)
711 if (block != end_block)
713 /* Ok, a fixup is needed. Add a fixup to the list of such. */
714 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
715 /* In case an old stack level is restored, make sure that comes
716 after any pending stack adjust. */
717 /* ?? If the fixup isn't to come at the present position,
718 doing the stack adjust here isn't useful. Doing it with our
719 settings at that location isn't useful either. Let's hope
722 do_pending_stack_adjust ();
723 fixup->target = tree_label;
724 fixup->target_rtl = rtl_label;
726 /* Create a BLOCK node and a corresponding matched set of
727 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
728 this point. The notes will encapsulate any and all fixup
729 code which we might later insert at this point in the insn
730 stream. Also, the BLOCK node will be the parent (i.e. the
731 `SUPERBLOCK') of any other BLOCK nodes which we might create
732 later on when we are expanding the fixup code.
734 Note that optimization passes might move the *_BLOCK notes away,
735 so we use a NOTE_INSN_DELETED as a placeholder. */
738 rtx original_before_jump
739 = last_insn ? last_insn : get_last_insn ();
744 block = make_node (BLOCK);
745 TREE_USED (block) = 1;
748 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
749 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
753 start = emit_note (NOTE_INSN_BLOCK_BEG);
754 NOTE_BLOCK (start) = block;
755 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
756 end = emit_note (NOTE_INSN_BLOCK_END);
757 NOTE_BLOCK (end) = block;
758 fixup->context = block;
760 emit_insn_after (start, original_before_jump);
763 fixup->block_start_count = current_block_start_count;
764 fixup->stack_level = 0;
765 fixup->cleanup_list_list
766 = ((block->data.block.outer_cleanups
767 || block->data.block.cleanups)
768 ? tree_cons (NULL_TREE, block->data.block.cleanups,
769 block->data.block.outer_cleanups)
771 fixup->next = goto_fixup_chain;
772 goto_fixup_chain = fixup;
778 /* Expand any needed fixups in the outputmost binding level of the
779 function. FIRST_INSN is the first insn in the function. */
782 expand_fixups (rtx first_insn)
784 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
787 /* When exiting a binding contour, process all pending gotos requiring fixups.
788 THISBLOCK is the structure that describes the block being exited.
789 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
790 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
791 FIRST_INSN is the insn that began this contour.
793 Gotos that jump out of this contour must restore the
794 stack level and do the cleanups before actually jumping.
796 DONT_JUMP_IN positive means report error if there is a jump into this
797 contour from before the beginning of the contour. This is also done if
798 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
801 fixup_gotos (struct nesting *thisblock, rtx stack_level,
802 tree cleanup_list, rtx first_insn, int dont_jump_in)
804 struct goto_fixup *f, *prev;
806 /* F is the fixup we are considering; PREV is the previous one. */
807 /* We run this loop in two passes so that cleanups of exited blocks
808 are run first, and blocks that are exited are marked so
811 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
813 /* Test for a fixup that is inactive because it is already handled. */
814 if (f->before_jump == 0)
816 /* Delete inactive fixup from the chain, if that is easy to do. */
818 prev->next = f->next;
820 /* Has this fixup's target label been defined?
821 If so, we can finalize it. */
822 else if (PREV_INSN (f->target_rtl) != 0)
826 /* If this fixup jumped into this contour from before the beginning
827 of this contour, report an error. This code used to use
828 the first non-label insn after f->target_rtl, but that's
829 wrong since such can be added, by things like put_var_into_stack
830 and have INSN_UIDs that are out of the range of the block. */
831 /* ??? Bug: this does not detect jumping in through intermediate
832 blocks that have stack levels or cleanups.
833 It detects only a problem with the innermost block
836 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
838 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
839 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
840 && ! DECL_ERROR_ISSUED (f->target))
842 error ("%Jlabel '%D' used before containing binding contour",
843 f->target, f->target);
844 /* Prevent multiple errors for one label. */
845 DECL_ERROR_ISSUED (f->target) = 1;
848 /* We will expand the cleanups into a sequence of their own and
849 then later on we will attach this new sequence to the insn
850 stream just ahead of the actual jump insn. */
854 /* Temporarily restore the lexical context where we will
855 logically be inserting the fixup code. We do this for the
856 sake of getting the debugging information right. */
858 lang_hooks.decls.pushlevel (0);
859 lang_hooks.decls.set_block (f->context);
861 /* Expand the cleanups for blocks this jump exits. */
862 if (f->cleanup_list_list)
865 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
866 /* Marked elements correspond to blocks that have been closed.
867 Do their cleanups. */
868 if (TREE_ADDRESSABLE (lists)
869 && TREE_VALUE (lists) != 0)
871 expand_cleanups (TREE_VALUE (lists), 1, 1);
872 /* Pop any pushes done in the cleanups,
873 in case function is about to return. */
874 do_pending_stack_adjust ();
878 /* Restore stack level for the biggest contour that this
879 jump jumps out of. */
881 && ! (f->target_rtl == return_label
882 && ((TREE_CODE (TREE_TYPE (current_function_decl))
884 && (TYPE_RETURNS_STACK_DEPRESSED
885 (TREE_TYPE (current_function_decl))))))
886 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
888 /* Finish up the sequence containing the insns which implement the
889 necessary cleanups, and then attach that whole sequence to the
890 insn stream just ahead of the actual jump insn. Attaching it
891 at that point insures that any cleanups which are in fact
892 implicit C++ object destructions (which must be executed upon
893 leaving the block) appear (to the debugger) to be taking place
894 in an area of the generated code where the object(s) being
895 destructed are still "in scope". */
897 cleanup_insns = get_insns ();
898 lang_hooks.decls.poplevel (1, 0, 0);
901 emit_insn_after (cleanup_insns, f->before_jump);
907 /* For any still-undefined labels, do the cleanups for this block now.
908 We must do this now since items in the cleanup list may go out
909 of scope when the block ends. */
910 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
911 if (f->before_jump != 0
912 && PREV_INSN (f->target_rtl) == 0
913 /* Label has still not appeared. If we are exiting a block with
914 a stack level to restore, that started before the fixup,
915 mark this stack level as needing restoration
916 when the fixup is later finalized. */
918 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
919 means the label is undefined. That's erroneous, but possible. */
920 && (thisblock->data.block.block_start_count
921 <= f->block_start_count))
923 tree lists = f->cleanup_list_list;
926 for (; lists; lists = TREE_CHAIN (lists))
927 /* If the following elt. corresponds to our containing block
928 then the elt. must be for this block. */
929 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
932 lang_hooks.decls.pushlevel (0);
933 lang_hooks.decls.set_block (f->context);
934 expand_cleanups (TREE_VALUE (lists), 1, 1);
935 do_pending_stack_adjust ();
936 cleanup_insns = get_insns ();
937 lang_hooks.decls.poplevel (1, 0, 0);
939 if (cleanup_insns != 0)
941 = emit_insn_after (cleanup_insns, f->before_jump);
943 f->cleanup_list_list = TREE_CHAIN (lists);
947 f->stack_level = stack_level;
951 /* Return the number of times character C occurs in string S. */
953 n_occurrences (int c, const char *s)
961 /* Generate RTL for an asm statement (explicit assembler code).
962 STRING is a STRING_CST node containing the assembler code text,
963 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
964 insn is volatile; don't optimize it. */
967 expand_asm (tree string, int vol)
971 if (TREE_CODE (string) == ADDR_EXPR)
972 string = TREE_OPERAND (string, 0);
974 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
976 MEM_VOLATILE_P (body) = vol;
983 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
984 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
985 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
986 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
987 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
988 constraint allows the use of a register operand. And, *IS_INOUT
989 will be true if the operand is read-write, i.e., if it is used as
990 an input as well as an output. If *CONSTRAINT_P is not in
991 canonical form, it will be made canonical. (Note that `+' will be
992 replaced with `=' as part of this process.)
994 Returns TRUE if all went well; FALSE if an error occurred. */
997 parse_output_constraint (const char **constraint_p, int operand_num,
998 int ninputs, int noutputs, bool *allows_mem,
999 bool *allows_reg, bool *is_inout)
1001 const char *constraint = *constraint_p;
1004 /* Assume the constraint doesn't allow the use of either a register
1006 *allows_mem = false;
1007 *allows_reg = false;
1009 /* Allow the `=' or `+' to not be at the beginning of the string,
1010 since it wasn't explicitly documented that way, and there is a
1011 large body of code that puts it last. Swap the character to
1012 the front, so as not to uglify any place else. */
1013 p = strchr (constraint, '=');
1015 p = strchr (constraint, '+');
1017 /* If the string doesn't contain an `=', issue an error
1021 error ("output operand constraint lacks `='");
1025 /* If the constraint begins with `+', then the operand is both read
1026 from and written to. */
1027 *is_inout = (*p == '+');
1029 /* Canonicalize the output constraint so that it begins with `='. */
1030 if (p != constraint || is_inout)
1033 size_t c_len = strlen (constraint);
1035 if (p != constraint)
1036 warning ("output constraint `%c' for operand %d is not at the beginning",
1039 /* Make a copy of the constraint. */
1040 buf = alloca (c_len + 1);
1041 strcpy (buf, constraint);
1042 /* Swap the first character and the `=' or `+'. */
1043 buf[p - constraint] = buf[0];
1044 /* Make sure the first character is an `='. (Until we do this,
1045 it might be a `+'.) */
1047 /* Replace the constraint with the canonicalized string. */
1048 *constraint_p = ggc_alloc_string (buf, c_len);
1049 constraint = *constraint_p;
1052 /* Loop through the constraint string. */
1053 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1058 error ("operand constraint contains incorrectly positioned '+' or '='");
1062 if (operand_num + 1 == ninputs + noutputs)
1064 error ("`%%' constraint used with last operand");
1069 case 'V': case 'm': case 'o':
1073 case '?': case '!': case '*': case '&': case '#':
1074 case 'E': case 'F': case 'G': case 'H':
1075 case 's': case 'i': case 'n':
1076 case 'I': case 'J': case 'K': case 'L': case 'M':
1077 case 'N': case 'O': case 'P': case ',':
1080 case '0': case '1': case '2': case '3': case '4':
1081 case '5': case '6': case '7': case '8': case '9':
1083 error ("matching constraint not valid in output operand");
1087 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1088 excepting those that expand_call created. So match memory
1105 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1107 #ifdef EXTRA_CONSTRAINT_STR
1108 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1110 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1114 /* Otherwise we can't assume anything about the nature of
1115 the constraint except that it isn't purely registers.
1116 Treat it like "g" and hope for the best. */
1127 /* Similar, but for input constraints. */
1130 parse_input_constraint (const char **constraint_p, int input_num,
1131 int ninputs, int noutputs, int ninout,
1132 const char * const * constraints,
1133 bool *allows_mem, bool *allows_reg)
1135 const char *constraint = *constraint_p;
1136 const char *orig_constraint = constraint;
1137 size_t c_len = strlen (constraint);
1139 bool saw_match = false;
1141 /* Assume the constraint doesn't allow the use of either
1142 a register or memory. */
1143 *allows_mem = false;
1144 *allows_reg = false;
1146 /* Make sure constraint has neither `=', `+', nor '&'. */
1148 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1149 switch (constraint[j])
1151 case '+': case '=': case '&':
1152 if (constraint == orig_constraint)
1154 error ("input operand constraint contains `%c'", constraint[j]);
1160 if (constraint == orig_constraint
1161 && input_num + 1 == ninputs - ninout)
1163 error ("`%%' constraint used with last operand");
1168 case 'V': case 'm': case 'o':
1173 case '?': case '!': case '*': case '#':
1174 case 'E': case 'F': case 'G': case 'H':
1175 case 's': case 'i': case 'n':
1176 case 'I': case 'J': case 'K': case 'L': case 'M':
1177 case 'N': case 'O': case 'P': case ',':
1180 /* Whether or not a numeric constraint allows a register is
1181 decided by the matching constraint, and so there is no need
1182 to do anything special with them. We must handle them in
1183 the default case, so that we don't unnecessarily force
1184 operands to memory. */
1185 case '0': case '1': case '2': case '3': case '4':
1186 case '5': case '6': case '7': case '8': case '9':
1189 unsigned long match;
1193 match = strtoul (constraint + j, &end, 10);
1194 if (match >= (unsigned long) noutputs)
1196 error ("matching constraint references invalid operand number");
1200 /* Try and find the real constraint for this dup. Only do this
1201 if the matching constraint is the only alternative. */
1203 && (j == 0 || (j == 1 && constraint[0] == '%')))
1205 constraint = constraints[match];
1206 *constraint_p = constraint;
1207 c_len = strlen (constraint);
1209 /* ??? At the end of the loop, we will skip the first part of
1210 the matched constraint. This assumes not only that the
1211 other constraint is an output constraint, but also that
1212 the '=' or '+' come first. */
1216 j = end - constraint;
1217 /* Anticipate increment at end of loop. */
1232 if (! ISALPHA (constraint[j]))
1234 error ("invalid punctuation `%c' in constraint", constraint[j]);
1237 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1240 #ifdef EXTRA_CONSTRAINT_STR
1241 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1243 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1247 /* Otherwise we can't assume anything about the nature of
1248 the constraint except that it isn't purely registers.
1249 Treat it like "g" and hope for the best. */
1257 if (saw_match && !*allows_reg)
1258 warning ("matching constraint does not allow a register");
1263 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
1264 if it is an operand which must be passed in memory (i.e. an "m"
1265 constraint), false otherwise. */
1268 asm_op_is_mem_input (tree input, tree expr)
1270 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
1271 tree outputs = ASM_OUTPUTS (expr);
1272 int noutputs = list_length (outputs);
1273 const char **constraints
1274 = (const char **) alloca ((noutputs) * sizeof (const char *));
1276 bool allows_mem, allows_reg;
1279 /* Collect output constraints. */
1280 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1281 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1283 /* We pass 0 for input_num, ninputs and ninout; they are only used for
1284 error checking which will be done at expand time. */
1285 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
1286 &allows_mem, &allows_reg);
1287 return (!allows_reg && allows_mem);
1290 /* Check for overlap between registers marked in CLOBBERED_REGS and
1291 anything inappropriate in DECL. Emit error and return TRUE for error,
1295 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1297 /* Conflicts between asm-declared register variables and the clobber
1298 list are not allowed. */
1299 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1300 && DECL_REGISTER (decl)
1301 && REG_P (DECL_RTL (decl))
1302 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1304 rtx reg = DECL_RTL (decl);
1307 for (regno = REGNO (reg);
1308 regno < (REGNO (reg)
1309 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
1311 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1313 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1314 IDENTIFIER_POINTER (DECL_NAME (decl)));
1316 /* Reset registerness to stop multiple errors emitted for a
1318 DECL_REGISTER (decl) = 0;
1325 /* Generate RTL for an asm statement with arguments.
1326 STRING is the instruction template.
1327 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1328 Each output or input has an expression in the TREE_VALUE and
1329 and a tree list in TREE_PURPOSE which in turn contains a constraint
1330 name in TREE_VALUE (or NULL_TREE) and a constraint string
1332 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1333 that is clobbered by this insn.
1335 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1336 Some elements of OUTPUTS may be replaced with trees representing temporary
1337 values. The caller should copy those temporary values to the originally
1340 VOL nonzero means the insn is volatile; don't optimize it. */
1343 expand_asm_operands (tree string, tree outputs, tree inputs,
1344 tree clobbers, int vol, location_t locus)
1346 rtvec argvec, constraintvec;
1348 int ninputs = list_length (inputs);
1349 int noutputs = list_length (outputs);
1352 HARD_REG_SET clobbered_regs;
1353 int clobber_conflict_found = 0;
1357 /* Vector of RTX's of evaluated output operands. */
1358 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1359 int *inout_opnum = alloca (noutputs * sizeof (int));
1360 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
1361 enum machine_mode *inout_mode
1362 = alloca (noutputs * sizeof (enum machine_mode));
1363 const char **constraints
1364 = alloca ((noutputs + ninputs) * sizeof (const char *));
1365 int old_generating_concat_p = generating_concat_p;
1367 /* An ASM with no outputs needs to be treated as volatile, for now. */
1371 if (! check_operand_nalternatives (outputs, inputs))
1374 string = resolve_asm_operand_names (string, outputs, inputs);
1376 /* Collect constraints. */
1378 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1379 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1380 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1381 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1383 /* Sometimes we wish to automatically clobber registers across an asm.
1384 Case in point is when the i386 backend moved from cc0 to a hard reg --
1385 maintaining source-level compatibility means automatically clobbering
1386 the flags register. */
1387 clobbers = targetm.md_asm_clobbers (clobbers);
1389 /* Count the number of meaningful clobbered registers, ignoring what
1390 we would ignore later. */
1392 CLEAR_HARD_REG_SET (clobbered_regs);
1393 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1395 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1397 i = decode_reg_name (regname);
1398 if (i >= 0 || i == -4)
1401 error ("unknown register name `%s' in `asm'", regname);
1403 /* Mark clobbered registers. */
1406 /* Clobbering the PIC register is an error */
1407 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1409 error ("PIC register `%s' clobbered in `asm'", regname);
1413 SET_HARD_REG_BIT (clobbered_regs, i);
1419 /* First pass over inputs and outputs checks validity and sets
1420 mark_addressable if needed. */
1423 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1425 tree val = TREE_VALUE (tail);
1426 tree type = TREE_TYPE (val);
1427 const char *constraint;
1432 /* If there's an erroneous arg, emit no insn. */
1433 if (type == error_mark_node)
1436 /* Try to parse the output constraint. If that fails, there's
1437 no point in going further. */
1438 constraint = constraints[i];
1439 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1440 &allows_mem, &allows_reg, &is_inout))
1447 && REG_P (DECL_RTL (val))
1448 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1449 lang_hooks.mark_addressable (val);
1456 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1458 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1462 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1464 bool allows_reg, allows_mem;
1465 const char *constraint;
1467 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1468 would get VOIDmode and that could cause a crash in reload. */
1469 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1472 constraint = constraints[i + noutputs];
1473 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1474 constraints, &allows_mem, &allows_reg))
1477 if (! allows_reg && allows_mem)
1478 lang_hooks.mark_addressable (TREE_VALUE (tail));
1481 /* Second pass evaluates arguments. */
1484 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1486 tree val = TREE_VALUE (tail);
1487 tree type = TREE_TYPE (val);
1493 if (!parse_output_constraint (&constraints[i], i, ninputs,
1494 noutputs, &allows_mem, &allows_reg,
1498 /* If an output operand is not a decl or indirect ref and our constraint
1499 allows a register, make a temporary to act as an intermediate.
1500 Make the asm insn write into that, then our caller will copy it to
1501 the real output operand. Likewise for promoted variables. */
1503 generating_concat_p = 0;
1505 real_output_rtx[i] = NULL_RTX;
1506 if ((TREE_CODE (val) == INDIRECT_REF
1509 && (allows_mem || REG_P (DECL_RTL (val)))
1510 && ! (REG_P (DECL_RTL (val))
1511 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1515 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1516 if (GET_CODE (op) == MEM)
1517 op = validize_mem (op);
1519 if (! allows_reg && GET_CODE (op) != MEM)
1520 error ("output number %d not directly addressable", i);
1521 if ((! allows_mem && GET_CODE (op) == MEM)
1522 || GET_CODE (op) == CONCAT)
1524 real_output_rtx[i] = protect_from_queue (op, 1);
1525 op = gen_reg_rtx (GET_MODE (op));
1527 emit_move_insn (op, real_output_rtx[i]);
1532 op = assign_temp (type, 0, 0, 1);
1533 op = validize_mem (op);
1534 TREE_VALUE (tail) = make_tree (type, op);
1538 generating_concat_p = old_generating_concat_p;
1542 inout_mode[ninout] = TYPE_MODE (type);
1543 inout_opnum[ninout++] = i;
1546 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1547 clobber_conflict_found = 1;
1550 /* Make vectors for the expression-rtx, constraint strings,
1551 and named operands. */
1553 argvec = rtvec_alloc (ninputs);
1554 constraintvec = rtvec_alloc (ninputs);
1556 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1557 : GET_MODE (output_rtx[0])),
1558 TREE_STRING_POINTER (string),
1559 empty_string, 0, argvec, constraintvec,
1562 MEM_VOLATILE_P (body) = vol;
1564 /* Eval the inputs and put them into ARGVEC.
1565 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1567 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1569 bool allows_reg, allows_mem;
1570 const char *constraint;
1574 constraint = constraints[i + noutputs];
1575 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1576 constraints, &allows_mem, &allows_reg))
1579 generating_concat_p = 0;
1581 val = TREE_VALUE (tail);
1582 type = TREE_TYPE (val);
1583 op = expand_expr (val, NULL_RTX, VOIDmode,
1584 (allows_mem && !allows_reg
1585 ? EXPAND_MEMORY : EXPAND_NORMAL));
1587 /* Never pass a CONCAT to an ASM. */
1588 if (GET_CODE (op) == CONCAT)
1589 op = force_reg (GET_MODE (op), op);
1590 else if (GET_CODE (op) == MEM)
1591 op = validize_mem (op);
1593 if (asm_operand_ok (op, constraint) <= 0)
1596 op = force_reg (TYPE_MODE (type), op);
1597 else if (!allows_mem)
1598 warning ("asm operand %d probably doesn't match constraints",
1600 else if (GET_CODE (op) == MEM)
1602 /* We won't recognize either volatile memory or memory
1603 with a queued address as available a memory_operand
1604 at this point. Ignore it: clearly this *is* a memory. */
1608 warning ("use of memory input without lvalue in "
1609 "asm operand %d is deprecated", i + noutputs);
1611 if (CONSTANT_P (op))
1613 rtx mem = force_const_mem (TYPE_MODE (type), op);
1615 op = validize_mem (mem);
1617 op = force_reg (TYPE_MODE (type), op);
1620 || GET_CODE (op) == SUBREG
1621 || GET_CODE (op) == ADDRESSOF
1622 || GET_CODE (op) == CONCAT)
1624 tree qual_type = build_qualified_type (type,
1626 | TYPE_QUAL_CONST));
1627 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1628 memloc = validize_mem (memloc);
1629 emit_move_insn (memloc, op);
1635 generating_concat_p = old_generating_concat_p;
1636 ASM_OPERANDS_INPUT (body, i) = op;
1638 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1639 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1641 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1642 clobber_conflict_found = 1;
1645 /* Protect all the operands from the queue now that they have all been
1648 generating_concat_p = 0;
1650 for (i = 0; i < ninputs - ninout; i++)
1651 ASM_OPERANDS_INPUT (body, i)
1652 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1654 for (i = 0; i < noutputs; i++)
1655 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1657 /* For in-out operands, copy output rtx to input rtx. */
1658 for (i = 0; i < ninout; i++)
1660 int j = inout_opnum[i];
1663 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1666 sprintf (buffer, "%d", j);
1667 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1668 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1671 generating_concat_p = old_generating_concat_p;
1673 /* Now, for each output, construct an rtx
1674 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1675 ARGVEC CONSTRAINTS OPNAMES))
1676 If there is more than one, put them inside a PARALLEL. */
1678 if (noutputs == 1 && nclobbers == 0)
1680 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1681 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1684 else if (noutputs == 0 && nclobbers == 0)
1686 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1698 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1700 /* For each output operand, store a SET. */
1701 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1703 XVECEXP (body, 0, i)
1704 = gen_rtx_SET (VOIDmode,
1706 gen_rtx_ASM_OPERANDS
1707 (GET_MODE (output_rtx[i]),
1708 TREE_STRING_POINTER (string),
1709 constraints[i], i, argvec, constraintvec,
1712 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1715 /* If there are no outputs (but there are some clobbers)
1716 store the bare ASM_OPERANDS into the PARALLEL. */
1719 XVECEXP (body, 0, i++) = obody;
1721 /* Store (clobber REG) for each clobbered register specified. */
1723 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1725 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1726 int j = decode_reg_name (regname);
1731 if (j == -3) /* `cc', which is not a register */
1734 if (j == -4) /* `memory', don't cache memory across asm */
1736 XVECEXP (body, 0, i++)
1737 = gen_rtx_CLOBBER (VOIDmode,
1740 gen_rtx_SCRATCH (VOIDmode)));
1744 /* Ignore unknown register, error already signaled. */
1748 /* Use QImode since that's guaranteed to clobber just one reg. */
1749 clobbered_reg = gen_rtx_REG (QImode, j);
1751 /* Do sanity check for overlap between clobbers and respectively
1752 input and outputs that hasn't been handled. Such overlap
1753 should have been detected and reported above. */
1754 if (!clobber_conflict_found)
1758 /* We test the old body (obody) contents to avoid tripping
1759 over the under-construction body. */
1760 for (opno = 0; opno < noutputs; opno++)
1761 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1762 internal_error ("asm clobber conflict with output operand");
1764 for (opno = 0; opno < ninputs - ninout; opno++)
1765 if (reg_overlap_mentioned_p (clobbered_reg,
1766 ASM_OPERANDS_INPUT (obody, opno)))
1767 internal_error ("asm clobber conflict with input operand");
1770 XVECEXP (body, 0, i++)
1771 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1777 /* For any outputs that needed reloading into registers, spill them
1778 back to where they belong. */
1779 for (i = 0; i < noutputs; ++i)
1780 if (real_output_rtx[i])
1781 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1787 expand_asm_expr (tree exp)
1793 if (ASM_INPUT_P (exp))
1795 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1799 outputs = ASM_OUTPUTS (exp);
1800 noutputs = list_length (outputs);
1801 /* o[I] is the place that output number I should be written. */
1802 o = (tree *) alloca (noutputs * sizeof (tree));
1804 /* Record the contents of OUTPUTS before it is modified. */
1805 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1806 o[i] = TREE_VALUE (tail);
1808 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1809 OUTPUTS some trees for where the values were actually stored. */
1810 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1811 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1814 /* Copy all the intermediate outputs into the specified outputs. */
1815 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1817 if (o[i] != TREE_VALUE (tail))
1819 expand_assignment (o[i], TREE_VALUE (tail), 0);
1822 /* Restore the original value so that it's correct the next
1823 time we expand this function. */
1824 TREE_VALUE (tail) = o[i];
1828 /* Those MODIFY_EXPRs could do autoincrements. */
1832 /* A subroutine of expand_asm_operands. Check that all operands have
1833 the same number of alternatives. Return true if so. */
1836 check_operand_nalternatives (tree outputs, tree inputs)
1838 if (outputs || inputs)
1840 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1842 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1845 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1847 error ("too many alternatives in `asm'");
1854 const char *constraint
1855 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1857 if (n_occurrences (',', constraint) != nalternatives)
1859 error ("operand constraints for `asm' differ in number of alternatives");
1863 if (TREE_CHAIN (tmp))
1864 tmp = TREE_CHAIN (tmp);
1866 tmp = next, next = 0;
1873 /* A subroutine of expand_asm_operands. Check that all operand names
1874 are unique. Return true if so. We rely on the fact that these names
1875 are identifiers, and so have been canonicalized by get_identifier,
1876 so all we need are pointer comparisons. */
1879 check_unique_operand_names (tree outputs, tree inputs)
1883 for (i = outputs; i ; i = TREE_CHAIN (i))
1885 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1889 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1890 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1894 for (i = inputs; i ; i = TREE_CHAIN (i))
1896 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1900 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1901 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1903 for (j = outputs; j ; j = TREE_CHAIN (j))
1904 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1911 error ("duplicate asm operand name '%s'",
1912 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1916 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1917 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1918 STRING and in the constraints to those numbers. */
1921 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1928 check_unique_operand_names (outputs, inputs);
1930 /* Substitute [<name>] in input constraint strings. There should be no
1931 named operands in output constraints. */
1932 for (t = inputs; t ; t = TREE_CHAIN (t))
1934 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1935 if (strchr (c, '[') != NULL)
1937 p = buffer = xstrdup (c);
1938 while ((p = strchr (p, '[')) != NULL)
1939 p = resolve_operand_name_1 (p, outputs, inputs);
1940 TREE_VALUE (TREE_PURPOSE (t))
1941 = build_string (strlen (buffer), buffer);
1946 /* Now check for any needed substitutions in the template. */
1947 c = TREE_STRING_POINTER (string);
1948 while ((c = strchr (c, '%')) != NULL)
1952 else if (ISALPHA (c[1]) && c[2] == '[')
1963 /* OK, we need to make a copy so we can perform the substitutions.
1964 Assume that we will not need extra space--we get to remove '['
1965 and ']', which means we cannot have a problem until we have more
1966 than 999 operands. */
1967 buffer = xstrdup (TREE_STRING_POINTER (string));
1968 p = buffer + (c - TREE_STRING_POINTER (string));
1970 while ((p = strchr (p, '%')) != NULL)
1974 else if (ISALPHA (p[1]) && p[2] == '[')
1982 p = resolve_operand_name_1 (p, outputs, inputs);
1985 string = build_string (strlen (buffer), buffer);
1992 /* A subroutine of resolve_operand_names. P points to the '[' for a
1993 potential named operand of the form [<name>]. In place, replace
1994 the name and brackets with a number. Return a pointer to the
1995 balance of the string after substitution. */
1998 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
2005 /* Collect the operand name. */
2006 q = strchr (p, ']');
2009 error ("missing close brace for named operand");
2010 return strchr (p, '\0');
2014 /* Resolve the name to a number. */
2015 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2017 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2020 const char *c = TREE_STRING_POINTER (name);
2021 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2025 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2027 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2030 const char *c = TREE_STRING_POINTER (name);
2031 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2037 error ("undefined named operand '%s'", p + 1);
2041 /* Replace the name with the number. Unfortunately, not all libraries
2042 get the return value of sprintf correct, so search for the end of the
2043 generated string by hand. */
2044 sprintf (p, "%d", op);
2045 p = strchr (p, '\0');
2047 /* Verify the no extra buffer space assumption. */
2051 /* Shift the rest of the buffer down to fill the gap. */
2052 memmove (p, q + 1, strlen (q + 1) + 1);
2057 /* Generate RTL to evaluate the expression EXP
2058 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2059 Provided just for backward-compatibility. expand_expr_stmt_value()
2060 should be used for new code. */
2063 expand_expr_stmt (tree exp)
2065 expand_expr_stmt_value (exp, -1, 1);
2068 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2069 whether to (1) save the value of the expression, (0) discard it or
2070 (-1) use expr_stmts_for_value to tell. The use of -1 is
2071 deprecated, and retained only for backward compatibility. */
2074 expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
2080 if (want_value == -1)
2081 want_value = expr_stmts_for_value != 0;
2083 /* If -Wextra, warn about statements with no side effects,
2084 except for an explicit cast to void (e.g. for assert()), and
2085 except for last statement in ({...}) where they may be useful. */
2087 && (expr_stmts_for_value == 0 || ! maybe_last)
2088 && exp != error_mark_node
2089 && warn_unused_value)
2091 if (TREE_SIDE_EFFECTS (exp))
2092 warn_if_unused_value (exp, emit_locus);
2093 else if (!VOID_TYPE_P (TREE_TYPE (exp)) && !TREE_NO_WARNING (exp))
2094 warning ("%Hstatement with no effect", &emit_locus);
2097 /* If EXP is of function type and we are expanding statements for
2098 value, convert it to pointer-to-function. */
2099 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2100 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2102 /* The call to `expand_expr' could cause last_expr_type and
2103 last_expr_value to get reset. Therefore, we set last_expr_value
2104 and last_expr_type *after* calling expand_expr. */
2105 value = expand_expr_real (exp, want_value ? NULL_RTX : const0_rtx,
2106 VOIDmode, 0, &alt_rtl);
2107 type = TREE_TYPE (exp);
2109 /* If all we do is reference a volatile value in memory,
2110 copy it to a register to be sure it is actually touched. */
2111 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2113 if (TYPE_MODE (type) == VOIDmode)
2115 else if (TYPE_MODE (type) != BLKmode)
2116 value = copy_to_reg (value);
2119 rtx lab = gen_label_rtx ();
2121 /* Compare the value with itself to reference it. */
2122 emit_cmp_and_jump_insns (value, value, EQ,
2123 expand_expr (TYPE_SIZE (type),
2124 NULL_RTX, VOIDmode, 0),
2130 /* If this expression is part of a ({...}) and is in memory, we may have
2131 to preserve temporaries. */
2132 preserve_temp_slots (value);
2134 /* Free any temporaries used to evaluate this expression. Any temporary
2135 used as a result of this expression will already have been preserved
2141 last_expr_value = value;
2142 last_expr_alt_rtl = alt_rtl;
2143 last_expr_type = type;
2149 /* Warn if EXP contains any computations whose results are not used.
2150 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
2151 (potential) location of the expression. */
2154 warn_if_unused_value (tree exp, location_t locus)
2157 if (TREE_USED (exp))
2160 /* Don't warn about void constructs. This includes casting to void,
2161 void function calls, and statement expressions with a final cast
2163 if (VOID_TYPE_P (TREE_TYPE (exp)))
2166 if (EXPR_LOCUS (exp))
2167 locus = *EXPR_LOCUS (exp);
2169 switch (TREE_CODE (exp))
2171 case PREINCREMENT_EXPR:
2172 case POSTINCREMENT_EXPR:
2173 case PREDECREMENT_EXPR:
2174 case POSTDECREMENT_EXPR:
2180 case TRY_CATCH_EXPR:
2181 case WITH_CLEANUP_EXPR:
2186 /* For a binding, warn if no side effect within it. */
2187 exp = BIND_EXPR_BODY (exp);
2191 exp = TREE_OPERAND (exp, 0);
2194 case TRUTH_ORIF_EXPR:
2195 case TRUTH_ANDIF_EXPR:
2196 /* In && or ||, warn if 2nd operand has no side effect. */
2197 exp = TREE_OPERAND (exp, 1);
2201 if (TREE_NO_WARNING (exp))
2203 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
2205 /* Let people do `(foo (), 0)' without a warning. */
2206 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2208 exp = TREE_OPERAND (exp, 1);
2213 case NON_LVALUE_EXPR:
2214 /* Don't warn about conversions not explicit in the user's program. */
2215 if (TREE_NO_WARNING (exp))
2217 /* Assignment to a cast usually results in a cast of a modify.
2218 Don't complain about that. There can be an arbitrary number of
2219 casts before the modify, so we must loop until we find the first
2220 non-cast expression and then test to see if that is a modify. */
2222 tree tem = TREE_OPERAND (exp, 0);
2224 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2225 tem = TREE_OPERAND (tem, 0);
2227 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2228 || TREE_CODE (tem) == CALL_EXPR)
2234 /* Don't warn about automatic dereferencing of references, since
2235 the user cannot control it. */
2236 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2238 exp = TREE_OPERAND (exp, 0);
2244 /* Referencing a volatile value is a side effect, so don't warn. */
2246 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2247 && TREE_THIS_VOLATILE (exp))
2250 /* If this is an expression which has no operands, there is no value
2251 to be unused. There are no such language-independent codes,
2252 but front ends may define such. */
2253 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2254 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2258 /* If this is an expression with side effects, don't warn. */
2259 if (TREE_SIDE_EFFECTS (exp))
2262 warning ("%Hvalue computed is not used", &locus);
2267 /* Clear out the memory of the last expression evaluated. */
2270 clear_last_expr (void)
2272 last_expr_type = NULL_TREE;
2273 last_expr_value = NULL_RTX;
2274 last_expr_alt_rtl = NULL_RTX;
2277 /* Begin a statement-expression, i.e., a series of statements which
2278 may return a value. Return the RTL_EXPR for this statement expr.
2279 The caller must save that value and pass it to
2280 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2281 in the statement-expression are deallocated at the end of the
2285 expand_start_stmt_expr (int has_scope)
2289 /* Make the RTL_EXPR node temporary, not momentary,
2290 so that rtl_expr_chain doesn't become garbage. */
2291 t = make_node (RTL_EXPR);
2292 do_pending_stack_adjust ();
2294 start_sequence_for_rtl_expr (t);
2298 expr_stmts_for_value++;
2302 /* Restore the previous state at the end of a statement that returns a value.
2303 Returns a tree node representing the statement's value and the
2304 insns to compute the value.
2306 The nodes of that expression have been freed by now, so we cannot use them.
2307 But we don't want to do that anyway; the expression has already been
2308 evaluated and now we just want to use the value. So generate a RTL_EXPR
2309 with the proper type and RTL value.
2311 If the last substatement was not an expression,
2312 return something with type `void'. */
2315 expand_end_stmt_expr (tree t)
2319 if (! last_expr_value || ! last_expr_type)
2321 last_expr_value = const0_rtx;
2322 last_expr_alt_rtl = NULL_RTX;
2323 last_expr_type = void_type_node;
2325 else if (!REG_P (last_expr_value) && ! CONSTANT_P (last_expr_value))
2326 /* Remove any possible QUEUED. */
2327 last_expr_value = protect_from_queue (last_expr_value, 0);
2331 TREE_TYPE (t) = last_expr_type;
2332 RTL_EXPR_RTL (t) = last_expr_value;
2333 RTL_EXPR_ALT_RTL (t) = last_expr_alt_rtl;
2334 RTL_EXPR_SEQUENCE (t) = get_insns ();
2336 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2340 /* Don't consider deleting this expr or containing exprs at tree level. */
2341 TREE_SIDE_EFFECTS (t) = 1;
2342 /* Propagate volatility of the actual RTL expr. */
2343 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2346 expr_stmts_for_value--;
2351 /* Generate RTL for the start of an if-then. COND is the expression
2352 whose truth should be tested.
2354 If EXITFLAG is nonzero, this conditional is visible to
2355 `exit_something'. */
2358 expand_start_cond (tree cond, int exitflag)
2360 struct nesting *thiscond = ALLOC_NESTING ();
2362 /* Make an entry on cond_stack for the cond we are entering. */
2364 thiscond->desc = COND_NESTING;
2365 thiscond->next = cond_stack;
2366 thiscond->all = nesting_stack;
2367 thiscond->depth = ++nesting_depth;
2368 thiscond->data.cond.next_label = gen_label_rtx ();
2369 /* Before we encounter an `else', we don't need a separate exit label
2370 unless there are supposed to be exit statements
2371 to exit this conditional. */
2372 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2373 thiscond->data.cond.endif_label = thiscond->exit_label;
2374 cond_stack = thiscond;
2375 nesting_stack = thiscond;
2377 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2380 /* Generate RTL between then-clause and the elseif-clause
2381 of an if-then-elseif-.... */
2384 expand_start_elseif (tree cond)
2386 if (cond_stack->data.cond.endif_label == 0)
2387 cond_stack->data.cond.endif_label = gen_label_rtx ();
2388 emit_jump (cond_stack->data.cond.endif_label);
2389 emit_label (cond_stack->data.cond.next_label);
2390 cond_stack->data.cond.next_label = gen_label_rtx ();
2391 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2394 /* Generate RTL between the then-clause and the else-clause
2395 of an if-then-else. */
2398 expand_start_else (void)
2400 if (cond_stack->data.cond.endif_label == 0)
2401 cond_stack->data.cond.endif_label = gen_label_rtx ();
2403 emit_jump (cond_stack->data.cond.endif_label);
2404 emit_label (cond_stack->data.cond.next_label);
2405 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2408 /* After calling expand_start_else, turn this "else" into an "else if"
2409 by providing another condition. */
2412 expand_elseif (tree cond)
2414 cond_stack->data.cond.next_label = gen_label_rtx ();
2415 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2418 /* Generate RTL for the end of an if-then.
2419 Pop the record for it off of cond_stack. */
2422 expand_end_cond (void)
2424 struct nesting *thiscond = cond_stack;
2426 do_pending_stack_adjust ();
2427 if (thiscond->data.cond.next_label)
2428 emit_label (thiscond->data.cond.next_label);
2429 if (thiscond->data.cond.endif_label)
2430 emit_label (thiscond->data.cond.endif_label);
2432 POPSTACK (cond_stack);
2436 /* Return nonzero if we should preserve sub-expressions as separate
2437 pseudos. We never do so if we aren't optimizing. We always do so
2438 if -fexpensive-optimizations. */
2441 preserve_subexpressions_p (void)
2443 if (flag_expensive_optimizations)
2446 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
2453 /* Generate RTL to return from the current function, with no value.
2454 (That is, we do not do anything about returning any value.) */
2457 expand_null_return (void)
2461 last_insn = get_last_insn ();
2463 /* If this function was declared to return a value, but we
2464 didn't, clobber the return registers so that they are not
2465 propagated live to the rest of the function. */
2466 clobber_return_register ();
2468 expand_null_return_1 (last_insn);
2471 /* Generate RTL to return directly from the current function.
2472 (That is, we bypass any return value.) */
2475 expand_naked_return (void)
2477 rtx last_insn, end_label;
2479 last_insn = get_last_insn ();
2480 end_label = naked_return_label;
2482 clear_pending_stack_adjust ();
2483 do_pending_stack_adjust ();
2487 end_label = naked_return_label = gen_label_rtx ();
2488 expand_goto_internal (NULL_TREE, end_label, last_insn);
2491 /* Try to guess whether the value of return means error code. */
2492 static enum br_predictor
2493 return_prediction (rtx val)
2495 /* Different heuristics for pointers and scalars. */
2496 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2498 /* NULL is usually not returned. */
2499 if (val == const0_rtx)
2500 return PRED_NULL_RETURN;
2504 /* Negative return values are often used to indicate
2506 if (GET_CODE (val) == CONST_INT
2507 && INTVAL (val) < 0)
2508 return PRED_NEGATIVE_RETURN;
2509 /* Constant return values are also usually erors,
2510 zero/one often mean booleans so exclude them from the
2512 if (CONSTANT_P (val)
2513 && (val != const0_rtx && val != const1_rtx))
2514 return PRED_CONST_RETURN;
2516 return PRED_NO_PREDICTION;
2520 /* If the current function returns values in the most significant part
2521 of a register, shift return value VAL appropriately. The mode of
2522 the function's return type is known not to be BLKmode. */
2525 shift_return_value (rtx val)
2529 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2530 if (targetm.calls.return_in_msb (type))
2533 HOST_WIDE_INT shift;
2535 target = DECL_RTL (DECL_RESULT (current_function_decl));
2536 shift = (GET_MODE_BITSIZE (GET_MODE (target))
2537 - BITS_PER_UNIT * int_size_in_bytes (type));
2539 val = expand_binop (GET_MODE (target), ashl_optab,
2540 gen_lowpart (GET_MODE (target), val),
2541 GEN_INT (shift), target, 1, OPTAB_WIDEN);
2547 /* Generate RTL to return from the current function, with value VAL. */
2550 expand_value_return (rtx val)
2554 enum br_predictor pred;
2556 if (flag_guess_branch_prob
2557 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2559 /* Emit information for branch prediction. */
2562 note = emit_note (NOTE_INSN_PREDICTION);
2564 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2568 last_insn = get_last_insn ();
2569 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2571 /* Copy the value to the return location
2572 unless it's already there. */
2574 if (return_reg != val)
2576 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2577 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
2579 int unsignedp = TYPE_UNSIGNED (type);
2580 enum machine_mode old_mode
2581 = DECL_MODE (DECL_RESULT (current_function_decl));
2582 enum machine_mode mode
2583 = promote_mode (type, old_mode, &unsignedp, 1);
2585 if (mode != old_mode)
2586 val = convert_modes (mode, old_mode, val, unsignedp);
2588 if (GET_CODE (return_reg) == PARALLEL)
2589 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
2591 emit_move_insn (return_reg, val);
2594 expand_null_return_1 (last_insn);
2597 /* Output a return with no value. If LAST_INSN is nonzero,
2598 pretend that the return takes place after LAST_INSN. */
2601 expand_null_return_1 (rtx last_insn)
2603 rtx end_label = return_label;
2605 clear_pending_stack_adjust ();
2606 do_pending_stack_adjust ();
2610 end_label = return_label = gen_label_rtx ();
2611 expand_goto_internal (NULL_TREE, end_label, last_insn);
2614 /* Generate RTL to evaluate the expression RETVAL and return it
2615 from the current function. */
2618 expand_return (tree retval)
2620 /* If there are any cleanups to be performed, then they will
2621 be inserted following LAST_INSN. It is desirable
2622 that the last_insn, for such purposes, should be the
2623 last insn before computing the return value. Otherwise, cleanups
2624 which call functions can clobber the return value. */
2625 /* ??? rms: I think that is erroneous, because in C++ it would
2626 run destructors on variables that might be used in the subsequent
2627 computation of the return value. */
2633 /* If function wants no value, give it none. */
2634 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2636 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2638 expand_null_return ();
2642 if (retval == error_mark_node)
2644 /* Treat this like a return of no value from a function that
2646 expand_null_return ();
2649 else if (TREE_CODE (retval) == RESULT_DECL)
2650 retval_rhs = retval;
2651 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2652 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2653 retval_rhs = TREE_OPERAND (retval, 1);
2655 retval_rhs = retval;
2657 last_insn = get_last_insn ();
2659 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2661 /* If the result is an aggregate that is being returned in one (or more)
2662 registers, load the registers here. The compiler currently can't handle
2663 copying a BLKmode value into registers. We could put this code in a
2664 more general area (for use by everyone instead of just function
2665 call/return), but until this feature is generally usable it is kept here
2666 (and in expand_call). The value must go into a pseudo in case there
2667 are cleanups that will clobber the real return register. */
2670 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2671 && REG_P (result_rtl))
2674 unsigned HOST_WIDE_INT bitpos, xbitpos;
2675 unsigned HOST_WIDE_INT padding_correction = 0;
2676 unsigned HOST_WIDE_INT bytes
2677 = int_size_in_bytes (TREE_TYPE (retval_rhs));
2678 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2679 unsigned int bitsize
2680 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
2681 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
2682 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2683 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2684 enum machine_mode tmpmode, result_reg_mode;
2688 expand_null_return ();
2692 /* If the structure doesn't take up a whole number of words, see
2693 whether the register value should be padded on the left or on
2694 the right. Set PADDING_CORRECTION to the number of padding
2695 bits needed on the left side.
2697 In most ABIs, the structure will be returned at the least end of
2698 the register, which translates to right padding on little-endian
2699 targets and left padding on big-endian targets. The opposite
2700 holds if the structure is returned at the most significant
2701 end of the register. */
2702 if (bytes % UNITS_PER_WORD != 0
2703 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2705 : BYTES_BIG_ENDIAN))
2706 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2709 /* Copy the structure BITSIZE bits at a time. */
2710 for (bitpos = 0, xbitpos = padding_correction;
2711 bitpos < bytes * BITS_PER_UNIT;
2712 bitpos += bitsize, xbitpos += bitsize)
2714 /* We need a new destination pseudo each time xbitpos is
2715 on a word boundary and when xbitpos == padding_correction
2716 (the first time through). */
2717 if (xbitpos % BITS_PER_WORD == 0
2718 || xbitpos == padding_correction)
2720 /* Generate an appropriate register. */
2721 dst = gen_reg_rtx (word_mode);
2722 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2724 /* Clear the destination before we move anything into it. */
2725 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
2728 /* We need a new source operand each time bitpos is on a word
2730 if (bitpos % BITS_PER_WORD == 0)
2731 src = operand_subword_force (result_val,
2732 bitpos / BITS_PER_WORD,
2735 /* Use bitpos for the source extraction (left justified) and
2736 xbitpos for the destination store (right justified). */
2737 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2738 extract_bit_field (src, bitsize,
2739 bitpos % BITS_PER_WORD, 1,
2740 NULL_RTX, word_mode, word_mode,
2745 tmpmode = GET_MODE (result_rtl);
2746 if (tmpmode == BLKmode)
2748 /* Find the smallest integer mode large enough to hold the
2749 entire structure and use that mode instead of BLKmode
2750 on the USE insn for the return register. */
2751 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2752 tmpmode != VOIDmode;
2753 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2754 /* Have we found a large enough mode? */
2755 if (GET_MODE_SIZE (tmpmode) >= bytes)
2758 /* No suitable mode found. */
2759 if (tmpmode == VOIDmode)
2762 PUT_MODE (result_rtl, tmpmode);
2765 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2766 result_reg_mode = word_mode;
2768 result_reg_mode = tmpmode;
2769 result_reg = gen_reg_rtx (result_reg_mode);
2772 for (i = 0; i < n_regs; i++)
2773 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2776 if (tmpmode != result_reg_mode)
2777 result_reg = gen_lowpart (tmpmode, result_reg);
2779 expand_value_return (result_reg);
2781 else if (retval_rhs != 0
2782 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2783 && (REG_P (result_rtl)
2784 || (GET_CODE (result_rtl) == PARALLEL)))
2786 /* Calculate the return value into a temporary (usually a pseudo
2788 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2789 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2791 val = assign_temp (nt, 0, 0, 1);
2792 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2793 val = force_not_mem (val);
2795 /* Return the calculated value, doing cleanups first. */
2796 expand_value_return (shift_return_value (val));
2800 /* No cleanups or no hard reg used;
2801 calculate value into hard return reg. */
2802 expand_expr (retval, const0_rtx, VOIDmode, 0);
2804 expand_value_return (result_rtl);
2808 /* Generate the RTL code for entering a binding contour.
2809 The variables are declared one by one, by calls to `expand_decl'.
2811 FLAGS is a bitwise or of the following flags:
2813 1 - Nonzero if this construct should be visible to
2816 2 - Nonzero if this contour does not require a
2817 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2818 language-independent code should set this flag because they
2819 will not create corresponding BLOCK nodes. (There should be
2820 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2821 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
2822 when expand_end_bindings is called.
2824 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2825 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2829 expand_start_bindings_and_block (int flags, tree block)
2831 struct nesting *thisblock = ALLOC_NESTING ();
2833 int exit_flag = ((flags & 1) != 0);
2834 int block_flag = ((flags & 2) == 0);
2836 /* If a BLOCK is supplied, then the caller should be requesting a
2837 NOTE_INSN_BLOCK_BEG note. */
2838 if (!block_flag && block)
2841 /* Create a note to mark the beginning of the block. */
2842 note = emit_note (NOTE_INSN_DELETED);
2844 /* Make an entry on block_stack for the block we are entering. */
2846 thisblock->desc = BLOCK_NESTING;
2847 thisblock->next = block_stack;
2848 thisblock->all = nesting_stack;
2849 thisblock->depth = ++nesting_depth;
2850 thisblock->data.block.stack_level = 0;
2851 thisblock->data.block.cleanups = 0;
2852 thisblock->data.block.exception_region = 0;
2853 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
2855 thisblock->data.block.conditional_code = 0;
2856 thisblock->data.block.last_unconditional_cleanup = note;
2857 /* When we insert instructions after the last unconditional cleanup,
2858 we don't adjust last_insn. That means that a later add_insn will
2859 clobber the instructions we've just added. The easiest way to
2860 fix this is to just insert another instruction here, so that the
2861 instructions inserted after the last unconditional cleanup are
2862 never the last instruction. */
2863 emit_note (NOTE_INSN_DELETED);
2866 && !(block_stack->data.block.cleanups == NULL_TREE
2867 && block_stack->data.block.outer_cleanups == NULL_TREE))
2868 thisblock->data.block.outer_cleanups
2869 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2870 block_stack->data.block.outer_cleanups);
2872 thisblock->data.block.outer_cleanups = 0;
2873 thisblock->data.block.label_chain = 0;
2874 thisblock->data.block.innermost_stack_block = stack_block_stack;
2875 thisblock->data.block.first_insn = note;
2876 thisblock->data.block.block_start_count = ++current_block_start_count;
2877 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2878 block_stack = thisblock;
2879 nesting_stack = thisblock;
2881 /* Make a new level for allocating stack slots. */
2885 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2886 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2887 expand_expr are made. After we end the region, we know that all
2888 space for all temporaries that were created by TARGET_EXPRs will be
2889 destroyed and their space freed for reuse. */
2892 expand_start_target_temps (void)
2894 /* This is so that even if the result is preserved, the space
2895 allocated will be freed, as we know that it is no longer in use. */
2898 /* Start a new binding layer that will keep track of all cleanup
2899 actions to be performed. */
2900 expand_start_bindings (2);
2902 target_temp_slot_level = temp_slot_level;
2906 expand_end_target_temps (void)
2908 expand_end_bindings (NULL_TREE, 0, 0);
2910 /* This is so that even if the result is preserved, the space
2911 allocated will be freed, as we know that it is no longer in use. */
2915 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
2916 in question represents the outermost pair of curly braces (i.e. the "body
2917 block") of a function or method.
2919 For any BLOCK node representing a "body block" of a function or method, the
2920 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2921 represents the outermost (function) scope for the function or method (i.e.
2922 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
2923 *that* node in turn will point to the relevant FUNCTION_DECL node. */
2926 is_body_block (tree stmt)
2928 if (lang_hooks.no_body_blocks)
2931 if (TREE_CODE (stmt) == BLOCK)
2933 tree parent = BLOCK_SUPERCONTEXT (stmt);
2935 if (parent && TREE_CODE (parent) == BLOCK)
2937 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2939 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2947 /* True if we are currently emitting insns in an area of output code
2948 that is controlled by a conditional expression. This is used by
2949 the cleanup handling code to generate conditional cleanup actions. */
2952 conditional_context (void)
2954 return block_stack && block_stack->data.block.conditional_code;
2957 /* Return an opaque pointer to the current nesting level, so frontend code
2958 can check its own sanity. */
2961 current_nesting_level (void)
2963 return cfun ? block_stack : 0;
2966 /* Emit code to restore vital registers at the beginning of a nonlocal goto
2969 expand_nl_goto_receiver (void)
2971 /* Clobber the FP when we get here, so we have to make sure it's
2972 marked as used by this function. */
2973 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2975 /* Mark the static chain as clobbered here so life information
2976 doesn't get messed up for it. */
2977 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2979 #ifdef HAVE_nonlocal_goto
2980 if (! HAVE_nonlocal_goto)
2982 /* First adjust our frame pointer to its actual value. It was
2983 previously set to the start of the virtual area corresponding to
2984 the stacked variables when we branched here and now needs to be
2985 adjusted to the actual hardware fp value.
2987 Assignments are to virtual registers are converted by
2988 instantiate_virtual_regs into the corresponding assignment
2989 to the underlying register (fp in this case) that makes
2990 the original assignment true.
2991 So the following insn will actually be
2992 decrementing fp by STARTING_FRAME_OFFSET. */
2993 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2995 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2996 if (fixed_regs[ARG_POINTER_REGNUM])
2998 #ifdef ELIMINABLE_REGS
2999 /* If the argument pointer can be eliminated in favor of the
3000 frame pointer, we don't need to restore it. We assume here
3001 that if such an elimination is present, it can always be used.
3002 This is the case on all known machines; if we don't make this
3003 assumption, we do unnecessary saving on many machines. */
3004 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3007 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3008 if (elim_regs[i].from == ARG_POINTER_REGNUM
3009 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3012 if (i == ARRAY_SIZE (elim_regs))
3015 /* Now restore our arg pointer from the address at which it
3016 was saved in our stack frame. */
3017 emit_move_insn (virtual_incoming_args_rtx,
3018 copy_to_reg (get_arg_pointer_save_area (cfun)));
3023 #ifdef HAVE_nonlocal_goto_receiver
3024 if (HAVE_nonlocal_goto_receiver)
3025 emit_insn (gen_nonlocal_goto_receiver ());
3028 /* @@@ This is a kludge. Not all machine descriptions define a blockage
3029 insn, but we must not allow the code we just generated to be reordered
3030 by scheduling. Specifically, the update of the frame pointer must
3031 happen immediately, not later. So emit an ASM_INPUT to act as blockage
3033 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
3036 /* Warn about any unused VARS (which may contain nodes other than
3037 VAR_DECLs, but such nodes are ignored). The nodes are connected
3038 via the TREE_CHAIN field. */
3041 warn_about_unused_variables (tree vars)
3045 if (warn_unused_variable)
3046 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3047 if (TREE_CODE (decl) == VAR_DECL
3048 && ! TREE_USED (decl)
3049 && ! DECL_IN_SYSTEM_HEADER (decl)
3050 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3051 warning ("%Junused variable '%D'", decl, decl);
3054 /* Generate RTL code to terminate a binding contour.
3056 VARS is the chain of VAR_DECL nodes for the variables bound in this
3057 contour. There may actually be other nodes in this chain, but any
3058 nodes other than VAR_DECLS are ignored.
3060 MARK_ENDS is nonzero if we should put a note at the beginning
3061 and end of this binding contour.
3063 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3064 zero if we can jump into this contour only if it does not have a saved
3065 stack level, and negative if we are not to check for invalid use of
3066 labels (because the front end does that). */
3069 expand_end_bindings (tree vars, int mark_ends ATTRIBUTE_UNUSED,
3072 struct nesting *thisblock = block_stack;
3074 /* If any of the variables in this scope were not used, warn the
3076 warn_about_unused_variables (vars);
3078 if (thisblock->exit_label)
3080 do_pending_stack_adjust ();
3081 emit_label (thisblock->exit_label);
3084 /* Don't allow jumping into a block that has a stack level.
3085 Cleanups are allowed, though. */
3086 if (dont_jump_in > 0
3087 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3089 struct label_chain *chain;
3091 /* Any labels in this block are no longer valid to go to.
3092 Mark them to cause an error message. */
3093 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3095 DECL_TOO_LATE (chain->label) = 1;
3096 /* If any goto without a fixup came to this label,
3097 that must be an error, because gotos without fixups
3098 come from outside all saved stack-levels. */
3099 if (TREE_ADDRESSABLE (chain->label))
3100 error ("%Jlabel '%D' used before containing binding contour",
3101 chain->label, chain->label);
3105 /* Restore stack level in effect before the block
3106 (only if variable-size objects allocated). */
3107 /* Perform any cleanups associated with the block. */
3109 if (thisblock->data.block.stack_level != 0
3110 || thisblock->data.block.cleanups != 0)
3115 /* Don't let cleanups affect ({...}) constructs. */
3116 int old_expr_stmts_for_value = expr_stmts_for_value;
3117 rtx old_last_expr_value = last_expr_value;
3118 rtx old_last_expr_alt_rtl = last_expr_alt_rtl;
3119 tree old_last_expr_type = last_expr_type;
3120 expr_stmts_for_value = 0;
3122 /* Only clean up here if this point can actually be reached. */
3123 insn = get_last_insn ();
3124 if (GET_CODE (insn) == NOTE)
3125 insn = prev_nonnote_insn (insn);
3126 reachable = (! insn || GET_CODE (insn) != BARRIER);
3128 /* Do the cleanups. */
3129 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3131 do_pending_stack_adjust ();
3133 expr_stmts_for_value = old_expr_stmts_for_value;
3134 last_expr_value = old_last_expr_value;
3135 last_expr_alt_rtl = old_last_expr_alt_rtl;
3136 last_expr_type = old_last_expr_type;
3138 /* Restore the stack level. */
3140 if (reachable && thisblock->data.block.stack_level != 0)
3142 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3143 thisblock->data.block.stack_level, NULL_RTX);
3144 if (cfun->nonlocal_goto_save_area)
3145 update_nonlocal_goto_save_area ();
3148 /* Any gotos out of this block must also do these things.
3149 Also report any gotos with fixups that came to labels in this
3151 fixup_gotos (thisblock,
3152 thisblock->data.block.stack_level,
3153 thisblock->data.block.cleanups,
3154 thisblock->data.block.first_insn,
3158 /* Mark the beginning and end of the scope if requested.
3159 We do this now, after running cleanups on the variables
3160 just going out of scope, so they are in scope for their cleanups. */
3162 /* Get rid of the beginning-mark if we don't make an end-mark. */
3163 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3165 /* Restore the temporary level of TARGET_EXPRs. */
3166 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3168 /* Restore block_stack level for containing block. */
3170 stack_block_stack = thisblock->data.block.innermost_stack_block;
3171 POPSTACK (block_stack);
3173 /* Pop the stack slot nesting and free any slots at this level. */
3177 /* Generate code to save the stack pointer at the start of the current block
3178 and set up to restore it on exit. */
3181 save_stack_pointer (void)
3183 struct nesting *thisblock = block_stack;
3185 if (thisblock->data.block.stack_level == 0)
3187 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3188 &thisblock->data.block.stack_level,
3189 thisblock->data.block.first_insn);
3190 stack_block_stack = thisblock;
3194 /* Generate RTL for the automatic variable declaration DECL.
3195 (Other kinds of declarations are simply ignored if seen here.) */
3198 expand_decl (tree decl)
3202 type = TREE_TYPE (decl);
3204 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3205 type in case this node is used in a reference. */
3206 if (TREE_CODE (decl) == CONST_DECL)
3208 DECL_MODE (decl) = TYPE_MODE (type);
3209 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3210 DECL_SIZE (decl) = TYPE_SIZE (type);
3211 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3215 /* Otherwise, only automatic variables need any expansion done. Static and
3216 external variables, and external functions, will be handled by
3217 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3218 nothing. PARM_DECLs are handled in `assign_parms'. */
3219 if (TREE_CODE (decl) != VAR_DECL)
3222 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3225 /* Create the RTL representation for the variable. */
3227 if (type == error_mark_node)
3228 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3230 else if (DECL_SIZE (decl) == 0)
3231 /* Variable with incomplete type. */
3234 if (DECL_INITIAL (decl) == 0)
3235 /* Error message was already done; now avoid a crash. */
3236 x = gen_rtx_MEM (BLKmode, const0_rtx);
3238 /* An initializer is going to decide the size of this array.
3239 Until we know the size, represent its address with a reg. */
3240 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3242 set_mem_attributes (x, decl, 1);
3243 SET_DECL_RTL (decl, x);
3245 else if (DECL_MODE (decl) != BLKmode
3246 /* If -ffloat-store, don't put explicit float vars
3248 && !(flag_float_store
3249 && TREE_CODE (type) == REAL_TYPE)
3250 && ! TREE_THIS_VOLATILE (decl)
3251 && ! DECL_NONLOCAL (decl)
3252 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3254 /* Automatic variable that can go in a register. */
3255 int unsignedp = TYPE_UNSIGNED (type);
3256 enum machine_mode reg_mode
3257 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3259 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3261 /* Note if the object is a user variable. */
3262 if (!DECL_ARTIFICIAL (decl))
3264 mark_user_reg (DECL_RTL (decl));
3266 /* Trust user variables which have a pointer type to really
3267 be pointers. Do not trust compiler generated temporaries
3268 as our type system is totally busted as it relates to
3269 pointer arithmetic which translates into lots of compiler
3270 generated objects with pointer types, but which are not really
3272 if (POINTER_TYPE_P (type))
3273 mark_reg_pointer (DECL_RTL (decl),
3274 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3277 maybe_set_unchanging (DECL_RTL (decl), decl);
3279 /* If something wants our address, try to use ADDRESSOF. */
3280 if (TREE_ADDRESSABLE (decl))
3281 put_var_into_stack (decl, /*rescan=*/false);
3284 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3285 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3286 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3287 STACK_CHECK_MAX_VAR_SIZE)))
3289 /* Variable of fixed size that goes on the stack. */
3294 /* If we previously made RTL for this decl, it must be an array
3295 whose size was determined by the initializer.
3296 The old address was a register; set that register now
3297 to the proper address. */
3298 if (DECL_RTL_SET_P (decl))
3300 if (GET_CODE (DECL_RTL (decl)) != MEM
3301 || !REG_P (XEXP (DECL_RTL (decl), 0)))
3303 oldaddr = XEXP (DECL_RTL (decl), 0);
3306 /* Set alignment we actually gave this decl. */
3307 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3308 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3309 DECL_USER_ALIGN (decl) = 0;
3311 x = assign_temp (decl, 1, 1, 1);
3312 set_mem_attributes (x, decl, 1);
3313 SET_DECL_RTL (decl, x);
3317 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3318 if (addr != oldaddr)
3319 emit_move_insn (oldaddr, addr);
3323 /* Dynamic-size object: must push space on the stack. */
3325 rtx address, size, x;
3327 /* Record the stack pointer on entry to block, if have
3328 not already done so. */
3329 do_pending_stack_adjust ();
3330 save_stack_pointer ();
3332 /* Compute the variable's size, in bytes. This will expand any
3333 needed SAVE_EXPRs for the first time. */
3334 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3337 /* Allocate space on the stack for the variable. Note that
3338 DECL_ALIGN says how the variable is to be aligned and we
3339 cannot use it to conclude anything about the alignment of
3341 address = allocate_dynamic_stack_space (size, NULL_RTX,
3342 TYPE_ALIGN (TREE_TYPE (decl)));
3344 /* Reference the variable indirect through that rtx. */
3345 x = gen_rtx_MEM (DECL_MODE (decl), address);
3346 set_mem_attributes (x, decl, 1);
3347 SET_DECL_RTL (decl, x);
3350 /* Indicate the alignment we actually gave this variable. */
3351 #ifdef STACK_BOUNDARY
3352 DECL_ALIGN (decl) = STACK_BOUNDARY;
3354 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3356 DECL_USER_ALIGN (decl) = 0;
3360 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
3362 expand_stack_alloc (tree alloc, tree t_size)
3364 rtx address, dest, size;
3367 if (TREE_CODE (alloc) != ADDR_EXPR)
3369 var = TREE_OPERAND (alloc, 0);
3370 if (TREE_CODE (var) != VAR_DECL)
3373 type = TREE_TYPE (var);
3375 /* Compute the variable's size, in bytes. */
3376 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
3379 /* Allocate space on the stack for the variable. */
3380 address = XEXP (DECL_RTL (var), 0);
3381 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
3382 if (dest != address)
3383 emit_move_insn (address, dest);
3385 /* Indicate the alignment we actually gave this variable. */
3386 #ifdef STACK_BOUNDARY
3387 DECL_ALIGN (var) = STACK_BOUNDARY;
3389 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
3391 DECL_USER_ALIGN (var) = 0;
3394 /* Emit code to save the current value of stack. */
3396 expand_stack_save (void)
3400 do_pending_stack_adjust ();
3401 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
3405 /* Emit code to restore the current value of stack. */
3407 expand_stack_restore (tree var)
3409 rtx sa = DECL_RTL (var);
3411 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
3414 /* Emit code to perform the initialization of a declaration DECL. */
3417 expand_decl_init (tree decl)
3419 int was_used = TREE_USED (decl);
3421 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3422 for static decls. */
3423 if (TREE_CODE (decl) == CONST_DECL
3424 || TREE_STATIC (decl))
3427 /* Compute and store the initial value now. */
3431 if (DECL_INITIAL (decl) == error_mark_node)
3433 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3435 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3436 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3437 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3441 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3443 emit_line_note (DECL_SOURCE_LOCATION (decl));
3444 expand_assignment (decl, DECL_INITIAL (decl), 0);
3448 /* Don't let the initialization count as "using" the variable. */
3449 TREE_USED (decl) = was_used;
3451 /* Free any temporaries we made while initializing the decl. */
3452 preserve_temp_slots (NULL_RTX);
3457 /* CLEANUP is an expression to be executed at exit from this binding contour;
3458 for example, in C++, it might call the destructor for this variable.
3460 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3461 CLEANUP multiple times, and have the correct semantics. This
3462 happens in exception handling, for gotos, returns, breaks that
3463 leave the current scope.
3465 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3466 that is not associated with any particular variable. */
3469 expand_decl_cleanup (tree decl, tree cleanup)
3471 struct nesting *thisblock;
3473 /* Error if we are not in any block. */
3474 if (cfun == 0 || block_stack == 0)
3477 thisblock = block_stack;
3479 /* Record the cleanup if there is one. */
3485 tree *cleanups = &thisblock->data.block.cleanups;
3486 int cond_context = conditional_context ();
3490 rtx flag = gen_reg_rtx (word_mode);
3495 emit_move_insn (flag, const0_rtx);
3496 set_flag_0 = get_insns ();
3499 thisblock->data.block.last_unconditional_cleanup
3500 = emit_insn_after (set_flag_0,
3501 thisblock->data.block.last_unconditional_cleanup);
3503 emit_move_insn (flag, const1_rtx);
3505 cond = build_decl (VAR_DECL, NULL_TREE,
3506 lang_hooks.types.type_for_mode (word_mode, 1));
3507 SET_DECL_RTL (cond, flag);
3509 /* Conditionalize the cleanup. */
3510 cleanup = build (COND_EXPR, void_type_node,
3511 lang_hooks.truthvalue_conversion (cond),
3512 cleanup, integer_zero_node);
3513 cleanup = fold (cleanup);
3515 cleanups = &thisblock->data.block.cleanups;
3518 cleanup = unsave_expr (cleanup);
3520 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
3523 /* If this block has a cleanup, it belongs in stack_block_stack. */
3524 stack_block_stack = thisblock;
3531 if (! using_eh_for_cleanups_p)
3532 TREE_ADDRESSABLE (t) = 1;
3534 expand_eh_region_start ();
3541 thisblock->data.block.last_unconditional_cleanup
3542 = emit_insn_after (seq,
3543 thisblock->data.block.last_unconditional_cleanup);
3547 thisblock->data.block.last_unconditional_cleanup
3549 /* When we insert instructions after the last unconditional cleanup,
3550 we don't adjust last_insn. That means that a later add_insn will
3551 clobber the instructions we've just added. The easiest way to
3552 fix this is to just insert another instruction here, so that the
3553 instructions inserted after the last unconditional cleanup are
3554 never the last instruction. */
3555 emit_note (NOTE_INSN_DELETED);
3561 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
3565 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
3567 int ret = expand_decl_cleanup (decl, cleanup);
3570 tree node = block_stack->data.block.cleanups;
3571 CLEANUP_EH_ONLY (node) = eh_only;
3576 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3577 DECL_ELTS is the list of elements that belong to DECL's type.
3578 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3581 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
3583 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
3587 /* If any of the elements are addressable, so is the entire union. */
3588 for (t = decl_elts; t; t = TREE_CHAIN (t))
3589 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
3591 TREE_ADDRESSABLE (decl) = 1;
3596 expand_decl_cleanup (decl, cleanup);
3597 x = DECL_RTL (decl);
3599 /* Go through the elements, assigning RTL to each. */
3600 for (t = decl_elts; t; t = TREE_CHAIN (t))
3602 tree decl_elt = TREE_VALUE (t);
3603 tree cleanup_elt = TREE_PURPOSE (t);
3604 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3606 /* If any of the elements are addressable, so is the entire
3608 if (TREE_USED (decl_elt))
3609 TREE_USED (decl) = 1;
3611 /* Propagate the union's alignment to the elements. */
3612 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3613 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
3615 /* If the element has BLKmode and the union doesn't, the union is
3616 aligned such that the element doesn't need to have BLKmode, so
3617 change the element's mode to the appropriate one for its size. */
3618 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3619 DECL_MODE (decl_elt) = mode
3620 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
3622 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3623 instead create a new MEM rtx with the proper mode. */
3624 if (GET_CODE (x) == MEM)
3626 if (mode == GET_MODE (x))
3627 SET_DECL_RTL (decl_elt, x);
3629 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
3633 if (mode == GET_MODE (x))
3634 SET_DECL_RTL (decl_elt, x);
3636 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
3641 /* Record the cleanup if there is one. */
3644 thisblock->data.block.cleanups
3645 = tree_cons (decl_elt, cleanup_elt,
3646 thisblock->data.block.cleanups);
3650 /* Expand a list of cleanups LIST.
3651 Elements may be expressions or may be nested lists.
3653 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
3654 goto and handle protection regions specially in that case.
3656 If REACHABLE, we emit code, otherwise just inform the exception handling
3657 code about this finalization. */
3660 expand_cleanups (tree list, int in_fixup, int reachable)
3663 for (tail = list; tail; tail = TREE_CHAIN (tail))
3664 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3665 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
3668 if (! in_fixup && using_eh_for_cleanups_p)
3669 expand_eh_region_end_cleanup (TREE_VALUE (tail));
3671 if (reachable && !CLEANUP_EH_ONLY (tail))
3673 /* Cleanups may be run multiple times. For example,
3674 when exiting a binding contour, we expand the
3675 cleanups associated with that contour. When a goto
3676 within that binding contour has a target outside that
3677 contour, it will expand all cleanups from its scope to
3678 the target. Though the cleanups are expanded multiple
3679 times, the control paths are non-overlapping so the
3680 cleanups will not be executed twice. */
3682 /* We may need to protect from outer cleanups. */
3683 if (in_fixup && using_eh_for_cleanups_p)
3685 expand_eh_region_start ();
3687 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3689 expand_eh_region_end_fixup (TREE_VALUE (tail));
3692 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3699 /* Mark when the context we are emitting RTL for as a conditional
3700 context, so that any cleanup actions we register with
3701 expand_decl_init will be properly conditionalized when those
3702 cleanup actions are later performed. Must be called before any
3703 expression (tree) is expanded that is within a conditional context. */
3706 start_cleanup_deferral (void)
3708 /* block_stack can be NULL if we are inside the parameter list. It is
3709 OK to do nothing, because cleanups aren't possible here. */
3711 ++block_stack->data.block.conditional_code;
3714 /* Mark the end of a conditional region of code. Because cleanup
3715 deferrals may be nested, we may still be in a conditional region
3716 after we end the currently deferred cleanups, only after we end all
3717 deferred cleanups, are we back in unconditional code. */
3720 end_cleanup_deferral (void)
3722 /* block_stack can be NULL if we are inside the parameter list. It is
3723 OK to do nothing, because cleanups aren't possible here. */
3725 --block_stack->data.block.conditional_code;
3729 last_cleanup_this_contour (void)
3731 if (block_stack == 0)
3734 return block_stack->data.block.cleanups;
3738 /* Return nonzero if any containing block has a stack level or
3742 containing_blocks_have_cleanups_or_stack_level (void)
3744 struct nesting *block;
3746 for (block = block_stack; block; block = block->next)
3747 if (block->data.block.stack_level != 0
3748 || block->data.block.cleanups != 0)
3754 /* Return 1 if there are any pending cleanups at this point.
3755 Check the current contour as well as contours that enclose
3756 the current contour. */
3759 any_pending_cleanups (void)
3761 struct nesting *block;
3763 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
3766 if (block_stack->data.block.cleanups != NULL)
3769 if (block_stack->data.block.outer_cleanups == 0)
3772 for (block = block_stack->next; block; block = block->next)
3773 if (block->data.block.cleanups != 0)
3779 /* Enter a case (Pascal) or switch (C) statement.
3780 Push a block onto case_stack and nesting_stack
3781 to accumulate the case-labels that are seen
3782 and to record the labels generated for the statement.
3784 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3785 Otherwise, this construct is transparent for `exit_something'.
3787 EXPR is the index-expression to be dispatched on.
3788 TYPE is its nominal type. We could simply convert EXPR to this type,
3789 but instead we take short cuts. */
3792 expand_start_case (int exit_flag, tree expr, tree type,
3793 const char *printname)
3795 struct nesting *thiscase = ALLOC_NESTING ();
3797 /* Make an entry on case_stack for the case we are entering. */
3799 thiscase->desc = CASE_NESTING;
3800 thiscase->next = case_stack;
3801 thiscase->all = nesting_stack;
3802 thiscase->depth = ++nesting_depth;
3803 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3804 thiscase->data.case_stmt.case_list = 0;
3805 thiscase->data.case_stmt.index_expr = expr;
3806 thiscase->data.case_stmt.nominal_type = type;
3807 thiscase->data.case_stmt.default_label = 0;
3808 thiscase->data.case_stmt.printname = printname;
3809 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
3810 case_stack = thiscase;
3811 nesting_stack = thiscase;
3813 do_pending_stack_adjust ();
3816 /* Make sure case_stmt.start points to something that won't
3817 need any transformation before expand_end_case. */
3818 if (GET_CODE (get_last_insn ()) != NOTE)
3819 emit_note (NOTE_INSN_DELETED);
3821 thiscase->data.case_stmt.start = get_last_insn ();
3823 start_cleanup_deferral ();
3826 /* Accumulate one case or default label inside a case or switch statement.
3827 VALUE is the value of the case (a null pointer, for a default label).
3828 The function CONVERTER, when applied to arguments T and V,
3829 converts the value V to the type T.
3831 If not currently inside a case or switch statement, return 1 and do
3832 nothing. The caller will print a language-specific error message.
3833 If VALUE is a duplicate or overlaps, return 2 and do nothing
3834 except store the (first) duplicate node in *DUPLICATE.
3835 If VALUE is out of range, return 3 and do nothing.
3836 If we are jumping into the scope of a cleanup or var-sized array, return 5.
3837 Return 0 on success.
3839 Extended to handle range statements. */
3842 pushcase (tree value, tree (*converter) (tree, tree), tree label,
3848 /* Fail if not inside a real case statement. */
3849 if (! (case_stack && case_stack->data.case_stmt.start))
3852 if (stack_block_stack
3853 && stack_block_stack->depth > case_stack->depth)
3856 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3857 nominal_type = case_stack->data.case_stmt.nominal_type;
3859 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3860 if (index_type == error_mark_node)
3863 /* Convert VALUE to the type in which the comparisons are nominally done. */
3865 value = (*converter) (nominal_type, value);
3867 /* Fail if this value is out of range for the actual type of the index
3868 (which may be narrower than NOMINAL_TYPE). */
3870 && (TREE_CONSTANT_OVERFLOW (value)
3871 || ! int_fits_type_p (value, index_type)))
3874 return add_case_node (value, value, label, duplicate, false);
3877 /* Like pushcase but this case applies to all values between VALUE1 and
3878 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
3879 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
3880 starts at VALUE1 and ends at the highest value of the index type.
3881 If both are NULL, this case applies to all values.
3883 The return value is the same as that of pushcase but there is one
3884 additional error code: 4 means the specified range was empty. */
3887 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
3888 tree label, tree *duplicate)
3893 /* Fail if not inside a real case statement. */
3894 if (! (case_stack && case_stack->data.case_stmt.start))
3897 if (stack_block_stack
3898 && stack_block_stack->depth > case_stack->depth)
3901 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3902 nominal_type = case_stack->data.case_stmt.nominal_type;
3904 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3905 if (index_type == error_mark_node)
3908 /* Convert VALUEs to type in which the comparisons are nominally done
3909 and replace any unspecified value with the corresponding bound. */
3911 value1 = TYPE_MIN_VALUE (index_type);
3913 value2 = TYPE_MAX_VALUE (index_type);
3915 /* Fail if the range is empty. Do this before any conversion since
3916 we want to allow out-of-range empty ranges. */
3917 if (value2 != 0 && tree_int_cst_lt (value2, value1))
3920 /* If the max was unbounded, use the max of the nominal_type we are
3921 converting to. Do this after the < check above to suppress false
3924 value2 = TYPE_MAX_VALUE (nominal_type);
3926 value1 = (*converter) (nominal_type, value1);
3927 value2 = (*converter) (nominal_type, value2);
3929 /* Fail if these values are out of range. */
3930 if (TREE_CONSTANT_OVERFLOW (value1)
3931 || ! int_fits_type_p (value1, index_type))
3934 if (TREE_CONSTANT_OVERFLOW (value2)
3935 || ! int_fits_type_p (value2, index_type))
3938 return add_case_node (value1, value2, label, duplicate, false);
3941 /* Do the actual insertion of a case label for pushcase and pushcase_range
3942 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
3943 slowdown for large switch statements. */
3946 add_case_node (tree low, tree high, tree label, tree *duplicate,
3947 bool dont_expand_label)
3949 struct case_node *p, **q, *r;
3951 /* If there's no HIGH value, then this is not a case range; it's
3952 just a simple case label. But that's just a degenerate case
3957 /* Handle default labels specially. */
3960 if (case_stack->data.case_stmt.default_label != 0)
3962 *duplicate = case_stack->data.case_stmt.default_label;
3965 case_stack->data.case_stmt.default_label = label;
3966 if (!dont_expand_label)
3967 expand_label (label);
3971 q = &case_stack->data.case_stmt.case_list;
3978 /* Keep going past elements distinctly greater than HIGH. */
3979 if (tree_int_cst_lt (high, p->low))
3982 /* or distinctly less than LOW. */
3983 else if (tree_int_cst_lt (p->high, low))
3988 /* We have an overlap; this is an error. */
3989 *duplicate = p->code_label;
3994 /* Add this label to the chain, and succeed. */
3996 r = ggc_alloc (sizeof (struct case_node));
3999 /* If the bounds are equal, turn this into the one-value case. */
4000 if (tree_int_cst_equal (low, high))
4005 r->code_label = label;
4006 if (!dont_expand_label)
4007 expand_label (label);
4017 struct case_node *s;
4023 if (! (b = p->balance))
4024 /* Growth propagation from left side. */
4031 if ((p->left = s = r->right))
4040 if ((r->parent = s))
4048 case_stack->data.case_stmt.case_list = r;
4051 /* r->balance == +1 */
4056 struct case_node *t = r->right;
4058 if ((p->left = s = t->right))
4062 if ((r->right = s = t->left))
4076 if ((t->parent = s))
4084 case_stack->data.case_stmt.case_list = t;
4091 /* p->balance == +1; growth of left side balances the node. */
4101 if (! (b = p->balance))
4102 /* Growth propagation from right side. */
4110 if ((p->right = s = r->left))
4118 if ((r->parent = s))
4127 case_stack->data.case_stmt.case_list = r;
4131 /* r->balance == -1 */
4135 struct case_node *t = r->left;
4137 if ((p->right = s = t->left))
4142 if ((r->left = s = t->right))
4156 if ((t->parent = s))
4165 case_stack->data.case_stmt.case_list = t;
4171 /* p->balance == -1; growth of right side balances the node. */
4184 /* Maximum number of case bit tests. */
4185 #define MAX_CASE_BIT_TESTS 3
4187 /* By default, enable case bit tests on targets with ashlsi3. */
4188 #ifndef CASE_USE_BIT_TESTS
4189 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
4190 != CODE_FOR_nothing)
4194 /* A case_bit_test represents a set of case nodes that may be
4195 selected from using a bit-wise comparison. HI and LO hold
4196 the integer to be tested against, LABEL contains the label
4197 to jump to upon success and BITS counts the number of case
4198 nodes handled by this test, typically the number of bits
4201 struct case_bit_test
4209 /* Determine whether "1 << x" is relatively cheap in word_mode. */
4212 bool lshift_cheap_p (void)
4214 static bool init = false;
4215 static bool cheap = true;
4219 rtx reg = gen_rtx_REG (word_mode, 10000);
4220 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
4221 cheap = cost < COSTS_N_INSNS (3);
4228 /* Comparison function for qsort to order bit tests by decreasing
4229 number of case nodes, i.e. the node with the most cases gets
4233 case_bit_test_cmp (const void *p1, const void *p2)
4235 const struct case_bit_test *d1 = p1;
4236 const struct case_bit_test *d2 = p2;
4238 return d2->bits - d1->bits;
4241 /* Expand a switch statement by a short sequence of bit-wise
4242 comparisons. "switch(x)" is effectively converted into
4243 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
4246 INDEX_EXPR is the value being switched on, which is of
4247 type INDEX_TYPE. MINVAL is the lowest case value of in
4248 the case nodes, of INDEX_TYPE type, and RANGE is highest
4249 value minus MINVAL, also of type INDEX_TYPE. NODES is
4250 the set of case nodes, and DEFAULT_LABEL is the label to
4251 branch to should none of the cases match.
4253 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
4257 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
4258 tree range, case_node_ptr nodes, rtx default_label)
4260 struct case_bit_test test[MAX_CASE_BIT_TESTS];
4261 enum machine_mode mode;
4262 rtx expr, index, label;
4263 unsigned int i,j,lo,hi;
4264 struct case_node *n;
4268 for (n = nodes; n; n = n->right)
4270 label = label_rtx (n->code_label);
4271 for (i = 0; i < count; i++)
4272 if (same_case_target_p (label, test[i].label))
4277 if (count >= MAX_CASE_BIT_TESTS)
4281 test[i].label = label;
4288 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4289 n->low, minval)), 1);
4290 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4291 n->high, minval)), 1);
4292 for (j = lo; j <= hi; j++)
4293 if (j >= HOST_BITS_PER_WIDE_INT)
4294 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
4296 test[i].lo |= (HOST_WIDE_INT) 1 << j;
4299 qsort (test, count, sizeof(*test), case_bit_test_cmp);
4301 index_expr = fold (build (MINUS_EXPR, index_type,
4302 convert (index_type, index_expr),
4303 convert (index_type, minval)));
4304 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4306 index = protect_from_queue (index, 0);
4307 do_pending_stack_adjust ();
4309 mode = TYPE_MODE (index_type);
4310 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
4311 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
4314 index = convert_to_mode (word_mode, index, 0);
4315 index = expand_binop (word_mode, ashl_optab, const1_rtx,
4316 index, NULL_RTX, 1, OPTAB_WIDEN);
4318 for (i = 0; i < count; i++)
4320 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
4321 expr = expand_binop (word_mode, and_optab, index, expr,
4322 NULL_RTX, 1, OPTAB_WIDEN);
4323 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
4324 word_mode, 1, test[i].label);
4327 emit_jump (default_label);
4331 #define HAVE_casesi 0
4334 #ifndef HAVE_tablejump
4335 #define HAVE_tablejump 0
4338 /* Terminate a case (Pascal) or switch (C) statement
4339 in which ORIG_INDEX is the expression to be tested.
4340 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
4341 type as given in the source before any compiler conversions.
4342 Generate the code to test it and jump to the right place. */
4345 expand_end_case_type (tree orig_index, tree orig_type)
4347 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
4348 rtx default_label = 0;
4349 struct case_node *n, *m;
4350 unsigned int count, uniq;
4356 rtx before_case, end, lab;
4357 struct nesting *thiscase = case_stack;
4358 tree index_expr, index_type;
4359 bool exit_done = false;
4362 /* Don't crash due to previous errors. */
4363 if (thiscase == NULL)
4366 index_expr = thiscase->data.case_stmt.index_expr;
4367 index_type = TREE_TYPE (index_expr);
4368 unsignedp = TYPE_UNSIGNED (index_type);
4369 if (orig_type == NULL)
4370 orig_type = TREE_TYPE (orig_index);
4372 do_pending_stack_adjust ();
4374 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4375 if (index_type != error_mark_node)
4377 /* If we don't have a default-label, create one here,
4378 after the body of the switch. */
4379 if (thiscase->data.case_stmt.default_label == 0)
4381 thiscase->data.case_stmt.default_label
4382 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4383 /* Share the exit label if possible. */
4384 if (thiscase->exit_label)
4386 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
4387 thiscase->exit_label);
4390 expand_label (thiscase->data.case_stmt.default_label);
4392 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4394 before_case = get_last_insn ();
4396 if (thiscase->data.case_stmt.case_list
4397 && thiscase->data.case_stmt.case_list->left)
4398 thiscase->data.case_stmt.case_list
4399 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
4401 /* Simplify the case-list before we count it. */
4402 group_case_nodes (thiscase->data.case_stmt.case_list);
4403 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
4406 /* Get upper and lower bounds of case values.
4407 Also convert all the case values to the index expr's data type. */
4411 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4413 /* Check low and high label values are integers. */
4414 if (TREE_CODE (n->low) != INTEGER_CST)
4416 if (TREE_CODE (n->high) != INTEGER_CST)
4419 n->low = convert (index_type, n->low);
4420 n->high = convert (index_type, n->high);
4422 /* Count the elements and track the largest and smallest
4423 of them (treating them as signed even if they are not). */
4431 if (INT_CST_LT (n->low, minval))
4433 if (INT_CST_LT (maxval, n->high))
4436 /* A range counts double, since it requires two compares. */
4437 if (! tree_int_cst_equal (n->low, n->high))
4440 /* Count the number of unique case node targets. */
4442 lab = label_rtx (n->code_label);
4443 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
4444 if (same_case_target_p (label_rtx (m->code_label), lab))
4451 /* Compute span of values. */
4453 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4455 end_cleanup_deferral ();
4459 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4461 emit_jump (default_label);
4464 /* Try implementing this switch statement by a short sequence of
4465 bit-wise comparisons. However, we let the binary-tree case
4466 below handle constant index expressions. */
4467 else if (CASE_USE_BIT_TESTS
4468 && ! TREE_CONSTANT (index_expr)
4469 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
4470 && compare_tree_int (range, 0) > 0
4471 && lshift_cheap_p ()
4472 && ((uniq == 1 && count >= 3)
4473 || (uniq == 2 && count >= 5)
4474 || (uniq == 3 && count >= 6)))
4476 /* Optimize the case where all the case values fit in a
4477 word without having to subtract MINVAL. In this case,
4478 we can optimize away the subtraction. */
4479 if (compare_tree_int (minval, 0) > 0
4480 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
4482 minval = integer_zero_node;
4485 emit_case_bit_tests (index_type, index_expr, minval, range,
4486 thiscase->data.case_stmt.case_list,
4490 /* If range of values is much bigger than number of values,
4491 make a sequence of conditional branches instead of a dispatch.
4492 If the switch-index is a constant, do it this way
4493 because we can optimize it. */
4495 else if (count < case_values_threshold ()
4496 || compare_tree_int (range,
4497 (optimize_size ? 3 : 10) * count) > 0
4498 /* RANGE may be signed, and really large ranges will show up
4499 as negative numbers. */
4500 || compare_tree_int (range, 0) < 0
4501 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4504 || TREE_CONSTANT (index_expr)
4505 /* If neither casesi or tablejump is available, we can
4506 only go this way. */
4507 || (!HAVE_casesi && !HAVE_tablejump))
4509 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4511 /* If the index is a short or char that we do not have
4512 an insn to handle comparisons directly, convert it to
4513 a full integer now, rather than letting each comparison
4514 generate the conversion. */
4516 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4517 && ! have_insn_for (COMPARE, GET_MODE (index)))
4519 enum machine_mode wider_mode;
4520 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4521 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4522 if (have_insn_for (COMPARE, wider_mode))
4524 index = convert_to_mode (wider_mode, index, unsignedp);
4530 do_pending_stack_adjust ();
4532 index = protect_from_queue (index, 0);
4533 if (GET_CODE (index) == MEM)
4534 index = copy_to_reg (index);
4535 if (GET_CODE (index) == CONST_INT
4536 || TREE_CODE (index_expr) == INTEGER_CST)
4538 /* Make a tree node with the proper constant value
4539 if we don't already have one. */
4540 if (TREE_CODE (index_expr) != INTEGER_CST)
4543 = build_int_2 (INTVAL (index),
4544 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4545 index_expr = convert (index_type, index_expr);
4548 /* For constant index expressions we need only
4549 issue an unconditional branch to the appropriate
4550 target code. The job of removing any unreachable
4551 code is left to the optimization phase if the
4552 "-O" option is specified. */
4553 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4554 if (! tree_int_cst_lt (index_expr, n->low)
4555 && ! tree_int_cst_lt (n->high, index_expr))
4559 emit_jump (label_rtx (n->code_label));
4561 emit_jump (default_label);
4565 /* If the index expression is not constant we generate
4566 a binary decision tree to select the appropriate
4567 target code. This is done as follows:
4569 The list of cases is rearranged into a binary tree,
4570 nearly optimal assuming equal probability for each case.
4572 The tree is transformed into RTL, eliminating
4573 redundant test conditions at the same time.
4575 If program flow could reach the end of the
4576 decision tree an unconditional jump to the
4577 default code is emitted. */
4580 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
4581 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4582 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
4583 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4584 default_label, index_type);
4585 emit_jump_if_reachable (default_label);
4590 table_label = gen_label_rtx ();
4591 if (! try_casesi (index_type, index_expr, minval, range,
4592 table_label, default_label))
4594 index_type = thiscase->data.case_stmt.nominal_type;
4596 /* Index jumptables from zero for suitable values of
4597 minval to avoid a subtraction. */
4599 && compare_tree_int (minval, 0) > 0
4600 && compare_tree_int (minval, 3) < 0)
4602 minval = integer_zero_node;
4606 if (! try_tablejump (index_type, index_expr, minval, range,
4607 table_label, default_label))
4611 /* Get table of labels to jump to, in order of case index. */
4613 ncases = tree_low_cst (range, 0) + 1;
4614 labelvec = alloca (ncases * sizeof (rtx));
4615 memset (labelvec, 0, ncases * sizeof (rtx));
4617 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4619 /* Compute the low and high bounds relative to the minimum
4620 value since that should fit in a HOST_WIDE_INT while the
4621 actual values may not. */
4623 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4624 n->low, minval)), 1);
4625 HOST_WIDE_INT i_high
4626 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4627 n->high, minval)), 1);
4630 for (i = i_low; i <= i_high; i ++)
4632 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
4635 /* Fill in the gaps with the default. */
4636 for (i = 0; i < ncases; i++)
4637 if (labelvec[i] == 0)
4638 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
4640 /* Output the table. */
4641 emit_label (table_label);
4643 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
4644 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
4645 gen_rtx_LABEL_REF (Pmode, table_label),
4646 gen_rtvec_v (ncases, labelvec),
4647 const0_rtx, const0_rtx));
4649 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
4650 gen_rtvec_v (ncases, labelvec)));
4652 /* If the case insn drops through the table,
4653 after the table we must jump to the default-label.
4654 Otherwise record no drop-through after the table. */
4655 #ifdef CASE_DROPS_THROUGH
4656 emit_jump (default_label);
4662 before_case = NEXT_INSN (before_case);
4663 end = get_last_insn ();
4664 if (squeeze_notes (&before_case, &end))
4666 reorder_insns (before_case, end,
4667 thiscase->data.case_stmt.start);
4670 end_cleanup_deferral ();
4672 if (thiscase->exit_label && !exit_done)
4673 emit_label (thiscase->exit_label);
4675 POPSTACK (case_stack);
4680 /* Convert the tree NODE into a list linked by the right field, with the left
4681 field zeroed. RIGHT is used for recursion; it is a list to be placed
4682 rightmost in the resulting list. */
4684 static struct case_node *
4685 case_tree2list (struct case_node *node, struct case_node *right)
4687 struct case_node *left;
4690 right = case_tree2list (node->right, right);
4692 node->right = right;
4693 if ((left = node->left))
4696 return case_tree2list (left, node);
4702 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4705 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
4707 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
4713 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
4714 (GET_MODE (op1) == VOIDmode
4715 ? GET_MODE (op2) : GET_MODE (op1)),
4719 /* Not all case values are encountered equally. This function
4720 uses a heuristic to weight case labels, in cases where that
4721 looks like a reasonable thing to do.
4723 Right now, all we try to guess is text, and we establish the
4726 chars above space: 16
4735 If we find any cases in the switch that are not either -1 or in the range
4736 of valid ASCII characters, or are control characters other than those
4737 commonly used with "\", don't treat this switch scanning text.
4739 Return 1 if these nodes are suitable for cost estimation, otherwise
4743 estimate_case_costs (case_node_ptr node)
4745 tree min_ascii = integer_minus_one_node;
4746 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4750 /* If we haven't already made the cost table, make it now. Note that the
4751 lower bound of the table is -1, not zero. */
4753 if (! cost_table_initialized)
4755 cost_table_initialized = 1;
4757 for (i = 0; i < 128; i++)
4760 COST_TABLE (i) = 16;
4761 else if (ISPUNCT (i))
4763 else if (ISCNTRL (i))
4764 COST_TABLE (i) = -1;
4767 COST_TABLE (' ') = 8;
4768 COST_TABLE ('\t') = 4;
4769 COST_TABLE ('\0') = 4;
4770 COST_TABLE ('\n') = 2;
4771 COST_TABLE ('\f') = 1;
4772 COST_TABLE ('\v') = 1;
4773 COST_TABLE ('\b') = 1;
4776 /* See if all the case expressions look like text. It is text if the
4777 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4778 as signed arithmetic since we don't want to ever access cost_table with a
4779 value less than -1. Also check that none of the constants in a range
4780 are strange control characters. */
4782 for (n = node; n; n = n->right)
4784 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4787 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
4788 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
4789 if (COST_TABLE (i) < 0)
4793 /* All interesting values are within the range of interesting
4794 ASCII characters. */
4798 /* Determine whether two case labels branch to the same target. */
4801 same_case_target_p (rtx l1, rtx l2)
4809 i1 = next_real_insn (l1);
4810 i2 = next_real_insn (l2);
4814 if (i1 && simplejump_p (i1))
4816 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
4819 if (i2 && simplejump_p (i2))
4821 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
4824 /* When coming from gimple, we usually won't have emitted either
4825 the labels or the body of the switch statement. The job being
4826 done here should be done via jump threading at the tree level.
4827 Cases that go the same place should have the same label. */
4831 /* Delete nodes that branch to the default label from a list of
4832 case nodes. Eg. case 5: default: becomes just default: */
4835 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
4842 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
4849 /* Scan an ordered list of case nodes
4850 combining those with consecutive values or ranges.
4852 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4855 group_case_nodes (case_node_ptr head)
4857 case_node_ptr node = head;
4862 case_node_ptr np = node;
4864 lab = label_rtx (node->code_label);
4866 /* Try to group the successors of NODE with NODE. */
4867 while (((np = np->right) != 0)
4868 /* Do they jump to the same place? */
4869 && same_case_target_p (label_rtx (np->code_label), lab)
4870 /* Are their ranges consecutive? */
4871 && tree_int_cst_equal (np->low,
4872 fold (build (PLUS_EXPR,
4873 TREE_TYPE (node->high),
4876 /* An overflow is not consecutive. */
4877 && tree_int_cst_lt (node->high,
4878 fold (build (PLUS_EXPR,
4879 TREE_TYPE (node->high),
4881 integer_one_node))))
4883 node->high = np->high;
4885 /* NP is the first node after NODE which can't be grouped with it.
4886 Delete the nodes in between, and move on to that node. */
4892 /* Take an ordered list of case nodes
4893 and transform them into a near optimal binary tree,
4894 on the assumption that any target code selection value is as
4895 likely as any other.
4897 The transformation is performed by splitting the ordered
4898 list into two equal sections plus a pivot. The parts are
4899 then attached to the pivot as left and right branches. Each
4900 branch is then transformed recursively. */
4903 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
4916 /* Count the number of entries on branch. Also count the ranges. */
4920 if (!tree_int_cst_equal (np->low, np->high))
4924 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
4928 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
4936 /* Split this list if it is long enough for that to help. */
4941 /* Find the place in the list that bisects the list's total cost,
4942 Here I gets half the total cost. */
4947 /* Skip nodes while their cost does not reach that amount. */
4948 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4949 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
4950 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
4953 npp = &(*npp)->right;
4958 /* Leave this branch lopsided, but optimize left-hand
4959 side and fill in `parent' fields for right-hand side. */
4961 np->parent = parent;
4962 balance_case_nodes (&np->left, np);
4963 for (; np->right; np = np->right)
4964 np->right->parent = np;
4968 /* If there are just three nodes, split at the middle one. */
4970 npp = &(*npp)->right;
4973 /* Find the place in the list that bisects the list's total cost,
4974 where ranges count as 2.
4975 Here I gets half the total cost. */
4976 i = (i + ranges + 1) / 2;
4979 /* Skip nodes while their cost does not reach that amount. */
4980 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4985 npp = &(*npp)->right;
4990 np->parent = parent;
4993 /* Optimize each of the two split parts. */
4994 balance_case_nodes (&np->left, np);
4995 balance_case_nodes (&np->right, np);
4999 /* Else leave this branch as one level,
5000 but fill in `parent' fields. */
5002 np->parent = parent;
5003 for (; np->right; np = np->right)
5004 np->right->parent = np;
5009 /* Search the parent sections of the case node tree
5010 to see if a test for the lower bound of NODE would be redundant.
5011 INDEX_TYPE is the type of the index expression.
5013 The instructions to generate the case decision tree are
5014 output in the same order as nodes are processed so it is
5015 known that if a parent node checks the range of the current
5016 node minus one that the current node is bounded at its lower
5017 span. Thus the test would be redundant. */
5020 node_has_low_bound (case_node_ptr node, tree index_type)
5023 case_node_ptr pnode;
5025 /* If the lower bound of this node is the lowest value in the index type,
5026 we need not test it. */
5028 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5031 /* If this node has a left branch, the value at the left must be less
5032 than that at this node, so it cannot be bounded at the bottom and
5033 we need not bother testing any further. */
5038 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5039 node->low, integer_one_node));
5041 /* If the subtraction above overflowed, we can't verify anything.
5042 Otherwise, look for a parent that tests our value - 1. */
5044 if (! tree_int_cst_lt (low_minus_one, node->low))
5047 for (pnode = node->parent; pnode; pnode = pnode->parent)
5048 if (tree_int_cst_equal (low_minus_one, pnode->high))
5054 /* Search the parent sections of the case node tree
5055 to see if a test for the upper bound of NODE would be redundant.
5056 INDEX_TYPE is the type of the index expression.
5058 The instructions to generate the case decision tree are
5059 output in the same order as nodes are processed so it is
5060 known that if a parent node checks the range of the current
5061 node plus one that the current node is bounded at its upper
5062 span. Thus the test would be redundant. */
5065 node_has_high_bound (case_node_ptr node, tree index_type)
5068 case_node_ptr pnode;
5070 /* If there is no upper bound, obviously no test is needed. */
5072 if (TYPE_MAX_VALUE (index_type) == NULL)
5075 /* If the upper bound of this node is the highest value in the type
5076 of the index expression, we need not test against it. */
5078 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5081 /* If this node has a right branch, the value at the right must be greater
5082 than that at this node, so it cannot be bounded at the top and
5083 we need not bother testing any further. */
5088 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5089 node->high, integer_one_node));
5091 /* If the addition above overflowed, we can't verify anything.
5092 Otherwise, look for a parent that tests our value + 1. */
5094 if (! tree_int_cst_lt (node->high, high_plus_one))
5097 for (pnode = node->parent; pnode; pnode = pnode->parent)
5098 if (tree_int_cst_equal (high_plus_one, pnode->low))
5104 /* Search the parent sections of the
5105 case node tree to see if both tests for the upper and lower
5106 bounds of NODE would be redundant. */
5109 node_is_bounded (case_node_ptr node, tree index_type)
5111 return (node_has_low_bound (node, index_type)
5112 && node_has_high_bound (node, index_type));
5115 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5118 emit_jump_if_reachable (rtx label)
5120 if (GET_CODE (get_last_insn ()) != BARRIER)
5124 /* Emit step-by-step code to select a case for the value of INDEX.
5125 The thus generated decision tree follows the form of the
5126 case-node binary tree NODE, whose nodes represent test conditions.
5127 INDEX_TYPE is the type of the index of the switch.
5129 Care is taken to prune redundant tests from the decision tree
5130 by detecting any boundary conditions already checked by
5131 emitted rtx. (See node_has_high_bound, node_has_low_bound
5132 and node_is_bounded, above.)
5134 Where the test conditions can be shown to be redundant we emit
5135 an unconditional jump to the target code. As a further
5136 optimization, the subordinates of a tree node are examined to
5137 check for bounded nodes. In this case conditional and/or
5138 unconditional jumps as a result of the boundary check for the
5139 current node are arranged to target the subordinates associated
5140 code for out of bound conditions on the current node.
5142 We can assume that when control reaches the code generated here,
5143 the index value has already been compared with the parents
5144 of this node, and determined to be on the same side of each parent
5145 as this node is. Thus, if this node tests for the value 51,
5146 and a parent tested for 52, we don't need to consider
5147 the possibility of a value greater than 51. If another parent
5148 tests for the value 50, then this node need not test anything. */
5151 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
5154 /* If INDEX has an unsigned type, we must make unsigned branches. */
5155 int unsignedp = TYPE_UNSIGNED (index_type);
5156 enum machine_mode mode = GET_MODE (index);
5157 enum machine_mode imode = TYPE_MODE (index_type);
5159 /* See if our parents have already tested everything for us.
5160 If they have, emit an unconditional jump for this node. */
5161 if (node_is_bounded (node, index_type))
5162 emit_jump (label_rtx (node->code_label));
5164 else if (tree_int_cst_equal (node->low, node->high))
5166 /* Node is single valued. First see if the index expression matches
5167 this node and then check our children, if any. */
5169 do_jump_if_equal (index,
5170 convert_modes (mode, imode,
5171 expand_expr (node->low, NULL_RTX,
5174 label_rtx (node->code_label), unsignedp);
5176 if (node->right != 0 && node->left != 0)
5178 /* This node has children on both sides.
5179 Dispatch to one side or the other
5180 by comparing the index value with this node's value.
5181 If one subtree is bounded, check that one first,
5182 so we can avoid real branches in the tree. */
5184 if (node_is_bounded (node->right, index_type))
5186 emit_cmp_and_jump_insns (index,
5189 expand_expr (node->high, NULL_RTX,
5192 GT, NULL_RTX, mode, unsignedp,
5193 label_rtx (node->right->code_label));
5194 emit_case_nodes (index, node->left, default_label, index_type);
5197 else if (node_is_bounded (node->left, index_type))
5199 emit_cmp_and_jump_insns (index,
5202 expand_expr (node->high, NULL_RTX,
5205 LT, NULL_RTX, mode, unsignedp,
5206 label_rtx (node->left->code_label));
5207 emit_case_nodes (index, node->right, default_label, index_type);
5210 /* If both children are single-valued cases with no
5211 children, finish up all the work. This way, we can save
5212 one ordered comparison. */
5213 else if (tree_int_cst_equal (node->right->low, node->right->high)
5214 && node->right->left == 0
5215 && node->right->right == 0
5216 && tree_int_cst_equal (node->left->low, node->left->high)
5217 && node->left->left == 0
5218 && node->left->right == 0)
5220 /* Neither node is bounded. First distinguish the two sides;
5221 then emit the code for one side at a time. */
5223 /* See if the value matches what the right hand side
5225 do_jump_if_equal (index,
5226 convert_modes (mode, imode,
5227 expand_expr (node->right->low,
5231 label_rtx (node->right->code_label),
5234 /* See if the value matches what the left hand side
5236 do_jump_if_equal (index,
5237 convert_modes (mode, imode,
5238 expand_expr (node->left->low,
5242 label_rtx (node->left->code_label),
5248 /* Neither node is bounded. First distinguish the two sides;
5249 then emit the code for one side at a time. */
5251 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5253 /* See if the value is on the right. */
5254 emit_cmp_and_jump_insns (index,
5257 expand_expr (node->high, NULL_RTX,
5260 GT, NULL_RTX, mode, unsignedp,
5261 label_rtx (test_label));
5263 /* Value must be on the left.
5264 Handle the left-hand subtree. */
5265 emit_case_nodes (index, node->left, default_label, index_type);
5266 /* If left-hand subtree does nothing,
5268 emit_jump_if_reachable (default_label);
5270 /* Code branches here for the right-hand subtree. */
5271 expand_label (test_label);
5272 emit_case_nodes (index, node->right, default_label, index_type);
5276 else if (node->right != 0 && node->left == 0)
5278 /* Here we have a right child but no left so we issue conditional
5279 branch to default and process the right child.
5281 Omit the conditional branch to default if we it avoid only one
5282 right child; it costs too much space to save so little time. */
5284 if (node->right->right || node->right->left
5285 || !tree_int_cst_equal (node->right->low, node->right->high))
5287 if (!node_has_low_bound (node, index_type))
5289 emit_cmp_and_jump_insns (index,
5292 expand_expr (node->high, NULL_RTX,
5295 LT, NULL_RTX, mode, unsignedp,
5299 emit_case_nodes (index, node->right, default_label, index_type);
5302 /* We cannot process node->right normally
5303 since we haven't ruled out the numbers less than
5304 this node's value. So handle node->right explicitly. */
5305 do_jump_if_equal (index,
5308 expand_expr (node->right->low, NULL_RTX,
5311 label_rtx (node->right->code_label), unsignedp);
5314 else if (node->right == 0 && node->left != 0)
5316 /* Just one subtree, on the left. */
5317 if (node->left->left || node->left->right
5318 || !tree_int_cst_equal (node->left->low, node->left->high))
5320 if (!node_has_high_bound (node, index_type))
5322 emit_cmp_and_jump_insns (index,
5325 expand_expr (node->high, NULL_RTX,
5328 GT, NULL_RTX, mode, unsignedp,
5332 emit_case_nodes (index, node->left, default_label, index_type);
5335 /* We cannot process node->left normally
5336 since we haven't ruled out the numbers less than
5337 this node's value. So handle node->left explicitly. */
5338 do_jump_if_equal (index,
5341 expand_expr (node->left->low, NULL_RTX,
5344 label_rtx (node->left->code_label), unsignedp);
5349 /* Node is a range. These cases are very similar to those for a single
5350 value, except that we do not start by testing whether this node
5351 is the one to branch to. */
5353 if (node->right != 0 && node->left != 0)
5355 /* Node has subtrees on both sides.
5356 If the right-hand subtree is bounded,
5357 test for it first, since we can go straight there.
5358 Otherwise, we need to make a branch in the control structure,
5359 then handle the two subtrees. */
5360 tree test_label = 0;
5362 if (node_is_bounded (node->right, index_type))
5363 /* Right hand node is fully bounded so we can eliminate any
5364 testing and branch directly to the target code. */
5365 emit_cmp_and_jump_insns (index,
5368 expand_expr (node->high, NULL_RTX,
5371 GT, NULL_RTX, mode, unsignedp,
5372 label_rtx (node->right->code_label));
5375 /* Right hand node requires testing.
5376 Branch to a label where we will handle it later. */
5378 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5379 emit_cmp_and_jump_insns (index,
5382 expand_expr (node->high, NULL_RTX,
5385 GT, NULL_RTX, mode, unsignedp,
5386 label_rtx (test_label));
5389 /* Value belongs to this node or to the left-hand subtree. */
5391 emit_cmp_and_jump_insns (index,
5394 expand_expr (node->low, NULL_RTX,
5397 GE, NULL_RTX, mode, unsignedp,
5398 label_rtx (node->code_label));
5400 /* Handle the left-hand subtree. */
5401 emit_case_nodes (index, node->left, default_label, index_type);
5403 /* If right node had to be handled later, do that now. */
5407 /* If the left-hand subtree fell through,
5408 don't let it fall into the right-hand subtree. */
5409 emit_jump_if_reachable (default_label);
5411 expand_label (test_label);
5412 emit_case_nodes (index, node->right, default_label, index_type);
5416 else if (node->right != 0 && node->left == 0)
5418 /* Deal with values to the left of this node,
5419 if they are possible. */
5420 if (!node_has_low_bound (node, index_type))
5422 emit_cmp_and_jump_insns (index,
5425 expand_expr (node->low, NULL_RTX,
5428 LT, NULL_RTX, mode, unsignedp,
5432 /* Value belongs to this node or to the right-hand subtree. */
5434 emit_cmp_and_jump_insns (index,
5437 expand_expr (node->high, NULL_RTX,
5440 LE, NULL_RTX, mode, unsignedp,
5441 label_rtx (node->code_label));
5443 emit_case_nodes (index, node->right, default_label, index_type);
5446 else if (node->right == 0 && node->left != 0)
5448 /* Deal with values to the right of this node,
5449 if they are possible. */
5450 if (!node_has_high_bound (node, index_type))
5452 emit_cmp_and_jump_insns (index,
5455 expand_expr (node->high, NULL_RTX,
5458 GT, NULL_RTX, mode, unsignedp,
5462 /* Value belongs to this node or to the left-hand subtree. */
5464 emit_cmp_and_jump_insns (index,
5467 expand_expr (node->low, NULL_RTX,
5470 GE, NULL_RTX, mode, unsignedp,
5471 label_rtx (node->code_label));
5473 emit_case_nodes (index, node->left, default_label, index_type);
5478 /* Node has no children so we check low and high bounds to remove
5479 redundant tests. Only one of the bounds can exist,
5480 since otherwise this node is bounded--a case tested already. */
5481 int high_bound = node_has_high_bound (node, index_type);
5482 int low_bound = node_has_low_bound (node, index_type);
5484 if (!high_bound && low_bound)
5486 emit_cmp_and_jump_insns (index,
5489 expand_expr (node->high, NULL_RTX,
5492 GT, NULL_RTX, mode, unsignedp,
5496 else if (!low_bound && high_bound)
5498 emit_cmp_and_jump_insns (index,
5501 expand_expr (node->low, NULL_RTX,
5504 LT, NULL_RTX, mode, unsignedp,
5507 else if (!low_bound && !high_bound)
5509 /* Widen LOW and HIGH to the same width as INDEX. */
5510 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
5511 tree low = build1 (CONVERT_EXPR, type, node->low);
5512 tree high = build1 (CONVERT_EXPR, type, node->high);
5513 rtx low_rtx, new_index, new_bound;
5515 /* Instead of doing two branches, emit one unsigned branch for
5516 (index-low) > (high-low). */
5517 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
5518 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
5519 NULL_RTX, unsignedp,
5521 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
5525 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
5526 mode, 1, default_label);
5529 emit_jump (label_rtx (node->code_label));
5534 #include "gt-stmt.h"