1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
38 #include "coretypes.h"
47 #include "insn-config.h"
50 #include "hard-reg-set.h"
57 #include "langhooks.h"
62 /* Functions and data structures for expanding case statements. */
64 /* Case label structure, used to hold info on labels within case
65 statements. We handle "range" labels; for a single-value label
66 as in C, the high and low limits are the same.
68 An AVL tree of case nodes is initially created, and later transformed
69 to a list linked via the RIGHT fields in the nodes. Nodes with
70 higher case values are later in the list.
72 Switch statements can be output in one of two forms. A branch table
73 is used if there are more than a few labels and the labels are dense
74 within the range between the smallest and largest case value. If a
75 branch table is used, no further manipulations are done with the case
78 The alternative to the use of a branch table is to generate a series
79 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
80 and PARENT fields to hold a binary tree. Initially the tree is
81 totally unbalanced, with everything on the right. We balance the tree
82 with nodes on the left having lower case values than the parent
83 and nodes on the right having higher values. We then output the tree
86 struct case_node GTY(())
88 struct case_node *left; /* Left son in binary tree */
89 struct case_node *right; /* Right son in binary tree; also node chain */
90 struct case_node *parent; /* Parent of node in binary tree */
91 tree low; /* Lowest index value for this label */
92 tree high; /* Highest index value for this label */
93 tree code_label; /* Label to jump to when node matches */
97 typedef struct case_node case_node;
98 typedef struct case_node *case_node_ptr;
100 /* These are used by estimate_case_costs and balance_case_nodes. */
102 /* This must be a signed type, and non-ANSI compilers lack signed char. */
103 static short cost_table_[129];
104 static int use_cost_table;
105 static int cost_table_initialized;
107 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
111 /* Stack of control and binding constructs we are currently inside.
113 These constructs begin when you call `expand_start_WHATEVER'
114 and end when you call `expand_end_WHATEVER'. This stack records
115 info about how the construct began that tells the end-function
116 what to do. It also may provide information about the construct
117 to alter the behavior of other constructs within the body.
118 For example, they may affect the behavior of C `break' and `continue'.
120 Each construct gets one `struct nesting' object.
121 All of these objects are chained through the `all' field.
122 `nesting_stack' points to the first object (innermost construct).
123 The position of an entry on `nesting_stack' is in its `depth' field.
125 Each type of construct has its own individual stack.
126 For example, loops have `loop_stack'. Each object points to the
127 next object of the same type through the `next' field.
129 Some constructs are visible to `break' exit-statements and others
130 are not. Which constructs are visible depends on the language.
131 Therefore, the data structure allows each construct to be visible
132 or not, according to the args given when the construct is started.
133 The construct is visible if the `exit_label' field is non-null.
134 In that case, the value should be a CODE_LABEL rtx. */
136 struct nesting GTY(())
139 struct nesting *next;
150 /* For conds (if-then and if-then-else statements). */
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
160 } GTY ((tag ("COND_NESTING"))) cond;
164 /* Label at the top of the loop; place to loop back to. */
166 /* Label at the end of the whole construct. */
168 /* Label for `continue' statement to jump to;
169 this is in front of the stepper of the loop. */
171 } GTY ((tag ("LOOP_NESTING"))) loop;
172 /* For variable binding contours. */
175 /* Sequence number of this binding contour within the function,
176 in order of entry. */
177 int block_start_count;
178 /* Nonzero => value to restore stack to on exit. */
180 /* The NOTE that starts this contour.
181 Used by expand_goto to check whether the destination
182 is within each contour or not. */
184 /* Innermost containing binding contour that has a stack level. */
185 struct nesting *innermost_stack_block;
186 /* List of cleanups to be run on exit from this contour.
187 This is a list of expressions to be evaluated.
188 The TREE_PURPOSE of each link is the ..._DECL node
189 which the cleanup pertains to. */
191 /* List of cleanup-lists of blocks containing this block,
192 as they were at the locus where this block appears.
193 There is an element for each containing block,
194 ordered innermost containing block first.
195 The tail of this list can be 0,
196 if all remaining elements would be empty lists.
197 The element's TREE_VALUE is the cleanup-list of that block,
198 which may be null. */
200 /* Chain of labels defined inside this binding contour.
201 For contours that have stack levels or cleanups. */
202 struct label_chain *label_chain;
203 /* Nonzero if this is associated with an EH region. */
204 int exception_region;
205 /* The saved target_temp_slot_level from our outer block.
206 We may reset target_temp_slot_level to be the level of
207 this block, if that is done, target_temp_slot_level
208 reverts to the saved target_temp_slot_level at the very
210 int block_target_temp_slot_level;
211 /* True if we are currently emitting insns in an area of
212 output code that is controlled by a conditional
213 expression. This is used by the cleanup handling code to
214 generate conditional cleanup actions. */
215 int conditional_code;
216 /* A place to move the start of the exception region for any
217 of the conditional cleanups, must be at the end or after
218 the start of the last unconditional cleanup, and before any
219 conditional branch points. */
220 rtx last_unconditional_cleanup;
221 } GTY ((tag ("BLOCK_NESTING"))) block;
222 /* For switch (C) or case (Pascal) statements,
223 and also for dummies (see `expand_start_case_dummy'). */
226 /* The insn after which the case dispatch should finally
227 be emitted. Zero for a dummy. */
229 /* A list of case labels; it is first built as an AVL tree.
230 During expand_end_case, this is converted to a list, and may be
231 rearranged into a nearly balanced binary tree. */
232 struct case_node *case_list;
233 /* Label to jump to if no case matches. */
235 /* The expression to be dispatched on. */
237 /* Type that INDEX_EXPR should be converted to. */
239 /* Name of this kind of statement, for warnings. */
240 const char *printname;
241 /* Used to save no_line_numbers till we see the first case label.
242 We set this to -1 when we see the first case label in this
244 int line_number_status;
245 } GTY ((tag ("CASE_NESTING"))) case_stmt;
246 } GTY ((desc ("%1.desc"))) data;
249 /* Allocate and return a new `struct nesting'. */
251 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
253 /* Pop the nesting stack element by element until we pop off
254 the element which is at the top of STACK.
255 Update all the other stacks, popping off elements from them
256 as we pop them from nesting_stack. */
258 #define POPSTACK(STACK) \
259 do { struct nesting *target = STACK; \
260 struct nesting *this; \
261 do { this = nesting_stack; \
262 if (loop_stack == this) \
263 loop_stack = loop_stack->next; \
264 if (cond_stack == this) \
265 cond_stack = cond_stack->next; \
266 if (block_stack == this) \
267 block_stack = block_stack->next; \
268 if (stack_block_stack == this) \
269 stack_block_stack = stack_block_stack->next; \
270 if (case_stack == this) \
271 case_stack = case_stack->next; \
272 nesting_depth = nesting_stack->depth - 1; \
273 nesting_stack = this->all; } \
274 while (this != target); } while (0)
276 /* In some cases it is impossible to generate code for a forward goto
277 until the label definition is seen. This happens when it may be necessary
278 for the goto to reset the stack pointer: we don't yet know how to do that.
279 So expand_goto puts an entry on this fixup list.
280 Each time a binding contour that resets the stack is exited,
282 If the target label has now been defined, we can insert the proper code. */
284 struct goto_fixup GTY(())
286 /* Points to following fixup. */
287 struct goto_fixup *next;
288 /* Points to the insn before the jump insn.
289 If more code must be inserted, it goes after this insn. */
291 /* The LABEL_DECL that this jump is jumping to, or 0
292 for break, continue or return. */
294 /* The BLOCK for the place where this goto was found. */
296 /* The CODE_LABEL rtx that this is jumping to. */
298 /* Number of binding contours started in current function
299 before the label reference. */
300 int block_start_count;
301 /* The outermost stack level that should be restored for this jump.
302 Each time a binding contour that resets the stack is exited,
303 if the target label is *not* yet defined, this slot is updated. */
305 /* List of lists of cleanup expressions to be run by this goto.
306 There is one element for each block that this goto is within.
307 The tail of this list can be 0,
308 if all remaining elements would be empty.
309 The TREE_VALUE contains the cleanup list of that block as of the
310 time this goto was seen.
311 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
312 tree cleanup_list_list;
315 /* Within any binding contour that must restore a stack level,
316 all labels are recorded with a chain of these structures. */
318 struct label_chain GTY(())
320 /* Points to following fixup. */
321 struct label_chain *next;
325 struct stmt_status GTY(())
327 /* Chain of all pending binding contours. */
328 struct nesting * x_block_stack;
330 /* If any new stacks are added here, add them to POPSTACKS too. */
332 /* Chain of all pending binding contours that restore stack levels
334 struct nesting * x_stack_block_stack;
336 /* Chain of all pending conditional statements. */
337 struct nesting * x_cond_stack;
339 /* Chain of all pending loops. */
340 struct nesting * x_loop_stack;
342 /* Chain of all pending case or switch statements. */
343 struct nesting * x_case_stack;
345 /* Separate chain including all of the above,
346 chained through the `all' field. */
347 struct nesting * x_nesting_stack;
349 /* Number of entries on nesting_stack now. */
352 /* Number of binding contours started so far in this function. */
353 int x_block_start_count;
355 /* Each time we expand an expression-statement,
356 record the expr's type and its RTL value here. */
357 tree x_last_expr_type;
358 rtx x_last_expr_value;
359 rtx x_last_expr_alt_rtl;
361 /* Nonzero if within a ({...}) grouping, in which case we must
362 always compute a value for each expr-stmt in case it is the last one. */
363 int x_expr_stmts_for_value;
365 /* Location of last line-number note, whether we actually
366 emitted it or not. */
367 location_t x_emit_locus;
369 struct goto_fixup *x_goto_fixup_chain;
372 #define block_stack (cfun->stmt->x_block_stack)
373 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
374 #define cond_stack (cfun->stmt->x_cond_stack)
375 #define loop_stack (cfun->stmt->x_loop_stack)
376 #define case_stack (cfun->stmt->x_case_stack)
377 #define nesting_stack (cfun->stmt->x_nesting_stack)
378 #define nesting_depth (cfun->stmt->x_nesting_depth)
379 #define current_block_start_count (cfun->stmt->x_block_start_count)
380 #define last_expr_type (cfun->stmt->x_last_expr_type)
381 #define last_expr_value (cfun->stmt->x_last_expr_value)
382 #define last_expr_alt_rtl (cfun->stmt->x_last_expr_alt_rtl)
383 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
384 #define emit_locus (cfun->stmt->x_emit_locus)
385 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
387 /* Nonzero if we are using EH to handle cleanups. */
388 static int using_eh_for_cleanups_p = 0;
390 static int n_occurrences (int, const char *);
391 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
392 static void expand_goto_internal (tree, rtx, rtx);
393 static int expand_fixup (tree, rtx, rtx);
394 static rtx expand_nl_handler_label (rtx, rtx);
395 static void expand_nl_goto_receiver (void);
396 static void expand_nl_goto_receivers (struct nesting *);
397 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
398 static bool check_operand_nalternatives (tree, tree);
399 static bool check_unique_operand_names (tree, tree);
400 static char *resolve_operand_name_1 (char *, tree, tree);
401 static void expand_null_return_1 (rtx);
402 static enum br_predictor return_prediction (rtx);
403 static rtx shift_return_value (rtx);
404 static void expand_value_return (rtx);
405 static int tail_recursion_args (tree, tree);
406 static void expand_cleanups (tree, int, int);
407 static void check_seenlabel (void);
408 static void do_jump_if_equal (rtx, rtx, rtx, int);
409 static int estimate_case_costs (case_node_ptr);
410 static bool same_case_target_p (rtx, rtx);
411 static void strip_default_case_nodes (case_node_ptr *, rtx);
412 static bool lshift_cheap_p (void);
413 static int case_bit_test_cmp (const void *, const void *);
414 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
415 static void group_case_nodes (case_node_ptr);
416 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
417 static int node_has_low_bound (case_node_ptr, tree);
418 static int node_has_high_bound (case_node_ptr, tree);
419 static int node_is_bounded (case_node_ptr, tree);
420 static void emit_jump_if_reachable (rtx);
421 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
422 static struct case_node *case_tree2list (case_node *, case_node *);
425 using_eh_for_cleanups (void)
427 using_eh_for_cleanups_p = 1;
431 init_stmt_for_function (void)
433 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
436 /* Record the current file and line. Called from emit_line_note. */
439 set_file_and_line_for_stmt (location_t location)
441 /* If we're outputting an inline function, and we add a line note,
442 there may be no CFUN->STMT information. So, there's no need to
445 emit_locus = location;
448 /* Emit a no-op instruction. */
455 last_insn = get_last_insn ();
457 && (GET_CODE (last_insn) == CODE_LABEL
458 || (GET_CODE (last_insn) == NOTE
459 && prev_real_insn (last_insn) == 0)))
460 emit_insn (gen_nop ());
463 /* Return the rtx-label that corresponds to a LABEL_DECL,
464 creating it if necessary. */
467 label_rtx (tree label)
469 if (TREE_CODE (label) != LABEL_DECL)
472 if (!DECL_RTL_SET_P (label))
473 SET_DECL_RTL (label, gen_label_rtx ());
475 return DECL_RTL (label);
478 /* As above, but also put it on the forced-reference list of the
479 function that contains it. */
481 force_label_rtx (tree label)
483 rtx ref = label_rtx (label);
484 tree function = decl_function_context (label);
490 if (function != current_function_decl
491 && function != inline_function_decl)
492 p = find_function_data (function);
496 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
497 p->expr->x_forced_labels);
501 /* Add an unconditional jump to LABEL as the next sequential instruction. */
504 emit_jump (rtx label)
506 do_pending_stack_adjust ();
507 emit_jump_insn (gen_jump (label));
511 /* Emit code to jump to the address
512 specified by the pointer expression EXP. */
515 expand_computed_goto (tree exp)
517 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
519 x = convert_memory_address (Pmode, x);
523 if (! cfun->computed_goto_common_label)
525 cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
526 cfun->computed_goto_common_label = gen_label_rtx ();
527 emit_label (cfun->computed_goto_common_label);
529 do_pending_stack_adjust ();
530 emit_indirect_jump (cfun->computed_goto_common_reg);
532 current_function_has_computed_jump = 1;
536 emit_move_insn (cfun->computed_goto_common_reg, x);
537 emit_jump (cfun->computed_goto_common_label);
541 /* Handle goto statements and the labels that they can go to. */
543 /* Specify the location in the RTL code of a label LABEL,
544 which is a LABEL_DECL tree node.
546 This is used for the kind of label that the user can jump to with a
547 goto statement, and for alternatives of a switch or case statement.
548 RTL labels generated for loops and conditionals don't go through here;
549 they are generated directly at the RTL level, by other functions below.
551 Note that this has nothing to do with defining label *names*.
552 Languages vary in how they do that and what that even means. */
555 expand_label (tree label)
557 struct label_chain *p;
559 do_pending_stack_adjust ();
560 emit_label (label_rtx (label));
561 if (DECL_NAME (label))
562 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
564 if (stack_block_stack != 0)
566 p = ggc_alloc (sizeof (struct label_chain));
567 p->next = stack_block_stack->data.block.label_chain;
568 stack_block_stack->data.block.label_chain = p;
573 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
574 from nested functions. */
577 declare_nonlocal_label (tree label)
579 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
581 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
582 LABEL_PRESERVE_P (label_rtx (label)) = 1;
583 if (nonlocal_goto_handler_slots == 0)
585 emit_stack_save (SAVE_NONLOCAL,
586 &nonlocal_goto_stack_level,
587 PREV_INSN (tail_recursion_reentry));
589 nonlocal_goto_handler_slots
590 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
593 /* Generate RTL code for a `goto' statement with target label LABEL.
594 LABEL should be a LABEL_DECL tree node that was or will later be
595 defined with `expand_label'. */
598 expand_goto (tree label)
602 /* Check for a nonlocal goto to a containing function. */
603 context = decl_function_context (label);
604 if (context != 0 && context != current_function_decl)
606 struct function *p = find_function_data (context);
607 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
608 rtx handler_slot, static_chain, save_area, insn;
611 /* Find the corresponding handler slot for this label. */
612 handler_slot = p->x_nonlocal_goto_handler_slots;
613 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
614 link = TREE_CHAIN (link))
615 handler_slot = XEXP (handler_slot, 1);
616 handler_slot = XEXP (handler_slot, 0);
618 p->has_nonlocal_label = 1;
619 current_function_has_nonlocal_goto = 1;
620 LABEL_REF_NONLOCAL_P (label_ref) = 1;
622 /* Copy the rtl for the slots so that they won't be shared in
623 case the virtual stack vars register gets instantiated differently
624 in the parent than in the child. */
626 static_chain = copy_to_reg (lookup_static_chain (label));
628 /* Get addr of containing function's current nonlocal goto handler,
629 which will do any cleanups and then jump to the label. */
630 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
631 virtual_stack_vars_rtx,
634 /* Get addr of containing function's nonlocal save area. */
635 save_area = p->x_nonlocal_goto_stack_level;
637 save_area = replace_rtx (copy_rtx (save_area),
638 virtual_stack_vars_rtx, static_chain);
640 #if HAVE_nonlocal_goto
641 if (HAVE_nonlocal_goto)
642 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
643 save_area, label_ref));
647 emit_insn (gen_rtx_CLOBBER (VOIDmode,
648 gen_rtx_MEM (BLKmode,
649 gen_rtx_SCRATCH (VOIDmode))));
650 emit_insn (gen_rtx_CLOBBER (VOIDmode,
651 gen_rtx_MEM (BLKmode,
652 hard_frame_pointer_rtx)));
654 /* Restore frame pointer for containing function.
655 This sets the actual hard register used for the frame pointer
656 to the location of the function's incoming static chain info.
657 The non-local goto handler will then adjust it to contain the
658 proper value and reload the argument pointer, if needed. */
659 emit_move_insn (hard_frame_pointer_rtx, static_chain);
660 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
662 /* USE of hard_frame_pointer_rtx added for consistency;
663 not clear if really needed. */
664 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
665 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
666 emit_indirect_jump (handler_slot);
669 /* Search backwards to the jump insn and mark it as a
671 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
673 if (GET_CODE (insn) == JUMP_INSN)
675 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
676 const0_rtx, REG_NOTES (insn));
679 else if (GET_CODE (insn) == CALL_INSN)
684 expand_goto_internal (label, label_rtx (label), NULL_RTX);
687 /* Generate RTL code for a `goto' statement with target label BODY.
688 LABEL should be a LABEL_REF.
689 LAST_INSN, if non-0, is the rtx we should consider as the last
690 insn emitted (for the purposes of cleaning up a return). */
693 expand_goto_internal (tree body, rtx label, rtx last_insn)
695 struct nesting *block;
698 if (GET_CODE (label) != CODE_LABEL)
701 /* If label has already been defined, we can tell now
702 whether and how we must alter the stack level. */
704 if (PREV_INSN (label) != 0)
706 /* Find the innermost pending block that contains the label.
707 (Check containment by comparing insn-uids.)
708 Then restore the outermost stack level within that block,
709 and do cleanups of all blocks contained in it. */
710 for (block = block_stack; block; block = block->next)
712 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
714 if (block->data.block.stack_level != 0)
715 stack_level = block->data.block.stack_level;
716 /* Execute the cleanups for blocks we are exiting. */
717 if (block->data.block.cleanups != 0)
719 expand_cleanups (block->data.block.cleanups, 1, 1);
720 do_pending_stack_adjust ();
726 /* Ensure stack adjust isn't done by emit_jump, as this
727 would clobber the stack pointer. This one should be
728 deleted as dead by flow. */
729 clear_pending_stack_adjust ();
730 do_pending_stack_adjust ();
732 /* Don't do this adjust if it's to the end label and this function
733 is to return with a depressed stack pointer. */
734 if (label == return_label
735 && (((TREE_CODE (TREE_TYPE (current_function_decl))
737 && (TYPE_RETURNS_STACK_DEPRESSED
738 (TREE_TYPE (current_function_decl))))))
741 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
744 if (body != 0 && DECL_TOO_LATE (body))
745 error ("jump to `%s' invalidly jumps into binding contour",
746 IDENTIFIER_POINTER (DECL_NAME (body)));
748 /* Label not yet defined: may need to put this goto
749 on the fixup list. */
750 else if (! expand_fixup (body, label, last_insn))
752 /* No fixup needed. Record that the label is the target
753 of at least one goto that has no fixup. */
755 TREE_ADDRESSABLE (body) = 1;
761 /* Generate if necessary a fixup for a goto
762 whose target label in tree structure (if any) is TREE_LABEL
763 and whose target in rtl is RTL_LABEL.
765 If LAST_INSN is nonzero, we pretend that the jump appears
766 after insn LAST_INSN instead of at the current point in the insn stream.
768 The fixup will be used later to insert insns just before the goto.
769 Those insns will restore the stack level as appropriate for the
770 target label, and will (in the case of C++) also invoke any object
771 destructors which have to be invoked when we exit the scopes which
772 are exited by the goto.
774 Value is nonzero if a fixup is made. */
777 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
779 struct nesting *block, *end_block;
781 /* See if we can recognize which block the label will be output in.
782 This is possible in some very common cases.
783 If we succeed, set END_BLOCK to that block.
784 Otherwise, set it to 0. */
787 && (rtl_label == cond_stack->data.cond.endif_label
788 || rtl_label == cond_stack->data.cond.next_label))
789 end_block = cond_stack;
790 /* If we are in a loop, recognize certain labels which
791 are likely targets. This reduces the number of fixups
792 we need to create. */
794 && (rtl_label == loop_stack->data.loop.start_label
795 || rtl_label == loop_stack->data.loop.end_label
796 || rtl_label == loop_stack->data.loop.continue_label))
797 end_block = loop_stack;
801 /* Now set END_BLOCK to the binding level to which we will return. */
805 struct nesting *next_block = end_block->all;
808 /* First see if the END_BLOCK is inside the innermost binding level.
809 If so, then no cleanups or stack levels are relevant. */
810 while (next_block && next_block != block)
811 next_block = next_block->all;
816 /* Otherwise, set END_BLOCK to the innermost binding level
817 which is outside the relevant control-structure nesting. */
818 next_block = block_stack->next;
819 for (block = block_stack; block != end_block; block = block->all)
820 if (block == next_block)
821 next_block = next_block->next;
822 end_block = next_block;
825 /* Does any containing block have a stack level or cleanups?
826 If not, no fixup is needed, and that is the normal case
827 (the only case, for standard C). */
828 for (block = block_stack; block != end_block; block = block->next)
829 if (block->data.block.stack_level != 0
830 || block->data.block.cleanups != 0)
833 if (block != end_block)
835 /* Ok, a fixup is needed. Add a fixup to the list of such. */
836 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
837 /* In case an old stack level is restored, make sure that comes
838 after any pending stack adjust. */
839 /* ?? If the fixup isn't to come at the present position,
840 doing the stack adjust here isn't useful. Doing it with our
841 settings at that location isn't useful either. Let's hope
844 do_pending_stack_adjust ();
845 fixup->target = tree_label;
846 fixup->target_rtl = rtl_label;
848 /* Create a BLOCK node and a corresponding matched set of
849 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
850 this point. The notes will encapsulate any and all fixup
851 code which we might later insert at this point in the insn
852 stream. Also, the BLOCK node will be the parent (i.e. the
853 `SUPERBLOCK') of any other BLOCK nodes which we might create
854 later on when we are expanding the fixup code.
856 Note that optimization passes (including expand_end_loop)
857 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
861 rtx original_before_jump
862 = last_insn ? last_insn : get_last_insn ();
867 block = make_node (BLOCK);
868 TREE_USED (block) = 1;
870 if (!cfun->x_whole_function_mode_p)
871 (*lang_hooks.decls.insert_block) (block);
875 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
876 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
881 start = emit_note (NOTE_INSN_BLOCK_BEG);
882 if (cfun->x_whole_function_mode_p)
883 NOTE_BLOCK (start) = block;
884 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
885 end = emit_note (NOTE_INSN_BLOCK_END);
886 if (cfun->x_whole_function_mode_p)
887 NOTE_BLOCK (end) = block;
888 fixup->context = block;
890 emit_insn_after (start, original_before_jump);
893 fixup->block_start_count = current_block_start_count;
894 fixup->stack_level = 0;
895 fixup->cleanup_list_list
896 = ((block->data.block.outer_cleanups
897 || block->data.block.cleanups)
898 ? tree_cons (NULL_TREE, block->data.block.cleanups,
899 block->data.block.outer_cleanups)
901 fixup->next = goto_fixup_chain;
902 goto_fixup_chain = fixup;
908 /* Expand any needed fixups in the outputmost binding level of the
909 function. FIRST_INSN is the first insn in the function. */
912 expand_fixups (rtx first_insn)
914 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
917 /* When exiting a binding contour, process all pending gotos requiring fixups.
918 THISBLOCK is the structure that describes the block being exited.
919 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
920 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
921 FIRST_INSN is the insn that began this contour.
923 Gotos that jump out of this contour must restore the
924 stack level and do the cleanups before actually jumping.
926 DONT_JUMP_IN positive means report error if there is a jump into this
927 contour from before the beginning of the contour. This is also done if
928 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
931 fixup_gotos (struct nesting *thisblock, rtx stack_level,
932 tree cleanup_list, rtx first_insn, int dont_jump_in)
934 struct goto_fixup *f, *prev;
936 /* F is the fixup we are considering; PREV is the previous one. */
937 /* We run this loop in two passes so that cleanups of exited blocks
938 are run first, and blocks that are exited are marked so
941 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
943 /* Test for a fixup that is inactive because it is already handled. */
944 if (f->before_jump == 0)
946 /* Delete inactive fixup from the chain, if that is easy to do. */
948 prev->next = f->next;
950 /* Has this fixup's target label been defined?
951 If so, we can finalize it. */
952 else if (PREV_INSN (f->target_rtl) != 0)
956 /* If this fixup jumped into this contour from before the beginning
957 of this contour, report an error. This code used to use
958 the first non-label insn after f->target_rtl, but that's
959 wrong since such can be added, by things like put_var_into_stack
960 and have INSN_UIDs that are out of the range of the block. */
961 /* ??? Bug: this does not detect jumping in through intermediate
962 blocks that have stack levels or cleanups.
963 It detects only a problem with the innermost block
966 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
968 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
969 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
970 && ! DECL_ERROR_ISSUED (f->target))
972 error ("%Jlabel '%D' used before containing binding contour",
973 f->target, f->target);
974 /* Prevent multiple errors for one label. */
975 DECL_ERROR_ISSUED (f->target) = 1;
978 /* We will expand the cleanups into a sequence of their own and
979 then later on we will attach this new sequence to the insn
980 stream just ahead of the actual jump insn. */
984 /* Temporarily restore the lexical context where we will
985 logically be inserting the fixup code. We do this for the
986 sake of getting the debugging information right. */
988 (*lang_hooks.decls.pushlevel) (0);
989 (*lang_hooks.decls.set_block) (f->context);
991 /* Expand the cleanups for blocks this jump exits. */
992 if (f->cleanup_list_list)
995 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
996 /* Marked elements correspond to blocks that have been closed.
997 Do their cleanups. */
998 if (TREE_ADDRESSABLE (lists)
999 && TREE_VALUE (lists) != 0)
1001 expand_cleanups (TREE_VALUE (lists), 1, 1);
1002 /* Pop any pushes done in the cleanups,
1003 in case function is about to return. */
1004 do_pending_stack_adjust ();
1008 /* Restore stack level for the biggest contour that this
1009 jump jumps out of. */
1011 && ! (f->target_rtl == return_label
1012 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1014 && (TYPE_RETURNS_STACK_DEPRESSED
1015 (TREE_TYPE (current_function_decl))))))
1016 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1018 /* Finish up the sequence containing the insns which implement the
1019 necessary cleanups, and then attach that whole sequence to the
1020 insn stream just ahead of the actual jump insn. Attaching it
1021 at that point insures that any cleanups which are in fact
1022 implicit C++ object destructions (which must be executed upon
1023 leaving the block) appear (to the debugger) to be taking place
1024 in an area of the generated code where the object(s) being
1025 destructed are still "in scope". */
1027 cleanup_insns = get_insns ();
1028 (*lang_hooks.decls.poplevel) (1, 0, 0);
1031 emit_insn_after (cleanup_insns, f->before_jump);
1037 /* For any still-undefined labels, do the cleanups for this block now.
1038 We must do this now since items in the cleanup list may go out
1039 of scope when the block ends. */
1040 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1041 if (f->before_jump != 0
1042 && PREV_INSN (f->target_rtl) == 0
1043 /* Label has still not appeared. If we are exiting a block with
1044 a stack level to restore, that started before the fixup,
1045 mark this stack level as needing restoration
1046 when the fixup is later finalized. */
1048 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1049 means the label is undefined. That's erroneous, but possible. */
1050 && (thisblock->data.block.block_start_count
1051 <= f->block_start_count))
1053 tree lists = f->cleanup_list_list;
1056 for (; lists; lists = TREE_CHAIN (lists))
1057 /* If the following elt. corresponds to our containing block
1058 then the elt. must be for this block. */
1059 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1062 (*lang_hooks.decls.pushlevel) (0);
1063 (*lang_hooks.decls.set_block) (f->context);
1064 expand_cleanups (TREE_VALUE (lists), 1, 1);
1065 do_pending_stack_adjust ();
1066 cleanup_insns = get_insns ();
1067 (*lang_hooks.decls.poplevel) (1, 0, 0);
1069 if (cleanup_insns != 0)
1071 = emit_insn_after (cleanup_insns, f->before_jump);
1073 f->cleanup_list_list = TREE_CHAIN (lists);
1077 f->stack_level = stack_level;
1081 /* Return the number of times character C occurs in string S. */
1083 n_occurrences (int c, const char *s)
1091 /* Generate RTL for an asm statement (explicit assembler code).
1092 STRING is a STRING_CST node containing the assembler code text,
1093 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1094 insn is volatile; don't optimize it. */
1097 expand_asm (tree string, int vol)
1101 if (TREE_CODE (string) == ADDR_EXPR)
1102 string = TREE_OPERAND (string, 0);
1104 body = gen_rtx_ASM_INPUT (VOIDmode,
1105 ggc_strdup (TREE_STRING_POINTER (string)));
1107 MEM_VOLATILE_P (body) = vol;
1114 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1115 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1116 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1117 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1118 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1119 constraint allows the use of a register operand. And, *IS_INOUT
1120 will be true if the operand is read-write, i.e., if it is used as
1121 an input as well as an output. If *CONSTRAINT_P is not in
1122 canonical form, it will be made canonical. (Note that `+' will be
1123 replaced with `=' as part of this process.)
1125 Returns TRUE if all went well; FALSE if an error occurred. */
1128 parse_output_constraint (const char **constraint_p, int operand_num,
1129 int ninputs, int noutputs, bool *allows_mem,
1130 bool *allows_reg, bool *is_inout)
1132 const char *constraint = *constraint_p;
1135 /* Assume the constraint doesn't allow the use of either a register
1137 *allows_mem = false;
1138 *allows_reg = false;
1140 /* Allow the `=' or `+' to not be at the beginning of the string,
1141 since it wasn't explicitly documented that way, and there is a
1142 large body of code that puts it last. Swap the character to
1143 the front, so as not to uglify any place else. */
1144 p = strchr (constraint, '=');
1146 p = strchr (constraint, '+');
1148 /* If the string doesn't contain an `=', issue an error
1152 error ("output operand constraint lacks `='");
1156 /* If the constraint begins with `+', then the operand is both read
1157 from and written to. */
1158 *is_inout = (*p == '+');
1160 /* Canonicalize the output constraint so that it begins with `='. */
1161 if (p != constraint || is_inout)
1164 size_t c_len = strlen (constraint);
1166 if (p != constraint)
1167 warning ("output constraint `%c' for operand %d is not at the beginning",
1170 /* Make a copy of the constraint. */
1171 buf = alloca (c_len + 1);
1172 strcpy (buf, constraint);
1173 /* Swap the first character and the `=' or `+'. */
1174 buf[p - constraint] = buf[0];
1175 /* Make sure the first character is an `='. (Until we do this,
1176 it might be a `+'.) */
1178 /* Replace the constraint with the canonicalized string. */
1179 *constraint_p = ggc_alloc_string (buf, c_len);
1180 constraint = *constraint_p;
1183 /* Loop through the constraint string. */
1184 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1189 error ("operand constraint contains incorrectly positioned '+' or '='");
1193 if (operand_num + 1 == ninputs + noutputs)
1195 error ("`%%' constraint used with last operand");
1200 case 'V': case 'm': case 'o':
1204 case '?': case '!': case '*': case '&': case '#':
1205 case 'E': case 'F': case 'G': case 'H':
1206 case 's': case 'i': case 'n':
1207 case 'I': case 'J': case 'K': case 'L': case 'M':
1208 case 'N': case 'O': case 'P': case ',':
1211 case '0': case '1': case '2': case '3': case '4':
1212 case '5': case '6': case '7': case '8': case '9':
1214 error ("matching constraint not valid in output operand");
1218 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1219 excepting those that expand_call created. So match memory
1236 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1238 #ifdef EXTRA_CONSTRAINT_STR
1239 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1241 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1245 /* Otherwise we can't assume anything about the nature of
1246 the constraint except that it isn't purely registers.
1247 Treat it like "g" and hope for the best. */
1255 if (*is_inout && !*allows_reg)
1256 warning ("read-write constraint does not allow a register");
1261 /* Similar, but for input constraints. */
1264 parse_input_constraint (const char **constraint_p, int input_num,
1265 int ninputs, int noutputs, int ninout,
1266 const char * const * constraints,
1267 bool *allows_mem, bool *allows_reg)
1269 const char *constraint = *constraint_p;
1270 const char *orig_constraint = constraint;
1271 size_t c_len = strlen (constraint);
1273 bool saw_match = false;
1275 /* Assume the constraint doesn't allow the use of either
1276 a register or memory. */
1277 *allows_mem = false;
1278 *allows_reg = false;
1280 /* Make sure constraint has neither `=', `+', nor '&'. */
1282 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1283 switch (constraint[j])
1285 case '+': case '=': case '&':
1286 if (constraint == orig_constraint)
1288 error ("input operand constraint contains `%c'", constraint[j]);
1294 if (constraint == orig_constraint
1295 && input_num + 1 == ninputs - ninout)
1297 error ("`%%' constraint used with last operand");
1302 case 'V': case 'm': case 'o':
1307 case '?': case '!': case '*': case '#':
1308 case 'E': case 'F': case 'G': case 'H':
1309 case 's': case 'i': case 'n':
1310 case 'I': case 'J': case 'K': case 'L': case 'M':
1311 case 'N': case 'O': case 'P': case ',':
1314 /* Whether or not a numeric constraint allows a register is
1315 decided by the matching constraint, and so there is no need
1316 to do anything special with them. We must handle them in
1317 the default case, so that we don't unnecessarily force
1318 operands to memory. */
1319 case '0': case '1': case '2': case '3': case '4':
1320 case '5': case '6': case '7': case '8': case '9':
1323 unsigned long match;
1327 match = strtoul (constraint + j, &end, 10);
1328 if (match >= (unsigned long) noutputs)
1330 error ("matching constraint references invalid operand number");
1334 /* Try and find the real constraint for this dup. Only do this
1335 if the matching constraint is the only alternative. */
1337 && (j == 0 || (j == 1 && constraint[0] == '%')))
1339 constraint = constraints[match];
1340 *constraint_p = constraint;
1341 c_len = strlen (constraint);
1343 /* ??? At the end of the loop, we will skip the first part of
1344 the matched constraint. This assumes not only that the
1345 other constraint is an output constraint, but also that
1346 the '=' or '+' come first. */
1350 j = end - constraint;
1351 /* Anticipate increment at end of loop. */
1366 if (! ISALPHA (constraint[j]))
1368 error ("invalid punctuation `%c' in constraint", constraint[j]);
1371 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1374 #ifdef EXTRA_CONSTRAINT_STR
1375 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1377 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1381 /* Otherwise we can't assume anything about the nature of
1382 the constraint except that it isn't purely registers.
1383 Treat it like "g" and hope for the best. */
1391 if (saw_match && !*allows_reg)
1392 warning ("matching constraint does not allow a register");
1397 /* Check for overlap between registers marked in CLOBBERED_REGS and
1398 anything inappropriate in DECL. Emit error and return TRUE for error,
1402 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1404 /* Conflicts between asm-declared register variables and the clobber
1405 list are not allowed. */
1406 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1407 && DECL_REGISTER (decl)
1408 && REG_P (DECL_RTL (decl))
1409 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1411 rtx reg = DECL_RTL (decl);
1414 for (regno = REGNO (reg);
1415 regno < (REGNO (reg)
1416 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1418 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1420 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1421 IDENTIFIER_POINTER (DECL_NAME (decl)));
1423 /* Reset registerness to stop multiple errors emitted for a
1425 DECL_REGISTER (decl) = 0;
1432 /* Generate RTL for an asm statement with arguments.
1433 STRING is the instruction template.
1434 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1435 Each output or input has an expression in the TREE_VALUE and
1436 and a tree list in TREE_PURPOSE which in turn contains a constraint
1437 name in TREE_VALUE (or NULL_TREE) and a constraint string
1439 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1440 that is clobbered by this insn.
1442 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1443 Some elements of OUTPUTS may be replaced with trees representing temporary
1444 values. The caller should copy those temporary values to the originally
1447 VOL nonzero means the insn is volatile; don't optimize it. */
1450 expand_asm_operands (tree string, tree outputs, tree inputs,
1451 tree clobbers, int vol, location_t locus)
1453 rtvec argvec, constraintvec;
1455 int ninputs = list_length (inputs);
1456 int noutputs = list_length (outputs);
1459 HARD_REG_SET clobbered_regs;
1460 int clobber_conflict_found = 0;
1464 /* Vector of RTX's of evaluated output operands. */
1465 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1466 int *inout_opnum = alloca (noutputs * sizeof (int));
1467 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
1468 enum machine_mode *inout_mode
1469 = alloca (noutputs * sizeof (enum machine_mode));
1470 const char **constraints
1471 = alloca ((noutputs + ninputs) * sizeof (const char *));
1472 int old_generating_concat_p = generating_concat_p;
1474 /* An ASM with no outputs needs to be treated as volatile, for now. */
1478 if (! check_operand_nalternatives (outputs, inputs))
1481 string = resolve_asm_operand_names (string, outputs, inputs);
1483 /* Collect constraints. */
1485 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1486 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1487 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1488 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1490 #ifdef MD_ASM_CLOBBERS
1491 /* Sometimes we wish to automatically clobber registers across an asm.
1492 Case in point is when the i386 backend moved from cc0 to a hard reg --
1493 maintaining source-level compatibility means automatically clobbering
1494 the flags register. */
1495 MD_ASM_CLOBBERS (clobbers);
1498 /* Count the number of meaningful clobbered registers, ignoring what
1499 we would ignore later. */
1501 CLEAR_HARD_REG_SET (clobbered_regs);
1502 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1504 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1506 i = decode_reg_name (regname);
1507 if (i >= 0 || i == -4)
1510 error ("unknown register name `%s' in `asm'", regname);
1512 /* Mark clobbered registers. */
1515 /* Clobbering the PIC register is an error */
1516 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1518 error ("PIC register `%s' clobbered in `asm'", regname);
1522 SET_HARD_REG_BIT (clobbered_regs, i);
1528 /* First pass over inputs and outputs checks validity and sets
1529 mark_addressable if needed. */
1532 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1534 tree val = TREE_VALUE (tail);
1535 tree type = TREE_TYPE (val);
1536 const char *constraint;
1541 /* If there's an erroneous arg, emit no insn. */
1542 if (type == error_mark_node)
1545 /* Try to parse the output constraint. If that fails, there's
1546 no point in going further. */
1547 constraint = constraints[i];
1548 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1549 &allows_mem, &allows_reg, &is_inout))
1556 && GET_CODE (DECL_RTL (val)) == REG
1557 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1558 (*lang_hooks.mark_addressable) (val);
1565 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1567 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1571 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1573 bool allows_reg, allows_mem;
1574 const char *constraint;
1576 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1577 would get VOIDmode and that could cause a crash in reload. */
1578 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1581 constraint = constraints[i + noutputs];
1582 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1583 constraints, &allows_mem, &allows_reg))
1586 if (! allows_reg && allows_mem)
1587 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1590 /* Second pass evaluates arguments. */
1593 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1595 tree val = TREE_VALUE (tail);
1596 tree type = TREE_TYPE (val);
1602 if (!parse_output_constraint (&constraints[i], i, ninputs,
1603 noutputs, &allows_mem, &allows_reg,
1607 /* If an output operand is not a decl or indirect ref and our constraint
1608 allows a register, make a temporary to act as an intermediate.
1609 Make the asm insn write into that, then our caller will copy it to
1610 the real output operand. Likewise for promoted variables. */
1612 generating_concat_p = 0;
1614 real_output_rtx[i] = NULL_RTX;
1615 if ((TREE_CODE (val) == INDIRECT_REF
1618 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1619 && ! (GET_CODE (DECL_RTL (val)) == REG
1620 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1624 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1625 if (GET_CODE (op) == MEM)
1626 op = validize_mem (op);
1628 if (! allows_reg && GET_CODE (op) != MEM)
1629 error ("output number %d not directly addressable", i);
1630 if ((! allows_mem && GET_CODE (op) == MEM)
1631 || GET_CODE (op) == CONCAT)
1633 real_output_rtx[i] = protect_from_queue (op, 1);
1634 op = gen_reg_rtx (GET_MODE (op));
1636 emit_move_insn (op, real_output_rtx[i]);
1641 op = assign_temp (type, 0, 0, 1);
1642 op = validize_mem (op);
1643 TREE_VALUE (tail) = make_tree (type, op);
1647 generating_concat_p = old_generating_concat_p;
1651 inout_mode[ninout] = TYPE_MODE (type);
1652 inout_opnum[ninout++] = i;
1655 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1656 clobber_conflict_found = 1;
1659 /* Make vectors for the expression-rtx, constraint strings,
1660 and named operands. */
1662 argvec = rtvec_alloc (ninputs);
1663 constraintvec = rtvec_alloc (ninputs);
1665 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1666 : GET_MODE (output_rtx[0])),
1667 ggc_strdup (TREE_STRING_POINTER (string)),
1668 empty_string, 0, argvec, constraintvec,
1669 locus.file, locus.line);
1671 MEM_VOLATILE_P (body) = vol;
1673 /* Eval the inputs and put them into ARGVEC.
1674 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1676 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1678 bool allows_reg, allows_mem;
1679 const char *constraint;
1683 constraint = constraints[i + noutputs];
1684 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1685 constraints, &allows_mem, &allows_reg))
1688 generating_concat_p = 0;
1690 val = TREE_VALUE (tail);
1691 type = TREE_TYPE (val);
1692 op = expand_expr (val, NULL_RTX, VOIDmode,
1693 (allows_mem && !allows_reg
1694 ? EXPAND_MEMORY : EXPAND_NORMAL));
1696 /* Never pass a CONCAT to an ASM. */
1697 if (GET_CODE (op) == CONCAT)
1698 op = force_reg (GET_MODE (op), op);
1699 else if (GET_CODE (op) == MEM)
1700 op = validize_mem (op);
1702 if (asm_operand_ok (op, constraint) <= 0)
1705 op = force_reg (TYPE_MODE (type), op);
1706 else if (!allows_mem)
1707 warning ("asm operand %d probably doesn't match constraints",
1709 else if (GET_CODE (op) == MEM)
1711 /* We won't recognize either volatile memory or memory
1712 with a queued address as available a memory_operand
1713 at this point. Ignore it: clearly this *is* a memory. */
1717 warning ("use of memory input without lvalue in "
1718 "asm operand %d is deprecated", i + noutputs);
1720 if (CONSTANT_P (op))
1722 rtx mem = force_const_mem (TYPE_MODE (type), op);
1724 op = validize_mem (mem);
1726 op = force_reg (TYPE_MODE (type), op);
1728 if (GET_CODE (op) == REG
1729 || GET_CODE (op) == SUBREG
1730 || GET_CODE (op) == ADDRESSOF
1731 || GET_CODE (op) == CONCAT)
1733 tree qual_type = build_qualified_type (type,
1735 | TYPE_QUAL_CONST));
1736 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1737 memloc = validize_mem (memloc);
1738 emit_move_insn (memloc, op);
1744 generating_concat_p = old_generating_concat_p;
1745 ASM_OPERANDS_INPUT (body, i) = op;
1747 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1748 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
1749 ggc_strdup (constraints[i + noutputs]));
1751 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1752 clobber_conflict_found = 1;
1755 /* Protect all the operands from the queue now that they have all been
1758 generating_concat_p = 0;
1760 for (i = 0; i < ninputs - ninout; i++)
1761 ASM_OPERANDS_INPUT (body, i)
1762 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1764 for (i = 0; i < noutputs; i++)
1765 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1767 /* For in-out operands, copy output rtx to input rtx. */
1768 for (i = 0; i < ninout; i++)
1770 int j = inout_opnum[i];
1773 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1776 sprintf (buffer, "%d", j);
1777 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1778 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1781 generating_concat_p = old_generating_concat_p;
1783 /* Now, for each output, construct an rtx
1784 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1785 ARGVEC CONSTRAINTS OPNAMES))
1786 If there is more than one, put them inside a PARALLEL. */
1788 if (noutputs == 1 && nclobbers == 0)
1790 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
1791 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1794 else if (noutputs == 0 && nclobbers == 0)
1796 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1808 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1810 /* For each output operand, store a SET. */
1811 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1813 XVECEXP (body, 0, i)
1814 = gen_rtx_SET (VOIDmode,
1816 gen_rtx_ASM_OPERANDS
1817 (GET_MODE (output_rtx[i]),
1818 ggc_strdup (TREE_STRING_POINTER (string)),
1819 ggc_strdup (constraints[i]),
1820 i, argvec, constraintvec,
1821 locus.file, locus.line));
1823 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1826 /* If there are no outputs (but there are some clobbers)
1827 store the bare ASM_OPERANDS into the PARALLEL. */
1830 XVECEXP (body, 0, i++) = obody;
1832 /* Store (clobber REG) for each clobbered register specified. */
1834 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1836 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1837 int j = decode_reg_name (regname);
1842 if (j == -3) /* `cc', which is not a register */
1845 if (j == -4) /* `memory', don't cache memory across asm */
1847 XVECEXP (body, 0, i++)
1848 = gen_rtx_CLOBBER (VOIDmode,
1851 gen_rtx_SCRATCH (VOIDmode)));
1855 /* Ignore unknown register, error already signaled. */
1859 /* Use QImode since that's guaranteed to clobber just one reg. */
1860 clobbered_reg = gen_rtx_REG (QImode, j);
1862 /* Do sanity check for overlap between clobbers and respectively
1863 input and outputs that hasn't been handled. Such overlap
1864 should have been detected and reported above. */
1865 if (!clobber_conflict_found)
1869 /* We test the old body (obody) contents to avoid tripping
1870 over the under-construction body. */
1871 for (opno = 0; opno < noutputs; opno++)
1872 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1873 internal_error ("asm clobber conflict with output operand");
1875 for (opno = 0; opno < ninputs - ninout; opno++)
1876 if (reg_overlap_mentioned_p (clobbered_reg,
1877 ASM_OPERANDS_INPUT (obody, opno)))
1878 internal_error ("asm clobber conflict with input operand");
1881 XVECEXP (body, 0, i++)
1882 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1888 /* For any outputs that needed reloading into registers, spill them
1889 back to where they belong. */
1890 for (i = 0; i < noutputs; ++i)
1891 if (real_output_rtx[i])
1892 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1897 /* A subroutine of expand_asm_operands. Check that all operands have
1898 the same number of alternatives. Return true if so. */
1901 check_operand_nalternatives (tree outputs, tree inputs)
1903 if (outputs || inputs)
1905 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1907 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1910 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1912 error ("too many alternatives in `asm'");
1919 const char *constraint
1920 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1922 if (n_occurrences (',', constraint) != nalternatives)
1924 error ("operand constraints for `asm' differ in number of alternatives");
1928 if (TREE_CHAIN (tmp))
1929 tmp = TREE_CHAIN (tmp);
1931 tmp = next, next = 0;
1938 /* A subroutine of expand_asm_operands. Check that all operand names
1939 are unique. Return true if so. We rely on the fact that these names
1940 are identifiers, and so have been canonicalized by get_identifier,
1941 so all we need are pointer comparisons. */
1944 check_unique_operand_names (tree outputs, tree inputs)
1948 for (i = outputs; i ; i = TREE_CHAIN (i))
1950 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1954 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1955 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1959 for (i = inputs; i ; i = TREE_CHAIN (i))
1961 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1965 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1966 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1968 for (j = outputs; j ; j = TREE_CHAIN (j))
1969 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1976 error ("duplicate asm operand name '%s'",
1977 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1981 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1982 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1983 STRING and in the constraints to those numbers. */
1986 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1993 check_unique_operand_names (outputs, inputs);
1995 /* Substitute [<name>] in input constraint strings. There should be no
1996 named operands in output constraints. */
1997 for (t = inputs; t ; t = TREE_CHAIN (t))
1999 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2000 if (strchr (c, '[') != NULL)
2002 p = buffer = xstrdup (c);
2003 while ((p = strchr (p, '[')) != NULL)
2004 p = resolve_operand_name_1 (p, outputs, inputs);
2005 TREE_VALUE (TREE_PURPOSE (t))
2006 = build_string (strlen (buffer), buffer);
2011 /* Now check for any needed substitutions in the template. */
2012 c = TREE_STRING_POINTER (string);
2013 while ((c = strchr (c, '%')) != NULL)
2017 else if (ISALPHA (c[1]) && c[2] == '[')
2028 /* OK, we need to make a copy so we can perform the substitutions.
2029 Assume that we will not need extra space--we get to remove '['
2030 and ']', which means we cannot have a problem until we have more
2031 than 999 operands. */
2032 buffer = xstrdup (TREE_STRING_POINTER (string));
2033 p = buffer + (c - TREE_STRING_POINTER (string));
2035 while ((p = strchr (p, '%')) != NULL)
2039 else if (ISALPHA (p[1]) && p[2] == '[')
2047 p = resolve_operand_name_1 (p, outputs, inputs);
2050 string = build_string (strlen (buffer), buffer);
2057 /* A subroutine of resolve_operand_names. P points to the '[' for a
2058 potential named operand of the form [<name>]. In place, replace
2059 the name and brackets with a number. Return a pointer to the
2060 balance of the string after substitution. */
2063 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
2070 /* Collect the operand name. */
2071 q = strchr (p, ']');
2074 error ("missing close brace for named operand");
2075 return strchr (p, '\0');
2079 /* Resolve the name to a number. */
2080 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2082 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2085 const char *c = TREE_STRING_POINTER (name);
2086 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2090 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2092 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2095 const char *c = TREE_STRING_POINTER (name);
2096 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2102 error ("undefined named operand '%s'", p + 1);
2106 /* Replace the name with the number. Unfortunately, not all libraries
2107 get the return value of sprintf correct, so search for the end of the
2108 generated string by hand. */
2109 sprintf (p, "%d", op);
2110 p = strchr (p, '\0');
2112 /* Verify the no extra buffer space assumption. */
2116 /* Shift the rest of the buffer down to fill the gap. */
2117 memmove (p, q + 1, strlen (q + 1) + 1);
2122 /* Generate RTL to evaluate the expression EXP
2123 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2124 Provided just for backward-compatibility. expand_expr_stmt_value()
2125 should be used for new code. */
2128 expand_expr_stmt (tree exp)
2130 expand_expr_stmt_value (exp, -1, 1);
2133 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2134 whether to (1) save the value of the expression, (0) discard it or
2135 (-1) use expr_stmts_for_value to tell. The use of -1 is
2136 deprecated, and retained only for backward compatibility. */
2139 expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
2145 if (want_value == -1)
2146 want_value = expr_stmts_for_value != 0;
2148 /* If -Wextra, warn about statements with no side effects,
2149 except for an explicit cast to void (e.g. for assert()), and
2150 except for last statement in ({...}) where they may be useful. */
2152 && (expr_stmts_for_value == 0 || ! maybe_last)
2153 && exp != error_mark_node
2154 && warn_unused_value)
2156 if (TREE_SIDE_EFFECTS (exp))
2157 warn_if_unused_value (exp);
2158 else if (!VOID_TYPE_P (TREE_TYPE (exp)))
2159 warning ("%Hstatement with no effect", &emit_locus);
2162 /* If EXP is of function type and we are expanding statements for
2163 value, convert it to pointer-to-function. */
2164 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2165 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2167 /* The call to `expand_expr' could cause last_expr_type and
2168 last_expr_value to get reset. Therefore, we set last_expr_value
2169 and last_expr_type *after* calling expand_expr. */
2170 value = expand_expr_real (exp, want_value ? NULL_RTX : const0_rtx,
2171 VOIDmode, 0, &alt_rtl);
2172 type = TREE_TYPE (exp);
2174 /* If all we do is reference a volatile value in memory,
2175 copy it to a register to be sure it is actually touched. */
2176 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2178 if (TYPE_MODE (type) == VOIDmode)
2180 else if (TYPE_MODE (type) != BLKmode)
2181 value = copy_to_reg (value);
2184 rtx lab = gen_label_rtx ();
2186 /* Compare the value with itself to reference it. */
2187 emit_cmp_and_jump_insns (value, value, EQ,
2188 expand_expr (TYPE_SIZE (type),
2189 NULL_RTX, VOIDmode, 0),
2195 /* If this expression is part of a ({...}) and is in memory, we may have
2196 to preserve temporaries. */
2197 preserve_temp_slots (value);
2199 /* Free any temporaries used to evaluate this expression. Any temporary
2200 used as a result of this expression will already have been preserved
2206 last_expr_value = value;
2207 last_expr_alt_rtl = alt_rtl;
2208 last_expr_type = type;
2214 /* Warn if EXP contains any computations whose results are not used.
2215 Return 1 if a warning is printed; 0 otherwise. */
2218 warn_if_unused_value (tree exp)
2220 if (TREE_USED (exp))
2223 /* Don't warn about void constructs. This includes casting to void,
2224 void function calls, and statement expressions with a final cast
2226 if (VOID_TYPE_P (TREE_TYPE (exp)))
2229 switch (TREE_CODE (exp))
2231 case PREINCREMENT_EXPR:
2232 case POSTINCREMENT_EXPR:
2233 case PREDECREMENT_EXPR:
2234 case POSTDECREMENT_EXPR:
2240 case TRY_CATCH_EXPR:
2241 case WITH_CLEANUP_EXPR:
2246 /* For a binding, warn if no side effect within it. */
2247 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2250 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2252 case TRUTH_ORIF_EXPR:
2253 case TRUTH_ANDIF_EXPR:
2254 /* In && or ||, warn if 2nd operand has no side effect. */
2255 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2258 if (TREE_NO_UNUSED_WARNING (exp))
2260 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2262 /* Let people do `(foo (), 0)' without a warning. */
2263 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2265 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2269 case NON_LVALUE_EXPR:
2270 /* Don't warn about conversions not explicit in the user's program. */
2271 if (TREE_NO_UNUSED_WARNING (exp))
2273 /* Assignment to a cast usually results in a cast of a modify.
2274 Don't complain about that. There can be an arbitrary number of
2275 casts before the modify, so we must loop until we find the first
2276 non-cast expression and then test to see if that is a modify. */
2278 tree tem = TREE_OPERAND (exp, 0);
2280 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2281 tem = TREE_OPERAND (tem, 0);
2283 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2284 || TREE_CODE (tem) == CALL_EXPR)
2290 /* Don't warn about automatic dereferencing of references, since
2291 the user cannot control it. */
2292 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2293 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2297 /* Referencing a volatile value is a side effect, so don't warn. */
2299 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2300 && TREE_THIS_VOLATILE (exp))
2303 /* If this is an expression which has no operands, there is no value
2304 to be unused. There are no such language-independent codes,
2305 but front ends may define such. */
2306 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2307 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2311 /* If this is an expression with side effects, don't warn. */
2312 if (TREE_SIDE_EFFECTS (exp))
2315 warning ("%Hvalue computed is not used", &emit_locus);
2320 /* Clear out the memory of the last expression evaluated. */
2323 clear_last_expr (void)
2325 last_expr_type = NULL_TREE;
2326 last_expr_value = NULL_RTX;
2327 last_expr_alt_rtl = NULL_RTX;
2330 /* Begin a statement-expression, i.e., a series of statements which
2331 may return a value. Return the RTL_EXPR for this statement expr.
2332 The caller must save that value and pass it to
2333 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2334 in the statement-expression are deallocated at the end of the
2338 expand_start_stmt_expr (int has_scope)
2342 /* Make the RTL_EXPR node temporary, not momentary,
2343 so that rtl_expr_chain doesn't become garbage. */
2344 t = make_node (RTL_EXPR);
2345 do_pending_stack_adjust ();
2347 start_sequence_for_rtl_expr (t);
2351 expr_stmts_for_value++;
2355 /* Restore the previous state at the end of a statement that returns a value.
2356 Returns a tree node representing the statement's value and the
2357 insns to compute the value.
2359 The nodes of that expression have been freed by now, so we cannot use them.
2360 But we don't want to do that anyway; the expression has already been
2361 evaluated and now we just want to use the value. So generate a RTL_EXPR
2362 with the proper type and RTL value.
2364 If the last substatement was not an expression,
2365 return something with type `void'. */
2368 expand_end_stmt_expr (tree t)
2372 if (! last_expr_value || ! last_expr_type)
2374 last_expr_value = const0_rtx;
2375 last_expr_alt_rtl = NULL_RTX;
2376 last_expr_type = void_type_node;
2378 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2379 /* Remove any possible QUEUED. */
2380 last_expr_value = protect_from_queue (last_expr_value, 0);
2384 TREE_TYPE (t) = last_expr_type;
2385 RTL_EXPR_RTL (t) = last_expr_value;
2386 RTL_EXPR_ALT_RTL (t) = last_expr_alt_rtl;
2387 RTL_EXPR_SEQUENCE (t) = get_insns ();
2389 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2393 /* Don't consider deleting this expr or containing exprs at tree level. */
2394 TREE_SIDE_EFFECTS (t) = 1;
2395 /* Propagate volatility of the actual RTL expr. */
2396 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2399 expr_stmts_for_value--;
2404 /* Generate RTL for the start of an if-then. COND is the expression
2405 whose truth should be tested.
2407 If EXITFLAG is nonzero, this conditional is visible to
2408 `exit_something'. */
2411 expand_start_cond (tree cond, int exitflag)
2413 struct nesting *thiscond = ALLOC_NESTING ();
2415 /* Make an entry on cond_stack for the cond we are entering. */
2417 thiscond->desc = COND_NESTING;
2418 thiscond->next = cond_stack;
2419 thiscond->all = nesting_stack;
2420 thiscond->depth = ++nesting_depth;
2421 thiscond->data.cond.next_label = gen_label_rtx ();
2422 /* Before we encounter an `else', we don't need a separate exit label
2423 unless there are supposed to be exit statements
2424 to exit this conditional. */
2425 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2426 thiscond->data.cond.endif_label = thiscond->exit_label;
2427 cond_stack = thiscond;
2428 nesting_stack = thiscond;
2430 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2433 /* Generate RTL between then-clause and the elseif-clause
2434 of an if-then-elseif-.... */
2437 expand_start_elseif (tree cond)
2439 if (cond_stack->data.cond.endif_label == 0)
2440 cond_stack->data.cond.endif_label = gen_label_rtx ();
2441 emit_jump (cond_stack->data.cond.endif_label);
2442 emit_label (cond_stack->data.cond.next_label);
2443 cond_stack->data.cond.next_label = gen_label_rtx ();
2444 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2447 /* Generate RTL between the then-clause and the else-clause
2448 of an if-then-else. */
2451 expand_start_else (void)
2453 if (cond_stack->data.cond.endif_label == 0)
2454 cond_stack->data.cond.endif_label = gen_label_rtx ();
2456 emit_jump (cond_stack->data.cond.endif_label);
2457 emit_label (cond_stack->data.cond.next_label);
2458 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2461 /* After calling expand_start_else, turn this "else" into an "else if"
2462 by providing another condition. */
2465 expand_elseif (tree cond)
2467 cond_stack->data.cond.next_label = gen_label_rtx ();
2468 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2471 /* Generate RTL for the end of an if-then.
2472 Pop the record for it off of cond_stack. */
2475 expand_end_cond (void)
2477 struct nesting *thiscond = cond_stack;
2479 do_pending_stack_adjust ();
2480 if (thiscond->data.cond.next_label)
2481 emit_label (thiscond->data.cond.next_label);
2482 if (thiscond->data.cond.endif_label)
2483 emit_label (thiscond->data.cond.endif_label);
2485 POPSTACK (cond_stack);
2489 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2490 loop should be exited by `exit_something'. This is a loop for which
2491 `expand_continue' will jump to the top of the loop.
2493 Make an entry on loop_stack to record the labels associated with
2497 expand_start_loop (int exit_flag)
2499 struct nesting *thisloop = ALLOC_NESTING ();
2501 /* Make an entry on loop_stack for the loop we are entering. */
2503 thisloop->desc = LOOP_NESTING;
2504 thisloop->next = loop_stack;
2505 thisloop->all = nesting_stack;
2506 thisloop->depth = ++nesting_depth;
2507 thisloop->data.loop.start_label = gen_label_rtx ();
2508 thisloop->data.loop.end_label = gen_label_rtx ();
2509 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2510 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2511 loop_stack = thisloop;
2512 nesting_stack = thisloop;
2514 do_pending_stack_adjust ();
2516 emit_note (NOTE_INSN_LOOP_BEG);
2517 emit_label (thisloop->data.loop.start_label);
2522 /* Like expand_start_loop but for a loop where the continuation point
2523 (for expand_continue_loop) will be specified explicitly. */
2526 expand_start_loop_continue_elsewhere (int exit_flag)
2528 struct nesting *thisloop = expand_start_loop (exit_flag);
2529 loop_stack->data.loop.continue_label = gen_label_rtx ();
2533 /* Begin a null, aka do { } while (0) "loop". But since the contents
2534 of said loop can still contain a break, we must frob the loop nest. */
2537 expand_start_null_loop (void)
2539 struct nesting *thisloop = ALLOC_NESTING ();
2541 /* Make an entry on loop_stack for the loop we are entering. */
2543 thisloop->desc = LOOP_NESTING;
2544 thisloop->next = loop_stack;
2545 thisloop->all = nesting_stack;
2546 thisloop->depth = ++nesting_depth;
2547 thisloop->data.loop.start_label = emit_note (NOTE_INSN_DELETED);
2548 thisloop->data.loop.end_label = gen_label_rtx ();
2549 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2550 thisloop->exit_label = thisloop->data.loop.end_label;
2551 loop_stack = thisloop;
2552 nesting_stack = thisloop;
2557 /* Specify the continuation point for a loop started with
2558 expand_start_loop_continue_elsewhere.
2559 Use this at the point in the code to which a continue statement
2563 expand_loop_continue_here (void)
2565 do_pending_stack_adjust ();
2566 emit_note (NOTE_INSN_LOOP_CONT);
2567 emit_label (loop_stack->data.loop.continue_label);
2570 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2571 Pop the block off of loop_stack. */
2574 expand_end_loop (void)
2576 rtx start_label = loop_stack->data.loop.start_label;
2578 int eh_regions, debug_blocks;
2581 /* Mark the continue-point at the top of the loop if none elsewhere. */
2582 if (start_label == loop_stack->data.loop.continue_label)
2583 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2585 do_pending_stack_adjust ();
2587 /* If the loop starts with a loop exit, roll that to the end where
2588 it will optimize together with the jump back.
2590 If the loop presently looks like this (in pseudo-C):
2594 if (test) goto end_label;
2600 transform it to look like:
2607 if (test) goto end_label;
2611 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2612 the end of the entry conditional. Without this, our lexical scan
2613 can't tell the difference between an entry conditional and a
2614 body conditional that exits the loop. Mistaking the two means
2615 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2616 screw up loop unrolling.
2618 Things will be oh so much better when loop optimization is done
2619 off of a proper control flow graph... */
2621 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2624 eh_regions = debug_blocks = 0;
2625 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2626 if (GET_CODE (etc_note) == NOTE)
2628 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2631 /* We must not walk into a nested loop. */
2632 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2634 etc_note = NULL_RTX;
2638 /* At the same time, scan for EH region notes, as we don't want
2639 to scrog region nesting. This shouldn't happen, but... */
2640 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2642 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2644 if (--eh_regions < 0)
2645 /* We've come to the end of an EH region, but never saw the
2646 beginning of that region. That means that an EH region
2647 begins before the top of the loop, and ends in the middle
2648 of it. The existence of such a situation violates a basic
2649 assumption in this code, since that would imply that even
2650 when EH_REGIONS is zero, we might move code out of an
2651 exception region. */
2655 /* Likewise for debug scopes. In this case we'll either (1) move
2656 all of the notes if they are properly nested or (2) leave the
2657 notes alone and only rotate the loop at high optimization
2658 levels when we expect to scrog debug info. */
2659 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2661 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2664 else if (INSN_P (etc_note))
2671 && (debug_blocks == 0 || optimize >= 2)
2672 && NEXT_INSN (etc_note) != NULL_RTX
2673 && ! any_condjump_p (get_last_insn ()))
2675 /* We found one. Move everything from START to ETC to the end
2676 of the loop, and add a jump from the top of the loop. */
2677 rtx top_label = gen_label_rtx ();
2678 rtx start_move = start_label;
2680 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2681 then we want to move this note also. */
2682 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2683 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2684 start_move = PREV_INSN (start_move);
2686 emit_label_before (top_label, start_move);
2688 /* Actually move the insns. If the debug scopes are nested, we
2689 can move everything at once. Otherwise we have to move them
2690 one by one and squeeze out the block notes. */
2691 if (debug_blocks == 0)
2692 reorder_insns (start_move, etc_note, get_last_insn ());
2695 rtx insn, next_insn;
2696 for (insn = start_move; insn; insn = next_insn)
2698 /* Figure out which insn comes after this one. We have
2699 to do this before we move INSN. */
2700 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2702 if (GET_CODE (insn) == NOTE
2703 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2704 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2707 reorder_insns (insn, insn, get_last_insn ());
2711 /* Add the jump from the top of the loop. */
2712 emit_jump_insn_before (gen_jump (start_label), top_label);
2713 emit_barrier_before (top_label);
2714 start_label = top_label;
2717 emit_jump (start_label);
2718 emit_note (NOTE_INSN_LOOP_END);
2719 emit_label (loop_stack->data.loop.end_label);
2721 POPSTACK (loop_stack);
2726 /* Finish a null loop, aka do { } while (0). */
2729 expand_end_null_loop (void)
2731 do_pending_stack_adjust ();
2732 emit_label (loop_stack->data.loop.end_label);
2734 POPSTACK (loop_stack);
2739 /* Generate a jump to the current loop's continue-point.
2740 This is usually the top of the loop, but may be specified
2741 explicitly elsewhere. If not currently inside a loop,
2742 return 0 and do nothing; caller will print an error message. */
2745 expand_continue_loop (struct nesting *whichloop)
2747 /* Emit information for branch prediction. */
2750 if (flag_guess_branch_prob)
2752 note = emit_note (NOTE_INSN_PREDICTION);
2753 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2757 whichloop = loop_stack;
2760 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2765 /* Generate a jump to exit the current loop. If not currently inside a loop,
2766 return 0 and do nothing; caller will print an error message. */
2769 expand_exit_loop (struct nesting *whichloop)
2773 whichloop = loop_stack;
2776 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2780 /* Generate a conditional jump to exit the current loop if COND
2781 evaluates to zero. If not currently inside a loop,
2782 return 0 and do nothing; caller will print an error message. */
2785 expand_exit_loop_if_false (struct nesting *whichloop, tree cond)
2791 whichloop = loop_stack;
2795 if (integer_nonzerop (cond))
2797 if (integer_zerop (cond))
2798 return expand_exit_loop (whichloop);
2800 /* Check if we definitely won't need a fixup. */
2801 if (whichloop == nesting_stack)
2803 jumpifnot (cond, whichloop->data.loop.end_label);
2807 /* In order to handle fixups, we actually create a conditional jump
2808 around an unconditional branch to exit the loop. If fixups are
2809 necessary, they go before the unconditional branch. */
2811 label = gen_label_rtx ();
2812 jumpif (cond, label);
2813 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2820 /* Like expand_exit_loop_if_false except also emit a note marking
2821 the end of the conditional. Should only be used immediately
2822 after expand_loop_start. */
2825 expand_exit_loop_top_cond (struct nesting *whichloop, tree cond)
2827 if (! expand_exit_loop_if_false (whichloop, cond))
2830 emit_note (NOTE_INSN_LOOP_END_TOP_COND);
2834 /* Return nonzero if we should preserve sub-expressions as separate
2835 pseudos. We never do so if we aren't optimizing. We always do so
2836 if -fexpensive-optimizations.
2838 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2839 the loop may still be a small one. */
2842 preserve_subexpressions_p (void)
2846 if (flag_expensive_optimizations)
2849 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2852 insn = get_last_insn_anywhere ();
2855 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2856 < n_non_fixed_regs * 3));
2860 /* Generate a jump to exit the current loop, conditional, binding contour
2861 or case statement. Not all such constructs are visible to this function,
2862 only those started with EXIT_FLAG nonzero. Individual languages use
2863 the EXIT_FLAG parameter to control which kinds of constructs you can
2866 If not currently inside anything that can be exited,
2867 return 0 and do nothing; caller will print an error message. */
2870 expand_exit_something (void)
2874 for (n = nesting_stack; n; n = n->all)
2875 if (n->exit_label != 0)
2877 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2884 /* Generate RTL to return from the current function, with no value.
2885 (That is, we do not do anything about returning any value.) */
2888 expand_null_return (void)
2892 last_insn = get_last_insn ();
2894 /* If this function was declared to return a value, but we
2895 didn't, clobber the return registers so that they are not
2896 propagated live to the rest of the function. */
2897 clobber_return_register ();
2899 expand_null_return_1 (last_insn);
2902 /* Generate RTL to return directly from the current function.
2903 (That is, we bypass any return value.) */
2906 expand_naked_return (void)
2908 rtx last_insn, end_label;
2910 last_insn = get_last_insn ();
2911 end_label = naked_return_label;
2913 clear_pending_stack_adjust ();
2914 do_pending_stack_adjust ();
2918 end_label = naked_return_label = gen_label_rtx ();
2919 expand_goto_internal (NULL_TREE, end_label, last_insn);
2922 /* Try to guess whether the value of return means error code. */
2923 static enum br_predictor
2924 return_prediction (rtx val)
2926 /* Different heuristics for pointers and scalars. */
2927 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2929 /* NULL is usually not returned. */
2930 if (val == const0_rtx)
2931 return PRED_NULL_RETURN;
2935 /* Negative return values are often used to indicate
2937 if (GET_CODE (val) == CONST_INT
2938 && INTVAL (val) < 0)
2939 return PRED_NEGATIVE_RETURN;
2940 /* Constant return values are also usually erors,
2941 zero/one often mean booleans so exclude them from the
2943 if (CONSTANT_P (val)
2944 && (val != const0_rtx && val != const1_rtx))
2945 return PRED_CONST_RETURN;
2947 return PRED_NO_PREDICTION;
2951 /* If the current function returns values in the most significant part
2952 of a register, shift return value VAL appropriately. The mode of
2953 the function's return type is known not to be BLKmode. */
2956 shift_return_value (rtx val)
2960 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2961 if (targetm.calls.return_in_msb (type))
2964 HOST_WIDE_INT shift;
2966 target = DECL_RTL (DECL_RESULT (current_function_decl));
2967 shift = (GET_MODE_BITSIZE (GET_MODE (target))
2968 - BITS_PER_UNIT * int_size_in_bytes (type));
2970 val = expand_binop (GET_MODE (target), ashl_optab,
2971 gen_lowpart (GET_MODE (target), val),
2972 GEN_INT (shift), target, 1, OPTAB_WIDEN);
2978 /* Generate RTL to return from the current function, with value VAL. */
2981 expand_value_return (rtx val)
2985 enum br_predictor pred;
2987 if (flag_guess_branch_prob
2988 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2990 /* Emit information for branch prediction. */
2993 note = emit_note (NOTE_INSN_PREDICTION);
2995 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2999 last_insn = get_last_insn ();
3000 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
3002 /* Copy the value to the return location
3003 unless it's already there. */
3005 if (return_reg != val)
3007 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3008 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
3010 int unsignedp = TREE_UNSIGNED (type);
3011 enum machine_mode old_mode
3012 = DECL_MODE (DECL_RESULT (current_function_decl));
3013 enum machine_mode mode
3014 = promote_mode (type, old_mode, &unsignedp, 1);
3016 if (mode != old_mode)
3017 val = convert_modes (mode, old_mode, val, unsignedp);
3019 if (GET_CODE (return_reg) == PARALLEL)
3020 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3022 emit_move_insn (return_reg, val);
3025 expand_null_return_1 (last_insn);
3028 /* Output a return with no value. If LAST_INSN is nonzero,
3029 pretend that the return takes place after LAST_INSN. */
3032 expand_null_return_1 (rtx last_insn)
3034 rtx end_label = cleanup_label ? cleanup_label : return_label;
3036 clear_pending_stack_adjust ();
3037 do_pending_stack_adjust ();
3041 end_label = return_label = gen_label_rtx ();
3042 expand_goto_internal (NULL_TREE, end_label, last_insn);
3045 /* Generate RTL to evaluate the expression RETVAL and return it
3046 from the current function. */
3049 expand_return (tree retval)
3051 /* If there are any cleanups to be performed, then they will
3052 be inserted following LAST_INSN. It is desirable
3053 that the last_insn, for such purposes, should be the
3054 last insn before computing the return value. Otherwise, cleanups
3055 which call functions can clobber the return value. */
3056 /* ??? rms: I think that is erroneous, because in C++ it would
3057 run destructors on variables that might be used in the subsequent
3058 computation of the return value. */
3064 /* If function wants no value, give it none. */
3065 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3067 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3069 expand_null_return ();
3073 if (retval == error_mark_node)
3075 /* Treat this like a return of no value from a function that
3077 expand_null_return ();
3080 else if (TREE_CODE (retval) == RESULT_DECL)
3081 retval_rhs = retval;
3082 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3083 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3084 retval_rhs = TREE_OPERAND (retval, 1);
3085 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3086 /* Recognize tail-recursive call to void function. */
3087 retval_rhs = retval;
3089 retval_rhs = NULL_TREE;
3091 last_insn = get_last_insn ();
3093 /* Distribute return down conditional expr if either of the sides
3094 may involve tail recursion (see test below). This enhances the number
3095 of tail recursions we see. Don't do this always since it can produce
3096 sub-optimal code in some cases and we distribute assignments into
3097 conditional expressions when it would help. */
3099 if (optimize && retval_rhs != 0
3100 && frame_offset == 0
3101 && TREE_CODE (retval_rhs) == COND_EXPR
3102 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3103 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3105 rtx label = gen_label_rtx ();
3108 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3109 start_cleanup_deferral ();
3110 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3111 DECL_RESULT (current_function_decl),
3112 TREE_OPERAND (retval_rhs, 1));
3113 TREE_SIDE_EFFECTS (expr) = 1;
3114 expand_return (expr);
3117 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3118 DECL_RESULT (current_function_decl),
3119 TREE_OPERAND (retval_rhs, 2));
3120 TREE_SIDE_EFFECTS (expr) = 1;
3121 expand_return (expr);
3122 end_cleanup_deferral ();
3126 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3128 /* If the result is an aggregate that is being returned in one (or more)
3129 registers, load the registers here. The compiler currently can't handle
3130 copying a BLKmode value into registers. We could put this code in a
3131 more general area (for use by everyone instead of just function
3132 call/return), but until this feature is generally usable it is kept here
3133 (and in expand_call). The value must go into a pseudo in case there
3134 are cleanups that will clobber the real return register. */
3137 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3138 && GET_CODE (result_rtl) == REG)
3141 unsigned HOST_WIDE_INT bitpos, xbitpos;
3142 unsigned HOST_WIDE_INT padding_correction = 0;
3143 unsigned HOST_WIDE_INT bytes
3144 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3145 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3146 unsigned int bitsize
3147 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3148 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
3149 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3150 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3151 enum machine_mode tmpmode, result_reg_mode;
3155 expand_null_return ();
3159 /* If the structure doesn't take up a whole number of words, see
3160 whether the register value should be padded on the left or on
3161 the right. Set PADDING_CORRECTION to the number of padding
3162 bits needed on the left side.
3164 In most ABIs, the structure will be returned at the least end of
3165 the register, which translates to right padding on little-endian
3166 targets and left padding on big-endian targets. The opposite
3167 holds if the structure is returned at the most significant
3168 end of the register. */
3169 if (bytes % UNITS_PER_WORD != 0
3170 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
3172 : BYTES_BIG_ENDIAN))
3173 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3176 /* Copy the structure BITSIZE bits at a time. */
3177 for (bitpos = 0, xbitpos = padding_correction;
3178 bitpos < bytes * BITS_PER_UNIT;
3179 bitpos += bitsize, xbitpos += bitsize)
3181 /* We need a new destination pseudo each time xbitpos is
3182 on a word boundary and when xbitpos == padding_correction
3183 (the first time through). */
3184 if (xbitpos % BITS_PER_WORD == 0
3185 || xbitpos == padding_correction)
3187 /* Generate an appropriate register. */
3188 dst = gen_reg_rtx (word_mode);
3189 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3191 /* Clear the destination before we move anything into it. */
3192 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3195 /* We need a new source operand each time bitpos is on a word
3197 if (bitpos % BITS_PER_WORD == 0)
3198 src = operand_subword_force (result_val,
3199 bitpos / BITS_PER_WORD,
3202 /* Use bitpos for the source extraction (left justified) and
3203 xbitpos for the destination store (right justified). */
3204 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3205 extract_bit_field (src, bitsize,
3206 bitpos % BITS_PER_WORD, 1,
3207 NULL_RTX, word_mode, word_mode,
3212 tmpmode = GET_MODE (result_rtl);
3213 if (tmpmode == BLKmode)
3215 /* Find the smallest integer mode large enough to hold the
3216 entire structure and use that mode instead of BLKmode
3217 on the USE insn for the return register. */
3218 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3219 tmpmode != VOIDmode;
3220 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3221 /* Have we found a large enough mode? */
3222 if (GET_MODE_SIZE (tmpmode) >= bytes)
3225 /* No suitable mode found. */
3226 if (tmpmode == VOIDmode)
3229 PUT_MODE (result_rtl, tmpmode);
3232 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3233 result_reg_mode = word_mode;
3235 result_reg_mode = tmpmode;
3236 result_reg = gen_reg_rtx (result_reg_mode);
3239 for (i = 0; i < n_regs; i++)
3240 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3243 if (tmpmode != result_reg_mode)
3244 result_reg = gen_lowpart (tmpmode, result_reg);
3246 expand_value_return (result_reg);
3248 else if (retval_rhs != 0
3249 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3250 && (GET_CODE (result_rtl) == REG
3251 || (GET_CODE (result_rtl) == PARALLEL)))
3253 /* Calculate the return value into a temporary (usually a pseudo
3255 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3256 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3258 val = assign_temp (nt, 0, 0, 1);
3259 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3260 val = force_not_mem (val);
3262 /* Return the calculated value, doing cleanups first. */
3263 expand_value_return (shift_return_value (val));
3267 /* No cleanups or no hard reg used;
3268 calculate value into hard return reg. */
3269 expand_expr (retval, const0_rtx, VOIDmode, 0);
3271 expand_value_return (result_rtl);
3275 /* Attempt to optimize a potential tail recursion call into a goto.
3276 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3277 where to place the jump to the tail recursion label.
3279 Return TRUE if the call was optimized into a goto. */
3282 optimize_tail_recursion (tree arguments, rtx last_insn)
3284 /* Finish checking validity, and if valid emit code to set the
3285 argument variables for the new call. */
3286 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3288 if (tail_recursion_label == 0)
3290 tail_recursion_label = gen_label_rtx ();
3291 emit_label_after (tail_recursion_label,
3292 tail_recursion_reentry);
3295 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3302 /* Emit code to alter this function's formal parms for a tail-recursive call.
3303 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3304 FORMALS is the chain of decls of formals.
3305 Return 1 if this can be done;
3306 otherwise return 0 and do not emit any code. */
3309 tail_recursion_args (tree actuals, tree formals)
3311 tree a = actuals, f = formals;
3315 /* Check that number and types of actuals are compatible
3316 with the formals. This is not always true in valid C code.
3317 Also check that no formal needs to be addressable
3318 and that all formals are scalars. */
3320 /* Also count the args. */
3322 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3324 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3325 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3327 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3330 if (a != 0 || f != 0)
3333 /* Compute all the actuals. */
3335 argvec = alloca (i * sizeof (rtx));
3337 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3338 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3340 /* Find which actual values refer to current values of previous formals.
3341 Copy each of them now, before any formal is changed. */
3343 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3347 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3348 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3354 argvec[i] = copy_to_reg (argvec[i]);
3357 /* Store the values of the actuals into the formals. */
3359 for (f = formals, a = actuals, i = 0; f;
3360 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3362 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3363 emit_move_insn (DECL_RTL (f), argvec[i]);
3366 rtx tmp = argvec[i];
3367 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
3368 promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
3370 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3372 tmp = gen_reg_rtx (DECL_MODE (f));
3373 convert_move (tmp, argvec[i], unsignedp);
3375 convert_move (DECL_RTL (f), tmp, unsignedp);
3383 /* Generate the RTL code for entering a binding contour.
3384 The variables are declared one by one, by calls to `expand_decl'.
3386 FLAGS is a bitwise or of the following flags:
3388 1 - Nonzero if this construct should be visible to
3391 2 - Nonzero if this contour does not require a
3392 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3393 language-independent code should set this flag because they
3394 will not create corresponding BLOCK nodes. (There should be
3395 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3396 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3397 when expand_end_bindings is called.
3399 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3400 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3404 expand_start_bindings_and_block (int flags, tree block)
3406 struct nesting *thisblock = ALLOC_NESTING ();
3408 int exit_flag = ((flags & 1) != 0);
3409 int block_flag = ((flags & 2) == 0);
3411 /* If a BLOCK is supplied, then the caller should be requesting a
3412 NOTE_INSN_BLOCK_BEG note. */
3413 if (!block_flag && block)
3416 /* Create a note to mark the beginning of the block. */
3419 note = emit_note (NOTE_INSN_BLOCK_BEG);
3420 NOTE_BLOCK (note) = block;
3423 note = emit_note (NOTE_INSN_DELETED);
3425 /* Make an entry on block_stack for the block we are entering. */
3427 thisblock->desc = BLOCK_NESTING;
3428 thisblock->next = block_stack;
3429 thisblock->all = nesting_stack;
3430 thisblock->depth = ++nesting_depth;
3431 thisblock->data.block.stack_level = 0;
3432 thisblock->data.block.cleanups = 0;
3433 thisblock->data.block.exception_region = 0;
3434 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3436 thisblock->data.block.conditional_code = 0;
3437 thisblock->data.block.last_unconditional_cleanup = note;
3438 /* When we insert instructions after the last unconditional cleanup,
3439 we don't adjust last_insn. That means that a later add_insn will
3440 clobber the instructions we've just added. The easiest way to
3441 fix this is to just insert another instruction here, so that the
3442 instructions inserted after the last unconditional cleanup are
3443 never the last instruction. */
3444 emit_note (NOTE_INSN_DELETED);
3447 && !(block_stack->data.block.cleanups == NULL_TREE
3448 && block_stack->data.block.outer_cleanups == NULL_TREE))
3449 thisblock->data.block.outer_cleanups
3450 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3451 block_stack->data.block.outer_cleanups);
3453 thisblock->data.block.outer_cleanups = 0;
3454 thisblock->data.block.label_chain = 0;
3455 thisblock->data.block.innermost_stack_block = stack_block_stack;
3456 thisblock->data.block.first_insn = note;
3457 thisblock->data.block.block_start_count = ++current_block_start_count;
3458 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3459 block_stack = thisblock;
3460 nesting_stack = thisblock;
3462 /* Make a new level for allocating stack slots. */
3466 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3467 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3468 expand_expr are made. After we end the region, we know that all
3469 space for all temporaries that were created by TARGET_EXPRs will be
3470 destroyed and their space freed for reuse. */
3473 expand_start_target_temps (void)
3475 /* This is so that even if the result is preserved, the space
3476 allocated will be freed, as we know that it is no longer in use. */
3479 /* Start a new binding layer that will keep track of all cleanup
3480 actions to be performed. */
3481 expand_start_bindings (2);
3483 target_temp_slot_level = temp_slot_level;
3487 expand_end_target_temps (void)
3489 expand_end_bindings (NULL_TREE, 0, 0);
3491 /* This is so that even if the result is preserved, the space
3492 allocated will be freed, as we know that it is no longer in use. */
3496 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3497 in question represents the outermost pair of curly braces (i.e. the "body
3498 block") of a function or method.
3500 For any BLOCK node representing a "body block" of a function or method, the
3501 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3502 represents the outermost (function) scope for the function or method (i.e.
3503 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3504 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3507 is_body_block (tree stmt)
3509 if (lang_hooks.no_body_blocks)
3512 if (TREE_CODE (stmt) == BLOCK)
3514 tree parent = BLOCK_SUPERCONTEXT (stmt);
3516 if (parent && TREE_CODE (parent) == BLOCK)
3518 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3520 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3528 /* True if we are currently emitting insns in an area of output code
3529 that is controlled by a conditional expression. This is used by
3530 the cleanup handling code to generate conditional cleanup actions. */
3533 conditional_context (void)
3535 return block_stack && block_stack->data.block.conditional_code;
3538 /* Return an opaque pointer to the current nesting level, so frontend code
3539 can check its own sanity. */
3542 current_nesting_level (void)
3544 return cfun ? block_stack : 0;
3547 /* Emit a handler label for a nonlocal goto handler.
3548 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3551 expand_nl_handler_label (rtx slot, rtx before_insn)
3554 rtx handler_label = gen_label_rtx ();
3556 /* Don't let cleanup_cfg delete the handler. */
3557 LABEL_PRESERVE_P (handler_label) = 1;
3560 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3561 insns = get_insns ();
3563 emit_insn_before (insns, before_insn);
3565 emit_label (handler_label);
3567 return handler_label;
3570 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3573 expand_nl_goto_receiver (void)
3575 /* Clobber the FP when we get here, so we have to make sure it's
3576 marked as used by this function. */
3577 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
3579 /* Mark the static chain as clobbered here so life information
3580 doesn't get messed up for it. */
3581 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
3583 #ifdef HAVE_nonlocal_goto
3584 if (! HAVE_nonlocal_goto)
3586 /* First adjust our frame pointer to its actual value. It was
3587 previously set to the start of the virtual area corresponding to
3588 the stacked variables when we branched here and now needs to be
3589 adjusted to the actual hardware fp value.
3591 Assignments are to virtual registers are converted by
3592 instantiate_virtual_regs into the corresponding assignment
3593 to the underlying register (fp in this case) that makes
3594 the original assignment true.
3595 So the following insn will actually be
3596 decrementing fp by STARTING_FRAME_OFFSET. */
3597 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3599 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3600 if (fixed_regs[ARG_POINTER_REGNUM])
3602 #ifdef ELIMINABLE_REGS
3603 /* If the argument pointer can be eliminated in favor of the
3604 frame pointer, we don't need to restore it. We assume here
3605 that if such an elimination is present, it can always be used.
3606 This is the case on all known machines; if we don't make this
3607 assumption, we do unnecessary saving on many machines. */
3608 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3611 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3612 if (elim_regs[i].from == ARG_POINTER_REGNUM
3613 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3616 if (i == ARRAY_SIZE (elim_regs))
3619 /* Now restore our arg pointer from the address at which it
3620 was saved in our stack frame. */
3621 emit_move_insn (virtual_incoming_args_rtx,
3622 copy_to_reg (get_arg_pointer_save_area (cfun)));
3627 #ifdef HAVE_nonlocal_goto_receiver
3628 if (HAVE_nonlocal_goto_receiver)
3629 emit_insn (gen_nonlocal_goto_receiver ());
3632 /* @@@ This is a kludge. Not all machine descriptions define a blockage
3633 insn, but we must not allow the code we just generated to be reordered
3634 by scheduling. Specifically, the update of the frame pointer must
3635 happen immediately, not later. So emit an ASM_INPUT to act as blockage
3637 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
3640 /* Make handlers for nonlocal gotos taking place in the function calls in
3644 expand_nl_goto_receivers (struct nesting *thisblock)
3647 rtx afterward = gen_label_rtx ();
3652 /* Record the handler address in the stack slot for that purpose,
3653 during this block, saving and restoring the outer value. */
3654 if (thisblock->next != 0)
3655 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3657 rtx save_receiver = gen_reg_rtx (Pmode);
3658 emit_move_insn (XEXP (slot, 0), save_receiver);
3661 emit_move_insn (save_receiver, XEXP (slot, 0));
3662 insns = get_insns ();
3664 emit_insn_before (insns, thisblock->data.block.first_insn);
3667 /* Jump around the handlers; they run only when specially invoked. */
3668 emit_jump (afterward);
3670 /* Make a separate handler for each label. */
3671 link = nonlocal_labels;
3672 slot = nonlocal_goto_handler_slots;
3673 label_list = NULL_RTX;
3674 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3675 /* Skip any labels we shouldn't be able to jump to from here,
3676 we generate one special handler for all of them below which just calls
3678 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3681 lab = expand_nl_handler_label (XEXP (slot, 0),
3682 thisblock->data.block.first_insn);
3683 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3685 expand_nl_goto_receiver ();
3687 /* Jump to the "real" nonlocal label. */
3688 expand_goto (TREE_VALUE (link));
3691 /* A second pass over all nonlocal labels; this time we handle those
3692 we should not be able to jump to at this point. */
3693 link = nonlocal_labels;
3694 slot = nonlocal_goto_handler_slots;
3696 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3697 if (DECL_TOO_LATE (TREE_VALUE (link)))
3700 lab = expand_nl_handler_label (XEXP (slot, 0),
3701 thisblock->data.block.first_insn);
3702 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3708 expand_nl_goto_receiver ();
3709 expand_builtin_trap ();
3712 nonlocal_goto_handler_labels = label_list;
3713 emit_label (afterward);
3716 /* Warn about any unused VARS (which may contain nodes other than
3717 VAR_DECLs, but such nodes are ignored). The nodes are connected
3718 via the TREE_CHAIN field. */
3721 warn_about_unused_variables (tree vars)
3725 if (warn_unused_variable)
3726 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3727 if (TREE_CODE (decl) == VAR_DECL
3728 && ! TREE_USED (decl)
3729 && ! DECL_IN_SYSTEM_HEADER (decl)
3730 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3731 warning ("%Junused variable '%D'", decl, decl);
3734 /* Generate RTL code to terminate a binding contour.
3736 VARS is the chain of VAR_DECL nodes for the variables bound in this
3737 contour. There may actually be other nodes in this chain, but any
3738 nodes other than VAR_DECLS are ignored.
3740 MARK_ENDS is nonzero if we should put a note at the beginning
3741 and end of this binding contour.
3743 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3744 zero if we can jump into this contour only if it does not have a saved
3745 stack level, and negative if we are not to check for invalid use of
3746 labels (because the front end does that). */
3749 expand_end_bindings (tree vars, int mark_ends, int dont_jump_in)
3751 struct nesting *thisblock = block_stack;
3753 /* If any of the variables in this scope were not used, warn the
3755 warn_about_unused_variables (vars);
3757 if (thisblock->exit_label)
3759 do_pending_stack_adjust ();
3760 emit_label (thisblock->exit_label);
3763 /* If necessary, make handlers for nonlocal gotos taking
3764 place in the function calls in this block. */
3765 if (function_call_count != 0 && nonlocal_labels
3766 /* Make handler for outermost block
3767 if there were any nonlocal gotos to this function. */
3768 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3769 /* Make handler for inner block if it has something
3770 special to do when you jump out of it. */
3771 : (thisblock->data.block.cleanups != 0
3772 || thisblock->data.block.stack_level != 0)))
3773 expand_nl_goto_receivers (thisblock);
3775 /* Don't allow jumping into a block that has a stack level.
3776 Cleanups are allowed, though. */
3777 if (dont_jump_in > 0
3778 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3780 struct label_chain *chain;
3782 /* Any labels in this block are no longer valid to go to.
3783 Mark them to cause an error message. */
3784 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3786 DECL_TOO_LATE (chain->label) = 1;
3787 /* If any goto without a fixup came to this label,
3788 that must be an error, because gotos without fixups
3789 come from outside all saved stack-levels. */
3790 if (TREE_ADDRESSABLE (chain->label))
3791 error ("%Jlabel '%D' used before containing binding contour",
3792 chain->label, chain->label);
3796 /* Restore stack level in effect before the block
3797 (only if variable-size objects allocated). */
3798 /* Perform any cleanups associated with the block. */
3800 if (thisblock->data.block.stack_level != 0
3801 || thisblock->data.block.cleanups != 0)
3806 /* Don't let cleanups affect ({...}) constructs. */
3807 int old_expr_stmts_for_value = expr_stmts_for_value;
3808 rtx old_last_expr_value = last_expr_value;
3809 rtx old_last_expr_alt_rtl = last_expr_alt_rtl;
3810 tree old_last_expr_type = last_expr_type;
3811 expr_stmts_for_value = 0;
3813 /* Only clean up here if this point can actually be reached. */
3814 insn = get_last_insn ();
3815 if (GET_CODE (insn) == NOTE)
3816 insn = prev_nonnote_insn (insn);
3817 reachable = (! insn || GET_CODE (insn) != BARRIER);
3819 /* Do the cleanups. */
3820 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3822 do_pending_stack_adjust ();
3824 expr_stmts_for_value = old_expr_stmts_for_value;
3825 last_expr_value = old_last_expr_value;
3826 last_expr_alt_rtl = old_last_expr_alt_rtl;
3827 last_expr_type = old_last_expr_type;
3829 /* Restore the stack level. */
3831 if (reachable && thisblock->data.block.stack_level != 0)
3833 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3834 thisblock->data.block.stack_level, NULL_RTX);
3835 if (nonlocal_goto_handler_slots != 0)
3836 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3840 /* Any gotos out of this block must also do these things.
3841 Also report any gotos with fixups that came to labels in this
3843 fixup_gotos (thisblock,
3844 thisblock->data.block.stack_level,
3845 thisblock->data.block.cleanups,
3846 thisblock->data.block.first_insn,
3850 /* Mark the beginning and end of the scope if requested.
3851 We do this now, after running cleanups on the variables
3852 just going out of scope, so they are in scope for their cleanups. */
3856 rtx note = emit_note (NOTE_INSN_BLOCK_END);
3857 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3860 /* Get rid of the beginning-mark if we don't make an end-mark. */
3861 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3863 /* Restore the temporary level of TARGET_EXPRs. */
3864 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3866 /* Restore block_stack level for containing block. */
3868 stack_block_stack = thisblock->data.block.innermost_stack_block;
3869 POPSTACK (block_stack);
3871 /* Pop the stack slot nesting and free any slots at this level. */
3875 /* Generate code to save the stack pointer at the start of the current block
3876 and set up to restore it on exit. */
3879 save_stack_pointer (void)
3881 struct nesting *thisblock = block_stack;
3883 if (thisblock->data.block.stack_level == 0)
3885 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3886 &thisblock->data.block.stack_level,
3887 thisblock->data.block.first_insn);
3888 stack_block_stack = thisblock;
3892 /* Generate RTL for the automatic variable declaration DECL.
3893 (Other kinds of declarations are simply ignored if seen here.) */
3896 expand_decl (tree decl)
3900 type = TREE_TYPE (decl);
3902 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3903 type in case this node is used in a reference. */
3904 if (TREE_CODE (decl) == CONST_DECL)
3906 DECL_MODE (decl) = TYPE_MODE (type);
3907 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3908 DECL_SIZE (decl) = TYPE_SIZE (type);
3909 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3913 /* Otherwise, only automatic variables need any expansion done. Static and
3914 external variables, and external functions, will be handled by
3915 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3916 nothing. PARM_DECLs are handled in `assign_parms'. */
3917 if (TREE_CODE (decl) != VAR_DECL)
3920 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3923 /* Create the RTL representation for the variable. */
3925 if (type == error_mark_node)
3926 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3928 else if (DECL_SIZE (decl) == 0)
3929 /* Variable with incomplete type. */
3932 if (DECL_INITIAL (decl) == 0)
3933 /* Error message was already done; now avoid a crash. */
3934 x = gen_rtx_MEM (BLKmode, const0_rtx);
3936 /* An initializer is going to decide the size of this array.
3937 Until we know the size, represent its address with a reg. */
3938 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3940 set_mem_attributes (x, decl, 1);
3941 SET_DECL_RTL (decl, x);
3943 else if (DECL_MODE (decl) != BLKmode
3944 /* If -ffloat-store, don't put explicit float vars
3946 && !(flag_float_store
3947 && TREE_CODE (type) == REAL_TYPE)
3948 && ! TREE_THIS_VOLATILE (decl)
3949 && ! DECL_NONLOCAL (decl)
3950 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3952 /* Automatic variable that can go in a register. */
3953 int unsignedp = TREE_UNSIGNED (type);
3954 enum machine_mode reg_mode
3955 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3957 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3959 if (!DECL_ARTIFICIAL (decl))
3960 mark_user_reg (DECL_RTL (decl));
3962 if (POINTER_TYPE_P (type))
3963 mark_reg_pointer (DECL_RTL (decl),
3964 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3966 maybe_set_unchanging (DECL_RTL (decl), decl);
3968 /* If something wants our address, try to use ADDRESSOF. */
3969 if (TREE_ADDRESSABLE (decl))
3970 put_var_into_stack (decl, /*rescan=*/false);
3973 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3974 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3975 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3976 STACK_CHECK_MAX_VAR_SIZE)))
3978 /* Variable of fixed size that goes on the stack. */
3983 /* If we previously made RTL for this decl, it must be an array
3984 whose size was determined by the initializer.
3985 The old address was a register; set that register now
3986 to the proper address. */
3987 if (DECL_RTL_SET_P (decl))
3989 if (GET_CODE (DECL_RTL (decl)) != MEM
3990 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3992 oldaddr = XEXP (DECL_RTL (decl), 0);
3995 /* Set alignment we actually gave this decl. */
3996 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3997 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3998 DECL_USER_ALIGN (decl) = 0;
4000 x = assign_temp (decl, 1, 1, 1);
4001 set_mem_attributes (x, decl, 1);
4002 SET_DECL_RTL (decl, x);
4006 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
4007 if (addr != oldaddr)
4008 emit_move_insn (oldaddr, addr);
4012 /* Dynamic-size object: must push space on the stack. */
4014 rtx address, size, x;
4016 /* Record the stack pointer on entry to block, if have
4017 not already done so. */
4018 do_pending_stack_adjust ();
4019 save_stack_pointer ();
4021 /* In function-at-a-time mode, variable_size doesn't expand this,
4023 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4024 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4025 const0_rtx, VOIDmode, 0);
4027 /* Compute the variable's size, in bytes. */
4028 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4031 /* Allocate space on the stack for the variable. Note that
4032 DECL_ALIGN says how the variable is to be aligned and we
4033 cannot use it to conclude anything about the alignment of
4035 address = allocate_dynamic_stack_space (size, NULL_RTX,
4036 TYPE_ALIGN (TREE_TYPE (decl)));
4038 /* Reference the variable indirect through that rtx. */
4039 x = gen_rtx_MEM (DECL_MODE (decl), address);
4040 set_mem_attributes (x, decl, 1);
4041 SET_DECL_RTL (decl, x);
4044 /* Indicate the alignment we actually gave this variable. */
4045 #ifdef STACK_BOUNDARY
4046 DECL_ALIGN (decl) = STACK_BOUNDARY;
4048 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4050 DECL_USER_ALIGN (decl) = 0;
4054 /* Emit code to perform the initialization of a declaration DECL. */
4057 expand_decl_init (tree decl)
4059 int was_used = TREE_USED (decl);
4061 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4062 for static decls. */
4063 if (TREE_CODE (decl) == CONST_DECL
4064 || TREE_STATIC (decl))
4067 /* Compute and store the initial value now. */
4071 if (DECL_INITIAL (decl) == error_mark_node)
4073 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4075 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4076 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4077 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4081 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4083 emit_line_note (DECL_SOURCE_LOCATION (decl));
4084 expand_assignment (decl, DECL_INITIAL (decl), 0);
4088 /* Don't let the initialization count as "using" the variable. */
4089 TREE_USED (decl) = was_used;
4091 /* Free any temporaries we made while initializing the decl. */
4092 preserve_temp_slots (NULL_RTX);
4097 /* CLEANUP is an expression to be executed at exit from this binding contour;
4098 for example, in C++, it might call the destructor for this variable.
4100 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4101 CLEANUP multiple times, and have the correct semantics. This
4102 happens in exception handling, for gotos, returns, breaks that
4103 leave the current scope.
4105 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4106 that is not associated with any particular variable. */
4109 expand_decl_cleanup (tree decl, tree cleanup)
4111 struct nesting *thisblock;
4113 /* Error if we are not in any block. */
4114 if (cfun == 0 || block_stack == 0)
4117 thisblock = block_stack;
4119 /* Record the cleanup if there is one. */
4125 tree *cleanups = &thisblock->data.block.cleanups;
4126 int cond_context = conditional_context ();
4130 rtx flag = gen_reg_rtx (word_mode);
4135 emit_move_insn (flag, const0_rtx);
4136 set_flag_0 = get_insns ();
4139 thisblock->data.block.last_unconditional_cleanup
4140 = emit_insn_after (set_flag_0,
4141 thisblock->data.block.last_unconditional_cleanup);
4143 emit_move_insn (flag, const1_rtx);
4145 cond = build_decl (VAR_DECL, NULL_TREE,
4146 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4147 SET_DECL_RTL (cond, flag);
4149 /* Conditionalize the cleanup. */
4150 cleanup = build (COND_EXPR, void_type_node,
4151 (*lang_hooks.truthvalue_conversion) (cond),
4152 cleanup, integer_zero_node);
4153 cleanup = fold (cleanup);
4155 cleanups = &thisblock->data.block.cleanups;
4158 cleanup = unsave_expr (cleanup);
4160 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4163 /* If this block has a cleanup, it belongs in stack_block_stack. */
4164 stack_block_stack = thisblock;
4171 if (! using_eh_for_cleanups_p)
4172 TREE_ADDRESSABLE (t) = 1;
4174 expand_eh_region_start ();
4181 thisblock->data.block.last_unconditional_cleanup
4182 = emit_insn_after (seq,
4183 thisblock->data.block.last_unconditional_cleanup);
4187 thisblock->data.block.last_unconditional_cleanup
4189 /* When we insert instructions after the last unconditional cleanup,
4190 we don't adjust last_insn. That means that a later add_insn will
4191 clobber the instructions we've just added. The easiest way to
4192 fix this is to just insert another instruction here, so that the
4193 instructions inserted after the last unconditional cleanup are
4194 never the last instruction. */
4195 emit_note (NOTE_INSN_DELETED);
4201 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4205 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
4207 int ret = expand_decl_cleanup (decl, cleanup);
4210 tree node = block_stack->data.block.cleanups;
4211 CLEANUP_EH_ONLY (node) = eh_only;
4216 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4217 DECL_ELTS is the list of elements that belong to DECL's type.
4218 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4221 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
4223 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4227 /* If any of the elements are addressable, so is the entire union. */
4228 for (t = decl_elts; t; t = TREE_CHAIN (t))
4229 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4231 TREE_ADDRESSABLE (decl) = 1;
4236 expand_decl_cleanup (decl, cleanup);
4237 x = DECL_RTL (decl);
4239 /* Go through the elements, assigning RTL to each. */
4240 for (t = decl_elts; t; t = TREE_CHAIN (t))
4242 tree decl_elt = TREE_VALUE (t);
4243 tree cleanup_elt = TREE_PURPOSE (t);
4244 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4246 /* If any of the elements are addressable, so is the entire
4248 if (TREE_USED (decl_elt))
4249 TREE_USED (decl) = 1;
4251 /* Propagate the union's alignment to the elements. */
4252 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4253 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4255 /* If the element has BLKmode and the union doesn't, the union is
4256 aligned such that the element doesn't need to have BLKmode, so
4257 change the element's mode to the appropriate one for its size. */
4258 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4259 DECL_MODE (decl_elt) = mode
4260 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4262 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4263 instead create a new MEM rtx with the proper mode. */
4264 if (GET_CODE (x) == MEM)
4266 if (mode == GET_MODE (x))
4267 SET_DECL_RTL (decl_elt, x);
4269 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4271 else if (GET_CODE (x) == REG)
4273 if (mode == GET_MODE (x))
4274 SET_DECL_RTL (decl_elt, x);
4276 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4281 /* Record the cleanup if there is one. */
4284 thisblock->data.block.cleanups
4285 = tree_cons (decl_elt, cleanup_elt,
4286 thisblock->data.block.cleanups);
4290 /* Expand a list of cleanups LIST.
4291 Elements may be expressions or may be nested lists.
4293 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4294 goto and handle protection regions specially in that case.
4296 If REACHABLE, we emit code, otherwise just inform the exception handling
4297 code about this finalization. */
4300 expand_cleanups (tree list, int in_fixup, int reachable)
4303 for (tail = list; tail; tail = TREE_CHAIN (tail))
4304 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4305 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
4308 if (! in_fixup && using_eh_for_cleanups_p)
4309 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4311 if (reachable && !CLEANUP_EH_ONLY (tail))
4313 /* Cleanups may be run multiple times. For example,
4314 when exiting a binding contour, we expand the
4315 cleanups associated with that contour. When a goto
4316 within that binding contour has a target outside that
4317 contour, it will expand all cleanups from its scope to
4318 the target. Though the cleanups are expanded multiple
4319 times, the control paths are non-overlapping so the
4320 cleanups will not be executed twice. */
4322 /* We may need to protect from outer cleanups. */
4323 if (in_fixup && using_eh_for_cleanups_p)
4325 expand_eh_region_start ();
4327 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4329 expand_eh_region_end_fixup (TREE_VALUE (tail));
4332 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4339 /* Mark when the context we are emitting RTL for as a conditional
4340 context, so that any cleanup actions we register with
4341 expand_decl_init will be properly conditionalized when those
4342 cleanup actions are later performed. Must be called before any
4343 expression (tree) is expanded that is within a conditional context. */
4346 start_cleanup_deferral (void)
4348 /* block_stack can be NULL if we are inside the parameter list. It is
4349 OK to do nothing, because cleanups aren't possible here. */
4351 ++block_stack->data.block.conditional_code;
4354 /* Mark the end of a conditional region of code. Because cleanup
4355 deferrals may be nested, we may still be in a conditional region
4356 after we end the currently deferred cleanups, only after we end all
4357 deferred cleanups, are we back in unconditional code. */
4360 end_cleanup_deferral (void)
4362 /* block_stack can be NULL if we are inside the parameter list. It is
4363 OK to do nothing, because cleanups aren't possible here. */
4365 --block_stack->data.block.conditional_code;
4369 last_cleanup_this_contour (void)
4371 if (block_stack == 0)
4374 return block_stack->data.block.cleanups;
4377 /* Return 1 if there are any pending cleanups at this point.
4378 Check the current contour as well as contours that enclose
4379 the current contour. */
4382 any_pending_cleanups (void)
4384 struct nesting *block;
4386 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4389 if (block_stack->data.block.cleanups != NULL)
4392 if (block_stack->data.block.outer_cleanups == 0)
4395 for (block = block_stack->next; block; block = block->next)
4396 if (block->data.block.cleanups != 0)
4402 /* Enter a case (Pascal) or switch (C) statement.
4403 Push a block onto case_stack and nesting_stack
4404 to accumulate the case-labels that are seen
4405 and to record the labels generated for the statement.
4407 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4408 Otherwise, this construct is transparent for `exit_something'.
4410 EXPR is the index-expression to be dispatched on.
4411 TYPE is its nominal type. We could simply convert EXPR to this type,
4412 but instead we take short cuts. */
4415 expand_start_case (int exit_flag, tree expr, tree type,
4416 const char *printname)
4418 struct nesting *thiscase = ALLOC_NESTING ();
4420 /* Make an entry on case_stack for the case we are entering. */
4422 thiscase->desc = CASE_NESTING;
4423 thiscase->next = case_stack;
4424 thiscase->all = nesting_stack;
4425 thiscase->depth = ++nesting_depth;
4426 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4427 thiscase->data.case_stmt.case_list = 0;
4428 thiscase->data.case_stmt.index_expr = expr;
4429 thiscase->data.case_stmt.nominal_type = type;
4430 thiscase->data.case_stmt.default_label = 0;
4431 thiscase->data.case_stmt.printname = printname;
4432 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4433 case_stack = thiscase;
4434 nesting_stack = thiscase;
4436 do_pending_stack_adjust ();
4439 /* Make sure case_stmt.start points to something that won't
4440 need any transformation before expand_end_case. */
4441 if (GET_CODE (get_last_insn ()) != NOTE)
4442 emit_note (NOTE_INSN_DELETED);
4444 thiscase->data.case_stmt.start = get_last_insn ();
4446 start_cleanup_deferral ();
4449 /* Start a "dummy case statement" within which case labels are invalid
4450 and are not connected to any larger real case statement.
4451 This can be used if you don't want to let a case statement jump
4452 into the middle of certain kinds of constructs. */
4455 expand_start_case_dummy (void)
4457 struct nesting *thiscase = ALLOC_NESTING ();
4459 /* Make an entry on case_stack for the dummy. */
4461 thiscase->desc = CASE_NESTING;
4462 thiscase->next = case_stack;
4463 thiscase->all = nesting_stack;
4464 thiscase->depth = ++nesting_depth;
4465 thiscase->exit_label = 0;
4466 thiscase->data.case_stmt.case_list = 0;
4467 thiscase->data.case_stmt.start = 0;
4468 thiscase->data.case_stmt.nominal_type = 0;
4469 thiscase->data.case_stmt.default_label = 0;
4470 case_stack = thiscase;
4471 nesting_stack = thiscase;
4472 start_cleanup_deferral ();
4476 check_seenlabel (void)
4478 /* If this is the first label, warn if any insns have been emitted. */
4479 if (case_stack->data.case_stmt.line_number_status >= 0)
4483 restore_line_number_status
4484 (case_stack->data.case_stmt.line_number_status);
4485 case_stack->data.case_stmt.line_number_status = -1;
4487 for (insn = case_stack->data.case_stmt.start;
4489 insn = NEXT_INSN (insn))
4491 if (GET_CODE (insn) == CODE_LABEL)
4493 if (GET_CODE (insn) != NOTE
4494 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4497 insn = PREV_INSN (insn);
4498 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4500 /* If insn is zero, then there must have been a syntax error. */
4504 locus.file = NOTE_SOURCE_FILE (insn);
4505 locus.line = NOTE_LINE_NUMBER (insn);
4506 warning ("%Hunreachable code at beginning of %s", &locus,
4507 case_stack->data.case_stmt.printname);
4515 /* Accumulate one case or default label inside a case or switch statement.
4516 VALUE is the value of the case (a null pointer, for a default label).
4517 The function CONVERTER, when applied to arguments T and V,
4518 converts the value V to the type T.
4520 If not currently inside a case or switch statement, return 1 and do
4521 nothing. The caller will print a language-specific error message.
4522 If VALUE is a duplicate or overlaps, return 2 and do nothing
4523 except store the (first) duplicate node in *DUPLICATE.
4524 If VALUE is out of range, return 3 and do nothing.
4525 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4526 Return 0 on success.
4528 Extended to handle range statements. */
4531 pushcase (tree value, tree (*converter) (tree, tree), tree label,
4537 /* Fail if not inside a real case statement. */
4538 if (! (case_stack && case_stack->data.case_stmt.start))
4541 if (stack_block_stack
4542 && stack_block_stack->depth > case_stack->depth)
4545 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4546 nominal_type = case_stack->data.case_stmt.nominal_type;
4548 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4549 if (index_type == error_mark_node)
4552 /* Convert VALUE to the type in which the comparisons are nominally done. */
4554 value = (*converter) (nominal_type, value);
4558 /* Fail if this value is out of range for the actual type of the index
4559 (which may be narrower than NOMINAL_TYPE). */
4561 && (TREE_CONSTANT_OVERFLOW (value)
4562 || ! int_fits_type_p (value, index_type)))
4565 return add_case_node (value, value, label, duplicate);
4568 /* Like pushcase but this case applies to all values between VALUE1 and
4569 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4570 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4571 starts at VALUE1 and ends at the highest value of the index type.
4572 If both are NULL, this case applies to all values.
4574 The return value is the same as that of pushcase but there is one
4575 additional error code: 4 means the specified range was empty. */
4578 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
4579 tree label, tree *duplicate)
4584 /* Fail if not inside a real case statement. */
4585 if (! (case_stack && case_stack->data.case_stmt.start))
4588 if (stack_block_stack
4589 && stack_block_stack->depth > case_stack->depth)
4592 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4593 nominal_type = case_stack->data.case_stmt.nominal_type;
4595 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4596 if (index_type == error_mark_node)
4601 /* Convert VALUEs to type in which the comparisons are nominally done
4602 and replace any unspecified value with the corresponding bound. */
4604 value1 = TYPE_MIN_VALUE (index_type);
4606 value2 = TYPE_MAX_VALUE (index_type);
4608 /* Fail if the range is empty. Do this before any conversion since
4609 we want to allow out-of-range empty ranges. */
4610 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4613 /* If the max was unbounded, use the max of the nominal_type we are
4614 converting to. Do this after the < check above to suppress false
4617 value2 = TYPE_MAX_VALUE (nominal_type);
4619 value1 = (*converter) (nominal_type, value1);
4620 value2 = (*converter) (nominal_type, value2);
4622 /* Fail if these values are out of range. */
4623 if (TREE_CONSTANT_OVERFLOW (value1)
4624 || ! int_fits_type_p (value1, index_type))
4627 if (TREE_CONSTANT_OVERFLOW (value2)
4628 || ! int_fits_type_p (value2, index_type))
4631 return add_case_node (value1, value2, label, duplicate);
4634 /* Do the actual insertion of a case label for pushcase and pushcase_range
4635 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4636 slowdown for large switch statements. */
4639 add_case_node (tree low, tree high, tree label, tree *duplicate)
4641 struct case_node *p, **q, *r;
4643 /* If there's no HIGH value, then this is not a case range; it's
4644 just a simple case label. But that's just a degenerate case
4649 /* Handle default labels specially. */
4652 if (case_stack->data.case_stmt.default_label != 0)
4654 *duplicate = case_stack->data.case_stmt.default_label;
4657 case_stack->data.case_stmt.default_label = label;
4658 expand_label (label);
4662 q = &case_stack->data.case_stmt.case_list;
4669 /* Keep going past elements distinctly greater than HIGH. */
4670 if (tree_int_cst_lt (high, p->low))
4673 /* or distinctly less than LOW. */
4674 else if (tree_int_cst_lt (p->high, low))
4679 /* We have an overlap; this is an error. */
4680 *duplicate = p->code_label;
4685 /* Add this label to the chain, and succeed. */
4687 r = ggc_alloc (sizeof (struct case_node));
4690 /* If the bounds are equal, turn this into the one-value case. */
4691 if (tree_int_cst_equal (low, high))
4696 r->code_label = label;
4697 expand_label (label);
4707 struct case_node *s;
4713 if (! (b = p->balance))
4714 /* Growth propagation from left side. */
4721 if ((p->left = s = r->right))
4730 if ((r->parent = s))
4738 case_stack->data.case_stmt.case_list = r;
4741 /* r->balance == +1 */
4746 struct case_node *t = r->right;
4748 if ((p->left = s = t->right))
4752 if ((r->right = s = t->left))
4766 if ((t->parent = s))
4774 case_stack->data.case_stmt.case_list = t;
4781 /* p->balance == +1; growth of left side balances the node. */
4791 if (! (b = p->balance))
4792 /* Growth propagation from right side. */
4800 if ((p->right = s = r->left))
4808 if ((r->parent = s))
4817 case_stack->data.case_stmt.case_list = r;
4821 /* r->balance == -1 */
4825 struct case_node *t = r->left;
4827 if ((p->right = s = t->left))
4832 if ((r->left = s = t->right))
4846 if ((t->parent = s))
4855 case_stack->data.case_stmt.case_list = t;
4861 /* p->balance == -1; growth of right side balances the node. */
4874 /* Returns the number of possible values of TYPE.
4875 Returns -1 if the number is unknown, variable, or if the number does not
4876 fit in a HOST_WIDE_INT.
4877 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4878 do not increase monotonically (there may be duplicates);
4879 to 1 if the values increase monotonically, but not always by 1;
4880 otherwise sets it to 0. */
4883 all_cases_count (tree type, int *sparseness)
4886 HOST_WIDE_INT count, minval, lastval;
4890 switch (TREE_CODE (type))
4897 count = 1 << BITS_PER_UNIT;
4902 if (TYPE_MAX_VALUE (type) != 0
4903 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4904 TYPE_MIN_VALUE (type))))
4905 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4906 convert (type, integer_zero_node))))
4907 && host_integerp (t, 1))
4908 count = tree_low_cst (t, 1);
4914 /* Don't waste time with enumeral types with huge values. */
4915 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4916 || TYPE_MAX_VALUE (type) == 0
4917 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4920 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4923 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4925 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4927 if (*sparseness == 2 || thisval <= lastval)
4929 else if (thisval != minval + count)
4940 #define BITARRAY_TEST(ARRAY, INDEX) \
4941 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4942 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4943 #define BITARRAY_SET(ARRAY, INDEX) \
4944 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4945 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4947 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4948 with the case values we have seen, assuming the case expression
4950 SPARSENESS is as determined by all_cases_count.
4952 The time needed is proportional to COUNT, unless
4953 SPARSENESS is 2, in which case quadratic time is needed. */
4956 mark_seen_cases (tree type, unsigned char *cases_seen, HOST_WIDE_INT count,
4959 tree next_node_to_try = NULL_TREE;
4960 HOST_WIDE_INT next_node_offset = 0;
4962 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4963 tree val = make_node (INTEGER_CST);
4965 TREE_TYPE (val) = type;
4969 else if (sparseness == 2)
4972 unsigned HOST_WIDE_INT xlo;
4974 /* This less efficient loop is only needed to handle
4975 duplicate case values (multiple enum constants
4976 with the same value). */
4977 TREE_TYPE (val) = TREE_TYPE (root->low);
4978 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4979 t = TREE_CHAIN (t), xlo++)
4981 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4982 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4986 /* Keep going past elements distinctly greater than VAL. */
4987 if (tree_int_cst_lt (val, n->low))
4990 /* or distinctly less than VAL. */
4991 else if (tree_int_cst_lt (n->high, val))
4996 /* We have found a matching range. */
4997 BITARRAY_SET (cases_seen, xlo);
5007 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5009 for (n = root; n; n = n->right)
5011 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5012 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5013 while (! tree_int_cst_lt (n->high, val))
5015 /* Calculate (into xlo) the "offset" of the integer (val).
5016 The element with lowest value has offset 0, the next smallest
5017 element has offset 1, etc. */
5019 unsigned HOST_WIDE_INT xlo;
5023 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5025 /* The TYPE_VALUES will be in increasing order, so
5026 starting searching where we last ended. */
5027 t = next_node_to_try;
5028 xlo = next_node_offset;
5034 t = TYPE_VALUES (type);
5037 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5039 next_node_to_try = TREE_CHAIN (t);
5040 next_node_offset = xlo + 1;
5045 if (t == next_node_to_try)
5054 t = TYPE_MIN_VALUE (type);
5056 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5060 add_double (xlo, xhi,
5061 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5065 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5066 BITARRAY_SET (cases_seen, xlo);
5068 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5070 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5076 /* Given a switch statement with an expression that is an enumeration
5077 type, warn if any of the enumeration type's literals are not
5078 covered by the case expressions of the switch. Also, warn if there
5079 are any extra switch cases that are *not* elements of the
5084 At one stage this function would: ``If all enumeration literals
5085 were covered by the case expressions, turn one of the expressions
5086 into the default expression since it should not be possible to fall
5087 through such a switch.''
5089 That code has since been removed as: ``This optimization is
5090 disabled because it causes valid programs to fail. ANSI C does not
5091 guarantee that an expression with enum type will have a value that
5092 is the same as one of the enumeration literals.'' */
5095 check_for_full_enumeration_handling (tree type)
5097 struct case_node *n;
5100 /* True iff the selector type is a numbered set mode. */
5103 /* The number of possible selector values. */
5106 /* For each possible selector value. a one iff it has been matched
5107 by a case value alternative. */
5108 unsigned char *cases_seen;
5110 /* The allocated size of cases_seen, in chars. */
5111 HOST_WIDE_INT bytes_needed;
5113 size = all_cases_count (type, &sparseness);
5114 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5116 if (size > 0 && size < 600000
5117 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5118 this optimization if we don't have enough memory rather than
5119 aborting, as xmalloc would do. */
5120 && (cases_seen = really_call_calloc (bytes_needed, 1)) != NULL)
5123 tree v = TYPE_VALUES (type);
5125 /* The time complexity of this code is normally O(N), where
5126 N being the number of members in the enumerated type.
5127 However, if type is an ENUMERAL_TYPE whose values do not
5128 increase monotonically, O(N*log(N)) time may be needed. */
5130 mark_seen_cases (type, cases_seen, size, sparseness);
5132 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5133 if (BITARRAY_TEST (cases_seen, i) == 0)
5134 warning ("enumeration value `%s' not handled in switch",
5135 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5140 /* Now we go the other way around; we warn if there are case
5141 expressions that don't correspond to enumerators. This can
5142 occur since C and C++ don't enforce type-checking of
5143 assignments to enumeration variables. */
5145 if (case_stack->data.case_stmt.case_list
5146 && case_stack->data.case_stmt.case_list->left)
5147 case_stack->data.case_stmt.case_list
5148 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5149 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5151 for (chain = TYPE_VALUES (type);
5152 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5153 chain = TREE_CHAIN (chain))
5158 if (TYPE_NAME (type) == 0)
5159 warning ("case value `%ld' not in enumerated type",
5160 (long) TREE_INT_CST_LOW (n->low));
5162 warning ("case value `%ld' not in enumerated type `%s'",
5163 (long) TREE_INT_CST_LOW (n->low),
5164 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5167 : DECL_NAME (TYPE_NAME (type))));
5169 if (!tree_int_cst_equal (n->low, n->high))
5171 for (chain = TYPE_VALUES (type);
5172 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5173 chain = TREE_CHAIN (chain))
5178 if (TYPE_NAME (type) == 0)
5179 warning ("case value `%ld' not in enumerated type",
5180 (long) TREE_INT_CST_LOW (n->high));
5182 warning ("case value `%ld' not in enumerated type `%s'",
5183 (long) TREE_INT_CST_LOW (n->high),
5184 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5187 : DECL_NAME (TYPE_NAME (type))));
5194 /* Maximum number of case bit tests. */
5195 #define MAX_CASE_BIT_TESTS 3
5197 /* By default, enable case bit tests on targets with ashlsi3. */
5198 #ifndef CASE_USE_BIT_TESTS
5199 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
5200 != CODE_FOR_nothing)
5204 /* A case_bit_test represents a set of case nodes that may be
5205 selected from using a bit-wise comparison. HI and LO hold
5206 the integer to be tested against, LABEL contains the label
5207 to jump to upon success and BITS counts the number of case
5208 nodes handled by this test, typically the number of bits
5211 struct case_bit_test
5219 /* Determine whether "1 << x" is relatively cheap in word_mode. */
5222 bool lshift_cheap_p (void)
5224 static bool init = false;
5225 static bool cheap = true;
5229 rtx reg = gen_rtx_REG (word_mode, 10000);
5230 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
5231 cheap = cost < COSTS_N_INSNS (3);
5238 /* Comparison function for qsort to order bit tests by decreasing
5239 number of case nodes, i.e. the node with the most cases gets
5243 int case_bit_test_cmp (const void *p1, const void *p2)
5245 const struct case_bit_test *d1 = p1;
5246 const struct case_bit_test *d2 = p2;
5248 return d2->bits - d1->bits;
5251 /* Expand a switch statement by a short sequence of bit-wise
5252 comparisons. "switch(x)" is effectively converted into
5253 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
5256 INDEX_EXPR is the value being switched on, which is of
5257 type INDEX_TYPE. MINVAL is the lowest case value of in
5258 the case nodes, of INDEX_TYPE type, and RANGE is highest
5259 value minus MINVAL, also of type INDEX_TYPE. NODES is
5260 the set of case nodes, and DEFAULT_LABEL is the label to
5261 branch to should none of the cases match.
5263 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
5267 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
5268 tree range, case_node_ptr nodes, rtx default_label)
5270 struct case_bit_test test[MAX_CASE_BIT_TESTS];
5271 enum machine_mode mode;
5272 rtx expr, index, label;
5273 unsigned int i,j,lo,hi;
5274 struct case_node *n;
5278 for (n = nodes; n; n = n->right)
5280 label = label_rtx (n->code_label);
5281 for (i = 0; i < count; i++)
5282 if (same_case_target_p (label, test[i].label))
5287 if (count >= MAX_CASE_BIT_TESTS)
5291 test[i].label = label;
5298 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5299 n->low, minval)), 1);
5300 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5301 n->high, minval)), 1);
5302 for (j = lo; j <= hi; j++)
5303 if (j >= HOST_BITS_PER_WIDE_INT)
5304 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
5306 test[i].lo |= (HOST_WIDE_INT) 1 << j;
5309 qsort (test, count, sizeof(*test), case_bit_test_cmp);
5311 index_expr = fold (build (MINUS_EXPR, index_type,
5312 convert (index_type, index_expr),
5313 convert (index_type, minval)));
5314 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5316 index = protect_from_queue (index, 0);
5317 do_pending_stack_adjust ();
5319 mode = TYPE_MODE (index_type);
5320 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
5321 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
5324 index = convert_to_mode (word_mode, index, 0);
5325 index = expand_binop (word_mode, ashl_optab, const1_rtx,
5326 index, NULL_RTX, 1, OPTAB_WIDEN);
5328 for (i = 0; i < count; i++)
5330 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
5331 expr = expand_binop (word_mode, and_optab, index, expr,
5332 NULL_RTX, 1, OPTAB_WIDEN);
5333 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
5334 word_mode, 1, test[i].label);
5337 emit_jump (default_label);
5341 #define HAVE_casesi 0
5344 #ifndef HAVE_tablejump
5345 #define HAVE_tablejump 0
5348 /* Terminate a case (Pascal) or switch (C) statement
5349 in which ORIG_INDEX is the expression to be tested.
5350 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5351 type as given in the source before any compiler conversions.
5352 Generate the code to test it and jump to the right place. */
5355 expand_end_case_type (tree orig_index, tree orig_type)
5357 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5358 rtx default_label = 0;
5359 struct case_node *n, *m;
5360 unsigned int count, uniq;
5366 rtx before_case, end, lab;
5367 struct nesting *thiscase = case_stack;
5368 tree index_expr, index_type;
5369 bool exit_done = false;
5372 /* Don't crash due to previous errors. */
5373 if (thiscase == NULL)
5376 index_expr = thiscase->data.case_stmt.index_expr;
5377 index_type = TREE_TYPE (index_expr);
5378 unsignedp = TREE_UNSIGNED (index_type);
5379 if (orig_type == NULL)
5380 orig_type = TREE_TYPE (orig_index);
5382 do_pending_stack_adjust ();
5384 /* This might get a spurious warning in the presence of a syntax error;
5385 it could be fixed by moving the call to check_seenlabel after the
5386 check for error_mark_node, and copying the code of check_seenlabel that
5387 deals with case_stack->data.case_stmt.line_number_status /
5388 restore_line_number_status in front of the call to end_cleanup_deferral;
5389 However, this might miss some useful warnings in the presence of
5390 non-syntax errors. */
5393 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5394 if (index_type != error_mark_node)
5396 /* If the switch expression was an enumerated type, check that
5397 exactly all enumeration literals are covered by the cases.
5398 The check is made when -Wswitch was specified and there is no
5399 default case, or when -Wswitch-enum was specified. */
5400 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5401 || warn_switch_enum)
5402 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5403 && TREE_CODE (index_expr) != INTEGER_CST)
5404 check_for_full_enumeration_handling (orig_type);
5406 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5407 warning ("switch missing default case");
5409 /* If we don't have a default-label, create one here,
5410 after the body of the switch. */
5411 if (thiscase->data.case_stmt.default_label == 0)
5413 thiscase->data.case_stmt.default_label
5414 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5415 /* Share the exit label if possible. */
5416 if (thiscase->exit_label)
5418 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
5419 thiscase->exit_label);
5422 expand_label (thiscase->data.case_stmt.default_label);
5424 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5426 before_case = get_last_insn ();
5428 if (thiscase->data.case_stmt.case_list
5429 && thiscase->data.case_stmt.case_list->left)
5430 thiscase->data.case_stmt.case_list
5431 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5433 /* Simplify the case-list before we count it. */
5434 group_case_nodes (thiscase->data.case_stmt.case_list);
5435 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5438 /* Get upper and lower bounds of case values.
5439 Also convert all the case values to the index expr's data type. */
5443 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5445 /* Check low and high label values are integers. */
5446 if (TREE_CODE (n->low) != INTEGER_CST)
5448 if (TREE_CODE (n->high) != INTEGER_CST)
5451 n->low = convert (index_type, n->low);
5452 n->high = convert (index_type, n->high);
5454 /* Count the elements and track the largest and smallest
5455 of them (treating them as signed even if they are not). */
5463 if (INT_CST_LT (n->low, minval))
5465 if (INT_CST_LT (maxval, n->high))
5468 /* A range counts double, since it requires two compares. */
5469 if (! tree_int_cst_equal (n->low, n->high))
5472 /* Count the number of unique case node targets. */
5474 lab = label_rtx (n->code_label);
5475 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
5476 if (same_case_target_p (label_rtx (m->code_label), lab))
5483 /* Compute span of values. */
5485 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5487 end_cleanup_deferral ();
5491 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5493 emit_jump (default_label);
5496 /* Try implementing this switch statement by a short sequence of
5497 bit-wise comparisons. However, we let the binary-tree case
5498 below handle constant index expressions. */
5499 else if (CASE_USE_BIT_TESTS
5500 && ! TREE_CONSTANT (index_expr)
5501 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
5502 && compare_tree_int (range, 0) > 0
5503 && lshift_cheap_p ()
5504 && ((uniq == 1 && count >= 3)
5505 || (uniq == 2 && count >= 5)
5506 || (uniq == 3 && count >= 6)))
5508 /* Optimize the case where all the case values fit in a
5509 word without having to subtract MINVAL. In this case,
5510 we can optimize away the subtraction. */
5511 if (compare_tree_int (minval, 0) > 0
5512 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
5514 minval = integer_zero_node;
5517 emit_case_bit_tests (index_type, index_expr, minval, range,
5518 thiscase->data.case_stmt.case_list,
5522 /* If range of values is much bigger than number of values,
5523 make a sequence of conditional branches instead of a dispatch.
5524 If the switch-index is a constant, do it this way
5525 because we can optimize it. */
5527 else if (count < case_values_threshold ()
5528 || compare_tree_int (range,
5529 (optimize_size ? 3 : 10) * count) > 0
5530 /* RANGE may be signed, and really large ranges will show up
5531 as negative numbers. */
5532 || compare_tree_int (range, 0) < 0
5533 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5536 || TREE_CONSTANT (index_expr)
5537 /* If neither casesi or tablejump is available, we can
5538 only go this way. */
5539 || (!HAVE_casesi && !HAVE_tablejump))
5541 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5543 /* If the index is a short or char that we do not have
5544 an insn to handle comparisons directly, convert it to
5545 a full integer now, rather than letting each comparison
5546 generate the conversion. */
5548 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5549 && ! have_insn_for (COMPARE, GET_MODE (index)))
5551 enum machine_mode wider_mode;
5552 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5553 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5554 if (have_insn_for (COMPARE, wider_mode))
5556 index = convert_to_mode (wider_mode, index, unsignedp);
5562 do_pending_stack_adjust ();
5564 index = protect_from_queue (index, 0);
5565 if (GET_CODE (index) == MEM)
5566 index = copy_to_reg (index);
5567 if (GET_CODE (index) == CONST_INT
5568 || TREE_CODE (index_expr) == INTEGER_CST)
5570 /* Make a tree node with the proper constant value
5571 if we don't already have one. */
5572 if (TREE_CODE (index_expr) != INTEGER_CST)
5575 = build_int_2 (INTVAL (index),
5576 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5577 index_expr = convert (index_type, index_expr);
5580 /* For constant index expressions we need only
5581 issue an unconditional branch to the appropriate
5582 target code. The job of removing any unreachable
5583 code is left to the optimization phase if the
5584 "-O" option is specified. */
5585 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5586 if (! tree_int_cst_lt (index_expr, n->low)
5587 && ! tree_int_cst_lt (n->high, index_expr))
5591 emit_jump (label_rtx (n->code_label));
5593 emit_jump (default_label);
5597 /* If the index expression is not constant we generate
5598 a binary decision tree to select the appropriate
5599 target code. This is done as follows:
5601 The list of cases is rearranged into a binary tree,
5602 nearly optimal assuming equal probability for each case.
5604 The tree is transformed into RTL, eliminating
5605 redundant test conditions at the same time.
5607 If program flow could reach the end of the
5608 decision tree an unconditional jump to the
5609 default code is emitted. */
5612 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5613 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5614 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5615 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5616 default_label, index_type);
5617 emit_jump_if_reachable (default_label);
5622 table_label = gen_label_rtx ();
5623 if (! try_casesi (index_type, index_expr, minval, range,
5624 table_label, default_label))
5626 index_type = thiscase->data.case_stmt.nominal_type;
5628 /* Index jumptables from zero for suitable values of
5629 minval to avoid a subtraction. */
5631 && compare_tree_int (minval, 0) > 0
5632 && compare_tree_int (minval, 3) < 0)
5634 minval = integer_zero_node;
5638 if (! try_tablejump (index_type, index_expr, minval, range,
5639 table_label, default_label))
5643 /* Get table of labels to jump to, in order of case index. */
5645 ncases = tree_low_cst (range, 0) + 1;
5646 labelvec = alloca (ncases * sizeof (rtx));
5647 memset (labelvec, 0, ncases * sizeof (rtx));
5649 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5651 /* Compute the low and high bounds relative to the minimum
5652 value since that should fit in a HOST_WIDE_INT while the
5653 actual values may not. */
5655 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5656 n->low, minval)), 1);
5657 HOST_WIDE_INT i_high
5658 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5659 n->high, minval)), 1);
5662 for (i = i_low; i <= i_high; i ++)
5664 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5667 /* Fill in the gaps with the default. */
5668 for (i = 0; i < ncases; i++)
5669 if (labelvec[i] == 0)
5670 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5672 /* Output the table. */
5673 emit_label (table_label);
5675 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5676 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5677 gen_rtx_LABEL_REF (Pmode, table_label),
5678 gen_rtvec_v (ncases, labelvec),
5679 const0_rtx, const0_rtx));
5681 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5682 gen_rtvec_v (ncases, labelvec)));
5684 /* If the case insn drops through the table,
5685 after the table we must jump to the default-label.
5686 Otherwise record no drop-through after the table. */
5687 #ifdef CASE_DROPS_THROUGH
5688 emit_jump (default_label);
5694 before_case = NEXT_INSN (before_case);
5695 end = get_last_insn ();
5696 if (squeeze_notes (&before_case, &end))
5698 reorder_insns (before_case, end,
5699 thiscase->data.case_stmt.start);
5702 end_cleanup_deferral ();
5704 if (thiscase->exit_label && !exit_done)
5705 emit_label (thiscase->exit_label);
5707 POPSTACK (case_stack);
5712 /* Convert the tree NODE into a list linked by the right field, with the left
5713 field zeroed. RIGHT is used for recursion; it is a list to be placed
5714 rightmost in the resulting list. */
5716 static struct case_node *
5717 case_tree2list (struct case_node *node, struct case_node *right)
5719 struct case_node *left;
5722 right = case_tree2list (node->right, right);
5724 node->right = right;
5725 if ((left = node->left))
5728 return case_tree2list (left, node);
5734 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5737 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
5739 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5745 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5746 (GET_MODE (op1) == VOIDmode
5747 ? GET_MODE (op2) : GET_MODE (op1)),
5751 /* Not all case values are encountered equally. This function
5752 uses a heuristic to weight case labels, in cases where that
5753 looks like a reasonable thing to do.
5755 Right now, all we try to guess is text, and we establish the
5758 chars above space: 16
5767 If we find any cases in the switch that are not either -1 or in the range
5768 of valid ASCII characters, or are control characters other than those
5769 commonly used with "\", don't treat this switch scanning text.
5771 Return 1 if these nodes are suitable for cost estimation, otherwise
5775 estimate_case_costs (case_node_ptr node)
5777 tree min_ascii = integer_minus_one_node;
5778 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5782 /* If we haven't already made the cost table, make it now. Note that the
5783 lower bound of the table is -1, not zero. */
5785 if (! cost_table_initialized)
5787 cost_table_initialized = 1;
5789 for (i = 0; i < 128; i++)
5792 COST_TABLE (i) = 16;
5793 else if (ISPUNCT (i))
5795 else if (ISCNTRL (i))
5796 COST_TABLE (i) = -1;
5799 COST_TABLE (' ') = 8;
5800 COST_TABLE ('\t') = 4;
5801 COST_TABLE ('\0') = 4;
5802 COST_TABLE ('\n') = 2;
5803 COST_TABLE ('\f') = 1;
5804 COST_TABLE ('\v') = 1;
5805 COST_TABLE ('\b') = 1;
5808 /* See if all the case expressions look like text. It is text if the
5809 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5810 as signed arithmetic since we don't want to ever access cost_table with a
5811 value less than -1. Also check that none of the constants in a range
5812 are strange control characters. */
5814 for (n = node; n; n = n->right)
5816 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5819 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5820 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5821 if (COST_TABLE (i) < 0)
5825 /* All interesting values are within the range of interesting
5826 ASCII characters. */
5830 /* Determine whether two case labels branch to the same target. */
5833 same_case_target_p (rtx l1, rtx l2)
5840 i1 = next_real_insn (l1);
5841 i2 = next_real_insn (l2);
5845 if (i1 && simplejump_p (i1))
5847 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5850 if (i2 && simplejump_p (i2))
5852 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5857 /* Delete nodes that branch to the default label from a list of
5858 case nodes. Eg. case 5: default: becomes just default: */
5861 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
5868 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5875 /* Scan an ordered list of case nodes
5876 combining those with consecutive values or ranges.
5878 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5881 group_case_nodes (case_node_ptr head)
5883 case_node_ptr node = head;
5887 rtx lab = label_rtx (node->code_label);
5888 case_node_ptr np = node;
5890 /* Try to group the successors of NODE with NODE. */
5891 while (((np = np->right) != 0)
5892 /* Do they jump to the same place? */
5893 && same_case_target_p (label_rtx (np->code_label), lab)
5894 /* Are their ranges consecutive? */
5895 && tree_int_cst_equal (np->low,
5896 fold (build (PLUS_EXPR,
5897 TREE_TYPE (node->high),
5900 /* An overflow is not consecutive. */
5901 && tree_int_cst_lt (node->high,
5902 fold (build (PLUS_EXPR,
5903 TREE_TYPE (node->high),
5905 integer_one_node))))
5907 node->high = np->high;
5909 /* NP is the first node after NODE which can't be grouped with it.
5910 Delete the nodes in between, and move on to that node. */
5916 /* Take an ordered list of case nodes
5917 and transform them into a near optimal binary tree,
5918 on the assumption that any target code selection value is as
5919 likely as any other.
5921 The transformation is performed by splitting the ordered
5922 list into two equal sections plus a pivot. The parts are
5923 then attached to the pivot as left and right branches. Each
5924 branch is then transformed recursively. */
5927 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
5940 /* Count the number of entries on branch. Also count the ranges. */
5944 if (!tree_int_cst_equal (np->low, np->high))
5948 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5952 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5960 /* Split this list if it is long enough for that to help. */
5965 /* Find the place in the list that bisects the list's total cost,
5966 Here I gets half the total cost. */
5971 /* Skip nodes while their cost does not reach that amount. */
5972 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5973 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5974 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5977 npp = &(*npp)->right;
5982 /* Leave this branch lopsided, but optimize left-hand
5983 side and fill in `parent' fields for right-hand side. */
5985 np->parent = parent;
5986 balance_case_nodes (&np->left, np);
5987 for (; np->right; np = np->right)
5988 np->right->parent = np;
5992 /* If there are just three nodes, split at the middle one. */
5994 npp = &(*npp)->right;
5997 /* Find the place in the list that bisects the list's total cost,
5998 where ranges count as 2.
5999 Here I gets half the total cost. */
6000 i = (i + ranges + 1) / 2;
6003 /* Skip nodes while their cost does not reach that amount. */
6004 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
6009 npp = &(*npp)->right;
6014 np->parent = parent;
6017 /* Optimize each of the two split parts. */
6018 balance_case_nodes (&np->left, np);
6019 balance_case_nodes (&np->right, np);
6023 /* Else leave this branch as one level,
6024 but fill in `parent' fields. */
6026 np->parent = parent;
6027 for (; np->right; np = np->right)
6028 np->right->parent = np;
6033 /* Search the parent sections of the case node tree
6034 to see if a test for the lower bound of NODE would be redundant.
6035 INDEX_TYPE is the type of the index expression.
6037 The instructions to generate the case decision tree are
6038 output in the same order as nodes are processed so it is
6039 known that if a parent node checks the range of the current
6040 node minus one that the current node is bounded at its lower
6041 span. Thus the test would be redundant. */
6044 node_has_low_bound (case_node_ptr node, tree index_type)
6047 case_node_ptr pnode;
6049 /* If the lower bound of this node is the lowest value in the index type,
6050 we need not test it. */
6052 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6055 /* If this node has a left branch, the value at the left must be less
6056 than that at this node, so it cannot be bounded at the bottom and
6057 we need not bother testing any further. */
6062 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6063 node->low, integer_one_node));
6065 /* If the subtraction above overflowed, we can't verify anything.
6066 Otherwise, look for a parent that tests our value - 1. */
6068 if (! tree_int_cst_lt (low_minus_one, node->low))
6071 for (pnode = node->parent; pnode; pnode = pnode->parent)
6072 if (tree_int_cst_equal (low_minus_one, pnode->high))
6078 /* Search the parent sections of the case node tree
6079 to see if a test for the upper bound of NODE would be redundant.
6080 INDEX_TYPE is the type of the index expression.
6082 The instructions to generate the case decision tree are
6083 output in the same order as nodes are processed so it is
6084 known that if a parent node checks the range of the current
6085 node plus one that the current node is bounded at its upper
6086 span. Thus the test would be redundant. */
6089 node_has_high_bound (case_node_ptr node, tree index_type)
6092 case_node_ptr pnode;
6094 /* If there is no upper bound, obviously no test is needed. */
6096 if (TYPE_MAX_VALUE (index_type) == NULL)
6099 /* If the upper bound of this node is the highest value in the type
6100 of the index expression, we need not test against it. */
6102 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6105 /* If this node has a right branch, the value at the right must be greater
6106 than that at this node, so it cannot be bounded at the top and
6107 we need not bother testing any further. */
6112 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6113 node->high, integer_one_node));
6115 /* If the addition above overflowed, we can't verify anything.
6116 Otherwise, look for a parent that tests our value + 1. */
6118 if (! tree_int_cst_lt (node->high, high_plus_one))
6121 for (pnode = node->parent; pnode; pnode = pnode->parent)
6122 if (tree_int_cst_equal (high_plus_one, pnode->low))
6128 /* Search the parent sections of the
6129 case node tree to see if both tests for the upper and lower
6130 bounds of NODE would be redundant. */
6133 node_is_bounded (case_node_ptr node, tree index_type)
6135 return (node_has_low_bound (node, index_type)
6136 && node_has_high_bound (node, index_type));
6139 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6142 emit_jump_if_reachable (rtx label)
6144 if (GET_CODE (get_last_insn ()) != BARRIER)
6148 /* Emit step-by-step code to select a case for the value of INDEX.
6149 The thus generated decision tree follows the form of the
6150 case-node binary tree NODE, whose nodes represent test conditions.
6151 INDEX_TYPE is the type of the index of the switch.
6153 Care is taken to prune redundant tests from the decision tree
6154 by detecting any boundary conditions already checked by
6155 emitted rtx. (See node_has_high_bound, node_has_low_bound
6156 and node_is_bounded, above.)
6158 Where the test conditions can be shown to be redundant we emit
6159 an unconditional jump to the target code. As a further
6160 optimization, the subordinates of a tree node are examined to
6161 check for bounded nodes. In this case conditional and/or
6162 unconditional jumps as a result of the boundary check for the
6163 current node are arranged to target the subordinates associated
6164 code for out of bound conditions on the current node.
6166 We can assume that when control reaches the code generated here,
6167 the index value has already been compared with the parents
6168 of this node, and determined to be on the same side of each parent
6169 as this node is. Thus, if this node tests for the value 51,
6170 and a parent tested for 52, we don't need to consider
6171 the possibility of a value greater than 51. If another parent
6172 tests for the value 50, then this node need not test anything. */
6175 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
6178 /* If INDEX has an unsigned type, we must make unsigned branches. */
6179 int unsignedp = TREE_UNSIGNED (index_type);
6180 enum machine_mode mode = GET_MODE (index);
6181 enum machine_mode imode = TYPE_MODE (index_type);
6183 /* See if our parents have already tested everything for us.
6184 If they have, emit an unconditional jump for this node. */
6185 if (node_is_bounded (node, index_type))
6186 emit_jump (label_rtx (node->code_label));
6188 else if (tree_int_cst_equal (node->low, node->high))
6190 /* Node is single valued. First see if the index expression matches
6191 this node and then check our children, if any. */
6193 do_jump_if_equal (index,
6194 convert_modes (mode, imode,
6195 expand_expr (node->low, NULL_RTX,
6198 label_rtx (node->code_label), unsignedp);
6200 if (node->right != 0 && node->left != 0)
6202 /* This node has children on both sides.
6203 Dispatch to one side or the other
6204 by comparing the index value with this node's value.
6205 If one subtree is bounded, check that one first,
6206 so we can avoid real branches in the tree. */
6208 if (node_is_bounded (node->right, index_type))
6210 emit_cmp_and_jump_insns (index,
6213 expand_expr (node->high, NULL_RTX,
6216 GT, NULL_RTX, mode, unsignedp,
6217 label_rtx (node->right->code_label));
6218 emit_case_nodes (index, node->left, default_label, index_type);
6221 else if (node_is_bounded (node->left, index_type))
6223 emit_cmp_and_jump_insns (index,
6226 expand_expr (node->high, NULL_RTX,
6229 LT, NULL_RTX, mode, unsignedp,
6230 label_rtx (node->left->code_label));
6231 emit_case_nodes (index, node->right, default_label, index_type);
6236 /* Neither node is bounded. First distinguish the two sides;
6237 then emit the code for one side at a time. */
6239 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6241 /* See if the value is on the right. */
6242 emit_cmp_and_jump_insns (index,
6245 expand_expr (node->high, NULL_RTX,
6248 GT, NULL_RTX, mode, unsignedp,
6249 label_rtx (test_label));
6251 /* Value must be on the left.
6252 Handle the left-hand subtree. */
6253 emit_case_nodes (index, node->left, default_label, index_type);
6254 /* If left-hand subtree does nothing,
6256 emit_jump_if_reachable (default_label);
6258 /* Code branches here for the right-hand subtree. */
6259 expand_label (test_label);
6260 emit_case_nodes (index, node->right, default_label, index_type);
6264 else if (node->right != 0 && node->left == 0)
6266 /* Here we have a right child but no left so we issue conditional
6267 branch to default and process the right child.
6269 Omit the conditional branch to default if we it avoid only one
6270 right child; it costs too much space to save so little time. */
6272 if (node->right->right || node->right->left
6273 || !tree_int_cst_equal (node->right->low, node->right->high))
6275 if (!node_has_low_bound (node, index_type))
6277 emit_cmp_and_jump_insns (index,
6280 expand_expr (node->high, NULL_RTX,
6283 LT, NULL_RTX, mode, unsignedp,
6287 emit_case_nodes (index, node->right, default_label, index_type);
6290 /* We cannot process node->right normally
6291 since we haven't ruled out the numbers less than
6292 this node's value. So handle node->right explicitly. */
6293 do_jump_if_equal (index,
6296 expand_expr (node->right->low, NULL_RTX,
6299 label_rtx (node->right->code_label), unsignedp);
6302 else if (node->right == 0 && node->left != 0)
6304 /* Just one subtree, on the left. */
6305 if (node->left->left || node->left->right
6306 || !tree_int_cst_equal (node->left->low, node->left->high))
6308 if (!node_has_high_bound (node, index_type))
6310 emit_cmp_and_jump_insns (index,
6313 expand_expr (node->high, NULL_RTX,
6316 GT, NULL_RTX, mode, unsignedp,
6320 emit_case_nodes (index, node->left, default_label, index_type);
6323 /* We cannot process node->left normally
6324 since we haven't ruled out the numbers less than
6325 this node's value. So handle node->left explicitly. */
6326 do_jump_if_equal (index,
6329 expand_expr (node->left->low, NULL_RTX,
6332 label_rtx (node->left->code_label), unsignedp);
6337 /* Node is a range. These cases are very similar to those for a single
6338 value, except that we do not start by testing whether this node
6339 is the one to branch to. */
6341 if (node->right != 0 && node->left != 0)
6343 /* Node has subtrees on both sides.
6344 If the right-hand subtree is bounded,
6345 test for it first, since we can go straight there.
6346 Otherwise, we need to make a branch in the control structure,
6347 then handle the two subtrees. */
6348 tree test_label = 0;
6350 if (node_is_bounded (node->right, index_type))
6351 /* Right hand node is fully bounded so we can eliminate any
6352 testing and branch directly to the target code. */
6353 emit_cmp_and_jump_insns (index,
6356 expand_expr (node->high, NULL_RTX,
6359 GT, NULL_RTX, mode, unsignedp,
6360 label_rtx (node->right->code_label));
6363 /* Right hand node requires testing.
6364 Branch to a label where we will handle it later. */
6366 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6367 emit_cmp_and_jump_insns (index,
6370 expand_expr (node->high, NULL_RTX,
6373 GT, NULL_RTX, mode, unsignedp,
6374 label_rtx (test_label));
6377 /* Value belongs to this node or to the left-hand subtree. */
6379 emit_cmp_and_jump_insns (index,
6382 expand_expr (node->low, NULL_RTX,
6385 GE, NULL_RTX, mode, unsignedp,
6386 label_rtx (node->code_label));
6388 /* Handle the left-hand subtree. */
6389 emit_case_nodes (index, node->left, default_label, index_type);
6391 /* If right node had to be handled later, do that now. */
6395 /* If the left-hand subtree fell through,
6396 don't let it fall into the right-hand subtree. */
6397 emit_jump_if_reachable (default_label);
6399 expand_label (test_label);
6400 emit_case_nodes (index, node->right, default_label, index_type);
6404 else if (node->right != 0 && node->left == 0)
6406 /* Deal with values to the left of this node,
6407 if they are possible. */
6408 if (!node_has_low_bound (node, index_type))
6410 emit_cmp_and_jump_insns (index,
6413 expand_expr (node->low, NULL_RTX,
6416 LT, NULL_RTX, mode, unsignedp,
6420 /* Value belongs to this node or to the right-hand subtree. */
6422 emit_cmp_and_jump_insns (index,
6425 expand_expr (node->high, NULL_RTX,
6428 LE, NULL_RTX, mode, unsignedp,
6429 label_rtx (node->code_label));
6431 emit_case_nodes (index, node->right, default_label, index_type);
6434 else if (node->right == 0 && node->left != 0)
6436 /* Deal with values to the right of this node,
6437 if they are possible. */
6438 if (!node_has_high_bound (node, index_type))
6440 emit_cmp_and_jump_insns (index,
6443 expand_expr (node->high, NULL_RTX,
6446 GT, NULL_RTX, mode, unsignedp,
6450 /* Value belongs to this node or to the left-hand subtree. */
6452 emit_cmp_and_jump_insns (index,
6455 expand_expr (node->low, NULL_RTX,
6458 GE, NULL_RTX, mode, unsignedp,
6459 label_rtx (node->code_label));
6461 emit_case_nodes (index, node->left, default_label, index_type);
6466 /* Node has no children so we check low and high bounds to remove
6467 redundant tests. Only one of the bounds can exist,
6468 since otherwise this node is bounded--a case tested already. */
6469 int high_bound = node_has_high_bound (node, index_type);
6470 int low_bound = node_has_low_bound (node, index_type);
6472 if (!high_bound && low_bound)
6474 emit_cmp_and_jump_insns (index,
6477 expand_expr (node->high, NULL_RTX,
6480 GT, NULL_RTX, mode, unsignedp,
6484 else if (!low_bound && high_bound)
6486 emit_cmp_and_jump_insns (index,
6489 expand_expr (node->low, NULL_RTX,
6492 LT, NULL_RTX, mode, unsignedp,
6495 else if (!low_bound && !high_bound)
6497 /* Widen LOW and HIGH to the same width as INDEX. */
6498 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6499 tree low = build1 (CONVERT_EXPR, type, node->low);
6500 tree high = build1 (CONVERT_EXPR, type, node->high);
6501 rtx low_rtx, new_index, new_bound;
6503 /* Instead of doing two branches, emit one unsigned branch for
6504 (index-low) > (high-low). */
6505 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6506 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6507 NULL_RTX, unsignedp,
6509 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6513 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6514 mode, 1, default_label);
6517 emit_jump (label_rtx (node->code_label));
6522 #include "gt-stmt.h"