1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
55 #include "bc-typecd.h"
56 #include "bc-opcode.h"
60 #define obstack_chunk_alloc xmalloc
61 #define obstack_chunk_free free
62 struct obstack stmt_obstack;
64 /* Filename and line number of last line-number note,
65 whether we actually emitted it or not. */
69 /* Nonzero if within a ({...}) grouping, in which case we must
70 always compute a value for each expr-stmt in case it is the last one. */
72 int expr_stmts_for_value;
74 /* Each time we expand an expression-statement,
75 record the expr's type and its RTL value here. */
77 static tree last_expr_type;
78 static rtx last_expr_value;
80 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
81 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
82 This is used by the `remember_end_note' function to record the endpoint
83 of each generated block in its associated BLOCK node. */
85 static rtx last_block_end_note;
87 /* Number of binding contours started so far in this function. */
89 int block_start_count;
91 /* Nonzero if function being compiled needs to
92 return the address of where it has put a structure value. */
94 extern int current_function_returns_pcc_struct;
96 /* Label that will go on parm cleanup code, if any.
97 Jumping to this label runs cleanup code for parameters, if
98 such code must be run. Following this code is the logical return label. */
100 extern rtx cleanup_label;
102 /* Label that will go on function epilogue.
103 Jumping to this label serves as a "return" instruction
104 on machines which require execution of the epilogue on all returns. */
106 extern rtx return_label;
108 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
109 So we can mark them all live at the end of the function, if nonopt. */
110 extern rtx save_expr_regs;
112 /* Offset to end of allocated area of stack frame.
113 If stack grows down, this is the address of the last stack slot allocated.
114 If stack grows up, this is the address for the next slot. */
115 extern int frame_offset;
117 /* Label to jump back to for tail recursion, or 0 if we have
118 not yet needed one for this function. */
119 extern rtx tail_recursion_label;
121 /* Place after which to insert the tail_recursion_label if we need one. */
122 extern rtx tail_recursion_reentry;
124 /* Location at which to save the argument pointer if it will need to be
125 referenced. There are two cases where this is done: if nonlocal gotos
126 exist, or if vars whose is an offset from the argument pointer will be
127 needed by inner routines. */
129 extern rtx arg_pointer_save_area;
131 /* Chain of all RTL_EXPRs that have insns in them. */
132 extern tree rtl_expr_chain;
134 #if 0 /* Turned off because 0 seems to work just as well. */
135 /* Cleanup lists are required for binding levels regardless of whether
136 that binding level has cleanups or not. This node serves as the
137 cleanup list whenever an empty list is required. */
138 static tree empty_cleanup_list;
141 /* Functions and data structures for expanding case statements. */
143 /* Case label structure, used to hold info on labels within case
144 statements. We handle "range" labels; for a single-value label
145 as in C, the high and low limits are the same.
147 A chain of case nodes is initially maintained via the RIGHT fields
148 in the nodes. Nodes with higher case values are later in the list.
150 Switch statements can be output in one of two forms. A branch table
151 is used if there are more than a few labels and the labels are dense
152 within the range between the smallest and largest case value. If a
153 branch table is used, no further manipulations are done with the case
156 The alternative to the use of a branch table is to generate a series
157 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
158 and PARENT fields to hold a binary tree. Initially the tree is
159 totally unbalanced, with everything on the right. We balance the tree
160 with nodes on the left having lower case values than the parent
161 and nodes on the right having higher values. We then output the tree
166 struct case_node *left; /* Left son in binary tree */
167 struct case_node *right; /* Right son in binary tree; also node chain */
168 struct case_node *parent; /* Parent of node in binary tree */
169 tree low; /* Lowest index value for this label */
170 tree high; /* Highest index value for this label */
171 tree code_label; /* Label to jump to when node matches */
174 typedef struct case_node case_node;
175 typedef struct case_node *case_node_ptr;
177 /* These are used by estimate_case_costs and balance_case_nodes. */
179 /* This must be a signed type, and non-ANSI compilers lack signed char. */
180 static short *cost_table;
181 static int use_cost_table;
183 /* Stack of control and binding constructs we are currently inside.
185 These constructs begin when you call `expand_start_WHATEVER'
186 and end when you call `expand_end_WHATEVER'. This stack records
187 info about how the construct began that tells the end-function
188 what to do. It also may provide information about the construct
189 to alter the behavior of other constructs within the body.
190 For example, they may affect the behavior of C `break' and `continue'.
192 Each construct gets one `struct nesting' object.
193 All of these objects are chained through the `all' field.
194 `nesting_stack' points to the first object (innermost construct).
195 The position of an entry on `nesting_stack' is in its `depth' field.
197 Each type of construct has its own individual stack.
198 For example, loops have `loop_stack'. Each object points to the
199 next object of the same type through the `next' field.
201 Some constructs are visible to `break' exit-statements and others
202 are not. Which constructs are visible depends on the language.
203 Therefore, the data structure allows each construct to be visible
204 or not, according to the args given when the construct is started.
205 The construct is visible if the `exit_label' field is non-null.
206 In that case, the value should be a CODE_LABEL rtx. */
211 struct nesting *next;
216 /* For conds (if-then and if-then-else statements). */
219 /* Label for the end of the if construct.
220 There is none if EXITFLAG was not set
221 and no `else' has been seen yet. */
223 /* Label for the end of this alternative.
224 This may be the end of the if or the next else/elseif. */
230 /* Label at the top of the loop; place to loop back to. */
232 /* Label at the end of the whole construct. */
234 /* Label for `continue' statement to jump to;
235 this is in front of the stepper of the loop. */
238 /* For variable binding contours. */
241 /* Sequence number of this binding contour within the function,
242 in order of entry. */
243 int block_start_count;
244 /* Nonzero => value to restore stack to on exit. Complemented by
245 bc_stack_level (see below) when generating bytecodes. */
247 /* The NOTE that starts this contour.
248 Used by expand_goto to check whether the destination
249 is within each contour or not. */
251 /* Innermost containing binding contour that has a stack level. */
252 struct nesting *innermost_stack_block;
253 /* List of cleanups to be run on exit from this contour.
254 This is a list of expressions to be evaluated.
255 The TREE_PURPOSE of each link is the ..._DECL node
256 which the cleanup pertains to. */
258 /* List of cleanup-lists of blocks containing this block,
259 as they were at the locus where this block appears.
260 There is an element for each containing block,
261 ordered innermost containing block first.
262 The tail of this list can be 0 (was empty_cleanup_list),
263 if all remaining elements would be empty lists.
264 The element's TREE_VALUE is the cleanup-list of that block,
265 which may be null. */
267 /* Chain of labels defined inside this binding contour.
268 For contours that have stack levels or cleanups. */
269 struct label_chain *label_chain;
270 /* Number of function calls seen, as of start of this block. */
271 int function_call_count;
272 /* Bytecode specific: stack level to restore stack to on exit. */
275 /* For switch (C) or case (Pascal) statements,
276 and also for dummies (see `expand_start_case_dummy'). */
279 /* The insn after which the case dispatch should finally
280 be emitted. Zero for a dummy. */
282 /* For bytecodes, the case table is in-lined right in the code.
283 A label is needed for skipping over this block. It is only
284 used when generating bytecodes. */
286 /* A list of case labels, kept in ascending order by value
287 as the list is built.
288 During expand_end_case, this list may be rearranged into a
289 nearly balanced binary tree. */
290 struct case_node *case_list;
291 /* Label to jump to if no case matches. */
293 /* The expression to be dispatched on. */
295 /* Type that INDEX_EXPR should be converted to. */
297 /* Number of range exprs in case statement. */
299 /* Name of this kind of statement, for warnings. */
301 /* Nonzero if a case label has been seen in this case stmt. */
307 /* Chain of all pending binding contours. */
308 struct nesting *block_stack;
310 /* If any new stacks are added here, add them to POPSTACKS too. */
312 /* Chain of all pending binding contours that restore stack levels
314 struct nesting *stack_block_stack;
316 /* Chain of all pending conditional statements. */
317 struct nesting *cond_stack;
319 /* Chain of all pending loops. */
320 struct nesting *loop_stack;
322 /* Chain of all pending case or switch statements. */
323 struct nesting *case_stack;
325 /* Separate chain including all of the above,
326 chained through the `all' field. */
327 struct nesting *nesting_stack;
329 /* Number of entries on nesting_stack now. */
332 /* Allocate and return a new `struct nesting'. */
334 #define ALLOC_NESTING() \
335 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
337 /* Pop the nesting stack element by element until we pop off
338 the element which is at the top of STACK.
339 Update all the other stacks, popping off elements from them
340 as we pop them from nesting_stack. */
342 #define POPSTACK(STACK) \
343 do { struct nesting *target = STACK; \
344 struct nesting *this; \
345 do { this = nesting_stack; \
346 if (loop_stack == this) \
347 loop_stack = loop_stack->next; \
348 if (cond_stack == this) \
349 cond_stack = cond_stack->next; \
350 if (block_stack == this) \
351 block_stack = block_stack->next; \
352 if (stack_block_stack == this) \
353 stack_block_stack = stack_block_stack->next; \
354 if (case_stack == this) \
355 case_stack = case_stack->next; \
356 nesting_depth = nesting_stack->depth - 1; \
357 nesting_stack = this->all; \
358 obstack_free (&stmt_obstack, this); } \
359 while (this != target); } while (0)
361 /* In some cases it is impossible to generate code for a forward goto
362 until the label definition is seen. This happens when it may be necessary
363 for the goto to reset the stack pointer: we don't yet know how to do that.
364 So expand_goto puts an entry on this fixup list.
365 Each time a binding contour that resets the stack is exited,
367 If the target label has now been defined, we can insert the proper code. */
371 /* Points to following fixup. */
372 struct goto_fixup *next;
373 /* Points to the insn before the jump insn.
374 If more code must be inserted, it goes after this insn. */
376 /* The LABEL_DECL that this jump is jumping to, or 0
377 for break, continue or return. */
379 /* The BLOCK for the place where this goto was found. */
381 /* The CODE_LABEL rtx that this is jumping to. */
383 /* Number of binding contours started in current function
384 before the label reference. */
385 int block_start_count;
386 /* The outermost stack level that should be restored for this jump.
387 Each time a binding contour that resets the stack is exited,
388 if the target label is *not* yet defined, this slot is updated. */
390 /* List of lists of cleanup expressions to be run by this goto.
391 There is one element for each block that this goto is within.
392 The tail of this list can be 0 (was empty_cleanup_list),
393 if all remaining elements would be empty.
394 The TREE_VALUE contains the cleanup list of that block as of the
395 time this goto was seen.
396 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
397 tree cleanup_list_list;
399 /* Bytecode specific members follow */
401 /* The label that this jump is jumping to, or 0 for break, continue
403 struct bc_label *bc_target;
405 /* The label we use for the fixup patch */
406 struct bc_label *label;
408 /* True (non-0) if fixup has been handled */
411 /* Like stack_level above, except refers to the interpreter stack */
415 static struct goto_fixup *goto_fixup_chain;
417 /* Within any binding contour that must restore a stack level,
418 all labels are recorded with a chain of these structures. */
422 /* Points to following fixup. */
423 struct label_chain *next;
426 static void expand_goto_internal PROTO((tree, rtx, rtx));
427 static void bc_expand_goto_internal PROTO((enum bytecode_opcode,
428 struct bc_label *, tree));
429 static int expand_fixup PROTO((tree, rtx, rtx));
430 static void bc_expand_fixup PROTO((enum bytecode_opcode,
431 struct bc_label *, int));
432 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
434 static void bc_fixup_gotos PROTO((struct nesting *, int, tree,
436 static int warn_if_unused_value PROTO((tree));
437 static void bc_expand_start_cond PROTO((tree, int));
438 static void bc_expand_end_cond PROTO((void));
439 static void bc_expand_start_else PROTO((void));
440 static void bc_expand_end_loop PROTO((void));
441 static void bc_expand_end_bindings PROTO((tree, int, int));
442 static void bc_expand_decl PROTO((tree, tree));
443 static void bc_expand_variable_local_init PROTO((tree));
444 static void bc_expand_decl_init PROTO((tree));
445 static void expand_null_return_1 PROTO((rtx, int));
446 static int tail_recursion_args PROTO((tree, tree));
447 static void expand_cleanups PROTO((tree, tree));
448 static void bc_expand_start_case PROTO((struct nesting *, tree,
450 static int bc_pushcase PROTO((tree, tree));
451 static void bc_check_for_full_enumeration_handling PROTO((tree));
452 static void bc_expand_end_case PROTO((tree));
453 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
454 static int estimate_case_costs PROTO((case_node_ptr));
455 static void group_case_nodes PROTO((case_node_ptr));
456 static void balance_case_nodes PROTO((case_node_ptr *,
458 static int node_has_low_bound PROTO((case_node_ptr, tree));
459 static int node_has_high_bound PROTO((case_node_ptr, tree));
460 static int node_is_bounded PROTO((case_node_ptr, tree));
461 static void emit_jump_if_reachable PROTO((rtx));
462 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
464 int bc_expand_exit_loop_if_false ();
465 void bc_expand_start_cond ();
466 void bc_expand_end_cond ();
467 void bc_expand_start_else ();
468 void bc_expand_end_bindings ();
469 void bc_expand_start_case ();
470 void bc_check_for_full_enumeration_handling ();
471 void bc_expand_end_case ();
472 void bc_expand_decl ();
474 extern rtx bc_allocate_local ();
475 extern rtx bc_allocate_variable_array ();
480 gcc_obstack_init (&stmt_obstack);
482 empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE);
487 init_stmt_for_function ()
489 /* We are not currently within any block, conditional, loop or case. */
491 stack_block_stack = 0;
498 block_start_count = 0;
500 /* No gotos have been expanded yet. */
501 goto_fixup_chain = 0;
503 /* We are not processing a ({...}) grouping. */
504 expr_stmts_for_value = 0;
512 p->block_stack = block_stack;
513 p->stack_block_stack = stack_block_stack;
514 p->cond_stack = cond_stack;
515 p->loop_stack = loop_stack;
516 p->case_stack = case_stack;
517 p->nesting_stack = nesting_stack;
518 p->nesting_depth = nesting_depth;
519 p->block_start_count = block_start_count;
520 p->last_expr_type = last_expr_type;
521 p->last_expr_value = last_expr_value;
522 p->expr_stmts_for_value = expr_stmts_for_value;
523 p->emit_filename = emit_filename;
524 p->emit_lineno = emit_lineno;
525 p->goto_fixup_chain = goto_fixup_chain;
529 restore_stmt_status (p)
532 block_stack = p->block_stack;
533 stack_block_stack = p->stack_block_stack;
534 cond_stack = p->cond_stack;
535 loop_stack = p->loop_stack;
536 case_stack = p->case_stack;
537 nesting_stack = p->nesting_stack;
538 nesting_depth = p->nesting_depth;
539 block_start_count = p->block_start_count;
540 last_expr_type = p->last_expr_type;
541 last_expr_value = p->last_expr_value;
542 expr_stmts_for_value = p->expr_stmts_for_value;
543 emit_filename = p->emit_filename;
544 emit_lineno = p->emit_lineno;
545 goto_fixup_chain = p->goto_fixup_chain;
548 /* Emit a no-op instruction. */
555 if (!output_bytecode)
557 last_insn = get_last_insn ();
559 && (GET_CODE (last_insn) == CODE_LABEL
560 || prev_real_insn (last_insn) == 0))
561 emit_insn (gen_nop ());
565 /* Return the rtx-label that corresponds to a LABEL_DECL,
566 creating it if necessary. */
572 if (TREE_CODE (label) != LABEL_DECL)
575 if (DECL_RTL (label))
576 return DECL_RTL (label);
578 return DECL_RTL (label) = gen_label_rtx ();
581 /* Add an unconditional jump to LABEL as the next sequential instruction. */
587 do_pending_stack_adjust ();
588 emit_jump_insn (gen_jump (label));
592 /* Emit code to jump to the address
593 specified by the pointer expression EXP. */
596 expand_computed_goto (exp)
601 bc_expand_expr (exp);
602 bc_emit_instruction (jumpP);
606 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
608 emit_indirect_jump (x);
612 /* Handle goto statements and the labels that they can go to. */
614 /* Specify the location in the RTL code of a label LABEL,
615 which is a LABEL_DECL tree node.
617 This is used for the kind of label that the user can jump to with a
618 goto statement, and for alternatives of a switch or case statement.
619 RTL labels generated for loops and conditionals don't go through here;
620 they are generated directly at the RTL level, by other functions below.
622 Note that this has nothing to do with defining label *names*.
623 Languages vary in how they do that and what that even means. */
629 struct label_chain *p;
633 if (! DECL_RTL (label))
634 DECL_RTL (label) = bc_gen_rtx ((char *) 0, 0, bc_get_bytecode_label ());
635 if (! bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (DECL_RTL (label))))
636 error ("multiply defined label");
640 do_pending_stack_adjust ();
641 emit_label (label_rtx (label));
642 if (DECL_NAME (label))
643 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
645 if (stack_block_stack != 0)
647 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
648 p->next = stack_block_stack->data.block.label_chain;
649 stack_block_stack->data.block.label_chain = p;
654 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
655 from nested functions. */
658 declare_nonlocal_label (label)
661 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
662 LABEL_PRESERVE_P (label_rtx (label)) = 1;
663 if (nonlocal_goto_handler_slot == 0)
665 nonlocal_goto_handler_slot
666 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
667 emit_stack_save (SAVE_NONLOCAL,
668 &nonlocal_goto_stack_level,
669 PREV_INSN (tail_recursion_reentry));
673 /* Generate RTL code for a `goto' statement with target label LABEL.
674 LABEL should be a LABEL_DECL tree node that was or will later be
675 defined with `expand_label'. */
685 expand_goto_internal (label, label_rtx (label), NULL_RTX);
689 /* Check for a nonlocal goto to a containing function. */
690 context = decl_function_context (label);
691 if (context != 0 && context != current_function_decl)
693 struct function *p = find_function_data (context);
694 rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
697 p->has_nonlocal_label = 1;
698 current_function_has_nonlocal_goto = 1;
699 LABEL_REF_NONLOCAL_P (label_ref) = 1;
701 /* Copy the rtl for the slots so that they won't be shared in
702 case the virtual stack vars register gets instantiated differently
703 in the parent than in the child. */
705 #if HAVE_nonlocal_goto
706 if (HAVE_nonlocal_goto)
707 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
708 copy_rtx (p->nonlocal_goto_handler_slot),
709 copy_rtx (p->nonlocal_goto_stack_level),
716 /* Restore frame pointer for containing function.
717 This sets the actual hard register used for the frame pointer
718 to the location of the function's incoming static chain info.
719 The non-local goto handler will then adjust it to contain the
720 proper value and reload the argument pointer, if needed. */
721 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
723 /* We have now loaded the frame pointer hardware register with
724 the address of that corresponds to the start of the virtual
725 stack vars. So replace virtual_stack_vars_rtx in all
726 addresses we use with stack_pointer_rtx. */
728 /* Get addr of containing function's current nonlocal goto handler,
729 which will do any cleanups and then jump to the label. */
730 addr = copy_rtx (p->nonlocal_goto_handler_slot);
731 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
732 hard_frame_pointer_rtx));
734 /* Restore the stack pointer. Note this uses fp just restored. */
735 addr = p->nonlocal_goto_stack_level;
737 addr = replace_rtx (copy_rtx (addr),
738 virtual_stack_vars_rtx,
739 hard_frame_pointer_rtx);
741 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
743 /* Put in the static chain register the nonlocal label address. */
744 emit_move_insn (static_chain_rtx, label_ref);
745 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
747 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
748 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
749 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
750 emit_indirect_jump (temp);
754 expand_goto_internal (label, label_rtx (label), NULL_RTX);
757 /* Generate RTL code for a `goto' statement with target label BODY.
758 LABEL should be a LABEL_REF.
759 LAST_INSN, if non-0, is the rtx we should consider as the last
760 insn emitted (for the purposes of cleaning up a return). */
763 expand_goto_internal (body, label, last_insn)
768 struct nesting *block;
771 /* NOTICE! If a bytecode instruction other than `jump' is needed,
772 then the caller has to call bc_expand_goto_internal()
773 directly. This is rather an exceptional case, and there aren't
774 that many places where this is necessary. */
777 expand_goto_internal (body, label, last_insn);
781 if (GET_CODE (label) != CODE_LABEL)
784 /* If label has already been defined, we can tell now
785 whether and how we must alter the stack level. */
787 if (PREV_INSN (label) != 0)
789 /* Find the innermost pending block that contains the label.
790 (Check containment by comparing insn-uids.)
791 Then restore the outermost stack level within that block,
792 and do cleanups of all blocks contained in it. */
793 for (block = block_stack; block; block = block->next)
795 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
797 if (block->data.block.stack_level != 0)
798 stack_level = block->data.block.stack_level;
799 /* Execute the cleanups for blocks we are exiting. */
800 if (block->data.block.cleanups != 0)
802 expand_cleanups (block->data.block.cleanups, NULL_TREE);
803 do_pending_stack_adjust ();
809 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
810 the stack pointer. This one should be deleted as dead by flow. */
811 clear_pending_stack_adjust ();
812 do_pending_stack_adjust ();
813 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
816 if (body != 0 && DECL_TOO_LATE (body))
817 error ("jump to `%s' invalidly jumps into binding contour",
818 IDENTIFIER_POINTER (DECL_NAME (body)));
820 /* Label not yet defined: may need to put this goto
821 on the fixup list. */
822 else if (! expand_fixup (body, label, last_insn))
824 /* No fixup needed. Record that the label is the target
825 of at least one goto that has no fixup. */
827 TREE_ADDRESSABLE (body) = 1;
833 /* Generate a jump with OPCODE to the given bytecode LABEL which is
834 found within BODY. */
837 bc_expand_goto_internal (opcode, label, body)
838 enum bytecode_opcode opcode;
839 struct bc_label *label;
842 struct nesting *block;
843 int stack_level = -1;
845 /* If the label is defined, adjust the stack as necessary.
846 If it's not defined, we have to push the reference on the
852 /* Find the innermost pending block that contains the label.
853 (Check containment by comparing bytecode uids.) Then restore the
854 outermost stack level within that block. */
856 for (block = block_stack; block; block = block->next)
858 if (BYTECODE_BC_LABEL (block->data.block.first_insn)->uid < label->uid)
860 if (block->data.block.bc_stack_level)
861 stack_level = block->data.block.bc_stack_level;
863 /* Execute the cleanups for blocks we are exiting. */
864 if (block->data.block.cleanups != 0)
866 expand_cleanups (block->data.block.cleanups, NULL_TREE);
867 do_pending_stack_adjust ();
871 /* Restore the stack level. If we need to adjust the stack, we
872 must do so after the jump, since the jump may depend on
873 what's on the stack. Thus, any stack-modifying conditional
874 jumps (these are the only ones that rely on what's on the
875 stack) go into the fixup list. */
878 && stack_depth != stack_level
881 bc_expand_fixup (opcode, label, stack_level);
884 if (stack_level >= 0)
885 bc_adjust_stack (stack_depth - stack_level);
887 if (body && DECL_BIT_FIELD (body))
888 error ("jump to `%s' invalidly jumps into binding contour",
889 IDENTIFIER_POINTER (DECL_NAME (body)));
891 /* Emit immediate jump */
892 bc_emit_bytecode (opcode);
893 bc_emit_bytecode_labelref (label);
895 #ifdef DEBUG_PRINT_CODE
896 fputc ('\n', stderr);
901 /* Put goto in the fixup list */
902 bc_expand_fixup (opcode, label, stack_level);
905 /* Generate if necessary a fixup for a goto
906 whose target label in tree structure (if any) is TREE_LABEL
907 and whose target in rtl is RTL_LABEL.
909 If LAST_INSN is nonzero, we pretend that the jump appears
910 after insn LAST_INSN instead of at the current point in the insn stream.
912 The fixup will be used later to insert insns just before the goto.
913 Those insns will restore the stack level as appropriate for the
914 target label, and will (in the case of C++) also invoke any object
915 destructors which have to be invoked when we exit the scopes which
916 are exited by the goto.
918 Value is nonzero if a fixup is made. */
921 expand_fixup (tree_label, rtl_label, last_insn)
926 struct nesting *block, *end_block;
928 /* See if we can recognize which block the label will be output in.
929 This is possible in some very common cases.
930 If we succeed, set END_BLOCK to that block.
931 Otherwise, set it to 0. */
934 && (rtl_label == cond_stack->data.cond.endif_label
935 || rtl_label == cond_stack->data.cond.next_label))
936 end_block = cond_stack;
937 /* If we are in a loop, recognize certain labels which
938 are likely targets. This reduces the number of fixups
939 we need to create. */
941 && (rtl_label == loop_stack->data.loop.start_label
942 || rtl_label == loop_stack->data.loop.end_label
943 || rtl_label == loop_stack->data.loop.continue_label))
944 end_block = loop_stack;
948 /* Now set END_BLOCK to the binding level to which we will return. */
952 struct nesting *next_block = end_block->all;
955 /* First see if the END_BLOCK is inside the innermost binding level.
956 If so, then no cleanups or stack levels are relevant. */
957 while (next_block && next_block != block)
958 next_block = next_block->all;
963 /* Otherwise, set END_BLOCK to the innermost binding level
964 which is outside the relevant control-structure nesting. */
965 next_block = block_stack->next;
966 for (block = block_stack; block != end_block; block = block->all)
967 if (block == next_block)
968 next_block = next_block->next;
969 end_block = next_block;
972 /* Does any containing block have a stack level or cleanups?
973 If not, no fixup is needed, and that is the normal case
974 (the only case, for standard C). */
975 for (block = block_stack; block != end_block; block = block->next)
976 if (block->data.block.stack_level != 0
977 || block->data.block.cleanups != 0)
980 if (block != end_block)
982 /* Ok, a fixup is needed. Add a fixup to the list of such. */
983 struct goto_fixup *fixup
984 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
985 /* In case an old stack level is restored, make sure that comes
986 after any pending stack adjust. */
987 /* ?? If the fixup isn't to come at the present position,
988 doing the stack adjust here isn't useful. Doing it with our
989 settings at that location isn't useful either. Let's hope
992 do_pending_stack_adjust ();
993 fixup->target = tree_label;
994 fixup->target_rtl = rtl_label;
996 /* Create a BLOCK node and a corresponding matched set of
997 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
998 this point. The notes will encapsulate any and all fixup
999 code which we might later insert at this point in the insn
1000 stream. Also, the BLOCK node will be the parent (i.e. the
1001 `SUPERBLOCK') of any other BLOCK nodes which we might create
1002 later on when we are expanding the fixup code. */
1005 register rtx original_before_jump
1006 = last_insn ? last_insn : get_last_insn ();
1010 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1011 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1012 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1014 emit_insns_after (fixup->before_jump, original_before_jump);
1017 fixup->block_start_count = block_start_count;
1018 fixup->stack_level = 0;
1019 fixup->cleanup_list_list
1020 = (((block->data.block.outer_cleanups
1022 && block->data.block.outer_cleanups != empty_cleanup_list
1025 || block->data.block.cleanups)
1026 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1027 block->data.block.outer_cleanups)
1029 fixup->next = goto_fixup_chain;
1030 goto_fixup_chain = fixup;
1037 /* Generate bytecode jump with OPCODE to a fixup routine that links to LABEL.
1038 Make the fixup restore the stack level to STACK_LEVEL. */
1041 bc_expand_fixup (opcode, label, stack_level)
1042 enum bytecode_opcode opcode;
1043 struct bc_label *label;
1046 struct goto_fixup *fixup
1047 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
1049 fixup->label = bc_get_bytecode_label ();
1050 fixup->bc_target = label;
1051 fixup->bc_stack_level = stack_level;
1052 fixup->bc_handled = FALSE;
1054 fixup->next = goto_fixup_chain;
1055 goto_fixup_chain = fixup;
1057 /* Insert a jump to the fixup code */
1058 bc_emit_bytecode (opcode);
1059 bc_emit_bytecode_labelref (fixup->label);
1061 #ifdef DEBUG_PRINT_CODE
1062 fputc ('\n', stderr);
1066 /* Expand any needed fixups in the outputmost binding level of the
1067 function. FIRST_INSN is the first insn in the function. */
1070 expand_fixups (first_insn)
1073 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1076 /* When exiting a binding contour, process all pending gotos requiring fixups.
1077 THISBLOCK is the structure that describes the block being exited.
1078 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1079 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1080 FIRST_INSN is the insn that began this contour.
1082 Gotos that jump out of this contour must restore the
1083 stack level and do the cleanups before actually jumping.
1085 DONT_JUMP_IN nonzero means report error there is a jump into this
1086 contour from before the beginning of the contour.
1087 This is also done if STACK_LEVEL is nonzero. */
1090 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1091 struct nesting *thisblock;
1097 register struct goto_fixup *f, *prev;
1099 if (output_bytecode)
1101 /* ??? The second arg is the bc stack level, which is not the same
1102 as STACK_LEVEL. I have no idea what should go here, so I'll
1104 bc_fixup_gotos (thisblock, 0, cleanup_list, first_insn, dont_jump_in);
1108 /* F is the fixup we are considering; PREV is the previous one. */
1109 /* We run this loop in two passes so that cleanups of exited blocks
1110 are run first, and blocks that are exited are marked so
1113 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1115 /* Test for a fixup that is inactive because it is already handled. */
1116 if (f->before_jump == 0)
1118 /* Delete inactive fixup from the chain, if that is easy to do. */
1120 prev->next = f->next;
1122 /* Has this fixup's target label been defined?
1123 If so, we can finalize it. */
1124 else if (PREV_INSN (f->target_rtl) != 0)
1126 register rtx cleanup_insns;
1128 /* Get the first non-label after the label
1129 this goto jumps to. If that's before this scope begins,
1130 we don't have a jump into the scope. */
1131 rtx after_label = f->target_rtl;
1132 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1133 after_label = NEXT_INSN (after_label);
1135 /* If this fixup jumped into this contour from before the beginning
1136 of this contour, report an error. */
1137 /* ??? Bug: this does not detect jumping in through intermediate
1138 blocks that have stack levels or cleanups.
1139 It detects only a problem with the innermost block
1140 around the label. */
1142 && (dont_jump_in || stack_level || cleanup_list)
1143 /* If AFTER_LABEL is 0, it means the jump goes to the end
1144 of the rtl, which means it jumps into this scope. */
1145 && (after_label == 0
1146 || INSN_UID (first_insn) < INSN_UID (after_label))
1147 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1148 && ! DECL_REGISTER (f->target))
1150 error_with_decl (f->target,
1151 "label `%s' used before containing binding contour");
1152 /* Prevent multiple errors for one label. */
1153 DECL_REGISTER (f->target) = 1;
1156 /* We will expand the cleanups into a sequence of their own and
1157 then later on we will attach this new sequence to the insn
1158 stream just ahead of the actual jump insn. */
1162 /* Temporarily restore the lexical context where we will
1163 logically be inserting the fixup code. We do this for the
1164 sake of getting the debugging information right. */
1167 set_block (f->context);
1169 /* Expand the cleanups for blocks this jump exits. */
1170 if (f->cleanup_list_list)
1173 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1174 /* Marked elements correspond to blocks that have been closed.
1175 Do their cleanups. */
1176 if (TREE_ADDRESSABLE (lists)
1177 && TREE_VALUE (lists) != 0)
1179 expand_cleanups (TREE_VALUE (lists), 0);
1180 /* Pop any pushes done in the cleanups,
1181 in case function is about to return. */
1182 do_pending_stack_adjust ();
1186 /* Restore stack level for the biggest contour that this
1187 jump jumps out of. */
1189 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1191 /* Finish up the sequence containing the insns which implement the
1192 necessary cleanups, and then attach that whole sequence to the
1193 insn stream just ahead of the actual jump insn. Attaching it
1194 at that point insures that any cleanups which are in fact
1195 implicit C++ object destructions (which must be executed upon
1196 leaving the block) appear (to the debugger) to be taking place
1197 in an area of the generated code where the object(s) being
1198 destructed are still "in scope". */
1200 cleanup_insns = get_insns ();
1204 emit_insns_after (cleanup_insns, f->before_jump);
1211 /* Mark the cleanups of exited blocks so that they are executed
1212 by the code above. */
1213 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1214 if (f->before_jump != 0
1215 && PREV_INSN (f->target_rtl) == 0
1216 /* Label has still not appeared. If we are exiting a block with
1217 a stack level to restore, that started before the fixup,
1218 mark this stack level as needing restoration
1219 when the fixup is later finalized.
1220 Also mark the cleanup_list_list element for F
1221 that corresponds to this block, so that ultimately
1222 this block's cleanups will be executed by the code above. */
1224 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
1225 it means the label is undefined. That's erroneous, but possible. */
1226 && (thisblock->data.block.block_start_count
1227 <= f->block_start_count))
1229 tree lists = f->cleanup_list_list;
1230 for (; lists; lists = TREE_CHAIN (lists))
1231 /* If the following elt. corresponds to our containing block
1232 then the elt. must be for this block. */
1233 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1234 TREE_ADDRESSABLE (lists) = 1;
1237 f->stack_level = stack_level;
1242 /* When exiting a binding contour, process all pending gotos requiring fixups.
1243 Note: STACK_DEPTH is not altered.
1245 The arguments are currently not used in the bytecode compiler, but we may
1246 need them one day for languages other than C.
1248 THISBLOCK is the structure that describes the block being exited.
1249 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1250 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1251 FIRST_INSN is the insn that began this contour.
1253 Gotos that jump out of this contour must restore the
1254 stack level and do the cleanups before actually jumping.
1256 DONT_JUMP_IN nonzero means report error there is a jump into this
1257 contour from before the beginning of the contour.
1258 This is also done if STACK_LEVEL is nonzero. */
1261 bc_fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1262 struct nesting *thisblock;
1268 register struct goto_fixup *f, *prev;
1269 int saved_stack_depth;
1271 /* F is the fixup we are considering; PREV is the previous one. */
1273 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1275 /* Test for a fixup that is inactive because it is already handled. */
1276 if (f->before_jump == 0)
1278 /* Delete inactive fixup from the chain, if that is easy to do. */
1280 prev->next = f->next;
1283 /* Emit code to restore the stack and continue */
1284 bc_emit_bytecode_labeldef (f->label);
1286 /* Save stack_depth across call, since bc_adjust_stack () will alter
1287 the perceived stack depth via the instructions generated. */
1289 if (f->bc_stack_level >= 0)
1291 saved_stack_depth = stack_depth;
1292 bc_adjust_stack (stack_depth - f->bc_stack_level);
1293 stack_depth = saved_stack_depth;
1296 bc_emit_bytecode (jump);
1297 bc_emit_bytecode_labelref (f->bc_target);
1299 #ifdef DEBUG_PRINT_CODE
1300 fputc ('\n', stderr);
1304 goto_fixup_chain = NULL;
1307 /* Generate RTL for an asm statement (explicit assembler code).
1308 BODY is a STRING_CST node containing the assembler code text,
1309 or an ADDR_EXPR containing a STRING_CST. */
1315 if (output_bytecode)
1317 error ("`asm' is illegal when generating bytecode");
1321 if (TREE_CODE (body) == ADDR_EXPR)
1322 body = TREE_OPERAND (body, 0);
1324 emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
1325 TREE_STRING_POINTER (body)));
1329 /* Generate RTL for an asm statement with arguments.
1330 STRING is the instruction template.
1331 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1332 Each output or input has an expression in the TREE_VALUE and
1333 a constraint-string in the TREE_PURPOSE.
1334 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1335 that is clobbered by this insn.
1337 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1338 Some elements of OUTPUTS may be replaced with trees representing temporary
1339 values. The caller should copy those temporary values to the originally
1342 VOL nonzero means the insn is volatile; don't optimize it. */
1345 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1346 tree string, outputs, inputs, clobbers;
1351 rtvec argvec, constraints;
1353 int ninputs = list_length (inputs);
1354 int noutputs = list_length (outputs);
1358 /* Vector of RTX's of evaluated output operands. */
1359 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1360 /* The insn we have emitted. */
1363 if (output_bytecode)
1365 error ("`asm' is illegal when generating bytecode");
1369 /* Count the number of meaningful clobbered registers, ignoring what
1370 we would ignore later. */
1372 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1374 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1375 i = decode_reg_name (regname);
1376 if (i >= 0 || i == -4)
1382 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1384 tree val = TREE_VALUE (tail);
1389 /* If there's an erroneous arg, emit no insn. */
1390 if (TREE_TYPE (val) == error_mark_node)
1393 /* Make sure constraint has `=' and does not have `+'. */
1396 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1398 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1400 error ("output operand constraint contains `+'");
1403 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=')
1408 error ("output operand constraint lacks `='");
1412 /* If an output operand is not a variable or indirect ref,
1414 create a SAVE_EXPR which is a pseudo-reg
1415 to act as an intermediate temporary.
1416 Make the asm insn write into that, then copy it to
1417 the real output operand. */
1419 while (TREE_CODE (val) == COMPONENT_REF
1420 || TREE_CODE (val) == ARRAY_REF)
1421 val = TREE_OPERAND (val, 0);
1423 if (TREE_CODE (val) != VAR_DECL
1424 && TREE_CODE (val) != PARM_DECL
1425 && TREE_CODE (val) != INDIRECT_REF)
1427 TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
1428 /* If it's a constant, print error now so don't crash later. */
1429 if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR)
1431 error ("invalid output in `asm'");
1436 output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1439 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1441 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1445 /* Make vectors for the expression-rtx and constraint strings. */
1447 argvec = rtvec_alloc (ninputs);
1448 constraints = rtvec_alloc (ninputs);
1450 body = gen_rtx (ASM_OPERANDS, VOIDmode,
1451 TREE_STRING_POINTER (string), "", 0, argvec, constraints,
1453 MEM_VOLATILE_P (body) = vol;
1455 /* Eval the inputs and put them into ARGVEC.
1456 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1459 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1463 /* If there's an erroneous arg, emit no insn,
1464 because the ASM_INPUT would get VOIDmode
1465 and that could cause a crash in reload. */
1466 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1468 if (TREE_PURPOSE (tail) == NULL_TREE)
1470 error ("hard register `%s' listed as input operand to `asm'",
1471 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1475 /* Make sure constraint has neither `=' nor `+'. */
1477 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++)
1478 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '='
1479 || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+')
1481 error ("input operand constraint contains `%c'",
1482 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1486 XVECEXP (body, 3, i) /* argvec */
1487 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1488 XVECEXP (body, 4, i) /* constraints */
1489 = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1490 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1494 /* Protect all the operands from the queue,
1495 now that they have all been evaluated. */
1497 for (i = 0; i < ninputs; i++)
1498 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1500 for (i = 0; i < noutputs; i++)
1501 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1503 /* Now, for each output, construct an rtx
1504 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1505 ARGVEC CONSTRAINTS))
1506 If there is more than one, put them inside a PARALLEL. */
1508 if (noutputs == 1 && nclobbers == 0)
1510 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1511 insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
1513 else if (noutputs == 0 && nclobbers == 0)
1515 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1516 insn = emit_insn (body);
1522 if (num == 0) num = 1;
1523 body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
1525 /* For each output operand, store a SET. */
1527 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1529 XVECEXP (body, 0, i)
1530 = gen_rtx (SET, VOIDmode,
1532 gen_rtx (ASM_OPERANDS, VOIDmode,
1533 TREE_STRING_POINTER (string),
1534 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1535 i, argvec, constraints,
1537 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1540 /* If there are no outputs (but there are some clobbers)
1541 store the bare ASM_OPERANDS into the PARALLEL. */
1544 XVECEXP (body, 0, i++) = obody;
1546 /* Store (clobber REG) for each clobbered register specified. */
1548 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1550 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1551 int j = decode_reg_name (regname);
1555 if (j == -3) /* `cc', which is not a register */
1558 if (j == -4) /* `memory', don't cache memory across asm */
1560 XVECEXP (body, 0, i++)
1561 = gen_rtx (CLOBBER, VOIDmode,
1562 gen_rtx (MEM, QImode,
1563 gen_rtx (SCRATCH, VOIDmode, 0)));
1567 error ("unknown register name `%s' in `asm'", regname);
1571 /* Use QImode since that's guaranteed to clobber just one reg. */
1572 XVECEXP (body, 0, i++)
1573 = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
1576 insn = emit_insn (body);
1582 /* Generate RTL to evaluate the expression EXP
1583 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1586 expand_expr_stmt (exp)
1589 if (output_bytecode)
1591 int org_stack_depth = stack_depth;
1593 bc_expand_expr (exp);
1595 /* Restore stack depth */
1596 if (stack_depth < org_stack_depth)
1599 bc_emit_instruction (drop);
1601 last_expr_type = TREE_TYPE (exp);
1605 /* If -W, warn about statements with no side effects,
1606 except for an explicit cast to void (e.g. for assert()), and
1607 except inside a ({...}) where they may be useful. */
1608 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1610 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1611 && !(TREE_CODE (exp) == CONVERT_EXPR
1612 && TREE_TYPE (exp) == void_type_node))
1613 warning_with_file_and_line (emit_filename, emit_lineno,
1614 "statement with no effect");
1615 else if (warn_unused)
1616 warn_if_unused_value (exp);
1618 last_expr_type = TREE_TYPE (exp);
1619 if (! flag_syntax_only)
1620 last_expr_value = expand_expr (exp,
1621 (expr_stmts_for_value
1622 ? NULL_RTX : const0_rtx),
1625 /* If all we do is reference a volatile value in memory,
1626 copy it to a register to be sure it is actually touched. */
1627 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1628 && TREE_THIS_VOLATILE (exp))
1630 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1632 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1633 copy_to_reg (last_expr_value);
1636 rtx lab = gen_label_rtx ();
1638 /* Compare the value with itself to reference it. */
1639 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1640 expand_expr (TYPE_SIZE (last_expr_type),
1641 NULL_RTX, VOIDmode, 0),
1643 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1644 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1649 /* If this expression is part of a ({...}) and is in memory, we may have
1650 to preserve temporaries. */
1651 preserve_temp_slots (last_expr_value);
1653 /* Free any temporaries used to evaluate this expression. Any temporary
1654 used as a result of this expression will already have been preserved
1661 /* Warn if EXP contains any computations whose results are not used.
1662 Return 1 if a warning is printed; 0 otherwise. */
1665 warn_if_unused_value (exp)
1668 if (TREE_USED (exp))
1671 switch (TREE_CODE (exp))
1673 case PREINCREMENT_EXPR:
1674 case POSTINCREMENT_EXPR:
1675 case PREDECREMENT_EXPR:
1676 case POSTDECREMENT_EXPR:
1681 case METHOD_CALL_EXPR:
1683 case WITH_CLEANUP_EXPR:
1685 /* We don't warn about COND_EXPR because it may be a useful
1686 construct if either arm contains a side effect. */
1691 /* For a binding, warn if no side effect within it. */
1692 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1694 case TRUTH_ORIF_EXPR:
1695 case TRUTH_ANDIF_EXPR:
1696 /* In && or ||, warn if 2nd operand has no side effect. */
1697 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1700 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1702 /* Let people do `(foo (), 0)' without a warning. */
1703 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1705 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1709 case NON_LVALUE_EXPR:
1710 /* Don't warn about values cast to void. */
1711 if (TREE_TYPE (exp) == void_type_node)
1713 /* Don't warn about conversions not explicit in the user's program. */
1714 if (TREE_NO_UNUSED_WARNING (exp))
1716 /* Assignment to a cast usually results in a cast of a modify.
1717 Don't complain about that. There can be an arbitrary number of
1718 casts before the modify, so we must loop until we find the first
1719 non-cast expression and then test to see if that is a modify. */
1721 tree tem = TREE_OPERAND (exp, 0);
1723 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1724 tem = TREE_OPERAND (tem, 0);
1726 if (TREE_CODE (tem) == MODIFY_EXPR)
1729 /* ... fall through ... */
1732 /* Referencing a volatile value is a side effect, so don't warn. */
1733 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1734 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1735 && TREE_THIS_VOLATILE (exp))
1737 warning_with_file_and_line (emit_filename, emit_lineno,
1738 "value computed is not used");
1743 /* Clear out the memory of the last expression evaluated. */
1751 /* Begin a statement which will return a value.
1752 Return the RTL_EXPR for this statement expr.
1753 The caller must save that value and pass it to expand_end_stmt_expr. */
1756 expand_start_stmt_expr ()
1761 /* When generating bytecode just note down the stack depth */
1762 if (output_bytecode)
1763 return (build_int_2 (stack_depth, 0));
1765 /* Make the RTL_EXPR node temporary, not momentary,
1766 so that rtl_expr_chain doesn't become garbage. */
1767 momentary = suspend_momentary ();
1768 t = make_node (RTL_EXPR);
1769 resume_momentary (momentary);
1770 start_sequence_for_rtl_expr (t);
1772 expr_stmts_for_value++;
1776 /* Restore the previous state at the end of a statement that returns a value.
1777 Returns a tree node representing the statement's value and the
1778 insns to compute the value.
1780 The nodes of that expression have been freed by now, so we cannot use them.
1781 But we don't want to do that anyway; the expression has already been
1782 evaluated and now we just want to use the value. So generate a RTL_EXPR
1783 with the proper type and RTL value.
1785 If the last substatement was not an expression,
1786 return something with type `void'. */
1789 expand_end_stmt_expr (t)
1792 if (output_bytecode)
1798 /* At this point, all expressions have been evaluated in order.
1799 However, all expression values have been popped when evaluated,
1800 which means we have to recover the last expression value. This is
1801 the last value removed by means of a `drop' instruction. Instead
1802 of adding code to inhibit dropping the last expression value, it
1803 is here recovered by undoing the `drop'. Since `drop' is
1804 equivalent to `adjustackSI [1]', it can be undone with `adjstackSI
1807 bc_adjust_stack (-1);
1809 if (!last_expr_type)
1810 last_expr_type = void_type_node;
1812 t = make_node (RTL_EXPR);
1813 TREE_TYPE (t) = last_expr_type;
1814 RTL_EXPR_RTL (t) = NULL;
1815 RTL_EXPR_SEQUENCE (t) = NULL;
1817 /* Don't consider deleting this expr or containing exprs at tree level. */
1818 TREE_THIS_VOLATILE (t) = 1;
1826 if (last_expr_type == 0)
1828 last_expr_type = void_type_node;
1829 last_expr_value = const0_rtx;
1831 else if (last_expr_value == 0)
1832 /* There are some cases where this can happen, such as when the
1833 statement is void type. */
1834 last_expr_value = const0_rtx;
1835 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1836 /* Remove any possible QUEUED. */
1837 last_expr_value = protect_from_queue (last_expr_value, 0);
1841 TREE_TYPE (t) = last_expr_type;
1842 RTL_EXPR_RTL (t) = last_expr_value;
1843 RTL_EXPR_SEQUENCE (t) = get_insns ();
1845 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1849 /* Don't consider deleting this expr or containing exprs at tree level. */
1850 TREE_SIDE_EFFECTS (t) = 1;
1851 /* Propagate volatility of the actual RTL expr. */
1852 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1855 expr_stmts_for_value--;
1860 /* Generate RTL for the start of an if-then. COND is the expression
1861 whose truth should be tested.
1863 If EXITFLAG is nonzero, this conditional is visible to
1864 `exit_something'. */
1867 expand_start_cond (cond, exitflag)
1871 struct nesting *thiscond = ALLOC_NESTING ();
1873 /* Make an entry on cond_stack for the cond we are entering. */
1875 thiscond->next = cond_stack;
1876 thiscond->all = nesting_stack;
1877 thiscond->depth = ++nesting_depth;
1878 thiscond->data.cond.next_label = gen_label_rtx ();
1879 /* Before we encounter an `else', we don't need a separate exit label
1880 unless there are supposed to be exit statements
1881 to exit this conditional. */
1882 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1883 thiscond->data.cond.endif_label = thiscond->exit_label;
1884 cond_stack = thiscond;
1885 nesting_stack = thiscond;
1887 if (output_bytecode)
1888 bc_expand_start_cond (cond, exitflag);
1890 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1893 /* Generate RTL between then-clause and the elseif-clause
1894 of an if-then-elseif-.... */
1897 expand_start_elseif (cond)
1900 if (cond_stack->data.cond.endif_label == 0)
1901 cond_stack->data.cond.endif_label = gen_label_rtx ();
1902 emit_jump (cond_stack->data.cond.endif_label);
1903 emit_label (cond_stack->data.cond.next_label);
1904 cond_stack->data.cond.next_label = gen_label_rtx ();
1905 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1908 /* Generate RTL between the then-clause and the else-clause
1909 of an if-then-else. */
1912 expand_start_else ()
1914 if (cond_stack->data.cond.endif_label == 0)
1915 cond_stack->data.cond.endif_label = gen_label_rtx ();
1917 if (output_bytecode)
1919 bc_expand_start_else ();
1923 emit_jump (cond_stack->data.cond.endif_label);
1924 emit_label (cond_stack->data.cond.next_label);
1925 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1928 /* Generate RTL for the end of an if-then.
1929 Pop the record for it off of cond_stack. */
1934 struct nesting *thiscond = cond_stack;
1936 if (output_bytecode)
1937 bc_expand_end_cond ();
1940 do_pending_stack_adjust ();
1941 if (thiscond->data.cond.next_label)
1942 emit_label (thiscond->data.cond.next_label);
1943 if (thiscond->data.cond.endif_label)
1944 emit_label (thiscond->data.cond.endif_label);
1947 POPSTACK (cond_stack);
1952 /* Generate code for the start of an if-then. COND is the expression
1953 whose truth is to be tested; if EXITFLAG is nonzero this conditional
1954 is to be visible to exit_something. It is assumed that the caller
1955 has pushed the previous context on the cond stack. */
1958 bc_expand_start_cond (cond, exitflag)
1962 struct nesting *thiscond = cond_stack;
1964 thiscond->data.case_stmt.nominal_type = cond;
1966 thiscond->exit_label = gen_label_rtx ();
1967 bc_expand_expr (cond);
1968 bc_emit_bytecode (xjumpifnot);
1969 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
1971 #ifdef DEBUG_PRINT_CODE
1972 fputc ('\n', stderr);
1976 /* Generate the label for the end of an if with
1980 bc_expand_end_cond ()
1982 struct nesting *thiscond = cond_stack;
1984 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->exit_label));
1987 /* Generate code for the start of the else- clause of
1991 bc_expand_start_else ()
1993 struct nesting *thiscond = cond_stack;
1995 thiscond->data.cond.endif_label = thiscond->exit_label;
1996 thiscond->exit_label = gen_label_rtx ();
1997 bc_emit_bytecode (jump);
1998 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscond->exit_label));
2000 #ifdef DEBUG_PRINT_CODE
2001 fputc ('\n', stderr);
2004 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscond->data.cond.endif_label));
2007 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2008 loop should be exited by `exit_something'. This is a loop for which
2009 `expand_continue' will jump to the top of the loop.
2011 Make an entry on loop_stack to record the labels associated with
2015 expand_start_loop (exit_flag)
2018 register struct nesting *thisloop = ALLOC_NESTING ();
2020 /* Make an entry on loop_stack for the loop we are entering. */
2022 thisloop->next = loop_stack;
2023 thisloop->all = nesting_stack;
2024 thisloop->depth = ++nesting_depth;
2025 thisloop->data.loop.start_label = gen_label_rtx ();
2026 thisloop->data.loop.end_label = gen_label_rtx ();
2027 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2028 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2029 loop_stack = thisloop;
2030 nesting_stack = thisloop;
2032 if (output_bytecode)
2034 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2038 do_pending_stack_adjust ();
2040 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2041 emit_label (thisloop->data.loop.start_label);
2046 /* Like expand_start_loop but for a loop where the continuation point
2047 (for expand_continue_loop) will be specified explicitly. */
2050 expand_start_loop_continue_elsewhere (exit_flag)
2053 struct nesting *thisloop = expand_start_loop (exit_flag);
2054 loop_stack->data.loop.continue_label = gen_label_rtx ();
2058 /* Specify the continuation point for a loop started with
2059 expand_start_loop_continue_elsewhere.
2060 Use this at the point in the code to which a continue statement
2064 expand_loop_continue_here ()
2066 if (output_bytecode)
2068 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (loop_stack->data.loop.continue_label));
2071 do_pending_stack_adjust ();
2072 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2073 emit_label (loop_stack->data.loop.continue_label);
2079 bc_expand_end_loop ()
2081 struct nesting *thisloop = loop_stack;
2083 bc_emit_bytecode (jump);
2084 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thisloop->data.loop.start_label));
2086 #ifdef DEBUG_PRINT_CODE
2087 fputc ('\n', stderr);
2090 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisloop->exit_label));
2091 POPSTACK (loop_stack);
2096 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2097 Pop the block off of loop_stack. */
2103 register rtx start_label;
2104 rtx last_test_insn = 0;
2107 if (output_bytecode)
2109 bc_expand_end_loop ();
2113 insn = get_last_insn ();
2114 start_label = loop_stack->data.loop.start_label;
2116 /* Mark the continue-point at the top of the loop if none elsewhere. */
2117 if (start_label == loop_stack->data.loop.continue_label)
2118 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2120 do_pending_stack_adjust ();
2122 /* If optimizing, perhaps reorder the loop. If the loop
2123 starts with a conditional exit, roll that to the end
2124 where it will optimize together with the jump back.
2126 We look for the last conditional branch to the exit that we encounter
2127 before hitting 30 insns or a CALL_INSN. If we see an unconditional
2128 branch to the exit first, use it.
2130 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
2131 because moving them is not valid. */
2135 ! (GET_CODE (insn) == JUMP_INSN
2136 && GET_CODE (PATTERN (insn)) == SET
2137 && SET_DEST (PATTERN (insn)) == pc_rtx
2138 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2140 /* Scan insns from the top of the loop looking for a qualified
2141 conditional exit. */
2142 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2143 insn = NEXT_INSN (insn))
2145 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
2148 if (GET_CODE (insn) == NOTE
2149 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2150 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2153 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2156 if (last_test_insn && num_insns > 30)
2159 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
2160 && SET_DEST (PATTERN (insn)) == pc_rtx
2161 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
2162 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
2163 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
2164 == loop_stack->data.loop.end_label))
2165 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
2166 && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
2167 == loop_stack->data.loop.end_label))))
2168 last_test_insn = insn;
2170 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
2171 && GET_CODE (PATTERN (insn)) == SET
2172 && SET_DEST (PATTERN (insn)) == pc_rtx
2173 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
2174 && (XEXP (SET_SRC (PATTERN (insn)), 0)
2175 == loop_stack->data.loop.end_label))
2176 /* Include BARRIER. */
2177 last_test_insn = NEXT_INSN (insn);
2180 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2182 /* We found one. Move everything from there up
2183 to the end of the loop, and add a jump into the loop
2184 to jump to there. */
2185 register rtx newstart_label = gen_label_rtx ();
2186 register rtx start_move = start_label;
2188 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2189 then we want to move this note also. */
2190 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2191 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2192 == NOTE_INSN_LOOP_CONT))
2193 start_move = PREV_INSN (start_move);
2195 emit_label_after (newstart_label, PREV_INSN (start_move));
2196 reorder_insns (start_move, last_test_insn, get_last_insn ());
2197 emit_jump_insn_after (gen_jump (start_label),
2198 PREV_INSN (newstart_label));
2199 emit_barrier_after (PREV_INSN (newstart_label));
2200 start_label = newstart_label;
2204 emit_jump (start_label);
2205 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2206 emit_label (loop_stack->data.loop.end_label);
2208 POPSTACK (loop_stack);
2213 /* Generate a jump to the current loop's continue-point.
2214 This is usually the top of the loop, but may be specified
2215 explicitly elsewhere. If not currently inside a loop,
2216 return 0 and do nothing; caller will print an error message. */
2219 expand_continue_loop (whichloop)
2220 struct nesting *whichloop;
2224 whichloop = loop_stack;
2227 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2232 /* Generate a jump to exit the current loop. If not currently inside a loop,
2233 return 0 and do nothing; caller will print an error message. */
2236 expand_exit_loop (whichloop)
2237 struct nesting *whichloop;
2241 whichloop = loop_stack;
2244 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2248 /* Generate a conditional jump to exit the current loop if COND
2249 evaluates to zero. If not currently inside a loop,
2250 return 0 and do nothing; caller will print an error message. */
2253 expand_exit_loop_if_false (whichloop, cond)
2254 struct nesting *whichloop;
2259 whichloop = loop_stack;
2262 if (output_bytecode)
2264 bc_expand_expr (cond);
2265 bc_expand_goto_internal (xjumpifnot,
2266 BYTECODE_BC_LABEL (whichloop->exit_label),
2270 do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
2275 /* Return non-zero if we should preserve sub-expressions as separate
2276 pseudos. We never do so if we aren't optimizing. We always do so
2277 if -fexpensive-optimizations.
2279 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2280 the loop may still be a small one. */
2283 preserve_subexpressions_p ()
2287 if (flag_expensive_optimizations)
2290 if (optimize == 0 || loop_stack == 0)
2293 insn = get_last_insn_anywhere ();
2296 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2297 < n_non_fixed_regs * 3));
2301 /* Generate a jump to exit the current loop, conditional, binding contour
2302 or case statement. Not all such constructs are visible to this function,
2303 only those started with EXIT_FLAG nonzero. Individual languages use
2304 the EXIT_FLAG parameter to control which kinds of constructs you can
2307 If not currently inside anything that can be exited,
2308 return 0 and do nothing; caller will print an error message. */
2311 expand_exit_something ()
2315 for (n = nesting_stack; n; n = n->all)
2316 if (n->exit_label != 0)
2318 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2325 /* Generate RTL to return from the current function, with no value.
2326 (That is, we do not do anything about returning any value.) */
2329 expand_null_return ()
2331 struct nesting *block = block_stack;
2334 if (output_bytecode)
2336 bc_emit_instruction (ret);
2340 /* Does any pending block have cleanups? */
2342 while (block && block->data.block.cleanups == 0)
2343 block = block->next;
2345 /* If yes, use a goto to return, since that runs cleanups. */
2347 expand_null_return_1 (last_insn, block != 0);
2350 /* Generate RTL to return from the current function, with value VAL. */
2353 expand_value_return (val)
2356 struct nesting *block = block_stack;
2357 rtx last_insn = get_last_insn ();
2358 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2360 /* Copy the value to the return location
2361 unless it's already there. */
2363 if (return_reg != val)
2365 #ifdef PROMOTE_FUNCTION_RETURN
2366 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2367 int unsignedp = TREE_UNSIGNED (type);
2368 enum machine_mode mode
2369 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2372 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2373 convert_move (return_reg, val, unsignedp);
2376 emit_move_insn (return_reg, val);
2378 if (GET_CODE (return_reg) == REG
2379 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2380 emit_insn (gen_rtx (USE, VOIDmode, return_reg));
2382 /* Does any pending block have cleanups? */
2384 while (block && block->data.block.cleanups == 0)
2385 block = block->next;
2387 /* If yes, use a goto to return, since that runs cleanups.
2388 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2390 expand_null_return_1 (last_insn, block != 0);
2393 /* Output a return with no value. If LAST_INSN is nonzero,
2394 pretend that the return takes place after LAST_INSN.
2395 If USE_GOTO is nonzero then don't use a return instruction;
2396 go to the return label instead. This causes any cleanups
2397 of pending blocks to be executed normally. */
2400 expand_null_return_1 (last_insn, use_goto)
2404 rtx end_label = cleanup_label ? cleanup_label : return_label;
2406 clear_pending_stack_adjust ();
2407 do_pending_stack_adjust ();
2410 /* PCC-struct return always uses an epilogue. */
2411 if (current_function_returns_pcc_struct || use_goto)
2414 end_label = return_label = gen_label_rtx ();
2415 expand_goto_internal (NULL_TREE, end_label, last_insn);
2419 /* Otherwise output a simple return-insn if one is available,
2420 unless it won't do the job. */
2422 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2424 emit_jump_insn (gen_return ());
2430 /* Otherwise jump to the epilogue. */
2431 expand_goto_internal (NULL_TREE, end_label, last_insn);
2434 /* Generate RTL to evaluate the expression RETVAL and return it
2435 from the current function. */
2438 expand_return (retval)
2441 /* If there are any cleanups to be performed, then they will
2442 be inserted following LAST_INSN. It is desirable
2443 that the last_insn, for such purposes, should be the
2444 last insn before computing the return value. Otherwise, cleanups
2445 which call functions can clobber the return value. */
2446 /* ??? rms: I think that is erroneous, because in C++ it would
2447 run destructors on variables that might be used in the subsequent
2448 computation of the return value. */
2450 register rtx val = 0;
2454 struct nesting *block;
2456 /* Bytecode returns are quite simple, just leave the result on the
2457 arithmetic stack. */
2458 if (output_bytecode)
2460 bc_expand_expr (retval);
2461 bc_emit_instruction (ret);
2465 /* If function wants no value, give it none. */
2466 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2468 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2470 expand_null_return ();
2474 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2475 cleanups = any_pending_cleanups (1);
2477 if (TREE_CODE (retval) == RESULT_DECL)
2478 retval_rhs = retval;
2479 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2480 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2481 retval_rhs = TREE_OPERAND (retval, 1);
2482 else if (TREE_TYPE (retval) == void_type_node)
2483 /* Recognize tail-recursive call to void function. */
2484 retval_rhs = retval;
2486 retval_rhs = NULL_TREE;
2488 /* Only use `last_insn' if there are cleanups which must be run. */
2489 if (cleanups || cleanup_label != 0)
2490 last_insn = get_last_insn ();
2492 /* Distribute return down conditional expr if either of the sides
2493 may involve tail recursion (see test below). This enhances the number
2494 of tail recursions we see. Don't do this always since it can produce
2495 sub-optimal code in some cases and we distribute assignments into
2496 conditional expressions when it would help. */
2498 if (optimize && retval_rhs != 0
2499 && frame_offset == 0
2500 && TREE_CODE (retval_rhs) == COND_EXPR
2501 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2502 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2504 rtx label = gen_label_rtx ();
2507 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2508 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2509 DECL_RESULT (current_function_decl),
2510 TREE_OPERAND (retval_rhs, 1));
2511 TREE_SIDE_EFFECTS (expr) = 1;
2512 expand_return (expr);
2515 expr = build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
2516 DECL_RESULT (current_function_decl),
2517 TREE_OPERAND (retval_rhs, 2));
2518 TREE_SIDE_EFFECTS (expr) = 1;
2519 expand_return (expr);
2523 /* For tail-recursive call to current function,
2524 just jump back to the beginning.
2525 It's unsafe if any auto variable in this function
2526 has its address taken; for simplicity,
2527 require stack frame to be empty. */
2528 if (optimize && retval_rhs != 0
2529 && frame_offset == 0
2530 && TREE_CODE (retval_rhs) == CALL_EXPR
2531 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2532 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2533 /* Finish checking validity, and if valid emit code
2534 to set the argument variables for the new call. */
2535 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2536 DECL_ARGUMENTS (current_function_decl)))
2538 if (tail_recursion_label == 0)
2540 tail_recursion_label = gen_label_rtx ();
2541 emit_label_after (tail_recursion_label,
2542 tail_recursion_reentry);
2545 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2550 /* This optimization is safe if there are local cleanups
2551 because expand_null_return takes care of them.
2552 ??? I think it should also be safe when there is a cleanup label,
2553 because expand_null_return takes care of them, too.
2554 Any reason why not? */
2555 if (HAVE_return && cleanup_label == 0
2556 && ! current_function_returns_pcc_struct
2557 && BRANCH_COST <= 1)
2559 /* If this is return x == y; then generate
2560 if (x == y) return 1; else return 0;
2561 if we can do it with explicit return insns and
2562 branches are cheap. */
2564 switch (TREE_CODE (retval_rhs))
2572 case TRUTH_ANDIF_EXPR:
2573 case TRUTH_ORIF_EXPR:
2574 case TRUTH_AND_EXPR:
2576 case TRUTH_NOT_EXPR:
2577 case TRUTH_XOR_EXPR:
2578 op0 = gen_label_rtx ();
2579 jumpifnot (retval_rhs, op0);
2580 expand_value_return (const1_rtx);
2582 expand_value_return (const0_rtx);
2586 #endif /* HAVE_return */
2590 && TREE_TYPE (retval_rhs) != void_type_node
2591 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2593 /* Calculate the return value into a pseudo reg. */
2594 val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2596 /* All temporaries have now been used. */
2598 /* Return the calculated value, doing cleanups first. */
2599 expand_value_return (val);
2603 /* No cleanups or no hard reg used;
2604 calculate value into hard return reg. */
2605 expand_expr (retval, const0_rtx, VOIDmode, 0);
2608 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2612 /* Return 1 if the end of the generated RTX is not a barrier.
2613 This means code already compiled can drop through. */
2616 drop_through_at_end_p ()
2618 rtx insn = get_last_insn ();
2619 while (insn && GET_CODE (insn) == NOTE)
2620 insn = PREV_INSN (insn);
2621 return insn && GET_CODE (insn) != BARRIER;
2624 /* Emit code to alter this function's formal parms for a tail-recursive call.
2625 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2626 FORMALS is the chain of decls of formals.
2627 Return 1 if this can be done;
2628 otherwise return 0 and do not emit any code. */
2631 tail_recursion_args (actuals, formals)
2632 tree actuals, formals;
2634 register tree a = actuals, f = formals;
2636 register rtx *argvec;
2638 /* Check that number and types of actuals are compatible
2639 with the formals. This is not always true in valid C code.
2640 Also check that no formal needs to be addressable
2641 and that all formals are scalars. */
2643 /* Also count the args. */
2645 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2647 if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f))
2649 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2652 if (a != 0 || f != 0)
2655 /* Compute all the actuals. */
2657 argvec = (rtx *) alloca (i * sizeof (rtx));
2659 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2660 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2662 /* Find which actual values refer to current values of previous formals.
2663 Copy each of them now, before any formal is changed. */
2665 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2669 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2670 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2671 { copy = 1; break; }
2673 argvec[i] = copy_to_reg (argvec[i]);
2676 /* Store the values of the actuals into the formals. */
2678 for (f = formals, a = actuals, i = 0; f;
2679 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2681 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2682 emit_move_insn (DECL_RTL (f), argvec[i]);
2684 convert_move (DECL_RTL (f), argvec[i],
2685 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2692 /* Generate the RTL code for entering a binding contour.
2693 The variables are declared one by one, by calls to `expand_decl'.
2695 EXIT_FLAG is nonzero if this construct should be visible to
2696 `exit_something'. */
2699 expand_start_bindings (exit_flag)
2702 struct nesting *thisblock = ALLOC_NESTING ();
2703 rtx note = output_bytecode ? 0 : emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2705 /* Make an entry on block_stack for the block we are entering. */
2707 thisblock->next = block_stack;
2708 thisblock->all = nesting_stack;
2709 thisblock->depth = ++nesting_depth;
2710 thisblock->data.block.stack_level = 0;
2711 thisblock->data.block.cleanups = 0;
2712 thisblock->data.block.function_call_count = 0;
2716 if (block_stack->data.block.cleanups == NULL_TREE
2717 && (block_stack->data.block.outer_cleanups == NULL_TREE
2718 || block_stack->data.block.outer_cleanups == empty_cleanup_list))
2719 thisblock->data.block.outer_cleanups = empty_cleanup_list;
2721 thisblock->data.block.outer_cleanups
2722 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2723 block_stack->data.block.outer_cleanups);
2726 thisblock->data.block.outer_cleanups = 0;
2730 && !(block_stack->data.block.cleanups == NULL_TREE
2731 && block_stack->data.block.outer_cleanups == NULL_TREE))
2732 thisblock->data.block.outer_cleanups
2733 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2734 block_stack->data.block.outer_cleanups);
2736 thisblock->data.block.outer_cleanups = 0;
2738 thisblock->data.block.label_chain = 0;
2739 thisblock->data.block.innermost_stack_block = stack_block_stack;
2740 thisblock->data.block.first_insn = note;
2741 thisblock->data.block.block_start_count = ++block_start_count;
2742 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2743 block_stack = thisblock;
2744 nesting_stack = thisblock;
2746 if (!output_bytecode)
2748 /* Make a new level for allocating stack slots. */
2753 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2754 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2758 remember_end_note (block)
2759 register tree block;
2761 BLOCK_END_NOTE (block) = last_block_end_note;
2762 last_block_end_note = NULL_RTX;
2765 /* Generate RTL code to terminate a binding contour.
2766 VARS is the chain of VAR_DECL nodes
2767 for the variables bound in this contour.
2768 MARK_ENDS is nonzero if we should put a note at the beginning
2769 and end of this binding contour.
2771 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2772 (That is true automatically if the contour has a saved stack level.) */
2775 expand_end_bindings (vars, mark_ends, dont_jump_in)
2780 register struct nesting *thisblock = block_stack;
2783 if (output_bytecode)
2785 bc_expand_end_bindings (vars, mark_ends, dont_jump_in);
2790 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2791 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
2792 && ! DECL_IN_SYSTEM_HEADER (decl))
2793 warning_with_decl (decl, "unused variable `%s'");
2795 if (thisblock->exit_label)
2797 do_pending_stack_adjust ();
2798 emit_label (thisblock->exit_label);
2801 /* If necessary, make a handler for nonlocal gotos taking
2802 place in the function calls in this block. */
2803 if (function_call_count != thisblock->data.block.function_call_count
2805 /* Make handler for outermost block
2806 if there were any nonlocal gotos to this function. */
2807 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2808 /* Make handler for inner block if it has something
2809 special to do when you jump out of it. */
2810 : (thisblock->data.block.cleanups != 0
2811 || thisblock->data.block.stack_level != 0)))
2814 rtx afterward = gen_label_rtx ();
2815 rtx handler_label = gen_label_rtx ();
2816 rtx save_receiver = gen_reg_rtx (Pmode);
2819 /* Don't let jump_optimize delete the handler. */
2820 LABEL_PRESERVE_P (handler_label) = 1;
2822 /* Record the handler address in the stack slot for that purpose,
2823 during this block, saving and restoring the outer value. */
2824 if (thisblock->next != 0)
2826 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2829 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
2830 insns = get_insns ();
2832 emit_insns_before (insns, thisblock->data.block.first_insn);
2836 emit_move_insn (nonlocal_goto_handler_slot,
2837 gen_rtx (LABEL_REF, Pmode, handler_label));
2838 insns = get_insns ();
2840 emit_insns_before (insns, thisblock->data.block.first_insn);
2842 /* Jump around the handler; it runs only when specially invoked. */
2843 emit_jump (afterward);
2844 emit_label (handler_label);
2846 #ifdef HAVE_nonlocal_goto
2847 if (! HAVE_nonlocal_goto)
2849 /* First adjust our frame pointer to its actual value. It was
2850 previously set to the start of the virtual area corresponding to
2851 the stacked variables when we branched here and now needs to be
2852 adjusted to the actual hardware fp value.
2854 Assignments are to virtual registers are converted by
2855 instantiate_virtual_regs into the corresponding assignment
2856 to the underlying register (fp in this case) that makes
2857 the original assignment true.
2858 So the following insn will actually be
2859 decrementing fp by STARTING_FRAME_OFFSET. */
2860 emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx);
2862 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2863 if (fixed_regs[ARG_POINTER_REGNUM])
2865 #ifdef ELIMINABLE_REGS
2866 /* If the argument pointer can be eliminated in favor of the
2867 frame pointer, we don't need to restore it. We assume here
2868 that if such an elimination is present, it can always be used.
2869 This is the case on all known machines; if we don't make this
2870 assumption, we do unnecessary saving on many machines. */
2871 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
2874 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
2875 if (elim_regs[i].from == ARG_POINTER_REGNUM
2876 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2879 if (i == sizeof elim_regs / sizeof elim_regs [0])
2882 /* Now restore our arg pointer from the address at which it
2883 was saved in our stack frame.
2884 If there hasn't be space allocated for it yet, make
2886 if (arg_pointer_save_area == 0)
2887 arg_pointer_save_area
2888 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2889 emit_move_insn (virtual_incoming_args_rtx,
2890 /* We need a pseudo here, or else
2891 instantiate_virtual_regs_1 complains. */
2892 copy_to_reg (arg_pointer_save_area));
2897 /* The handler expects the desired label address in the static chain
2898 register. It tests the address and does an appropriate jump
2899 to whatever label is desired. */
2900 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2901 /* Skip any labels we shouldn't be able to jump to from here. */
2902 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2904 rtx not_this = gen_label_rtx ();
2905 rtx this = gen_label_rtx ();
2906 do_jump_if_equal (static_chain_rtx,
2907 gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
2909 emit_jump (not_this);
2911 expand_goto (TREE_VALUE (link));
2912 emit_label (not_this);
2914 /* If label is not recognized, abort. */
2915 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
2917 emit_label (afterward);
2920 /* Don't allow jumping into a block that has cleanups or a stack level. */
2922 || thisblock->data.block.stack_level != 0
2923 || thisblock->data.block.cleanups != 0)
2925 struct label_chain *chain;
2927 /* Any labels in this block are no longer valid to go to.
2928 Mark them to cause an error message. */
2929 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2931 DECL_TOO_LATE (chain->label) = 1;
2932 /* If any goto without a fixup came to this label,
2933 that must be an error, because gotos without fixups
2934 come from outside all saved stack-levels and all cleanups. */
2935 if (TREE_ADDRESSABLE (chain->label))
2936 error_with_decl (chain->label,
2937 "label `%s' used before containing binding contour");
2941 /* Restore stack level in effect before the block
2942 (only if variable-size objects allocated). */
2943 /* Perform any cleanups associated with the block. */
2945 if (thisblock->data.block.stack_level != 0
2946 || thisblock->data.block.cleanups != 0)
2948 /* Don't let cleanups affect ({...}) constructs. */
2949 int old_expr_stmts_for_value = expr_stmts_for_value;
2950 rtx old_last_expr_value = last_expr_value;
2951 tree old_last_expr_type = last_expr_type;
2952 expr_stmts_for_value = 0;
2954 /* Do the cleanups. */
2955 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
2956 do_pending_stack_adjust ();
2958 expr_stmts_for_value = old_expr_stmts_for_value;
2959 last_expr_value = old_last_expr_value;
2960 last_expr_type = old_last_expr_type;
2962 /* Restore the stack level. */
2964 if (thisblock->data.block.stack_level != 0)
2966 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2967 thisblock->data.block.stack_level, NULL_RTX);
2968 if (nonlocal_goto_handler_slot != 0)
2969 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
2973 /* Any gotos out of this block must also do these things.
2974 Also report any gotos with fixups that came to labels in this
2976 fixup_gotos (thisblock,
2977 thisblock->data.block.stack_level,
2978 thisblock->data.block.cleanups,
2979 thisblock->data.block.first_insn,
2983 /* Mark the beginning and end of the scope if requested.
2984 We do this now, after running cleanups on the variables
2985 just going out of scope, so they are in scope for their cleanups. */
2988 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
2990 /* Get rid of the beginning-mark if we don't make an end-mark. */
2991 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2993 /* If doing stupid register allocation, make sure lives of all
2994 register variables declared here extend thru end of scope. */
2997 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2999 rtx rtl = DECL_RTL (decl);
3000 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3004 /* Restore block_stack level for containing block. */
3006 stack_block_stack = thisblock->data.block.innermost_stack_block;
3007 POPSTACK (block_stack);
3009 /* Pop the stack slot nesting and free any slots at this level. */
3014 /* End a binding contour.
3015 VARS is the chain of VAR_DECL nodes for the variables bound
3016 in this contour. MARK_ENDS is nonzer if we should put a note
3017 at the beginning and end of this binding contour.
3018 DONT_JUMP_IN is nonzero if it is not valid to jump into this
3022 bc_expand_end_bindings (vars, mark_ends, dont_jump_in)
3027 struct nesting *thisbind = nesting_stack;
3031 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3032 if (! TREE_USED (TREE_VALUE (decl)) && TREE_CODE (TREE_VALUE (decl)) == VAR_DECL)
3033 warning_with_decl (decl, "unused variable `%s'");
3035 if (thisbind->exit_label)
3036 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thisbind->exit_label));
3038 /* Pop block/bindings off stack */
3039 POPSTACK (block_stack);
3042 /* Generate RTL for the automatic variable declaration DECL.
3043 (Other kinds of declarations are simply ignored if seen here.)
3044 CLEANUP is an expression to be executed at exit from this binding contour;
3045 for example, in C++, it might call the destructor for this variable.
3047 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3048 either before or after calling `expand_decl' but before compiling
3049 any subsequent expressions. This is because CLEANUP may be expanded
3050 more than once, on different branches of execution.
3051 For the same reason, CLEANUP may not contain a CALL_EXPR
3052 except as its topmost node--else `preexpand_calls' would get confused.
3054 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3055 that is not associated with any particular variable.
3057 There is no special support here for C++ constructors.
3058 They should be handled by the proper code in DECL_INITIAL. */
3064 struct nesting *thisblock = block_stack;
3067 if (output_bytecode)
3069 bc_expand_decl (decl, 0);
3073 type = TREE_TYPE (decl);
3075 /* Only automatic variables need any expansion done.
3076 Static and external variables, and external functions,
3077 will be handled by `assemble_variable' (called from finish_decl).
3078 TYPE_DECL and CONST_DECL require nothing.
3079 PARM_DECLs are handled in `assign_parms'. */
3081 if (TREE_CODE (decl) != VAR_DECL)
3083 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3086 /* Create the RTL representation for the variable. */
3088 if (type == error_mark_node)
3089 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
3090 else if (DECL_SIZE (decl) == 0)
3091 /* Variable with incomplete type. */
3093 if (DECL_INITIAL (decl) == 0)
3094 /* Error message was already done; now avoid a crash. */
3095 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3097 /* An initializer is going to decide the size of this array.
3098 Until we know the size, represent its address with a reg. */
3099 DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
3101 else if (DECL_MODE (decl) != BLKmode
3102 /* If -ffloat-store, don't put explicit float vars
3104 && !(flag_float_store
3105 && TREE_CODE (type) == REAL_TYPE)
3106 && ! TREE_THIS_VOLATILE (decl)
3107 && ! TREE_ADDRESSABLE (decl)
3108 && (DECL_REGISTER (decl) || ! obey_regdecls))
3110 /* Automatic variable that can go in a register. */
3111 int unsignedp = TREE_UNSIGNED (type);
3112 enum machine_mode reg_mode
3113 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3115 if (TREE_CODE (type) == COMPLEX_TYPE)
3117 rtx realpart, imagpart;
3118 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (type));
3120 /* For a complex type variable, make a CONCAT of two pseudos
3121 so that the real and imaginary parts
3122 can be allocated separately. */
3123 realpart = gen_reg_rtx (partmode);
3124 REG_USERVAR_P (realpart) = 1;
3125 imagpart = gen_reg_rtx (partmode);
3126 REG_USERVAR_P (imagpart) = 1;
3127 DECL_RTL (decl) = gen_rtx (CONCAT, reg_mode, realpart, imagpart);
3131 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3132 if (TREE_CODE (type) == POINTER_TYPE)
3133 mark_reg_pointer (DECL_RTL (decl));
3134 REG_USERVAR_P (DECL_RTL (decl)) = 1;
3137 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST)
3139 /* Variable of fixed size that goes on the stack. */
3143 /* If we previously made RTL for this decl, it must be an array
3144 whose size was determined by the initializer.
3145 The old address was a register; set that register now
3146 to the proper address. */
3147 if (DECL_RTL (decl) != 0)
3149 if (GET_CODE (DECL_RTL (decl)) != MEM
3150 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3152 oldaddr = XEXP (DECL_RTL (decl), 0);
3156 = assign_stack_temp (DECL_MODE (decl),
3157 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3158 + BITS_PER_UNIT - 1)
3162 /* Set alignment we actually gave this decl. */
3163 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3164 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3168 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3169 if (addr != oldaddr)
3170 emit_move_insn (oldaddr, addr);
3173 /* If this is a memory ref that contains aggregate components,
3174 mark it as such for cse and loop optimize. */
3175 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3177 /* If this is in memory because of -ffloat-store,
3178 set the volatile bit, to prevent optimizations from
3179 undoing the effects. */
3180 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3181 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3185 /* Dynamic-size object: must push space on the stack. */
3189 /* Record the stack pointer on entry to block, if have
3190 not already done so. */
3191 if (thisblock->data.block.stack_level == 0)
3193 do_pending_stack_adjust ();
3194 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3195 &thisblock->data.block.stack_level,
3196 thisblock->data.block.first_insn);
3197 stack_block_stack = thisblock;
3200 /* Compute the variable's size, in bytes. */
3201 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3203 size_int (BITS_PER_UNIT)),
3204 NULL_RTX, VOIDmode, 0);
3207 /* Allocate space on the stack for the variable. */
3208 address = allocate_dynamic_stack_space (size, NULL_RTX,
3211 /* Reference the variable indirect through that rtx. */
3212 DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
3214 /* If this is a memory ref that contains aggregate components,
3215 mark it as such for cse and loop optimize. */
3216 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3218 /* Indicate the alignment we actually gave this variable. */
3219 #ifdef STACK_BOUNDARY
3220 DECL_ALIGN (decl) = STACK_BOUNDARY;
3222 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3226 if (TREE_THIS_VOLATILE (decl))
3227 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3228 #if 0 /* A variable is not necessarily unchanging
3229 just because it is const. RTX_UNCHANGING_P
3230 means no change in the function,
3231 not merely no change in the variable's scope.
3232 It is correct to set RTX_UNCHANGING_P if the variable's scope
3233 is the whole function. There's no convenient way to test that. */
3234 if (TREE_READONLY (decl))
3235 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3238 /* If doing stupid register allocation, make sure life of any
3239 register variable starts here, at the start of its scope. */
3242 use_variable (DECL_RTL (decl));
3246 /* Generate code for the automatic variable declaration DECL. For
3247 most variables this just means we give it a stack offset. The
3248 compiler sometimes emits cleanups without variables and we will
3249 have to deal with those too. */
3252 bc_expand_decl (decl, cleanup)
3260 /* A cleanup with no variable. */
3267 /* Only auto variables need any work. */
3268 if (TREE_CODE (decl) != VAR_DECL || TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3271 type = TREE_TYPE (decl);
3273 if (type == error_mark_node)
3274 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3276 else if (DECL_SIZE (decl) == 0)
3278 /* Variable with incomplete type. The stack offset herein will be
3279 fixed later in expand_decl_init (). */
3280 DECL_RTL (decl) = bc_gen_rtx ((char *) 0, 0, (struct bc_label *) 0);
3282 else if (TREE_CONSTANT (DECL_SIZE (decl)))
3284 DECL_RTL (decl) = bc_allocate_local (TREE_INT_CST_LOW (DECL_SIZE (decl)) / BITS_PER_UNIT,
3288 DECL_RTL (decl) = bc_allocate_variable_array (DECL_SIZE (decl));
3291 /* Emit code to perform the initialization of a declaration DECL. */
3294 expand_decl_init (decl)
3297 int was_used = TREE_USED (decl);
3299 if (output_bytecode)
3301 bc_expand_decl_init (decl);
3305 /* If this is a CONST_DECL, we don't have to generate any code, but
3306 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3307 to be set while in the obstack containing the constant. If we don't
3308 do this, we can lose if we have functions nested three deep and the middle
3309 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3310 the innermost function is the first to expand that STRING_CST. */
3311 if (TREE_CODE (decl) == CONST_DECL)
3313 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3314 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3315 EXPAND_INITIALIZER);
3319 if (TREE_STATIC (decl))
3322 /* Compute and store the initial value now. */
3324 if (DECL_INITIAL (decl) == error_mark_node)
3326 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3327 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3328 || code == POINTER_TYPE)
3329 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3333 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3335 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3336 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3340 /* Don't let the initialization count as "using" the variable. */
3341 TREE_USED (decl) = was_used;
3343 /* Free any temporaries we made while initializing the decl. */
3347 /* Expand initialization for variable-sized types. Allocate array
3348 using newlocalSI and set local variable, which is a pointer to the
3352 bc_expand_variable_local_init (decl)
3355 /* Evaluate size expression and coerce to SI */
3356 bc_expand_expr (DECL_SIZE (decl));
3358 /* Type sizes are always (?) of TREE_CODE INTEGER_CST, so
3359 no coercion is necessary (?) */
3361 /* emit_typecode_conversion (preferred_typecode (TYPE_MODE (DECL_SIZE (decl)),
3362 TREE_UNSIGNED (DECL_SIZE (decl))), SIcode); */
3364 /* Emit code to allocate array */
3365 bc_emit_instruction (newlocalSI);
3367 /* Store array pointer in local variable. This is the only instance
3368 where we actually want the address of the pointer to the
3369 variable-size block, rather than the pointer itself. We avoid
3370 using expand_address() since that would cause the pointer to be
3371 pushed rather than its address. Hence the hard-coded reference;
3372 notice also that the variable is always local (no global
3373 variable-size type variables). */
3375 bc_load_localaddr (DECL_RTL (decl));
3376 bc_emit_instruction (storeP);
3380 /* Emit code to initialize a declaration. */
3383 bc_expand_decl_init (decl)
3386 int org_stack_depth;
3388 /* Statical initializers are handled elsewhere */
3390 if (TREE_STATIC (decl))
3393 /* Memory original stack depth */
3394 org_stack_depth = stack_depth;
3396 /* If the type is variable-size, we first create its space (we ASSUME
3397 it CAN'T be static). We do this regardless of whether there's an
3398 initializer assignment or not. */
3400 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
3401 bc_expand_variable_local_init (decl);
3403 /* Expand initializer assignment */
3404 if (DECL_INITIAL (decl) == error_mark_node)
3406 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3408 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3409 || code == POINTER_TYPE)
3411 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3413 else if (DECL_INITIAL (decl))
3414 expand_assignment (TREE_TYPE (decl), decl, 0, 0);
3416 /* Restore stack depth */
3417 if (org_stack_depth > stack_depth)
3420 bc_adjust_stack (stack_depth - org_stack_depth);
3424 /* CLEANUP is an expression to be executed at exit from this binding contour;
3425 for example, in C++, it might call the destructor for this variable.
3427 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
3428 either before or after calling `expand_decl' but before compiling
3429 any subsequent expressions. This is because CLEANUP may be expanded
3430 more than once, on different branches of execution.
3431 For the same reason, CLEANUP may not contain a CALL_EXPR
3432 except as its topmost node--else `preexpand_calls' would get confused.
3434 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3435 that is not associated with any particular variable. */
3438 expand_decl_cleanup (decl, cleanup)
3441 struct nesting *thisblock = block_stack;
3443 /* Error if we are not in any block. */
3447 /* Record the cleanup if there is one. */
3451 thisblock->data.block.cleanups
3452 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3453 /* If this block has a cleanup, it belongs in stack_block_stack. */
3454 stack_block_stack = thisblock;
3459 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3460 DECL_ELTS is the list of elements that belong to DECL's type.
3461 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3464 expand_anon_union_decl (decl, cleanup, decl_elts)
3465 tree decl, cleanup, decl_elts;
3467 struct nesting *thisblock = block_stack;
3470 expand_decl (decl, cleanup);
3471 x = DECL_RTL (decl);
3475 tree decl_elt = TREE_VALUE (decl_elts);
3476 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3477 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3479 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3480 instead create a new MEM rtx with the proper mode. */
3481 if (GET_CODE (x) == MEM)
3483 if (mode == GET_MODE (x))
3484 DECL_RTL (decl_elt) = x;
3487 DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
3488 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3489 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3492 else if (GET_CODE (x) == REG)
3494 if (mode == GET_MODE (x))
3495 DECL_RTL (decl_elt) = x;
3497 DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
3502 /* Record the cleanup if there is one. */
3505 thisblock->data.block.cleanups
3506 = temp_tree_cons (decl_elt, cleanup_elt,
3507 thisblock->data.block.cleanups);
3509 decl_elts = TREE_CHAIN (decl_elts);
3513 /* Expand a list of cleanups LIST.
3514 Elements may be expressions or may be nested lists.
3516 If DONT_DO is nonnull, then any list-element
3517 whose TREE_PURPOSE matches DONT_DO is omitted.
3518 This is sometimes used to avoid a cleanup associated with
3519 a value that is being returned out of the scope. */
3522 expand_cleanups (list, dont_do)
3527 for (tail = list; tail; tail = TREE_CHAIN (tail))
3528 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3530 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3531 expand_cleanups (TREE_VALUE (tail), dont_do);
3534 /* Cleanups may be run multiple times. For example,
3535 when exiting a binding contour, we expand the
3536 cleanups associated with that contour. When a goto
3537 within that binding contour has a target outside that
3538 contour, it will expand all cleanups from its scope to
3539 the target. Though the cleanups are expanded multiple
3540 times, the control paths are non-overlapping so the
3541 cleanups will not be executed twice. */
3542 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3548 /* Move all cleanups from the current block_stack
3549 to the containing block_stack, where they are assumed to
3550 have been created. If anything can cause a temporary to
3551 be created, but not expanded for more than one level of
3552 block_stacks, then this code will have to change. */
3557 struct nesting *block = block_stack;
3558 struct nesting *outer = block->next;
3560 outer->data.block.cleanups
3561 = chainon (block->data.block.cleanups,
3562 outer->data.block.cleanups);
3563 block->data.block.cleanups = 0;
3567 last_cleanup_this_contour ()
3569 if (block_stack == 0)
3572 return block_stack->data.block.cleanups;
3575 /* Return 1 if there are any pending cleanups at this point.
3576 If THIS_CONTOUR is nonzero, check the current contour as well.
3577 Otherwise, look only at the contours that enclose this one. */
3580 any_pending_cleanups (this_contour)
3583 struct nesting *block;
3585 if (block_stack == 0)
3588 if (this_contour && block_stack->data.block.cleanups != NULL)
3590 if (block_stack->data.block.cleanups == 0
3591 && (block_stack->data.block.outer_cleanups == 0
3593 || block_stack->data.block.outer_cleanups == empty_cleanup_list
3598 for (block = block_stack->next; block; block = block->next)
3599 if (block->data.block.cleanups != 0)
3605 /* Enter a case (Pascal) or switch (C) statement.
3606 Push a block onto case_stack and nesting_stack
3607 to accumulate the case-labels that are seen
3608 and to record the labels generated for the statement.
3610 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3611 Otherwise, this construct is transparent for `exit_something'.
3613 EXPR is the index-expression to be dispatched on.
3614 TYPE is its nominal type. We could simply convert EXPR to this type,
3615 but instead we take short cuts. */
3618 expand_start_case (exit_flag, expr, type, printname)
3624 register struct nesting *thiscase = ALLOC_NESTING ();
3626 /* Make an entry on case_stack for the case we are entering. */
3628 thiscase->next = case_stack;
3629 thiscase->all = nesting_stack;
3630 thiscase->depth = ++nesting_depth;
3631 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3632 thiscase->data.case_stmt.case_list = 0;
3633 thiscase->data.case_stmt.index_expr = expr;
3634 thiscase->data.case_stmt.nominal_type = type;
3635 thiscase->data.case_stmt.default_label = 0;
3636 thiscase->data.case_stmt.num_ranges = 0;
3637 thiscase->data.case_stmt.printname = printname;
3638 thiscase->data.case_stmt.seenlabel = 0;
3639 case_stack = thiscase;
3640 nesting_stack = thiscase;
3642 if (output_bytecode)
3644 bc_expand_start_case (thiscase, expr, type, printname);
3648 do_pending_stack_adjust ();
3650 /* Make sure case_stmt.start points to something that won't
3651 need any transformation before expand_end_case. */
3652 if (GET_CODE (get_last_insn ()) != NOTE)
3653 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3655 thiscase->data.case_stmt.start = get_last_insn ();
3659 /* Enter a case statement. It is assumed that the caller has pushed
3660 the current context onto the case stack. */
3663 bc_expand_start_case (thiscase, expr, type, printname)
3664 struct nesting *thiscase;
3669 bc_expand_expr (expr);
3670 bc_expand_conversion (TREE_TYPE (expr), type);
3672 /* For cases, the skip is a place we jump to that's emitted after
3673 the size of the jump table is known. */
3675 thiscase->data.case_stmt.skip_label = gen_label_rtx ();
3676 bc_emit_bytecode (jump);
3677 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
3679 #ifdef DEBUG_PRINT_CODE
3680 fputc ('\n', stderr);
3685 /* Start a "dummy case statement" within which case labels are invalid
3686 and are not connected to any larger real case statement.
3687 This can be used if you don't want to let a case statement jump
3688 into the middle of certain kinds of constructs. */
3691 expand_start_case_dummy ()
3693 register struct nesting *thiscase = ALLOC_NESTING ();
3695 /* Make an entry on case_stack for the dummy. */
3697 thiscase->next = case_stack;
3698 thiscase->all = nesting_stack;
3699 thiscase->depth = ++nesting_depth;
3700 thiscase->exit_label = 0;
3701 thiscase->data.case_stmt.case_list = 0;
3702 thiscase->data.case_stmt.start = 0;
3703 thiscase->data.case_stmt.nominal_type = 0;
3704 thiscase->data.case_stmt.default_label = 0;
3705 thiscase->data.case_stmt.num_ranges = 0;
3706 case_stack = thiscase;
3707 nesting_stack = thiscase;
3710 /* End a dummy case statement. */
3713 expand_end_case_dummy ()
3715 POPSTACK (case_stack);
3718 /* Return the data type of the index-expression
3719 of the innermost case statement, or null if none. */
3722 case_index_expr_type ()
3725 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3729 /* Accumulate one case or default label inside a case or switch statement.
3730 VALUE is the value of the case (a null pointer, for a default label).
3731 The function CONVERTER, when applied to arguments T and V,
3732 converts the value V to the type T.
3734 If not currently inside a case or switch statement, return 1 and do
3735 nothing. The caller will print a language-specific error message.
3736 If VALUE is a duplicate or overlaps, return 2 and do nothing
3737 except store the (first) duplicate node in *DUPLICATE.
3738 If VALUE is out of range, return 3 and do nothing.
3739 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3740 Return 0 on success.
3742 Extended to handle range statements. */
3745 pushcase (value, converter, label, duplicate)
3746 register tree value;
3747 tree (*converter) PROTO((tree, tree));
3748 register tree label;
3751 register struct case_node **l;
3752 register struct case_node *n;
3756 if (output_bytecode)
3757 return bc_pushcase (value, label);
3759 /* Fail if not inside a real case statement. */
3760 if (! (case_stack && case_stack->data.case_stmt.start))
3763 if (stack_block_stack
3764 && stack_block_stack->depth > case_stack->depth)
3767 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3768 nominal_type = case_stack->data.case_stmt.nominal_type;
3770 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3771 if (index_type == error_mark_node)
3774 /* Convert VALUE to the type in which the comparisons are nominally done. */
3776 value = (*converter) (nominal_type, value);
3778 /* If this is the first label, warn if any insns have been emitted. */
3779 if (case_stack->data.case_stmt.seenlabel == 0)
3782 for (insn = case_stack->data.case_stmt.start;
3784 insn = NEXT_INSN (insn))
3786 if (GET_CODE (insn) == CODE_LABEL)
3788 if (GET_CODE (insn) != NOTE
3789 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3791 warning ("unreachable code at beginning of %s",
3792 case_stack->data.case_stmt.printname);
3797 case_stack->data.case_stmt.seenlabel = 1;
3799 /* Fail if this value is out of range for the actual type of the index
3800 (which may be narrower than NOMINAL_TYPE). */
3801 if (value != 0 && ! int_fits_type_p (value, index_type))
3804 /* Fail if this is a duplicate or overlaps another entry. */
3807 if (case_stack->data.case_stmt.default_label != 0)
3809 *duplicate = case_stack->data.case_stmt.default_label;
3812 case_stack->data.case_stmt.default_label = label;
3816 /* Find the elt in the chain before which to insert the new value,
3817 to keep the chain sorted in increasing order.
3818 But report an error if this element is a duplicate. */
3819 for (l = &case_stack->data.case_stmt.case_list;
3820 /* Keep going past elements distinctly less than VALUE. */
3821 *l != 0 && tree_int_cst_lt ((*l)->high, value);
3826 /* Element we will insert before must be distinctly greater;
3827 overlap means error. */
3828 if (! tree_int_cst_lt (value, (*l)->low))
3830 *duplicate = (*l)->code_label;
3835 /* Add this label to the chain, and succeed.
3836 Copy VALUE so it is on temporary rather than momentary
3837 obstack and will thus survive till the end of the case statement. */
3838 n = (struct case_node *) oballoc (sizeof (struct case_node));
3841 n->high = n->low = copy_node (value);
3842 n->code_label = label;
3846 expand_label (label);
3850 /* Like pushcase but this case applies to all values
3851 between VALUE1 and VALUE2 (inclusive).
3852 The return value is the same as that of pushcase
3853 but there is one additional error code:
3854 4 means the specified range was empty. */
3857 pushcase_range (value1, value2, converter, label, duplicate)
3858 register tree value1, value2;
3859 tree (*converter) PROTO((tree, tree));
3860 register tree label;
3863 register struct case_node **l;
3864 register struct case_node *n;
3868 /* Fail if not inside a real case statement. */
3869 if (! (case_stack && case_stack->data.case_stmt.start))
3872 if (stack_block_stack
3873 && stack_block_stack->depth > case_stack->depth)
3876 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3877 nominal_type = case_stack->data.case_stmt.nominal_type;
3879 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3880 if (index_type == error_mark_node)
3883 /* If this is the first label, warn if any insns have been emitted. */
3884 if (case_stack->data.case_stmt.seenlabel == 0)
3887 for (insn = case_stack->data.case_stmt.start;
3889 insn = NEXT_INSN (insn))
3891 if (GET_CODE (insn) == CODE_LABEL)
3893 if (GET_CODE (insn) != NOTE
3894 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3896 warning ("unreachable code at beginning of %s",
3897 case_stack->data.case_stmt.printname);
3902 case_stack->data.case_stmt.seenlabel = 1;
3904 /* Convert VALUEs to type in which the comparisons are nominally done. */
3905 if (value1 == 0) /* Negative infinity. */
3906 value1 = TYPE_MIN_VALUE(index_type);
3907 value1 = (*converter) (nominal_type, value1);
3909 if (value2 == 0) /* Positive infinity. */
3910 value2 = TYPE_MAX_VALUE(index_type);
3911 value2 = (*converter) (nominal_type, value2);
3913 /* Fail if these values are out of range. */
3914 if (! int_fits_type_p (value1, index_type))
3917 if (! int_fits_type_p (value2, index_type))
3920 /* Fail if the range is empty. */
3921 if (tree_int_cst_lt (value2, value1))
3924 /* If the bounds are equal, turn this into the one-value case. */
3925 if (tree_int_cst_equal (value1, value2))
3926 return pushcase (value1, converter, label, duplicate);
3928 /* Find the elt in the chain before which to insert the new value,
3929 to keep the chain sorted in increasing order.
3930 But report an error if this element is a duplicate. */
3931 for (l = &case_stack->data.case_stmt.case_list;
3932 /* Keep going past elements distinctly less than this range. */
3933 *l != 0 && tree_int_cst_lt ((*l)->high, value1);
3938 /* Element we will insert before must be distinctly greater;
3939 overlap means error. */
3940 if (! tree_int_cst_lt (value2, (*l)->low))
3942 *duplicate = (*l)->code_label;
3947 /* Add this label to the chain, and succeed.
3948 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3949 obstack and will thus survive till the end of the case statement. */
3951 n = (struct case_node *) oballoc (sizeof (struct case_node));
3954 n->low = copy_node (value1);
3955 n->high = copy_node (value2);
3956 n->code_label = label;
3959 expand_label (label);
3961 case_stack->data.case_stmt.num_ranges++;
3967 /* Accumulate one case or default label; VALUE is the value of the
3968 case, or nil for a default label. If not currently inside a case,
3969 return 1 and do nothing. If VALUE is a duplicate or overlaps, return
3970 2 and do nothing. If VALUE is out of range, return 3 and do nothing.
3971 Return 0 on success. This function is a leftover from the earlier
3972 bytecode compiler, which was based on gcc 1.37. It should be
3973 merged into pushcase. */
3976 bc_pushcase (value, label)
3980 struct nesting *thiscase = case_stack;
3981 struct case_node *case_label, *new_label;
3986 /* Fail if duplicate, overlap, or out of type range. */
3989 value = convert (thiscase->data.case_stmt.nominal_type, value);
3990 if (! int_fits_type_p (value, thiscase->data.case_stmt.nominal_type))
3993 for (case_label = thiscase->data.case_stmt.case_list;
3994 case_label->left; case_label = case_label->left)
3995 if (! tree_int_cst_lt (case_label->left->high, value))
3998 if (case_label != thiscase->data.case_stmt.case_list
3999 && ! tree_int_cst_lt (case_label->high, value)
4000 || case_label->left && ! tree_int_cst_lt (value, case_label->left->low))
4003 new_label = (struct case_node *) oballoc (sizeof (struct case_node));
4004 new_label->low = new_label->high = copy_node (value);
4005 new_label->code_label = label;
4006 new_label->left = case_label->left;
4008 case_label->left = new_label;
4009 thiscase->data.case_stmt.num_ranges++;
4013 if (thiscase->data.case_stmt.default_label)
4015 thiscase->data.case_stmt.default_label = label;
4018 expand_label (label);
4022 /* Called when the index of a switch statement is an enumerated type
4023 and there is no default label.
4025 Checks that all enumeration literals are covered by the case
4026 expressions of a switch. Also, warn if there are any extra
4027 switch cases that are *not* elements of the enumerated type.
4029 If all enumeration literals were covered by the case expressions,
4030 turn one of the expressions into the default expression since it should
4031 not be possible to fall through such a switch. */
4034 check_for_full_enumeration_handling (type)
4037 register struct case_node *n;
4038 register struct case_node **l;
4039 register tree chain;
4042 if (output_bytecode)
4044 bc_check_for_full_enumeration_handling (type);
4048 /* The time complexity of this loop is currently O(N * M), with
4049 N being the number of members in the enumerated type, and
4050 M being the number of case expressions in the switch. */
4052 for (chain = TYPE_VALUES (type);
4054 chain = TREE_CHAIN (chain))
4056 /* Find a match between enumeral and case expression, if possible.
4057 Quit looking when we've gone too far (since case expressions
4058 are kept sorted in ascending order). Warn about enumerators not
4059 handled in the switch statement case expression list. */
4061 for (n = case_stack->data.case_stmt.case_list;
4062 n && tree_int_cst_lt (n->high, TREE_VALUE (chain));
4066 if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low))
4069 warning ("enumeration value `%s' not handled in switch",
4070 IDENTIFIER_POINTER (TREE_PURPOSE (chain)));
4075 /* Now we go the other way around; we warn if there are case
4076 expressions that don't correspond to enumerators. This can
4077 occur since C and C++ don't enforce type-checking of
4078 assignments to enumeration variables. */
4081 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4083 for (chain = TYPE_VALUES (type);
4084 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4085 chain = TREE_CHAIN (chain))
4090 if (TYPE_NAME (type) == 0)
4091 warning ("case value `%d' not in enumerated type",
4092 TREE_INT_CST_LOW (n->low));
4094 warning ("case value `%d' not in enumerated type `%s'",
4095 TREE_INT_CST_LOW (n->low),
4096 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4099 : DECL_NAME (TYPE_NAME (type))));
4101 if (!tree_int_cst_equal (n->low, n->high))
4103 for (chain = TYPE_VALUES (type);
4104 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4105 chain = TREE_CHAIN (chain))
4110 if (TYPE_NAME (type) == 0)
4111 warning ("case value `%d' not in enumerated type",
4112 TREE_INT_CST_LOW (n->high));
4114 warning ("case value `%d' not in enumerated type `%s'",
4115 TREE_INT_CST_LOW (n->high),
4116 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4119 : DECL_NAME (TYPE_NAME (type))));
4125 /* ??? This optimization is disabled because it causes valid programs to
4126 fail. ANSI C does not guarantee that an expression with enum type
4127 will have a value that is the same as one of the enumation literals. */
4129 /* If all values were found as case labels, make one of them the default
4130 label. Thus, this switch will never fall through. We arbitrarily pick
4131 the last one to make the default since this is likely the most
4132 efficient choice. */
4136 for (l = &case_stack->data.case_stmt.case_list;
4141 case_stack->data.case_stmt.default_label = (*l)->code_label;
4148 /* Check that all enumeration literals are covered by the case
4149 expressions of a switch. Also warn if there are any cases
4150 that are not elements of the enumerated type. */
4153 bc_check_for_full_enumeration_handling (type)
4156 struct nesting *thiscase = case_stack;
4157 struct case_node *c;
4160 /* Check for enums not handled. */
4161 for (e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
4163 for (c = thiscase->data.case_stmt.case_list->left;
4164 c && tree_int_cst_lt (c->high, TREE_VALUE (e));
4167 if (! (c && tree_int_cst_equal (c->low, TREE_VALUE (e))))
4168 warning ("enumerated value `%s' not handled in switch",
4169 IDENTIFIER_POINTER (TREE_PURPOSE (e)));
4172 /* Check for cases not in the enumeration. */
4173 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4175 for (e = TYPE_VALUES (type);
4176 e && !tree_int_cst_equal (c->low, TREE_VALUE (e));
4180 warning ("case value `%d' not in enumerated type `%s'",
4181 TREE_INT_CST_LOW (c->low),
4182 IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
4184 : DECL_NAME (TYPE_NAME (type))));
4188 /* Terminate a case (Pascal) or switch (C) statement
4189 in which ORIG_INDEX is the expression to be tested.
4190 Generate the code to test it and jump to the right place. */
4193 expand_end_case (orig_index)
4196 tree minval, maxval, range, orig_minval;
4197 rtx default_label = 0;
4198 register struct case_node *n;
4206 register struct nesting *thiscase = case_stack;
4210 if (output_bytecode)
4212 bc_expand_end_case (orig_index);
4216 table_label = gen_label_rtx ();
4217 index_expr = thiscase->data.case_stmt.index_expr;
4218 unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr));
4220 do_pending_stack_adjust ();
4222 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4223 if (TREE_TYPE (index_expr) != error_mark_node)
4225 /* If switch expression was an enumerated type, check that all
4226 enumeration literals are covered by the cases.
4227 No sense trying this if there's a default case, however. */
4229 if (!thiscase->data.case_stmt.default_label
4230 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4231 && TREE_CODE (index_expr) != INTEGER_CST)
4232 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4234 /* If this is the first label, warn if any insns have been emitted. */
4235 if (thiscase->data.case_stmt.seenlabel == 0)
4238 for (insn = get_last_insn ();
4239 insn != case_stack->data.case_stmt.start;
4240 insn = PREV_INSN (insn))
4241 if (GET_CODE (insn) != NOTE
4242 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE))
4244 warning ("unreachable code at beginning of %s",
4245 case_stack->data.case_stmt.printname);
4250 /* If we don't have a default-label, create one here,
4251 after the body of the switch. */
4252 if (thiscase->data.case_stmt.default_label == 0)
4254 thiscase->data.case_stmt.default_label
4255 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4256 expand_label (thiscase->data.case_stmt.default_label);
4258 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4260 before_case = get_last_insn ();
4262 /* Simplify the case-list before we count it. */
4263 group_case_nodes (thiscase->data.case_stmt.case_list);
4265 /* Get upper and lower bounds of case values.
4266 Also convert all the case values to the index expr's data type. */
4269 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4271 /* Check low and high label values are integers. */
4272 if (TREE_CODE (n->low) != INTEGER_CST)
4274 if (TREE_CODE (n->high) != INTEGER_CST)
4277 n->low = convert (TREE_TYPE (index_expr), n->low);
4278 n->high = convert (TREE_TYPE (index_expr), n->high);
4280 /* Count the elements and track the largest and smallest
4281 of them (treating them as signed even if they are not). */
4289 if (INT_CST_LT (n->low, minval))
4291 if (INT_CST_LT (maxval, n->high))
4294 /* A range counts double, since it requires two compares. */
4295 if (! tree_int_cst_equal (n->low, n->high))
4299 orig_minval = minval;
4301 /* Compute span of values. */
4303 range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr),
4306 if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK)
4308 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4310 emit_jump (default_label);
4313 /* If range of values is much bigger than number of values,
4314 make a sequence of conditional branches instead of a dispatch.
4315 If the switch-index is a constant, do it this way
4316 because we can optimize it. */
4318 #ifndef CASE_VALUES_THRESHOLD
4320 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4322 /* If machine does not have a case insn that compares the
4323 bounds, this means extra overhead for dispatch tables
4324 which raises the threshold for using them. */
4325 #define CASE_VALUES_THRESHOLD 5
4326 #endif /* HAVE_casesi */
4327 #endif /* CASE_VALUES_THRESHOLD */
4329 else if (TREE_INT_CST_HIGH (range) != 0
4330 || count < CASE_VALUES_THRESHOLD
4331 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4333 || TREE_CODE (index_expr) == INTEGER_CST
4334 /* These will reduce to a constant. */
4335 || (TREE_CODE (index_expr) == CALL_EXPR
4336 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4337 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4338 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4339 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4340 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4342 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4344 /* If the index is a short or char that we do not have
4345 an insn to handle comparisons directly, convert it to
4346 a full integer now, rather than letting each comparison
4347 generate the conversion. */
4349 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4350 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4351 == CODE_FOR_nothing))
4353 enum machine_mode wider_mode;
4354 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4355 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4356 if (cmp_optab->handlers[(int) wider_mode].insn_code
4357 != CODE_FOR_nothing)
4359 index = convert_to_mode (wider_mode, index, unsignedp);
4365 do_pending_stack_adjust ();
4367 index = protect_from_queue (index, 0);
4368 if (GET_CODE (index) == MEM)
4369 index = copy_to_reg (index);
4370 if (GET_CODE (index) == CONST_INT
4371 || TREE_CODE (index_expr) == INTEGER_CST)
4373 /* Make a tree node with the proper constant value
4374 if we don't already have one. */
4375 if (TREE_CODE (index_expr) != INTEGER_CST)
4378 = build_int_2 (INTVAL (index),
4379 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4380 index_expr = convert (TREE_TYPE (index_expr), index_expr);
4383 /* For constant index expressions we need only
4384 issue a unconditional branch to the appropriate
4385 target code. The job of removing any unreachable
4386 code is left to the optimisation phase if the
4387 "-O" option is specified. */
4388 for (n = thiscase->data.case_stmt.case_list;
4392 if (! tree_int_cst_lt (index_expr, n->low)
4393 && ! tree_int_cst_lt (n->high, index_expr))
4397 emit_jump (label_rtx (n->code_label));
4399 emit_jump (default_label);
4403 /* If the index expression is not constant we generate
4404 a binary decision tree to select the appropriate
4405 target code. This is done as follows:
4407 The list of cases is rearranged into a binary tree,
4408 nearly optimal assuming equal probability for each case.
4410 The tree is transformed into RTL, eliminating
4411 redundant test conditions at the same time.
4413 If program flow could reach the end of the
4414 decision tree an unconditional jump to the
4415 default code is emitted. */
4418 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4419 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4420 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4422 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4423 default_label, TREE_TYPE (index_expr));
4424 emit_jump_if_reachable (default_label);
4433 enum machine_mode index_mode = SImode;
4434 int index_bits = GET_MODE_BITSIZE (index_mode);
4436 enum machine_mode op_mode;
4438 /* Convert the index to SImode. */
4439 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr)))
4440 > GET_MODE_BITSIZE (index_mode))
4442 enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
4443 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4445 /* We must handle the endpoints in the original mode. */
4446 index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
4447 index_expr, minval);
4448 minval = integer_zero_node;
4449 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4450 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4451 emit_jump_insn (gen_bltu (default_label));
4452 /* Now we can safely truncate. */
4453 index = convert_to_mode (index_mode, index, 0);
4457 if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
4458 index_expr = convert (type_for_size (index_bits, 0),
4460 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4463 index = protect_from_queue (index, 0);
4464 do_pending_stack_adjust ();
4466 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
4467 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
4469 index = copy_to_mode_reg (op_mode, index);
4471 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
4473 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
4474 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
4476 op1 = copy_to_mode_reg (op_mode, op1);
4478 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
4480 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
4481 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
4483 op2 = copy_to_mode_reg (op_mode, op2);
4485 emit_jump_insn (gen_casesi (index, op1, op2,
4486 table_label, default_label));
4490 #ifdef HAVE_tablejump
4491 if (! win && HAVE_tablejump)
4493 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4494 fold (build (MINUS_EXPR,
4495 TREE_TYPE (index_expr),
4496 index_expr, minval)));
4497 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4499 index = protect_from_queue (index, 0);
4500 do_pending_stack_adjust ();
4502 do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
4503 expand_expr (range, NULL_RTX, VOIDmode, 0),
4504 table_label, default_label);
4511 /* Get table of labels to jump to, in order of case index. */
4513 ncases = TREE_INT_CST_LOW (range) + 1;
4514 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4515 bzero (labelvec, ncases * sizeof (rtx));
4517 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4519 register HOST_WIDE_INT i
4520 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4525 = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
4526 if (i + TREE_INT_CST_LOW (orig_minval)
4527 == TREE_INT_CST_LOW (n->high))
4533 /* Fill in the gaps with the default. */
4534 for (i = 0; i < ncases; i++)
4535 if (labelvec[i] == 0)
4536 labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
4538 /* Output the table */
4539 emit_label (table_label);
4541 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
4542 were an expression, instead of an #ifdef/#ifndef. */
4544 #ifdef CASE_VECTOR_PC_RELATIVE
4548 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
4549 gen_rtx (LABEL_REF, Pmode, table_label),
4550 gen_rtvec_v (ncases, labelvec)));
4552 emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
4553 gen_rtvec_v (ncases, labelvec)));
4555 /* If the case insn drops through the table,
4556 after the table we must jump to the default-label.
4557 Otherwise record no drop-through after the table. */
4558 #ifdef CASE_DROPS_THROUGH
4559 emit_jump (default_label);
4565 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4566 reorder_insns (before_case, get_last_insn (),
4567 thiscase->data.case_stmt.start);
4569 if (thiscase->exit_label)
4570 emit_label (thiscase->exit_label);
4572 POPSTACK (case_stack);
4578 /* Terminate a case statement. EXPR is the original index
4582 bc_expand_end_case (expr)
4585 struct nesting *thiscase = case_stack;
4586 enum bytecode_opcode opcode;
4587 struct bc_label *jump_label;
4588 struct case_node *c;
4590 bc_emit_bytecode (jump);
4591 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4593 #ifdef DEBUG_PRINT_CODE
4594 fputc ('\n', stderr);
4597 /* Now that the size of the jump table is known, emit the actual
4598 indexed jump instruction. */
4599 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->data.case_stmt.skip_label));
4601 opcode = TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode
4602 ? TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseSU : caseSI
4603 : TREE_UNSIGNED (thiscase->data.case_stmt.nominal_type) ? caseDU : caseDI;
4605 bc_emit_bytecode (opcode);
4607 /* Now emit the case instructions literal arguments, in order.
4608 In addition to the value on the stack, it uses:
4609 1. The address of the jump table.
4610 2. The size of the jump table.
4611 3. The default label. */
4613 jump_label = bc_get_bytecode_label ();
4614 bc_emit_bytecode_labelref (jump_label);
4615 bc_emit_bytecode_const ((char *) &thiscase->data.case_stmt.num_ranges,
4616 sizeof thiscase->data.case_stmt.num_ranges);
4618 if (thiscase->data.case_stmt.default_label)
4619 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (thiscase->data.case_stmt.default_label)));
4621 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (thiscase->exit_label));
4623 /* Output the jump table. */
4625 bc_align_bytecode (3 /* PTR_ALIGN */);
4626 bc_emit_bytecode_labeldef (jump_label);
4628 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == SImode)
4629 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4631 opcode = TREE_INT_CST_LOW (c->low);
4632 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4634 opcode = TREE_INT_CST_LOW (c->high);
4635 bc_emit_bytecode_const ((char *) &opcode, sizeof opcode);
4637 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4640 if (TYPE_MODE (thiscase->data.case_stmt.nominal_type) == DImode)
4641 for (c = thiscase->data.case_stmt.case_list->left; c; c = c->left)
4643 bc_emit_bytecode_DI_const (c->low);
4644 bc_emit_bytecode_DI_const (c->high);
4646 bc_emit_bytecode_labelref (BYTECODE_BC_LABEL (DECL_RTL (c->code_label)));
4653 bc_emit_bytecode_labeldef (BYTECODE_BC_LABEL (thiscase->exit_label));
4655 /* Possibly issue enumeration warnings. */
4657 if (!thiscase->data.case_stmt.default_label
4658 && TREE_CODE (TREE_TYPE (expr)) == ENUMERAL_TYPE
4659 && TREE_CODE (expr) != INTEGER_CST
4661 check_for_full_enumeration_handling (TREE_TYPE (expr));
4664 #ifdef DEBUG_PRINT_CODE
4665 fputc ('\n', stderr);
4668 POPSTACK (case_stack);
4672 /* Return unique bytecode ID. */
4677 static int bc_uid = 0;
4682 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4685 do_jump_if_equal (op1, op2, label, unsignedp)
4686 rtx op1, op2, label;
4689 if (GET_CODE (op1) == CONST_INT
4690 && GET_CODE (op2) == CONST_INT)
4692 if (INTVAL (op1) == INTVAL (op2))
4697 enum machine_mode mode = GET_MODE (op1);
4698 if (mode == VOIDmode)
4699 mode = GET_MODE (op2);
4700 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
4701 emit_jump_insn (gen_beq (label));
4705 /* Not all case values are encountered equally. This function
4706 uses a heuristic to weight case labels, in cases where that
4707 looks like a reasonable thing to do.
4709 Right now, all we try to guess is text, and we establish the
4712 chars above space: 16
4721 If we find any cases in the switch that are not either -1 or in the range
4722 of valid ASCII characters, or are control characters other than those
4723 commonly used with "\", don't treat this switch scanning text.
4725 Return 1 if these nodes are suitable for cost estimation, otherwise
4729 estimate_case_costs (node)
4732 tree min_ascii = build_int_2 (-1, -1);
4733 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4737 /* If we haven't already made the cost table, make it now. Note that the
4738 lower bound of the table is -1, not zero. */
4740 if (cost_table == NULL)
4742 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
4743 bzero (cost_table - 1, 129 * sizeof (short));
4745 for (i = 0; i < 128; i++)
4749 else if (ispunct (i))
4751 else if (iscntrl (i))
4755 cost_table[' '] = 8;
4756 cost_table['\t'] = 4;
4757 cost_table['\0'] = 4;
4758 cost_table['\n'] = 2;
4759 cost_table['\f'] = 1;
4760 cost_table['\v'] = 1;
4761 cost_table['\b'] = 1;
4764 /* See if all the case expressions look like text. It is text if the
4765 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4766 as signed arithmetic since we don't want to ever access cost_table with a
4767 value less than -1. Also check that none of the constants in a range
4768 are strange control characters. */
4770 for (n = node; n; n = n->right)
4772 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4775 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
4776 if (cost_table[i] < 0)
4780 /* All interesting values are within the range of interesting
4781 ASCII characters. */
4785 /* Scan an ordered list of case nodes
4786 combining those with consecutive values or ranges.
4788 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4791 group_case_nodes (head)
4794 case_node_ptr node = head;
4798 rtx lb = next_real_insn (label_rtx (node->code_label));
4799 case_node_ptr np = node;
4801 /* Try to group the successors of NODE with NODE. */
4802 while (((np = np->right) != 0)
4803 /* Do they jump to the same place? */
4804 && next_real_insn (label_rtx (np->code_label)) == lb
4805 /* Are their ranges consecutive? */
4806 && tree_int_cst_equal (np->low,
4807 fold (build (PLUS_EXPR,
4808 TREE_TYPE (node->high),
4811 /* An overflow is not consecutive. */
4812 && tree_int_cst_lt (node->high,
4813 fold (build (PLUS_EXPR,
4814 TREE_TYPE (node->high),
4816 integer_one_node))))
4818 node->high = np->high;
4820 /* NP is the first node after NODE which can't be grouped with it.
4821 Delete the nodes in between, and move on to that node. */
4827 /* Take an ordered list of case nodes
4828 and transform them into a near optimal binary tree,
4829 on the assumption that any target code selection value is as
4830 likely as any other.
4832 The transformation is performed by splitting the ordered
4833 list into two equal sections plus a pivot. The parts are
4834 then attached to the pivot as left and right branches. Each
4835 branch is is then transformed recursively. */
4838 balance_case_nodes (head, parent)
4839 case_node_ptr *head;
4840 case_node_ptr parent;
4842 register case_node_ptr np;
4850 register case_node_ptr *npp;
4853 /* Count the number of entries on branch. Also count the ranges. */
4857 if (!tree_int_cst_equal (np->low, np->high))
4861 cost += cost_table[TREE_INT_CST_LOW (np->high)];
4865 cost += cost_table[TREE_INT_CST_LOW (np->low)];
4873 /* Split this list if it is long enough for that to help. */
4878 /* Find the place in the list that bisects the list's total cost,
4879 Here I gets half the total cost. */
4884 /* Skip nodes while their cost does not reach that amount. */
4885 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4886 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
4887 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
4890 npp = &(*npp)->right;
4895 /* Leave this branch lopsided, but optimize left-hand
4896 side and fill in `parent' fields for right-hand side. */
4898 np->parent = parent;
4899 balance_case_nodes (&np->left, np);
4900 for (; np->right; np = np->right)
4901 np->right->parent = np;
4905 /* If there are just three nodes, split at the middle one. */
4907 npp = &(*npp)->right;
4910 /* Find the place in the list that bisects the list's total cost,
4911 where ranges count as 2.
4912 Here I gets half the total cost. */
4913 i = (i + ranges + 1) / 2;
4916 /* Skip nodes while their cost does not reach that amount. */
4917 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4922 npp = &(*npp)->right;
4927 np->parent = parent;
4930 /* Optimize each of the two split parts. */
4931 balance_case_nodes (&np->left, np);
4932 balance_case_nodes (&np->right, np);
4936 /* Else leave this branch as one level,
4937 but fill in `parent' fields. */
4939 np->parent = parent;
4940 for (; np->right; np = np->right)
4941 np->right->parent = np;
4946 /* Search the parent sections of the case node tree
4947 to see if a test for the lower bound of NODE would be redundant.
4948 INDEX_TYPE is the type of the index expression.
4950 The instructions to generate the case decision tree are
4951 output in the same order as nodes are processed so it is
4952 known that if a parent node checks the range of the current
4953 node minus one that the current node is bounded at its lower
4954 span. Thus the test would be redundant. */
4957 node_has_low_bound (node, index_type)
4962 case_node_ptr pnode;
4964 /* If the lower bound of this node is the lowest value in the index type,
4965 we need not test it. */
4967 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4970 /* If this node has a left branch, the value at the left must be less
4971 than that at this node, so it cannot be bounded at the bottom and
4972 we need not bother testing any further. */
4977 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4978 node->low, integer_one_node));
4980 /* If the subtraction above overflowed, we can't verify anything.
4981 Otherwise, look for a parent that tests our value - 1. */
4983 if (! tree_int_cst_lt (low_minus_one, node->low))
4986 for (pnode = node->parent; pnode; pnode = pnode->parent)
4987 if (tree_int_cst_equal (low_minus_one, pnode->high))
4993 /* Search the parent sections of the case node tree
4994 to see if a test for the upper bound of NODE would be redundant.
4995 INDEX_TYPE is the type of the index expression.
4997 The instructions to generate the case decision tree are
4998 output in the same order as nodes are processed so it is
4999 known that if a parent node checks the range of the current
5000 node plus one that the current node is bounded at its upper
5001 span. Thus the test would be redundant. */
5004 node_has_high_bound (node, index_type)
5009 case_node_ptr pnode;
5011 /* If the upper bound of this node is the highest value in the type
5012 of the index expression, we need not test against it. */
5014 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5017 /* If this node has a right branch, the value at the right must be greater
5018 than that at this node, so it cannot be bounded at the top and
5019 we need not bother testing any further. */
5024 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5025 node->high, integer_one_node));
5027 /* If the addition above overflowed, we can't verify anything.
5028 Otherwise, look for a parent that tests our value + 1. */
5030 if (! tree_int_cst_lt (node->high, high_plus_one))
5033 for (pnode = node->parent; pnode; pnode = pnode->parent)
5034 if (tree_int_cst_equal (high_plus_one, pnode->low))
5040 /* Search the parent sections of the
5041 case node tree to see if both tests for the upper and lower
5042 bounds of NODE would be redundant. */
5045 node_is_bounded (node, index_type)
5049 return (node_has_low_bound (node, index_type)
5050 && node_has_high_bound (node, index_type));
5053 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5056 emit_jump_if_reachable (label)
5059 if (GET_CODE (get_last_insn ()) != BARRIER)
5063 /* Emit step-by-step code to select a case for the value of INDEX.
5064 The thus generated decision tree follows the form of the
5065 case-node binary tree NODE, whose nodes represent test conditions.
5066 INDEX_TYPE is the type of the index of the switch.
5068 Care is taken to prune redundant tests from the decision tree
5069 by detecting any boundary conditions already checked by
5070 emitted rtx. (See node_has_high_bound, node_has_low_bound
5071 and node_is_bounded, above.)
5073 Where the test conditions can be shown to be redundant we emit
5074 an unconditional jump to the target code. As a further
5075 optimization, the subordinates of a tree node are examined to
5076 check for bounded nodes. In this case conditional and/or
5077 unconditional jumps as a result of the boundary check for the
5078 current node are arranged to target the subordinates associated
5079 code for out of bound conditions on the current node node.
5081 We can assume that when control reaches the code generated here,
5082 the index value has already been compared with the parents
5083 of this node, and determined to be on the same side of each parent
5084 as this node is. Thus, if this node tests for the value 51,
5085 and a parent tested for 52, we don't need to consider
5086 the possibility of a value greater than 51. If another parent
5087 tests for the value 50, then this node need not test anything. */
5090 emit_case_nodes (index, node, default_label, index_type)
5096 /* If INDEX has an unsigned type, we must make unsigned branches. */
5097 int unsignedp = TREE_UNSIGNED (index_type);
5098 typedef rtx rtx_function ();
5099 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5100 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5101 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5102 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5103 enum machine_mode mode = GET_MODE (index);
5105 /* See if our parents have already tested everything for us.
5106 If they have, emit an unconditional jump for this node. */
5107 if (node_is_bounded (node, index_type))
5108 emit_jump (label_rtx (node->code_label));
5110 else if (tree_int_cst_equal (node->low, node->high))
5112 /* Node is single valued. First see if the index expression matches
5113 this node and then check our children, if any. */
5115 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5116 label_rtx (node->code_label), unsignedp);
5118 if (node->right != 0 && node->left != 0)
5120 /* This node has children on both sides.
5121 Dispatch to one side or the other
5122 by comparing the index value with this node's value.
5123 If one subtree is bounded, check that one first,
5124 so we can avoid real branches in the tree. */
5126 if (node_is_bounded (node->right, index_type))
5128 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5130 GT, NULL_RTX, mode, unsignedp, 0);
5132 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5133 emit_case_nodes (index, node->left, default_label, index_type);
5136 else if (node_is_bounded (node->left, index_type))
5138 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5140 LT, NULL_RTX, mode, unsignedp, 0);
5141 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5142 emit_case_nodes (index, node->right, default_label, index_type);
5147 /* Neither node is bounded. First distinguish the two sides;
5148 then emit the code for one side at a time. */
5151 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5153 /* See if the value is on the right. */
5154 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5156 GT, NULL_RTX, mode, unsignedp, 0);
5157 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5159 /* Value must be on the left.
5160 Handle the left-hand subtree. */
5161 emit_case_nodes (index, node->left, default_label, index_type);
5162 /* If left-hand subtree does nothing,
5164 emit_jump_if_reachable (default_label);
5166 /* Code branches here for the right-hand subtree. */
5167 expand_label (test_label);
5168 emit_case_nodes (index, node->right, default_label, index_type);
5172 else if (node->right != 0 && node->left == 0)
5174 /* Here we have a right child but no left so we issue conditional
5175 branch to default and process the right child.
5177 Omit the conditional branch to default if we it avoid only one
5178 right child; it costs too much space to save so little time. */
5180 if (node->right->right || node->right->left
5181 || !tree_int_cst_equal (node->right->low, node->right->high))
5183 if (!node_has_low_bound (node, index_type))
5185 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5187 LT, NULL_RTX, mode, unsignedp, 0);
5188 emit_jump_insn ((*gen_blt_pat) (default_label));
5191 emit_case_nodes (index, node->right, default_label, index_type);
5194 /* We cannot process node->right normally
5195 since we haven't ruled out the numbers less than
5196 this node's value. So handle node->right explicitly. */
5197 do_jump_if_equal (index,
5198 expand_expr (node->right->low, NULL_RTX,
5200 label_rtx (node->right->code_label), unsignedp);
5203 else if (node->right == 0 && node->left != 0)
5205 /* Just one subtree, on the left. */
5207 #if 0 /* The following code and comment were formerly part
5208 of the condition here, but they didn't work
5209 and I don't understand what the idea was. -- rms. */
5210 /* If our "most probable entry" is less probable
5211 than the default label, emit a jump to
5212 the default label using condition codes
5213 already lying around. With no right branch,
5214 a branch-greater-than will get us to the default
5217 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5220 if (node->left->left || node->left->right
5221 || !tree_int_cst_equal (node->left->low, node->left->high))
5223 if (!node_has_high_bound (node, index_type))
5225 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5227 GT, NULL_RTX, mode, unsignedp, 0);
5228 emit_jump_insn ((*gen_bgt_pat) (default_label));
5231 emit_case_nodes (index, node->left, default_label, index_type);
5234 /* We cannot process node->left normally
5235 since we haven't ruled out the numbers less than
5236 this node's value. So handle node->left explicitly. */
5237 do_jump_if_equal (index,
5238 expand_expr (node->left->low, NULL_RTX,
5240 label_rtx (node->left->code_label), unsignedp);
5245 /* Node is a range. These cases are very similar to those for a single
5246 value, except that we do not start by testing whether this node
5247 is the one to branch to. */
5249 if (node->right != 0 && node->left != 0)
5251 /* Node has subtrees on both sides.
5252 If the right-hand subtree is bounded,
5253 test for it first, since we can go straight there.
5254 Otherwise, we need to make a branch in the control structure,
5255 then handle the two subtrees. */
5256 tree test_label = 0;
5258 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5260 GT, NULL_RTX, mode, unsignedp, 0);
5262 if (node_is_bounded (node->right, index_type))
5263 /* Right hand node is fully bounded so we can eliminate any
5264 testing and branch directly to the target code. */
5265 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5268 /* Right hand node requires testing.
5269 Branch to a label where we will handle it later. */
5271 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5272 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5275 /* Value belongs to this node or to the left-hand subtree. */
5277 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5278 GE, NULL_RTX, mode, unsignedp, 0);
5279 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5281 /* Handle the left-hand subtree. */
5282 emit_case_nodes (index, node->left, default_label, index_type);
5284 /* If right node had to be handled later, do that now. */
5288 /* If the left-hand subtree fell through,
5289 don't let it fall into the right-hand subtree. */
5290 emit_jump_if_reachable (default_label);
5292 expand_label (test_label);
5293 emit_case_nodes (index, node->right, default_label, index_type);
5297 else if (node->right != 0 && node->left == 0)
5299 /* Deal with values to the left of this node,
5300 if they are possible. */
5301 if (!node_has_low_bound (node, index_type))
5303 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5305 LT, NULL_RTX, mode, unsignedp, 0);
5306 emit_jump_insn ((*gen_blt_pat) (default_label));
5309 /* Value belongs to this node or to the right-hand subtree. */
5311 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5313 LE, NULL_RTX, mode, unsignedp, 0);
5314 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5316 emit_case_nodes (index, node->right, default_label, index_type);
5319 else if (node->right == 0 && node->left != 0)
5321 /* Deal with values to the right of this node,
5322 if they are possible. */
5323 if (!node_has_high_bound (node, index_type))
5325 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5327 GT, NULL_RTX, mode, unsignedp, 0);
5328 emit_jump_insn ((*gen_bgt_pat) (default_label));
5331 /* Value belongs to this node or to the left-hand subtree. */
5333 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5334 GE, NULL_RTX, mode, unsignedp, 0);
5335 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5337 emit_case_nodes (index, node->left, default_label, index_type);
5342 /* Node has no children so we check low and high bounds to remove
5343 redundant tests. Only one of the bounds can exist,
5344 since otherwise this node is bounded--a case tested already. */
5346 if (!node_has_high_bound (node, index_type))
5348 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5350 GT, NULL_RTX, mode, unsignedp, 0);
5351 emit_jump_insn ((*gen_bgt_pat) (default_label));
5354 if (!node_has_low_bound (node, index_type))
5356 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5358 LT, NULL_RTX, mode, unsignedp, 0);
5359 emit_jump_insn ((*gen_blt_pat) (default_label));
5362 emit_jump (label_rtx (node->code_label));
5367 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5368 so that the debugging info will be correct for the unrolled loop. */
5370 /* Indexed by block number, contains a pointer to the N'th block node. */
5372 static tree *block_vector;
5375 find_loop_tree_blocks ()
5377 tree block = DECL_INITIAL (current_function_decl);
5379 /* There first block is for the function body, and does not have
5380 corresponding block notes. Don't include it in the block vector. */
5381 block = BLOCK_SUBBLOCKS (block);
5383 block_vector = identify_blocks (block, get_insns ());
5387 unroll_block_trees ()
5389 tree block = DECL_INITIAL (current_function_decl);
5391 reorder_blocks (block_vector, block, get_insns ());