1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
38 #include "coretypes.h"
47 #include "insn-config.h"
50 #include "hard-reg-set.h"
57 #include "langhooks.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
87 struct case_node GTY(())
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
101 /* These are used by estimate_case_costs and balance_case_nodes. */
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `cond_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting GTY(())
140 struct nesting *next;
150 /* For conds (if-then and if-then-else statements). */
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
160 } GTY ((tag ("COND_NESTING"))) cond;
161 /* For variable binding contours. */
164 /* Sequence number of this binding contour within the function,
165 in order of entry. */
166 int block_start_count;
167 /* The NOTE that starts this contour.
168 Used by expand_goto to check whether the destination
169 is within each contour or not. */
171 /* The saved target_temp_slot_level from our outer block.
172 We may reset target_temp_slot_level to be the level of
173 this block, if that is done, target_temp_slot_level
174 reverts to the saved target_temp_slot_level at the very
176 int block_target_temp_slot_level;
177 /* True if we are currently emitting insns in an area of
178 output code that is controlled by a conditional
179 expression. This is used by the cleanup handling code to
180 generate conditional cleanup actions. */
181 int conditional_code;
182 /* A place to move the start of the exception region for any
183 of the conditional cleanups, must be at the end or after
184 the start of the last unconditional cleanup, and before any
185 conditional branch points. */
186 rtx last_unconditional_cleanup;
187 } GTY ((tag ("BLOCK_NESTING"))) block;
188 /* For switch (C) or case (Pascal) statements. */
191 /* The insn after which the case dispatch should finally
192 be emitted. Zero for a dummy. */
194 /* A list of case labels; it is first built as an AVL tree.
195 During expand_end_case, this is converted to a list, and may be
196 rearranged into a nearly balanced binary tree. */
197 struct case_node *case_list;
198 /* Label to jump to if no case matches. */
200 /* The expression to be dispatched on. */
202 /* Type that INDEX_EXPR should be converted to. */
204 /* Name of this kind of statement, for warnings. */
205 const char *printname;
206 /* Used to save no_line_numbers till we see the first case label.
207 We set this to -1 when we see the first case label in this
209 int line_number_status;
210 } GTY ((tag ("CASE_NESTING"))) case_stmt;
211 } GTY ((desc ("%1.desc"))) data;
214 /* Allocate and return a new `struct nesting'. */
216 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
218 /* Pop the nesting stack element by element until we pop off
219 the element which is at the top of STACK.
220 Update all the other stacks, popping off elements from them
221 as we pop them from nesting_stack. */
223 #define POPSTACK(STACK) \
224 do { struct nesting *target = STACK; \
225 struct nesting *this; \
226 do { this = nesting_stack; \
227 if (cond_stack == this) \
228 cond_stack = cond_stack->next; \
229 if (block_stack == this) \
230 block_stack = block_stack->next; \
231 if (case_stack == this) \
232 case_stack = case_stack->next; \
233 nesting_depth = nesting_stack->depth - 1; \
234 nesting_stack = this->all; } \
235 while (this != target); } while (0)
237 /* In some cases it is impossible to generate code for a forward goto
238 until the label definition is seen. This happens when it may be necessary
239 for the goto to reset the stack pointer: we don't yet know how to do that.
240 So expand_goto puts an entry on this fixup list.
241 Each time a binding contour that resets the stack is exited,
243 If the target label has now been defined, we can insert the proper code. */
245 struct goto_fixup GTY(())
247 /* Points to following fixup. */
248 struct goto_fixup *next;
249 /* Points to the insn before the jump insn.
250 If more code must be inserted, it goes after this insn. */
252 /* The LABEL_DECL that this jump is jumping to, or 0
253 for break, continue or return. */
255 /* The BLOCK for the place where this goto was found. */
257 /* The CODE_LABEL rtx that this is jumping to. */
259 /* Number of binding contours started in current function
260 before the label reference. */
261 int block_start_count;
264 struct stmt_status GTY(())
266 /* Chain of all pending binding contours. */
267 struct nesting * x_block_stack;
269 /* If any new stacks are added here, add them to POPSTACKS too. */
271 /* Chain of all pending conditional statements. */
272 struct nesting * x_cond_stack;
274 /* Chain of all pending case or switch statements. */
275 struct nesting * x_case_stack;
277 /* Separate chain including all of the above,
278 chained through the `all' field. */
279 struct nesting * x_nesting_stack;
281 /* Number of entries on nesting_stack now. */
284 /* Number of binding contours started so far in this function. */
285 int x_block_start_count;
287 /* Location of last line-number note, whether we actually
288 emitted it or not. */
289 location_t x_emit_locus;
291 struct goto_fixup *x_goto_fixup_chain;
294 #define block_stack (cfun->stmt->x_block_stack)
295 #define cond_stack (cfun->stmt->x_cond_stack)
296 #define case_stack (cfun->stmt->x_case_stack)
297 #define nesting_stack (cfun->stmt->x_nesting_stack)
298 #define nesting_depth (cfun->stmt->x_nesting_depth)
299 #define current_block_start_count (cfun->stmt->x_block_start_count)
300 #define emit_locus (cfun->stmt->x_emit_locus)
301 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
303 /* Nonzero if we are using EH to handle cleanups. */
304 int using_eh_for_cleanups_p = 0;
306 static int n_occurrences (int, const char *);
307 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
308 static void expand_nl_goto_receiver (void);
309 static bool check_operand_nalternatives (tree, tree);
310 static bool check_unique_operand_names (tree, tree);
311 static char *resolve_operand_name_1 (char *, tree, tree);
312 static void expand_null_return_1 (void);
313 static enum br_predictor return_prediction (rtx);
314 static rtx shift_return_value (rtx);
315 static void expand_value_return (rtx);
316 static void do_jump_if_equal (rtx, rtx, rtx, int);
317 static int estimate_case_costs (case_node_ptr);
318 static bool same_case_target_p (rtx, rtx);
319 static void strip_default_case_nodes (case_node_ptr *, rtx);
320 static bool lshift_cheap_p (void);
321 static int case_bit_test_cmp (const void *, const void *);
322 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
323 static void group_case_nodes (case_node_ptr);
324 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
325 static int node_has_low_bound (case_node_ptr, tree);
326 static int node_has_high_bound (case_node_ptr, tree);
327 static int node_is_bounded (case_node_ptr, tree);
328 static void emit_jump_if_reachable (rtx);
329 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
330 static struct case_node *case_tree2list (case_node *, case_node *);
333 using_eh_for_cleanups (void)
335 using_eh_for_cleanups_p = 1;
339 init_stmt_for_function (void)
341 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
344 /* Record the current file and line. Called from emit_line_note. */
347 set_file_and_line_for_stmt (location_t location)
349 /* If we're outputting an inline function, and we add a line note,
350 there may be no CFUN->STMT information. So, there's no need to
353 emit_locus = location;
356 /* Emit a no-op instruction. */
363 last_insn = get_last_insn ();
365 && (GET_CODE (last_insn) == CODE_LABEL
366 || (GET_CODE (last_insn) == NOTE
367 && prev_real_insn (last_insn) == 0)))
368 emit_insn (gen_nop ());
371 /* Return the rtx-label that corresponds to a LABEL_DECL,
372 creating it if necessary. */
375 label_rtx (tree label)
377 if (TREE_CODE (label) != LABEL_DECL)
380 if (!DECL_RTL_SET_P (label))
382 rtx r = gen_label_rtx ();
383 SET_DECL_RTL (label, r);
384 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
385 LABEL_PRESERVE_P (r) = 1;
388 return DECL_RTL (label);
391 /* As above, but also put it on the forced-reference list of the
392 function that contains it. */
394 force_label_rtx (tree label)
396 rtx ref = label_rtx (label);
397 tree function = decl_function_context (label);
403 if (function != current_function_decl)
404 p = find_function_data (function);
408 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
409 p->expr->x_forced_labels);
413 /* Add an unconditional jump to LABEL as the next sequential instruction. */
416 emit_jump (rtx label)
418 do_pending_stack_adjust ();
419 emit_jump_insn (gen_jump (label));
423 /* Emit code to jump to the address
424 specified by the pointer expression EXP. */
427 expand_computed_goto (tree exp)
429 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
431 x = convert_memory_address (Pmode, x);
434 do_pending_stack_adjust ();
435 emit_indirect_jump (x);
438 /* Handle goto statements and the labels that they can go to. */
440 /* Specify the location in the RTL code of a label LABEL,
441 which is a LABEL_DECL tree node.
443 This is used for the kind of label that the user can jump to with a
444 goto statement, and for alternatives of a switch or case statement.
445 RTL labels generated for loops and conditionals don't go through here;
446 they are generated directly at the RTL level, by other functions below.
448 Note that this has nothing to do with defining label *names*.
449 Languages vary in how they do that and what that even means. */
452 expand_label (tree label)
454 rtx label_r = label_rtx (label);
456 do_pending_stack_adjust ();
457 emit_label (label_r);
458 if (DECL_NAME (label))
459 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
461 if (DECL_NONLOCAL (label))
463 expand_nl_goto_receiver ();
464 nonlocal_goto_handler_labels
465 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
466 nonlocal_goto_handler_labels);
469 if (FORCED_LABEL (label))
470 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
472 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
473 maybe_set_first_label_num (label_r);
476 /* Generate RTL code for a `goto' statement with target label LABEL.
477 LABEL should be a LABEL_DECL tree node that was or will later be
478 defined with `expand_label'. */
481 expand_goto (tree label)
483 #ifdef ENABLE_CHECKING
484 /* Check for a nonlocal goto to a containing function. Should have
485 gotten translated to __builtin_nonlocal_goto. */
486 tree context = decl_function_context (label);
487 if (context != 0 && context != current_function_decl)
491 emit_jump (label_rtx (label));
494 /* Return the number of times character C occurs in string S. */
496 n_occurrences (int c, const char *s)
504 /* Generate RTL for an asm statement (explicit assembler code).
505 STRING is a STRING_CST node containing the assembler code text,
506 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
507 insn is volatile; don't optimize it. */
510 expand_asm (tree string, int vol)
514 if (TREE_CODE (string) == ADDR_EXPR)
515 string = TREE_OPERAND (string, 0);
517 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
519 MEM_VOLATILE_P (body) = vol;
524 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
525 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
526 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
527 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
528 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
529 constraint allows the use of a register operand. And, *IS_INOUT
530 will be true if the operand is read-write, i.e., if it is used as
531 an input as well as an output. If *CONSTRAINT_P is not in
532 canonical form, it will be made canonical. (Note that `+' will be
533 replaced with `=' as part of this process.)
535 Returns TRUE if all went well; FALSE if an error occurred. */
538 parse_output_constraint (const char **constraint_p, int operand_num,
539 int ninputs, int noutputs, bool *allows_mem,
540 bool *allows_reg, bool *is_inout)
542 const char *constraint = *constraint_p;
545 /* Assume the constraint doesn't allow the use of either a register
550 /* Allow the `=' or `+' to not be at the beginning of the string,
551 since it wasn't explicitly documented that way, and there is a
552 large body of code that puts it last. Swap the character to
553 the front, so as not to uglify any place else. */
554 p = strchr (constraint, '=');
556 p = strchr (constraint, '+');
558 /* If the string doesn't contain an `=', issue an error
562 error ("output operand constraint lacks `='");
566 /* If the constraint begins with `+', then the operand is both read
567 from and written to. */
568 *is_inout = (*p == '+');
570 /* Canonicalize the output constraint so that it begins with `='. */
571 if (p != constraint || is_inout)
574 size_t c_len = strlen (constraint);
577 warning ("output constraint `%c' for operand %d is not at the beginning",
580 /* Make a copy of the constraint. */
581 buf = alloca (c_len + 1);
582 strcpy (buf, constraint);
583 /* Swap the first character and the `=' or `+'. */
584 buf[p - constraint] = buf[0];
585 /* Make sure the first character is an `='. (Until we do this,
586 it might be a `+'.) */
588 /* Replace the constraint with the canonicalized string. */
589 *constraint_p = ggc_alloc_string (buf, c_len);
590 constraint = *constraint_p;
593 /* Loop through the constraint string. */
594 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
599 error ("operand constraint contains incorrectly positioned '+' or '='");
603 if (operand_num + 1 == ninputs + noutputs)
605 error ("`%%' constraint used with last operand");
610 case 'V': case 'm': case 'o':
614 case '?': case '!': case '*': case '&': case '#':
615 case 'E': case 'F': case 'G': case 'H':
616 case 's': case 'i': case 'n':
617 case 'I': case 'J': case 'K': case 'L': case 'M':
618 case 'N': case 'O': case 'P': case ',':
621 case '0': case '1': case '2': case '3': case '4':
622 case '5': case '6': case '7': case '8': case '9':
624 error ("matching constraint not valid in output operand");
628 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
629 excepting those that expand_call created. So match memory
646 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
648 #ifdef EXTRA_CONSTRAINT_STR
649 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
651 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
655 /* Otherwise we can't assume anything about the nature of
656 the constraint except that it isn't purely registers.
657 Treat it like "g" and hope for the best. */
668 /* Similar, but for input constraints. */
671 parse_input_constraint (const char **constraint_p, int input_num,
672 int ninputs, int noutputs, int ninout,
673 const char * const * constraints,
674 bool *allows_mem, bool *allows_reg)
676 const char *constraint = *constraint_p;
677 const char *orig_constraint = constraint;
678 size_t c_len = strlen (constraint);
680 bool saw_match = false;
682 /* Assume the constraint doesn't allow the use of either
683 a register or memory. */
687 /* Make sure constraint has neither `=', `+', nor '&'. */
689 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
690 switch (constraint[j])
692 case '+': case '=': case '&':
693 if (constraint == orig_constraint)
695 error ("input operand constraint contains `%c'", constraint[j]);
701 if (constraint == orig_constraint
702 && input_num + 1 == ninputs - ninout)
704 error ("`%%' constraint used with last operand");
709 case 'V': case 'm': case 'o':
714 case '?': case '!': case '*': case '#':
715 case 'E': case 'F': case 'G': case 'H':
716 case 's': case 'i': case 'n':
717 case 'I': case 'J': case 'K': case 'L': case 'M':
718 case 'N': case 'O': case 'P': case ',':
721 /* Whether or not a numeric constraint allows a register is
722 decided by the matching constraint, and so there is no need
723 to do anything special with them. We must handle them in
724 the default case, so that we don't unnecessarily force
725 operands to memory. */
726 case '0': case '1': case '2': case '3': case '4':
727 case '5': case '6': case '7': case '8': case '9':
734 match = strtoul (constraint + j, &end, 10);
735 if (match >= (unsigned long) noutputs)
737 error ("matching constraint references invalid operand number");
741 /* Try and find the real constraint for this dup. Only do this
742 if the matching constraint is the only alternative. */
744 && (j == 0 || (j == 1 && constraint[0] == '%')))
746 constraint = constraints[match];
747 *constraint_p = constraint;
748 c_len = strlen (constraint);
750 /* ??? At the end of the loop, we will skip the first part of
751 the matched constraint. This assumes not only that the
752 other constraint is an output constraint, but also that
753 the '=' or '+' come first. */
757 j = end - constraint;
758 /* Anticipate increment at end of loop. */
773 if (! ISALPHA (constraint[j]))
775 error ("invalid punctuation `%c' in constraint", constraint[j]);
778 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
781 #ifdef EXTRA_CONSTRAINT_STR
782 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
784 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
788 /* Otherwise we can't assume anything about the nature of
789 the constraint except that it isn't purely registers.
790 Treat it like "g" and hope for the best. */
798 if (saw_match && !*allows_reg)
799 warning ("matching constraint does not allow a register");
804 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
805 if it is an operand which must be passed in memory (i.e. an "m"
806 constraint), false otherwise. */
809 asm_op_is_mem_input (tree input, tree expr)
811 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
812 tree outputs = ASM_OUTPUTS (expr);
813 int noutputs = list_length (outputs);
814 const char **constraints
815 = (const char **) alloca ((noutputs) * sizeof (const char *));
817 bool allows_mem, allows_reg;
820 /* Collect output constraints. */
821 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
822 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
824 /* We pass 0 for input_num, ninputs and ninout; they are only used for
825 error checking which will be done at expand time. */
826 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
827 &allows_mem, &allows_reg);
828 return (!allows_reg && allows_mem);
831 /* Check for overlap between registers marked in CLOBBERED_REGS and
832 anything inappropriate in DECL. Emit error and return TRUE for error,
836 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
838 /* Conflicts between asm-declared register variables and the clobber
839 list are not allowed. */
840 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
841 && DECL_REGISTER (decl)
842 && REG_P (DECL_RTL (decl))
843 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
845 rtx reg = DECL_RTL (decl);
848 for (regno = REGNO (reg);
850 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
852 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
854 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
855 IDENTIFIER_POINTER (DECL_NAME (decl)));
857 /* Reset registerness to stop multiple errors emitted for a
859 DECL_REGISTER (decl) = 0;
866 /* Generate RTL for an asm statement with arguments.
867 STRING is the instruction template.
868 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
869 Each output or input has an expression in the TREE_VALUE and
870 and a tree list in TREE_PURPOSE which in turn contains a constraint
871 name in TREE_VALUE (or NULL_TREE) and a constraint string
873 CLOBBERS is a list of STRING_CST nodes each naming a hard register
874 that is clobbered by this insn.
876 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
877 Some elements of OUTPUTS may be replaced with trees representing temporary
878 values. The caller should copy those temporary values to the originally
881 VOL nonzero means the insn is volatile; don't optimize it. */
884 expand_asm_operands (tree string, tree outputs, tree inputs,
885 tree clobbers, int vol, location_t locus)
887 rtvec argvec, constraintvec;
889 int ninputs = list_length (inputs);
890 int noutputs = list_length (outputs);
893 HARD_REG_SET clobbered_regs;
894 int clobber_conflict_found = 0;
898 /* Vector of RTX's of evaluated output operands. */
899 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
900 int *inout_opnum = alloca (noutputs * sizeof (int));
901 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
902 enum machine_mode *inout_mode
903 = alloca (noutputs * sizeof (enum machine_mode));
904 const char **constraints
905 = alloca ((noutputs + ninputs) * sizeof (const char *));
906 int old_generating_concat_p = generating_concat_p;
908 /* An ASM with no outputs needs to be treated as volatile, for now. */
912 if (! check_operand_nalternatives (outputs, inputs))
915 string = resolve_asm_operand_names (string, outputs, inputs);
917 /* Collect constraints. */
919 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
920 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
921 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
922 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
924 /* Sometimes we wish to automatically clobber registers across an asm.
925 Case in point is when the i386 backend moved from cc0 to a hard reg --
926 maintaining source-level compatibility means automatically clobbering
927 the flags register. */
928 clobbers = targetm.md_asm_clobbers (clobbers);
930 /* Count the number of meaningful clobbered registers, ignoring what
931 we would ignore later. */
933 CLEAR_HARD_REG_SET (clobbered_regs);
934 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
936 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
938 i = decode_reg_name (regname);
939 if (i >= 0 || i == -4)
942 error ("unknown register name `%s' in `asm'", regname);
944 /* Mark clobbered registers. */
947 /* Clobbering the PIC register is an error */
948 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
950 error ("PIC register `%s' clobbered in `asm'", regname);
954 SET_HARD_REG_BIT (clobbered_regs, i);
958 /* First pass over inputs and outputs checks validity and sets
959 mark_addressable if needed. */
962 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
964 tree val = TREE_VALUE (tail);
965 tree type = TREE_TYPE (val);
966 const char *constraint;
971 /* If there's an erroneous arg, emit no insn. */
972 if (type == error_mark_node)
975 /* Try to parse the output constraint. If that fails, there's
976 no point in going further. */
977 constraint = constraints[i];
978 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
979 &allows_mem, &allows_reg, &is_inout))
986 && REG_P (DECL_RTL (val))
987 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
988 lang_hooks.mark_addressable (val);
995 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
997 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1001 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1003 bool allows_reg, allows_mem;
1004 const char *constraint;
1006 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1007 would get VOIDmode and that could cause a crash in reload. */
1008 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1011 constraint = constraints[i + noutputs];
1012 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1013 constraints, &allows_mem, &allows_reg))
1016 if (! allows_reg && allows_mem)
1017 lang_hooks.mark_addressable (TREE_VALUE (tail));
1020 /* Second pass evaluates arguments. */
1023 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1025 tree val = TREE_VALUE (tail);
1026 tree type = TREE_TYPE (val);
1032 if (!parse_output_constraint (&constraints[i], i, ninputs,
1033 noutputs, &allows_mem, &allows_reg,
1037 /* If an output operand is not a decl or indirect ref and our constraint
1038 allows a register, make a temporary to act as an intermediate.
1039 Make the asm insn write into that, then our caller will copy it to
1040 the real output operand. Likewise for promoted variables. */
1042 generating_concat_p = 0;
1044 real_output_rtx[i] = NULL_RTX;
1045 if ((TREE_CODE (val) == INDIRECT_REF
1048 && (allows_mem || REG_P (DECL_RTL (val)))
1049 && ! (REG_P (DECL_RTL (val))
1050 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1054 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1056 op = validize_mem (op);
1058 if (! allows_reg && !MEM_P (op))
1059 error ("output number %d not directly addressable", i);
1060 if ((! allows_mem && MEM_P (op))
1061 || GET_CODE (op) == CONCAT)
1063 real_output_rtx[i] = protect_from_queue (op, 1);
1064 op = gen_reg_rtx (GET_MODE (op));
1066 emit_move_insn (op, real_output_rtx[i]);
1071 op = assign_temp (type, 0, 0, 1);
1072 op = validize_mem (op);
1073 TREE_VALUE (tail) = make_tree (type, op);
1077 generating_concat_p = old_generating_concat_p;
1081 inout_mode[ninout] = TYPE_MODE (type);
1082 inout_opnum[ninout++] = i;
1085 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1086 clobber_conflict_found = 1;
1089 /* Make vectors for the expression-rtx, constraint strings,
1090 and named operands. */
1092 argvec = rtvec_alloc (ninputs);
1093 constraintvec = rtvec_alloc (ninputs);
1095 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1096 : GET_MODE (output_rtx[0])),
1097 TREE_STRING_POINTER (string),
1098 empty_string, 0, argvec, constraintvec,
1101 MEM_VOLATILE_P (body) = vol;
1103 /* Eval the inputs and put them into ARGVEC.
1104 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1106 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1108 bool allows_reg, allows_mem;
1109 const char *constraint;
1113 constraint = constraints[i + noutputs];
1114 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1115 constraints, &allows_mem, &allows_reg))
1118 generating_concat_p = 0;
1120 val = TREE_VALUE (tail);
1121 type = TREE_TYPE (val);
1122 op = expand_expr (val, NULL_RTX, VOIDmode,
1123 (allows_mem && !allows_reg
1124 ? EXPAND_MEMORY : EXPAND_NORMAL));
1126 /* Never pass a CONCAT to an ASM. */
1127 if (GET_CODE (op) == CONCAT)
1128 op = force_reg (GET_MODE (op), op);
1129 else if (MEM_P (op))
1130 op = validize_mem (op);
1132 if (asm_operand_ok (op, constraint) <= 0)
1135 op = force_reg (TYPE_MODE (type), op);
1136 else if (!allows_mem)
1137 warning ("asm operand %d probably doesn't match constraints",
1139 else if (MEM_P (op))
1141 /* We won't recognize either volatile memory or memory
1142 with a queued address as available a memory_operand
1143 at this point. Ignore it: clearly this *is* a memory. */
1147 warning ("use of memory input without lvalue in "
1148 "asm operand %d is deprecated", i + noutputs);
1150 if (CONSTANT_P (op))
1152 rtx mem = force_const_mem (TYPE_MODE (type), op);
1154 op = validize_mem (mem);
1156 op = force_reg (TYPE_MODE (type), op);
1159 || GET_CODE (op) == SUBREG
1160 || GET_CODE (op) == CONCAT)
1162 tree qual_type = build_qualified_type (type,
1164 | TYPE_QUAL_CONST));
1165 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1166 memloc = validize_mem (memloc);
1167 emit_move_insn (memloc, op);
1173 generating_concat_p = old_generating_concat_p;
1174 ASM_OPERANDS_INPUT (body, i) = op;
1176 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1177 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1179 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1180 clobber_conflict_found = 1;
1183 /* Protect all the operands from the queue now that they have all been
1186 generating_concat_p = 0;
1188 for (i = 0; i < ninputs - ninout; i++)
1189 ASM_OPERANDS_INPUT (body, i)
1190 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1192 for (i = 0; i < noutputs; i++)
1193 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1195 /* For in-out operands, copy output rtx to input rtx. */
1196 for (i = 0; i < ninout; i++)
1198 int j = inout_opnum[i];
1201 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1204 sprintf (buffer, "%d", j);
1205 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1206 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1209 generating_concat_p = old_generating_concat_p;
1211 /* Now, for each output, construct an rtx
1212 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1213 ARGVEC CONSTRAINTS OPNAMES))
1214 If there is more than one, put them inside a PARALLEL. */
1216 if (noutputs == 1 && nclobbers == 0)
1218 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1219 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1222 else if (noutputs == 0 && nclobbers == 0)
1224 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1236 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1238 /* For each output operand, store a SET. */
1239 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1241 XVECEXP (body, 0, i)
1242 = gen_rtx_SET (VOIDmode,
1244 gen_rtx_ASM_OPERANDS
1245 (GET_MODE (output_rtx[i]),
1246 TREE_STRING_POINTER (string),
1247 constraints[i], i, argvec, constraintvec,
1250 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1253 /* If there are no outputs (but there are some clobbers)
1254 store the bare ASM_OPERANDS into the PARALLEL. */
1257 XVECEXP (body, 0, i++) = obody;
1259 /* Store (clobber REG) for each clobbered register specified. */
1261 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1263 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1264 int j = decode_reg_name (regname);
1269 if (j == -3) /* `cc', which is not a register */
1272 if (j == -4) /* `memory', don't cache memory across asm */
1274 XVECEXP (body, 0, i++)
1275 = gen_rtx_CLOBBER (VOIDmode,
1278 gen_rtx_SCRATCH (VOIDmode)));
1282 /* Ignore unknown register, error already signaled. */
1286 /* Use QImode since that's guaranteed to clobber just one reg. */
1287 clobbered_reg = gen_rtx_REG (QImode, j);
1289 /* Do sanity check for overlap between clobbers and respectively
1290 input and outputs that hasn't been handled. Such overlap
1291 should have been detected and reported above. */
1292 if (!clobber_conflict_found)
1296 /* We test the old body (obody) contents to avoid tripping
1297 over the under-construction body. */
1298 for (opno = 0; opno < noutputs; opno++)
1299 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1300 internal_error ("asm clobber conflict with output operand");
1302 for (opno = 0; opno < ninputs - ninout; opno++)
1303 if (reg_overlap_mentioned_p (clobbered_reg,
1304 ASM_OPERANDS_INPUT (obody, opno)))
1305 internal_error ("asm clobber conflict with input operand");
1308 XVECEXP (body, 0, i++)
1309 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1315 /* For any outputs that needed reloading into registers, spill them
1316 back to where they belong. */
1317 for (i = 0; i < noutputs; ++i)
1318 if (real_output_rtx[i])
1319 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1325 expand_asm_expr (tree exp)
1331 if (ASM_INPUT_P (exp))
1333 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1337 outputs = ASM_OUTPUTS (exp);
1338 noutputs = list_length (outputs);
1339 /* o[I] is the place that output number I should be written. */
1340 o = (tree *) alloca (noutputs * sizeof (tree));
1342 /* Record the contents of OUTPUTS before it is modified. */
1343 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1344 o[i] = TREE_VALUE (tail);
1346 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1347 OUTPUTS some trees for where the values were actually stored. */
1348 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1349 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1352 /* Copy all the intermediate outputs into the specified outputs. */
1353 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1355 if (o[i] != TREE_VALUE (tail))
1357 expand_assignment (o[i], TREE_VALUE (tail), 0);
1360 /* Restore the original value so that it's correct the next
1361 time we expand this function. */
1362 TREE_VALUE (tail) = o[i];
1366 /* Those MODIFY_EXPRs could do autoincrements. */
1370 /* A subroutine of expand_asm_operands. Check that all operands have
1371 the same number of alternatives. Return true if so. */
1374 check_operand_nalternatives (tree outputs, tree inputs)
1376 if (outputs || inputs)
1378 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1380 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1383 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1385 error ("too many alternatives in `asm'");
1392 const char *constraint
1393 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1395 if (n_occurrences (',', constraint) != nalternatives)
1397 error ("operand constraints for `asm' differ in number of alternatives");
1401 if (TREE_CHAIN (tmp))
1402 tmp = TREE_CHAIN (tmp);
1404 tmp = next, next = 0;
1411 /* A subroutine of expand_asm_operands. Check that all operand names
1412 are unique. Return true if so. We rely on the fact that these names
1413 are identifiers, and so have been canonicalized by get_identifier,
1414 so all we need are pointer comparisons. */
1417 check_unique_operand_names (tree outputs, tree inputs)
1421 for (i = outputs; i ; i = TREE_CHAIN (i))
1423 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1427 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1428 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1432 for (i = inputs; i ; i = TREE_CHAIN (i))
1434 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1438 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1439 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1441 for (j = outputs; j ; j = TREE_CHAIN (j))
1442 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1449 error ("duplicate asm operand name '%s'",
1450 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1454 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1455 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1456 STRING and in the constraints to those numbers. */
1459 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1466 check_unique_operand_names (outputs, inputs);
1468 /* Substitute [<name>] in input constraint strings. There should be no
1469 named operands in output constraints. */
1470 for (t = inputs; t ; t = TREE_CHAIN (t))
1472 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1473 if (strchr (c, '[') != NULL)
1475 p = buffer = xstrdup (c);
1476 while ((p = strchr (p, '[')) != NULL)
1477 p = resolve_operand_name_1 (p, outputs, inputs);
1478 TREE_VALUE (TREE_PURPOSE (t))
1479 = build_string (strlen (buffer), buffer);
1484 /* Now check for any needed substitutions in the template. */
1485 c = TREE_STRING_POINTER (string);
1486 while ((c = strchr (c, '%')) != NULL)
1490 else if (ISALPHA (c[1]) && c[2] == '[')
1501 /* OK, we need to make a copy so we can perform the substitutions.
1502 Assume that we will not need extra space--we get to remove '['
1503 and ']', which means we cannot have a problem until we have more
1504 than 999 operands. */
1505 buffer = xstrdup (TREE_STRING_POINTER (string));
1506 p = buffer + (c - TREE_STRING_POINTER (string));
1508 while ((p = strchr (p, '%')) != NULL)
1512 else if (ISALPHA (p[1]) && p[2] == '[')
1520 p = resolve_operand_name_1 (p, outputs, inputs);
1523 string = build_string (strlen (buffer), buffer);
1530 /* A subroutine of resolve_operand_names. P points to the '[' for a
1531 potential named operand of the form [<name>]. In place, replace
1532 the name and brackets with a number. Return a pointer to the
1533 balance of the string after substitution. */
1536 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1543 /* Collect the operand name. */
1544 q = strchr (p, ']');
1547 error ("missing close brace for named operand");
1548 return strchr (p, '\0');
1552 /* Resolve the name to a number. */
1553 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1555 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1558 const char *c = TREE_STRING_POINTER (name);
1559 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1563 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1565 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1568 const char *c = TREE_STRING_POINTER (name);
1569 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1575 error ("undefined named operand '%s'", p + 1);
1579 /* Replace the name with the number. Unfortunately, not all libraries
1580 get the return value of sprintf correct, so search for the end of the
1581 generated string by hand. */
1582 sprintf (p, "%d", op);
1583 p = strchr (p, '\0');
1585 /* Verify the no extra buffer space assumption. */
1589 /* Shift the rest of the buffer down to fill the gap. */
1590 memmove (p, q + 1, strlen (q + 1) + 1);
1595 /* Generate RTL to evaluate the expression EXP. */
1598 expand_expr_stmt (tree exp)
1603 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1604 type = TREE_TYPE (exp);
1606 /* If all we do is reference a volatile value in memory,
1607 copy it to a register to be sure it is actually touched. */
1608 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1610 if (TYPE_MODE (type) == VOIDmode)
1612 else if (TYPE_MODE (type) != BLKmode)
1613 value = copy_to_reg (value);
1616 rtx lab = gen_label_rtx ();
1618 /* Compare the value with itself to reference it. */
1619 emit_cmp_and_jump_insns (value, value, EQ,
1620 expand_expr (TYPE_SIZE (type),
1621 NULL_RTX, VOIDmode, 0),
1627 /* Free any temporaries used to evaluate this expression. */
1633 /* Warn if EXP contains any computations whose results are not used.
1634 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1635 (potential) location of the expression. */
1638 warn_if_unused_value (tree exp, location_t locus)
1641 if (TREE_USED (exp))
1644 /* Don't warn about void constructs. This includes casting to void,
1645 void function calls, and statement expressions with a final cast
1647 if (VOID_TYPE_P (TREE_TYPE (exp)))
1650 if (EXPR_LOCUS (exp))
1651 locus = *EXPR_LOCUS (exp);
1653 switch (TREE_CODE (exp))
1655 case PREINCREMENT_EXPR:
1656 case POSTINCREMENT_EXPR:
1657 case PREDECREMENT_EXPR:
1658 case POSTDECREMENT_EXPR:
1663 case TRY_CATCH_EXPR:
1664 case WITH_CLEANUP_EXPR:
1669 /* For a binding, warn if no side effect within it. */
1670 exp = BIND_EXPR_BODY (exp);
1674 exp = TREE_OPERAND (exp, 0);
1677 case TRUTH_ORIF_EXPR:
1678 case TRUTH_ANDIF_EXPR:
1679 /* In && or ||, warn if 2nd operand has no side effect. */
1680 exp = TREE_OPERAND (exp, 1);
1684 if (TREE_NO_WARNING (exp))
1686 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1688 /* Let people do `(foo (), 0)' without a warning. */
1689 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1691 exp = TREE_OPERAND (exp, 1);
1696 case NON_LVALUE_EXPR:
1697 /* Don't warn about conversions not explicit in the user's program. */
1698 if (TREE_NO_WARNING (exp))
1700 /* Assignment to a cast usually results in a cast of a modify.
1701 Don't complain about that. There can be an arbitrary number of
1702 casts before the modify, so we must loop until we find the first
1703 non-cast expression and then test to see if that is a modify. */
1705 tree tem = TREE_OPERAND (exp, 0);
1707 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1708 tem = TREE_OPERAND (tem, 0);
1710 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1711 || TREE_CODE (tem) == CALL_EXPR)
1717 /* Don't warn about automatic dereferencing of references, since
1718 the user cannot control it. */
1719 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1721 exp = TREE_OPERAND (exp, 0);
1727 /* Referencing a volatile value is a side effect, so don't warn. */
1729 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1730 && TREE_THIS_VOLATILE (exp))
1733 /* If this is an expression which has no operands, there is no value
1734 to be unused. There are no such language-independent codes,
1735 but front ends may define such. */
1736 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
1737 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1741 /* If this is an expression with side effects, don't warn. */
1742 if (TREE_SIDE_EFFECTS (exp))
1745 warning ("%Hvalue computed is not used", &locus);
1750 /* Generate RTL for the start of an if-then. COND is the expression
1751 whose truth should be tested.
1753 If EXITFLAG is nonzero, this conditional is visible to
1754 `exit_something'. */
1757 expand_start_cond (tree cond, int exitflag)
1759 struct nesting *thiscond = ALLOC_NESTING ();
1761 /* Make an entry on cond_stack for the cond we are entering. */
1763 thiscond->desc = COND_NESTING;
1764 thiscond->next = cond_stack;
1765 thiscond->all = nesting_stack;
1766 thiscond->depth = ++nesting_depth;
1767 thiscond->data.cond.next_label = gen_label_rtx ();
1768 /* Before we encounter an `else', we don't need a separate exit label
1769 unless there are supposed to be exit statements
1770 to exit this conditional. */
1771 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1772 thiscond->data.cond.endif_label = thiscond->exit_label;
1773 cond_stack = thiscond;
1774 nesting_stack = thiscond;
1776 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1779 /* Generate RTL between then-clause and the elseif-clause
1780 of an if-then-elseif-.... */
1783 expand_start_elseif (tree cond)
1785 if (cond_stack->data.cond.endif_label == 0)
1786 cond_stack->data.cond.endif_label = gen_label_rtx ();
1787 emit_jump (cond_stack->data.cond.endif_label);
1788 emit_label (cond_stack->data.cond.next_label);
1789 cond_stack->data.cond.next_label = gen_label_rtx ();
1790 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1793 /* Generate RTL between the then-clause and the else-clause
1794 of an if-then-else. */
1797 expand_start_else (void)
1799 if (cond_stack->data.cond.endif_label == 0)
1800 cond_stack->data.cond.endif_label = gen_label_rtx ();
1802 emit_jump (cond_stack->data.cond.endif_label);
1803 emit_label (cond_stack->data.cond.next_label);
1804 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1807 /* After calling expand_start_else, turn this "else" into an "else if"
1808 by providing another condition. */
1811 expand_elseif (tree cond)
1813 cond_stack->data.cond.next_label = gen_label_rtx ();
1814 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1817 /* Generate RTL for the end of an if-then.
1818 Pop the record for it off of cond_stack. */
1821 expand_end_cond (void)
1823 struct nesting *thiscond = cond_stack;
1825 do_pending_stack_adjust ();
1826 if (thiscond->data.cond.next_label)
1827 emit_label (thiscond->data.cond.next_label);
1828 if (thiscond->data.cond.endif_label)
1829 emit_label (thiscond->data.cond.endif_label);
1831 POPSTACK (cond_stack);
1834 /* Return nonzero if we should preserve sub-expressions as separate
1835 pseudos. We never do so if we aren't optimizing. We always do so
1836 if -fexpensive-optimizations. */
1839 preserve_subexpressions_p (void)
1841 if (flag_expensive_optimizations)
1844 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
1851 /* Generate RTL to return from the current function, with no value.
1852 (That is, we do not do anything about returning any value.) */
1855 expand_null_return (void)
1857 /* If this function was declared to return a value, but we
1858 didn't, clobber the return registers so that they are not
1859 propagated live to the rest of the function. */
1860 clobber_return_register ();
1862 expand_null_return_1 ();
1865 /* Generate RTL to return directly from the current function.
1866 (That is, we bypass any return value.) */
1869 expand_naked_return (void)
1873 clear_pending_stack_adjust ();
1874 do_pending_stack_adjust ();
1876 end_label = naked_return_label;
1878 end_label = naked_return_label = gen_label_rtx ();
1880 emit_jump (end_label);
1883 /* Try to guess whether the value of return means error code. */
1884 static enum br_predictor
1885 return_prediction (rtx val)
1887 /* Different heuristics for pointers and scalars. */
1888 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
1890 /* NULL is usually not returned. */
1891 if (val == const0_rtx)
1892 return PRED_NULL_RETURN;
1896 /* Negative return values are often used to indicate
1898 if (GET_CODE (val) == CONST_INT
1899 && INTVAL (val) < 0)
1900 return PRED_NEGATIVE_RETURN;
1901 /* Constant return values are also usually erors,
1902 zero/one often mean booleans so exclude them from the
1904 if (CONSTANT_P (val)
1905 && (val != const0_rtx && val != const1_rtx))
1906 return PRED_CONST_RETURN;
1908 return PRED_NO_PREDICTION;
1912 /* If the current function returns values in the most significant part
1913 of a register, shift return value VAL appropriately. The mode of
1914 the function's return type is known not to be BLKmode. */
1917 shift_return_value (rtx val)
1921 type = TREE_TYPE (DECL_RESULT (current_function_decl));
1922 if (targetm.calls.return_in_msb (type))
1925 HOST_WIDE_INT shift;
1927 target = DECL_RTL (DECL_RESULT (current_function_decl));
1928 shift = (GET_MODE_BITSIZE (GET_MODE (target))
1929 - BITS_PER_UNIT * int_size_in_bytes (type));
1931 val = expand_shift (LSHIFT_EXPR, GET_MODE (target),
1932 gen_lowpart (GET_MODE (target), val),
1933 build_int_2 (shift, 0), target, 1);
1939 /* Generate RTL to return from the current function, with value VAL. */
1942 expand_value_return (rtx val)
1945 enum br_predictor pred;
1947 if (flag_guess_branch_prob
1948 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
1950 /* Emit information for branch prediction. */
1953 note = emit_note (NOTE_INSN_PREDICTION);
1955 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
1959 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1961 /* Copy the value to the return location
1962 unless it's already there. */
1964 if (return_reg != val)
1966 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1967 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1969 int unsignedp = TYPE_UNSIGNED (type);
1970 enum machine_mode old_mode
1971 = DECL_MODE (DECL_RESULT (current_function_decl));
1972 enum machine_mode mode
1973 = promote_mode (type, old_mode, &unsignedp, 1);
1975 if (mode != old_mode)
1976 val = convert_modes (mode, old_mode, val, unsignedp);
1978 if (GET_CODE (return_reg) == PARALLEL)
1979 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1981 emit_move_insn (return_reg, val);
1984 expand_null_return_1 ();
1987 /* Output a return with no value. */
1990 expand_null_return_1 (void)
1994 clear_pending_stack_adjust ();
1995 do_pending_stack_adjust ();
1997 end_label = return_label;
1999 end_label = return_label = gen_label_rtx ();
2000 emit_jump (end_label);
2003 /* Generate RTL to evaluate the expression RETVAL and return it
2004 from the current function. */
2007 expand_return (tree retval)
2013 /* If function wants no value, give it none. */
2014 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2016 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2018 expand_null_return ();
2022 if (retval == error_mark_node)
2024 /* Treat this like a return of no value from a function that
2026 expand_null_return ();
2029 else if (TREE_CODE (retval) == RESULT_DECL)
2030 retval_rhs = retval;
2031 else if ((TREE_CODE (retval) == MODIFY_EXPR
2032 || TREE_CODE (retval) == INIT_EXPR)
2033 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2034 retval_rhs = TREE_OPERAND (retval, 1);
2036 retval_rhs = retval;
2038 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2040 /* If the result is an aggregate that is being returned in one (or more)
2041 registers, load the registers here. The compiler currently can't handle
2042 copying a BLKmode value into registers. We could put this code in a
2043 more general area (for use by everyone instead of just function
2044 call/return), but until this feature is generally usable it is kept here
2045 (and in expand_call). */
2048 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2049 && REG_P (result_rtl))
2052 unsigned HOST_WIDE_INT bitpos, xbitpos;
2053 unsigned HOST_WIDE_INT padding_correction = 0;
2054 unsigned HOST_WIDE_INT bytes
2055 = int_size_in_bytes (TREE_TYPE (retval_rhs));
2056 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2057 unsigned int bitsize
2058 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
2059 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
2060 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2061 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2062 enum machine_mode tmpmode, result_reg_mode;
2066 expand_null_return ();
2070 /* If the structure doesn't take up a whole number of words, see
2071 whether the register value should be padded on the left or on
2072 the right. Set PADDING_CORRECTION to the number of padding
2073 bits needed on the left side.
2075 In most ABIs, the structure will be returned at the least end of
2076 the register, which translates to right padding on little-endian
2077 targets and left padding on big-endian targets. The opposite
2078 holds if the structure is returned at the most significant
2079 end of the register. */
2080 if (bytes % UNITS_PER_WORD != 0
2081 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2083 : BYTES_BIG_ENDIAN))
2084 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2087 /* Copy the structure BITSIZE bits at a time. */
2088 for (bitpos = 0, xbitpos = padding_correction;
2089 bitpos < bytes * BITS_PER_UNIT;
2090 bitpos += bitsize, xbitpos += bitsize)
2092 /* We need a new destination pseudo each time xbitpos is
2093 on a word boundary and when xbitpos == padding_correction
2094 (the first time through). */
2095 if (xbitpos % BITS_PER_WORD == 0
2096 || xbitpos == padding_correction)
2098 /* Generate an appropriate register. */
2099 dst = gen_reg_rtx (word_mode);
2100 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2102 /* Clear the destination before we move anything into it. */
2103 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
2106 /* We need a new source operand each time bitpos is on a word
2108 if (bitpos % BITS_PER_WORD == 0)
2109 src = operand_subword_force (result_val,
2110 bitpos / BITS_PER_WORD,
2113 /* Use bitpos for the source extraction (left justified) and
2114 xbitpos for the destination store (right justified). */
2115 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2116 extract_bit_field (src, bitsize,
2117 bitpos % BITS_PER_WORD, 1,
2118 NULL_RTX, word_mode, word_mode,
2123 tmpmode = GET_MODE (result_rtl);
2124 if (tmpmode == BLKmode)
2126 /* Find the smallest integer mode large enough to hold the
2127 entire structure and use that mode instead of BLKmode
2128 on the USE insn for the return register. */
2129 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2130 tmpmode != VOIDmode;
2131 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2132 /* Have we found a large enough mode? */
2133 if (GET_MODE_SIZE (tmpmode) >= bytes)
2136 /* No suitable mode found. */
2137 if (tmpmode == VOIDmode)
2140 PUT_MODE (result_rtl, tmpmode);
2143 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2144 result_reg_mode = word_mode;
2146 result_reg_mode = tmpmode;
2147 result_reg = gen_reg_rtx (result_reg_mode);
2150 for (i = 0; i < n_regs; i++)
2151 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2154 if (tmpmode != result_reg_mode)
2155 result_reg = gen_lowpart (tmpmode, result_reg);
2157 expand_value_return (result_reg);
2159 else if (retval_rhs != 0
2160 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2161 && (REG_P (result_rtl)
2162 || (GET_CODE (result_rtl) == PARALLEL)))
2164 /* Calculate the return value into a temporary (usually a pseudo
2166 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2167 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2169 val = assign_temp (nt, 0, 0, 1);
2170 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2171 val = force_not_mem (val);
2173 /* Return the calculated value. */
2174 expand_value_return (shift_return_value (val));
2178 /* No hard reg used; calculate value into hard return reg. */
2179 expand_expr (retval, const0_rtx, VOIDmode, 0);
2181 expand_value_return (result_rtl);
2185 /* Generate the RTL code for entering a binding contour.
2186 The variables are declared one by one, by calls to `expand_decl'.
2188 FLAGS is a bitwise or of the following flags:
2190 1 - Nonzero if this construct should be visible to
2193 2 - Nonzero if this contour does not require a
2194 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2195 language-independent code should set this flag because they
2196 will not create corresponding BLOCK nodes. (There should be
2197 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2198 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
2199 when expand_end_bindings is called.
2201 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2202 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2206 expand_start_bindings_and_block (int flags, tree block)
2208 struct nesting *thisblock = ALLOC_NESTING ();
2210 int exit_flag = ((flags & 1) != 0);
2211 int block_flag = ((flags & 2) == 0);
2213 /* If a BLOCK is supplied, then the caller should be requesting a
2214 NOTE_INSN_BLOCK_BEG note. */
2215 if (!block_flag && block)
2218 /* Create a note to mark the beginning of the block. */
2219 note = emit_note (NOTE_INSN_DELETED);
2221 /* Make an entry on block_stack for the block we are entering. */
2223 thisblock->desc = BLOCK_NESTING;
2224 thisblock->next = block_stack;
2225 thisblock->all = nesting_stack;
2226 thisblock->depth = ++nesting_depth;
2227 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
2229 thisblock->data.block.conditional_code = 0;
2230 thisblock->data.block.last_unconditional_cleanup = note;
2231 /* When we insert instructions after the last unconditional cleanup,
2232 we don't adjust last_insn. That means that a later add_insn will
2233 clobber the instructions we've just added. The easiest way to
2234 fix this is to just insert another instruction here, so that the
2235 instructions inserted after the last unconditional cleanup are
2236 never the last instruction. */
2237 emit_note (NOTE_INSN_DELETED);
2239 thisblock->data.block.first_insn = note;
2240 thisblock->data.block.block_start_count = ++current_block_start_count;
2241 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2242 block_stack = thisblock;
2243 nesting_stack = thisblock;
2245 /* Make a new level for allocating stack slots. */
2249 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2250 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2251 expand_expr are made. After we end the region, we know that all
2252 space for all temporaries that were created by TARGET_EXPRs will be
2253 destroyed and their space freed for reuse. */
2256 expand_start_target_temps (void)
2258 /* This is so that even if the result is preserved, the space
2259 allocated will be freed, as we know that it is no longer in use. */
2262 /* Start a new binding layer that will keep track of all cleanup
2263 actions to be performed. */
2264 expand_start_bindings (2);
2266 target_temp_slot_level = temp_slot_level;
2270 expand_end_target_temps (void)
2272 expand_end_bindings (NULL_TREE, 0, 0);
2274 /* This is so that even if the result is preserved, the space
2275 allocated will be freed, as we know that it is no longer in use. */
2279 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
2280 in question represents the outermost pair of curly braces (i.e. the "body
2281 block") of a function or method.
2283 For any BLOCK node representing a "body block" of a function or method, the
2284 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2285 represents the outermost (function) scope for the function or method (i.e.
2286 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
2287 *that* node in turn will point to the relevant FUNCTION_DECL node. */
2290 is_body_block (tree stmt)
2292 if (lang_hooks.no_body_blocks)
2295 if (TREE_CODE (stmt) == BLOCK)
2297 tree parent = BLOCK_SUPERCONTEXT (stmt);
2299 if (parent && TREE_CODE (parent) == BLOCK)
2301 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2303 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2311 /* True if we are currently emitting insns in an area of output code
2312 that is controlled by a conditional expression. This is used by
2313 the cleanup handling code to generate conditional cleanup actions. */
2316 conditional_context (void)
2318 return block_stack && block_stack->data.block.conditional_code;
2321 /* Return an opaque pointer to the current nesting level, so frontend code
2322 can check its own sanity. */
2325 current_nesting_level (void)
2327 return cfun ? block_stack : 0;
2330 /* Emit code to restore vital registers at the beginning of a nonlocal goto
2333 expand_nl_goto_receiver (void)
2335 /* Clobber the FP when we get here, so we have to make sure it's
2336 marked as used by this function. */
2337 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2339 /* Mark the static chain as clobbered here so life information
2340 doesn't get messed up for it. */
2341 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2343 #ifdef HAVE_nonlocal_goto
2344 if (! HAVE_nonlocal_goto)
2346 /* First adjust our frame pointer to its actual value. It was
2347 previously set to the start of the virtual area corresponding to
2348 the stacked variables when we branched here and now needs to be
2349 adjusted to the actual hardware fp value.
2351 Assignments are to virtual registers are converted by
2352 instantiate_virtual_regs into the corresponding assignment
2353 to the underlying register (fp in this case) that makes
2354 the original assignment true.
2355 So the following insn will actually be
2356 decrementing fp by STARTING_FRAME_OFFSET. */
2357 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2359 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2360 if (fixed_regs[ARG_POINTER_REGNUM])
2362 #ifdef ELIMINABLE_REGS
2363 /* If the argument pointer can be eliminated in favor of the
2364 frame pointer, we don't need to restore it. We assume here
2365 that if such an elimination is present, it can always be used.
2366 This is the case on all known machines; if we don't make this
2367 assumption, we do unnecessary saving on many machines. */
2368 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
2371 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
2372 if (elim_regs[i].from == ARG_POINTER_REGNUM
2373 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2376 if (i == ARRAY_SIZE (elim_regs))
2379 /* Now restore our arg pointer from the address at which it
2380 was saved in our stack frame. */
2381 emit_move_insn (virtual_incoming_args_rtx,
2382 copy_to_reg (get_arg_pointer_save_area (cfun)));
2387 #ifdef HAVE_nonlocal_goto_receiver
2388 if (HAVE_nonlocal_goto_receiver)
2389 emit_insn (gen_nonlocal_goto_receiver ());
2392 /* @@@ This is a kludge. Not all machine descriptions define a blockage
2393 insn, but we must not allow the code we just generated to be reordered
2394 by scheduling. Specifically, the update of the frame pointer must
2395 happen immediately, not later. So emit an ASM_INPUT to act as blockage
2397 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2400 /* Warn about any unused VARS (which may contain nodes other than
2401 VAR_DECLs, but such nodes are ignored). The nodes are connected
2402 via the TREE_CHAIN field. */
2405 warn_about_unused_variables (tree vars)
2409 if (warn_unused_variable)
2410 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2411 if (TREE_CODE (decl) == VAR_DECL
2412 && ! TREE_USED (decl)
2413 && ! DECL_IN_SYSTEM_HEADER (decl)
2414 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
2415 warning ("%Junused variable '%D'", decl, decl);
2418 /* Generate RTL code to terminate a binding contour.
2420 VARS is the chain of VAR_DECL nodes for the variables bound in this
2421 contour. There may actually be other nodes in this chain, but any
2422 nodes other than VAR_DECLS are ignored.
2424 MARK_ENDS is nonzero if we should put a note at the beginning
2425 and end of this binding contour.
2427 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
2428 zero if we can jump into this contour only if it does not have a saved
2429 stack level, and negative if we are not to check for invalid use of
2430 labels (because the front end does that). */
2433 expand_end_bindings (tree vars, int mark_ends ATTRIBUTE_UNUSED,
2434 int dont_jump_in ATTRIBUTE_UNUSED)
2436 struct nesting *thisblock = block_stack;
2438 /* If any of the variables in this scope were not used, warn the
2440 warn_about_unused_variables (vars);
2442 if (thisblock->exit_label)
2444 do_pending_stack_adjust ();
2445 emit_label (thisblock->exit_label);
2448 /* Mark the beginning and end of the scope if requested. */
2450 /* Get rid of the beginning-mark if we don't make an end-mark. */
2451 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2453 /* Restore the temporary level of TARGET_EXPRs. */
2454 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
2456 /* Restore block_stack level for containing block. */
2458 POPSTACK (block_stack);
2460 /* Pop the stack slot nesting and free any slots at this level. */
2464 /* Generate RTL for the automatic variable declaration DECL.
2465 (Other kinds of declarations are simply ignored if seen here.) */
2468 expand_decl (tree decl)
2472 type = TREE_TYPE (decl);
2474 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
2475 type in case this node is used in a reference. */
2476 if (TREE_CODE (decl) == CONST_DECL)
2478 DECL_MODE (decl) = TYPE_MODE (type);
2479 DECL_ALIGN (decl) = TYPE_ALIGN (type);
2480 DECL_SIZE (decl) = TYPE_SIZE (type);
2481 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
2485 /* Otherwise, only automatic variables need any expansion done. Static and
2486 external variables, and external functions, will be handled by
2487 `assemble_variable' (called from finish_decl). TYPE_DECL requires
2488 nothing. PARM_DECLs are handled in `assign_parms'. */
2489 if (TREE_CODE (decl) != VAR_DECL)
2492 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
2495 /* Create the RTL representation for the variable. */
2497 if (type == error_mark_node)
2498 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
2500 else if (DECL_SIZE (decl) == 0)
2501 /* Variable with incomplete type. */
2504 if (DECL_INITIAL (decl) == 0)
2505 /* Error message was already done; now avoid a crash. */
2506 x = gen_rtx_MEM (BLKmode, const0_rtx);
2508 /* An initializer is going to decide the size of this array.
2509 Until we know the size, represent its address with a reg. */
2510 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
2512 set_mem_attributes (x, decl, 1);
2513 SET_DECL_RTL (decl, x);
2515 else if (use_register_for_decl (decl))
2517 /* Automatic variable that can go in a register. */
2518 int unsignedp = TYPE_UNSIGNED (type);
2519 enum machine_mode reg_mode
2520 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
2522 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
2524 /* Note if the object is a user variable. */
2525 if (!DECL_ARTIFICIAL (decl))
2527 mark_user_reg (DECL_RTL (decl));
2529 /* Trust user variables which have a pointer type to really
2530 be pointers. Do not trust compiler generated temporaries
2531 as our type system is totally busted as it relates to
2532 pointer arithmetic which translates into lots of compiler
2533 generated objects with pointer types, but which are not really
2535 if (POINTER_TYPE_P (type))
2536 mark_reg_pointer (DECL_RTL (decl),
2537 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
2540 maybe_set_unchanging (DECL_RTL (decl), decl);
2543 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
2544 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
2545 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
2546 STACK_CHECK_MAX_VAR_SIZE)))
2548 /* Variable of fixed size that goes on the stack. */
2553 /* If we previously made RTL for this decl, it must be an array
2554 whose size was determined by the initializer.
2555 The old address was a register; set that register now
2556 to the proper address. */
2557 if (DECL_RTL_SET_P (decl))
2559 if (!MEM_P (DECL_RTL (decl))
2560 || !REG_P (XEXP (DECL_RTL (decl), 0)))
2562 oldaddr = XEXP (DECL_RTL (decl), 0);
2565 /* Set alignment we actually gave this decl. */
2566 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2567 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2568 DECL_USER_ALIGN (decl) = 0;
2570 x = assign_temp (decl, 1, 1, 1);
2571 set_mem_attributes (x, decl, 1);
2572 SET_DECL_RTL (decl, x);
2576 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2577 if (addr != oldaddr)
2578 emit_move_insn (oldaddr, addr);
2582 /* Dynamic-size object: must push space on the stack. */
2584 rtx address, size, x;
2586 /* Record the stack pointer on entry to block, if have
2587 not already done so. */
2588 do_pending_stack_adjust ();
2590 /* Compute the variable's size, in bytes. This will expand any
2591 needed SAVE_EXPRs for the first time. */
2592 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
2595 /* Allocate space on the stack for the variable. Note that
2596 DECL_ALIGN says how the variable is to be aligned and we
2597 cannot use it to conclude anything about the alignment of
2599 address = allocate_dynamic_stack_space (size, NULL_RTX,
2600 TYPE_ALIGN (TREE_TYPE (decl)));
2602 /* Reference the variable indirect through that rtx. */
2603 x = gen_rtx_MEM (DECL_MODE (decl), address);
2604 set_mem_attributes (x, decl, 1);
2605 SET_DECL_RTL (decl, x);
2608 /* Indicate the alignment we actually gave this variable. */
2609 #ifdef STACK_BOUNDARY
2610 DECL_ALIGN (decl) = STACK_BOUNDARY;
2612 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2614 DECL_USER_ALIGN (decl) = 0;
2618 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
2620 expand_stack_alloc (tree alloc, tree t_size)
2622 rtx address, dest, size;
2625 if (TREE_CODE (alloc) != ADDR_EXPR)
2627 var = TREE_OPERAND (alloc, 0);
2628 if (TREE_CODE (var) != VAR_DECL)
2631 type = TREE_TYPE (var);
2633 /* Compute the variable's size, in bytes. */
2634 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
2637 /* Allocate space on the stack for the variable. */
2638 address = XEXP (DECL_RTL (var), 0);
2639 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
2640 if (dest != address)
2641 emit_move_insn (address, dest);
2643 /* Indicate the alignment we actually gave this variable. */
2644 #ifdef STACK_BOUNDARY
2645 DECL_ALIGN (var) = STACK_BOUNDARY;
2647 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
2649 DECL_USER_ALIGN (var) = 0;
2652 /* Emit code to save the current value of stack. */
2654 expand_stack_save (void)
2658 do_pending_stack_adjust ();
2659 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
2663 /* Emit code to restore the current value of stack. */
2665 expand_stack_restore (tree var)
2667 rtx sa = DECL_RTL (var);
2669 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2672 /* Emit code to perform the initialization of a declaration DECL. */
2675 expand_decl_init (tree decl)
2677 int was_used = TREE_USED (decl);
2679 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
2680 for static decls. */
2681 if (TREE_CODE (decl) == CONST_DECL
2682 || TREE_STATIC (decl))
2685 /* Compute and store the initial value now. */
2689 if (DECL_INITIAL (decl) == error_mark_node)
2691 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2693 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2694 || code == POINTER_TYPE || code == REFERENCE_TYPE)
2695 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2699 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2701 emit_line_note (DECL_SOURCE_LOCATION (decl));
2702 expand_assignment (decl, DECL_INITIAL (decl), 0);
2706 /* Don't let the initialization count as "using" the variable. */
2707 TREE_USED (decl) = was_used;
2709 /* Free any temporaries we made while initializing the decl. */
2710 preserve_temp_slots (NULL_RTX);
2716 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2717 DECL_ELTS is the list of elements that belong to DECL's type.
2718 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2721 expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2727 /* If any of the elements are addressable, so is the entire union. */
2728 for (t = decl_elts; t; t = TREE_CHAIN (t))
2729 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2731 TREE_ADDRESSABLE (decl) = 1;
2736 x = DECL_RTL (decl);
2738 /* Go through the elements, assigning RTL to each. */
2739 for (t = decl_elts; t; t = TREE_CHAIN (t))
2741 tree decl_elt = TREE_VALUE (t);
2742 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2744 /* If any of the elements are addressable, so is the entire
2746 if (TREE_USED (decl_elt))
2747 TREE_USED (decl) = 1;
2749 /* Propagate the union's alignment to the elements. */
2750 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2751 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2753 /* If the element has BLKmode and the union doesn't, the union is
2754 aligned such that the element doesn't need to have BLKmode, so
2755 change the element's mode to the appropriate one for its size. */
2756 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2757 DECL_MODE (decl_elt) = mode
2758 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2760 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2761 instead create a new MEM rtx with the proper mode. */
2764 if (mode == GET_MODE (x))
2765 SET_DECL_RTL (decl_elt, x);
2767 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
2771 if (mode == GET_MODE (x))
2772 SET_DECL_RTL (decl_elt, x);
2774 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
2781 /* Enter a case (Pascal) or switch (C) statement.
2782 Push a block onto case_stack and nesting_stack
2783 to accumulate the case-labels that are seen
2784 and to record the labels generated for the statement.
2786 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
2787 Otherwise, this construct is transparent for `exit_something'.
2789 EXPR is the index-expression to be dispatched on.
2790 TYPE is its nominal type. We could simply convert EXPR to this type,
2791 but instead we take short cuts. */
2794 expand_start_case (int exit_flag, tree expr, tree type,
2795 const char *printname)
2797 struct nesting *thiscase = ALLOC_NESTING ();
2799 /* Make an entry on case_stack for the case we are entering. */
2801 thiscase->desc = CASE_NESTING;
2802 thiscase->next = case_stack;
2803 thiscase->all = nesting_stack;
2804 thiscase->depth = ++nesting_depth;
2805 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
2806 thiscase->data.case_stmt.case_list = 0;
2807 thiscase->data.case_stmt.index_expr = expr;
2808 thiscase->data.case_stmt.nominal_type = type;
2809 thiscase->data.case_stmt.default_label = 0;
2810 thiscase->data.case_stmt.printname = printname;
2811 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
2812 case_stack = thiscase;
2813 nesting_stack = thiscase;
2815 do_pending_stack_adjust ();
2818 /* Make sure case_stmt.start points to something that won't
2819 need any transformation before expand_end_case. */
2820 if (GET_CODE (get_last_insn ()) != NOTE)
2821 emit_note (NOTE_INSN_DELETED);
2823 thiscase->data.case_stmt.start = get_last_insn ();
2826 /* Accumulate one case or default label inside a case or switch statement.
2827 VALUE is the value of the case (a null pointer, for a default label).
2828 The function CONVERTER, when applied to arguments T and V,
2829 converts the value V to the type T.
2831 If not currently inside a case or switch statement, return 1 and do
2832 nothing. The caller will print a language-specific error message.
2833 If VALUE is a duplicate or overlaps, return 2 and do nothing
2834 except store the (first) duplicate node in *DUPLICATE.
2835 If VALUE is out of range, return 3 and do nothing.
2836 Return 0 on success.
2838 Extended to handle range statements. */
2841 pushcase (tree value, tree (*converter) (tree, tree), tree label,
2847 /* Fail if not inside a real case statement. */
2848 if (! (case_stack && case_stack->data.case_stmt.start))
2851 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
2852 nominal_type = case_stack->data.case_stmt.nominal_type;
2854 /* If the index is erroneous, avoid more problems: pretend to succeed. */
2855 if (index_type == error_mark_node)
2858 /* Convert VALUE to the type in which the comparisons are nominally done. */
2860 value = (*converter) (nominal_type, value);
2862 /* Fail if this value is out of range for the actual type of the index
2863 (which may be narrower than NOMINAL_TYPE). */
2865 && (TREE_CONSTANT_OVERFLOW (value)
2866 || ! int_fits_type_p (value, index_type)))
2869 return add_case_node (value, value, label, duplicate, false);
2872 /* Like pushcase but this case applies to all values between VALUE1 and
2873 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
2874 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
2875 starts at VALUE1 and ends at the highest value of the index type.
2876 If both are NULL, this case applies to all values.
2878 The return value is the same as that of pushcase but there is one
2879 additional error code: 4 means the specified range was empty. */
2882 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
2883 tree label, tree *duplicate)
2888 /* Fail if not inside a real case statement. */
2889 if (! (case_stack && case_stack->data.case_stmt.start))
2892 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
2893 nominal_type = case_stack->data.case_stmt.nominal_type;
2895 /* If the index is erroneous, avoid more problems: pretend to succeed. */
2896 if (index_type == error_mark_node)
2899 /* Convert VALUEs to type in which the comparisons are nominally done
2900 and replace any unspecified value with the corresponding bound. */
2902 value1 = TYPE_MIN_VALUE (index_type);
2904 value2 = TYPE_MAX_VALUE (index_type);
2906 /* Fail if the range is empty. Do this before any conversion since
2907 we want to allow out-of-range empty ranges. */
2908 if (value2 != 0 && tree_int_cst_lt (value2, value1))
2911 /* If the max was unbounded, use the max of the nominal_type we are
2912 converting to. Do this after the < check above to suppress false
2915 value2 = TYPE_MAX_VALUE (nominal_type);
2917 value1 = (*converter) (nominal_type, value1);
2918 value2 = (*converter) (nominal_type, value2);
2920 /* Fail if these values are out of range. */
2921 if (TREE_CONSTANT_OVERFLOW (value1)
2922 || ! int_fits_type_p (value1, index_type))
2925 if (TREE_CONSTANT_OVERFLOW (value2)
2926 || ! int_fits_type_p (value2, index_type))
2929 return add_case_node (value1, value2, label, duplicate, false);
2932 /* Do the actual insertion of a case label for pushcase and pushcase_range
2933 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
2934 slowdown for large switch statements. */
2937 add_case_node (tree low, tree high, tree label, tree *duplicate,
2938 bool dont_expand_label)
2940 struct case_node *p, **q, *r;
2942 /* If there's no HIGH value, then this is not a case range; it's
2943 just a simple case label. But that's just a degenerate case
2948 /* Handle default labels specially. */
2951 if (case_stack->data.case_stmt.default_label != 0)
2953 *duplicate = case_stack->data.case_stmt.default_label;
2956 case_stack->data.case_stmt.default_label = label;
2957 if (!dont_expand_label)
2958 expand_label (label);
2962 q = &case_stack->data.case_stmt.case_list;
2969 /* Keep going past elements distinctly greater than HIGH. */
2970 if (tree_int_cst_lt (high, p->low))
2973 /* or distinctly less than LOW. */
2974 else if (tree_int_cst_lt (p->high, low))
2979 /* We have an overlap; this is an error. */
2980 *duplicate = p->code_label;
2985 /* Add this label to the chain, and succeed. */
2987 r = ggc_alloc (sizeof (struct case_node));
2990 /* If the bounds are equal, turn this into the one-value case. */
2991 if (tree_int_cst_equal (low, high))
2996 r->code_label = label;
2997 if (!dont_expand_label)
2998 expand_label (label);
3008 struct case_node *s;
3014 if (! (b = p->balance))
3015 /* Growth propagation from left side. */
3022 if ((p->left = s = r->right))
3031 if ((r->parent = s))
3039 case_stack->data.case_stmt.case_list = r;
3042 /* r->balance == +1 */
3047 struct case_node *t = r->right;
3049 if ((p->left = s = t->right))
3053 if ((r->right = s = t->left))
3067 if ((t->parent = s))
3075 case_stack->data.case_stmt.case_list = t;
3082 /* p->balance == +1; growth of left side balances the node. */
3092 if (! (b = p->balance))
3093 /* Growth propagation from right side. */
3101 if ((p->right = s = r->left))
3109 if ((r->parent = s))
3118 case_stack->data.case_stmt.case_list = r;
3122 /* r->balance == -1 */
3126 struct case_node *t = r->left;
3128 if ((p->right = s = t->left))
3133 if ((r->left = s = t->right))
3147 if ((t->parent = s))
3156 case_stack->data.case_stmt.case_list = t;
3162 /* p->balance == -1; growth of right side balances the node. */
3175 /* Maximum number of case bit tests. */
3176 #define MAX_CASE_BIT_TESTS 3
3178 /* By default, enable case bit tests on targets with ashlsi3. */
3179 #ifndef CASE_USE_BIT_TESTS
3180 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
3181 != CODE_FOR_nothing)
3185 /* A case_bit_test represents a set of case nodes that may be
3186 selected from using a bit-wise comparison. HI and LO hold
3187 the integer to be tested against, LABEL contains the label
3188 to jump to upon success and BITS counts the number of case
3189 nodes handled by this test, typically the number of bits
3192 struct case_bit_test
3200 /* Determine whether "1 << x" is relatively cheap in word_mode. */
3203 bool lshift_cheap_p (void)
3205 static bool init = false;
3206 static bool cheap = true;
3210 rtx reg = gen_rtx_REG (word_mode, 10000);
3211 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
3212 cheap = cost < COSTS_N_INSNS (3);
3219 /* Comparison function for qsort to order bit tests by decreasing
3220 number of case nodes, i.e. the node with the most cases gets
3224 case_bit_test_cmp (const void *p1, const void *p2)
3226 const struct case_bit_test *d1 = p1;
3227 const struct case_bit_test *d2 = p2;
3229 return d2->bits - d1->bits;
3232 /* Expand a switch statement by a short sequence of bit-wise
3233 comparisons. "switch(x)" is effectively converted into
3234 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
3237 INDEX_EXPR is the value being switched on, which is of
3238 type INDEX_TYPE. MINVAL is the lowest case value of in
3239 the case nodes, of INDEX_TYPE type, and RANGE is highest
3240 value minus MINVAL, also of type INDEX_TYPE. NODES is
3241 the set of case nodes, and DEFAULT_LABEL is the label to
3242 branch to should none of the cases match.
3244 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
3248 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
3249 tree range, case_node_ptr nodes, rtx default_label)
3251 struct case_bit_test test[MAX_CASE_BIT_TESTS];
3252 enum machine_mode mode;
3253 rtx expr, index, label;
3254 unsigned int i,j,lo,hi;
3255 struct case_node *n;
3259 for (n = nodes; n; n = n->right)
3261 label = label_rtx (n->code_label);
3262 for (i = 0; i < count; i++)
3263 if (same_case_target_p (label, test[i].label))
3268 if (count >= MAX_CASE_BIT_TESTS)
3272 test[i].label = label;
3279 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3280 n->low, minval)), 1);
3281 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3282 n->high, minval)), 1);
3283 for (j = lo; j <= hi; j++)
3284 if (j >= HOST_BITS_PER_WIDE_INT)
3285 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
3287 test[i].lo |= (HOST_WIDE_INT) 1 << j;
3290 qsort (test, count, sizeof(*test), case_bit_test_cmp);
3292 index_expr = fold (build (MINUS_EXPR, index_type,
3293 convert (index_type, index_expr),
3294 convert (index_type, minval)));
3295 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3297 index = protect_from_queue (index, 0);
3298 do_pending_stack_adjust ();
3300 mode = TYPE_MODE (index_type);
3301 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
3302 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
3305 index = convert_to_mode (word_mode, index, 0);
3306 index = expand_binop (word_mode, ashl_optab, const1_rtx,
3307 index, NULL_RTX, 1, OPTAB_WIDEN);
3309 for (i = 0; i < count; i++)
3311 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
3312 expr = expand_binop (word_mode, and_optab, index, expr,
3313 NULL_RTX, 1, OPTAB_WIDEN);
3314 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
3315 word_mode, 1, test[i].label);
3318 emit_jump (default_label);
3322 #define HAVE_casesi 0
3325 #ifndef HAVE_tablejump
3326 #define HAVE_tablejump 0
3329 /* Terminate a case (Pascal) or switch (C) statement
3330 in which ORIG_INDEX is the expression to be tested.
3331 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
3332 type as given in the source before any compiler conversions.
3333 Generate the code to test it and jump to the right place. */
3336 expand_end_case_type (tree orig_index, tree orig_type)
3338 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
3339 rtx default_label = 0;
3340 struct case_node *n, *m;
3341 unsigned int count, uniq;
3347 rtx before_case, end, lab;
3348 struct nesting *thiscase = case_stack;
3349 tree index_expr, index_type;
3350 bool exit_done = false;
3353 /* Don't crash due to previous errors. */
3354 if (thiscase == NULL)
3357 index_expr = thiscase->data.case_stmt.index_expr;
3358 index_type = TREE_TYPE (index_expr);
3359 unsignedp = TYPE_UNSIGNED (index_type);
3360 if (orig_type == NULL)
3361 orig_type = TREE_TYPE (orig_index);
3363 do_pending_stack_adjust ();
3365 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3366 if (index_type != error_mark_node)
3368 /* If we don't have a default-label, create one here,
3369 after the body of the switch. */
3370 if (thiscase->data.case_stmt.default_label == 0)
3372 thiscase->data.case_stmt.default_label
3373 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3374 /* Share the exit label if possible. */
3375 if (thiscase->exit_label)
3377 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
3378 thiscase->exit_label);
3381 expand_label (thiscase->data.case_stmt.default_label);
3383 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3385 before_case = get_last_insn ();
3387 if (thiscase->data.case_stmt.case_list
3388 && thiscase->data.case_stmt.case_list->left)
3389 thiscase->data.case_stmt.case_list
3390 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
3392 /* Simplify the case-list before we count it. */
3393 group_case_nodes (thiscase->data.case_stmt.case_list);
3394 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
3397 /* Get upper and lower bounds of case values.
3398 Also convert all the case values to the index expr's data type. */
3402 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3404 /* Check low and high label values are integers. */
3405 if (TREE_CODE (n->low) != INTEGER_CST)
3407 if (TREE_CODE (n->high) != INTEGER_CST)
3410 n->low = convert (index_type, n->low);
3411 n->high = convert (index_type, n->high);
3413 /* Count the elements and track the largest and smallest
3414 of them (treating them as signed even if they are not). */
3422 if (INT_CST_LT (n->low, minval))
3424 if (INT_CST_LT (maxval, n->high))
3427 /* A range counts double, since it requires two compares. */
3428 if (! tree_int_cst_equal (n->low, n->high))
3431 /* Count the number of unique case node targets. */
3433 lab = label_rtx (n->code_label);
3434 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
3435 if (same_case_target_p (label_rtx (m->code_label), lab))
3442 /* Compute span of values. */
3444 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
3448 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3450 emit_jump (default_label);
3453 /* Try implementing this switch statement by a short sequence of
3454 bit-wise comparisons. However, we let the binary-tree case
3455 below handle constant index expressions. */
3456 else if (CASE_USE_BIT_TESTS
3457 && ! TREE_CONSTANT (index_expr)
3458 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
3459 && compare_tree_int (range, 0) > 0
3460 && lshift_cheap_p ()
3461 && ((uniq == 1 && count >= 3)
3462 || (uniq == 2 && count >= 5)
3463 || (uniq == 3 && count >= 6)))
3465 /* Optimize the case where all the case values fit in a
3466 word without having to subtract MINVAL. In this case,
3467 we can optimize away the subtraction. */
3468 if (compare_tree_int (minval, 0) > 0
3469 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
3471 minval = integer_zero_node;
3474 emit_case_bit_tests (index_type, index_expr, minval, range,
3475 thiscase->data.case_stmt.case_list,
3479 /* If range of values is much bigger than number of values,
3480 make a sequence of conditional branches instead of a dispatch.
3481 If the switch-index is a constant, do it this way
3482 because we can optimize it. */
3484 else if (count < case_values_threshold ()
3485 || compare_tree_int (range,
3486 (optimize_size ? 3 : 10) * count) > 0
3487 /* RANGE may be signed, and really large ranges will show up
3488 as negative numbers. */
3489 || compare_tree_int (range, 0) < 0
3490 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
3493 || TREE_CONSTANT (index_expr)
3494 /* If neither casesi or tablejump is available, we can
3495 only go this way. */
3496 || (!HAVE_casesi && !HAVE_tablejump))
3498 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3500 /* If the index is a short or char that we do not have
3501 an insn to handle comparisons directly, convert it to
3502 a full integer now, rather than letting each comparison
3503 generate the conversion. */
3505 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3506 && ! have_insn_for (COMPARE, GET_MODE (index)))
3508 enum machine_mode wider_mode;
3509 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3510 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3511 if (have_insn_for (COMPARE, wider_mode))
3513 index = convert_to_mode (wider_mode, index, unsignedp);
3519 do_pending_stack_adjust ();
3521 index = protect_from_queue (index, 0);
3523 index = copy_to_reg (index);
3524 if (GET_CODE (index) == CONST_INT
3525 || TREE_CODE (index_expr) == INTEGER_CST)
3527 /* Make a tree node with the proper constant value
3528 if we don't already have one. */
3529 if (TREE_CODE (index_expr) != INTEGER_CST)
3532 = build_int_2 (INTVAL (index),
3533 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
3534 index_expr = convert (index_type, index_expr);
3537 /* For constant index expressions we need only
3538 issue an unconditional branch to the appropriate
3539 target code. The job of removing any unreachable
3540 code is left to the optimization phase if the
3541 "-O" option is specified. */
3542 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3543 if (! tree_int_cst_lt (index_expr, n->low)
3544 && ! tree_int_cst_lt (n->high, index_expr))
3548 emit_jump (label_rtx (n->code_label));
3550 emit_jump (default_label);
3554 /* If the index expression is not constant we generate
3555 a binary decision tree to select the appropriate
3556 target code. This is done as follows:
3558 The list of cases is rearranged into a binary tree,
3559 nearly optimal assuming equal probability for each case.
3561 The tree is transformed into RTL, eliminating
3562 redundant test conditions at the same time.
3564 If program flow could reach the end of the
3565 decision tree an unconditional jump to the
3566 default code is emitted. */
3569 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
3570 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3571 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
3572 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3573 default_label, index_type);
3574 emit_jump_if_reachable (default_label);
3579 table_label = gen_label_rtx ();
3580 if (! try_casesi (index_type, index_expr, minval, range,
3581 table_label, default_label))
3583 index_type = thiscase->data.case_stmt.nominal_type;
3585 /* Index jumptables from zero for suitable values of
3586 minval to avoid a subtraction. */
3588 && compare_tree_int (minval, 0) > 0
3589 && compare_tree_int (minval, 3) < 0)
3591 minval = integer_zero_node;
3595 if (! try_tablejump (index_type, index_expr, minval, range,
3596 table_label, default_label))
3600 /* Get table of labels to jump to, in order of case index. */
3602 ncases = tree_low_cst (range, 0) + 1;
3603 labelvec = alloca (ncases * sizeof (rtx));
3604 memset (labelvec, 0, ncases * sizeof (rtx));
3606 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3608 /* Compute the low and high bounds relative to the minimum
3609 value since that should fit in a HOST_WIDE_INT while the
3610 actual values may not. */
3612 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3613 n->low, minval)), 1);
3614 HOST_WIDE_INT i_high
3615 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3616 n->high, minval)), 1);
3619 for (i = i_low; i <= i_high; i ++)
3621 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
3624 /* Fill in the gaps with the default. */
3625 for (i = 0; i < ncases; i++)
3626 if (labelvec[i] == 0)
3627 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
3629 /* Output the table. */
3630 emit_label (table_label);
3632 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
3633 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
3634 gen_rtx_LABEL_REF (Pmode, table_label),
3635 gen_rtvec_v (ncases, labelvec),
3636 const0_rtx, const0_rtx));
3638 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
3639 gen_rtvec_v (ncases, labelvec)));
3641 /* If the case insn drops through the table,
3642 after the table we must jump to the default-label.
3643 Otherwise record no drop-through after the table. */
3644 #ifdef CASE_DROPS_THROUGH
3645 emit_jump (default_label);
3651 before_case = NEXT_INSN (before_case);
3652 end = get_last_insn ();
3653 if (squeeze_notes (&before_case, &end))
3655 reorder_insns (before_case, end,
3656 thiscase->data.case_stmt.start);
3659 if (thiscase->exit_label && !exit_done)
3660 emit_label (thiscase->exit_label);
3662 POPSTACK (case_stack);
3667 /* Convert the tree NODE into a list linked by the right field, with the left
3668 field zeroed. RIGHT is used for recursion; it is a list to be placed
3669 rightmost in the resulting list. */
3671 static struct case_node *
3672 case_tree2list (struct case_node *node, struct case_node *right)
3674 struct case_node *left;
3677 right = case_tree2list (node->right, right);
3679 node->right = right;
3680 if ((left = node->left))
3683 return case_tree2list (left, node);
3689 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3692 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
3694 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
3700 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
3701 (GET_MODE (op1) == VOIDmode
3702 ? GET_MODE (op2) : GET_MODE (op1)),
3706 /* Not all case values are encountered equally. This function
3707 uses a heuristic to weight case labels, in cases where that
3708 looks like a reasonable thing to do.
3710 Right now, all we try to guess is text, and we establish the
3713 chars above space: 16
3722 If we find any cases in the switch that are not either -1 or in the range
3723 of valid ASCII characters, or are control characters other than those
3724 commonly used with "\", don't treat this switch scanning text.
3726 Return 1 if these nodes are suitable for cost estimation, otherwise
3730 estimate_case_costs (case_node_ptr node)
3732 tree min_ascii = integer_minus_one_node;
3733 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3737 /* If we haven't already made the cost table, make it now. Note that the
3738 lower bound of the table is -1, not zero. */
3740 if (! cost_table_initialized)
3742 cost_table_initialized = 1;
3744 for (i = 0; i < 128; i++)
3747 COST_TABLE (i) = 16;
3748 else if (ISPUNCT (i))
3750 else if (ISCNTRL (i))
3751 COST_TABLE (i) = -1;
3754 COST_TABLE (' ') = 8;
3755 COST_TABLE ('\t') = 4;
3756 COST_TABLE ('\0') = 4;
3757 COST_TABLE ('\n') = 2;
3758 COST_TABLE ('\f') = 1;
3759 COST_TABLE ('\v') = 1;
3760 COST_TABLE ('\b') = 1;
3763 /* See if all the case expressions look like text. It is text if the
3764 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3765 as signed arithmetic since we don't want to ever access cost_table with a
3766 value less than -1. Also check that none of the constants in a range
3767 are strange control characters. */
3769 for (n = node; n; n = n->right)
3771 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3774 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
3775 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
3776 if (COST_TABLE (i) < 0)
3780 /* All interesting values are within the range of interesting
3781 ASCII characters. */
3785 /* Determine whether two case labels branch to the same target. */
3788 same_case_target_p (rtx l1, rtx l2)
3796 i1 = next_real_insn (l1);
3797 i2 = next_real_insn (l2);
3801 if (i1 && simplejump_p (i1))
3803 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
3806 if (i2 && simplejump_p (i2))
3808 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
3811 /* When coming from gimple, we usually won't have emitted either
3812 the labels or the body of the switch statement. The job being
3813 done here should be done via jump threading at the tree level.
3814 Cases that go the same place should have the same label. */
3818 /* Delete nodes that branch to the default label from a list of
3819 case nodes. Eg. case 5: default: becomes just default: */
3822 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
3829 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
3836 /* Scan an ordered list of case nodes
3837 combining those with consecutive values or ranges.
3839 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3842 group_case_nodes (case_node_ptr head)
3844 case_node_ptr node = head;
3849 case_node_ptr np = node;
3851 lab = label_rtx (node->code_label);
3853 /* Try to group the successors of NODE with NODE. */
3854 while (((np = np->right) != 0)
3855 /* Do they jump to the same place? */
3856 && same_case_target_p (label_rtx (np->code_label), lab)
3857 /* Are their ranges consecutive? */
3858 && tree_int_cst_equal (np->low,
3859 fold (build (PLUS_EXPR,
3860 TREE_TYPE (node->high),
3863 /* An overflow is not consecutive. */
3864 && tree_int_cst_lt (node->high,
3865 fold (build (PLUS_EXPR,
3866 TREE_TYPE (node->high),
3868 integer_one_node))))
3870 node->high = np->high;
3872 /* NP is the first node after NODE which can't be grouped with it.
3873 Delete the nodes in between, and move on to that node. */
3879 /* Take an ordered list of case nodes
3880 and transform them into a near optimal binary tree,
3881 on the assumption that any target code selection value is as
3882 likely as any other.
3884 The transformation is performed by splitting the ordered
3885 list into two equal sections plus a pivot. The parts are
3886 then attached to the pivot as left and right branches. Each
3887 branch is then transformed recursively. */
3890 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
3903 /* Count the number of entries on branch. Also count the ranges. */
3907 if (!tree_int_cst_equal (np->low, np->high))
3911 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
3915 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
3923 /* Split this list if it is long enough for that to help. */
3928 /* Find the place in the list that bisects the list's total cost,
3929 Here I gets half the total cost. */
3934 /* Skip nodes while their cost does not reach that amount. */
3935 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3936 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
3937 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
3940 npp = &(*npp)->right;
3945 /* Leave this branch lopsided, but optimize left-hand
3946 side and fill in `parent' fields for right-hand side. */
3948 np->parent = parent;
3949 balance_case_nodes (&np->left, np);
3950 for (; np->right; np = np->right)
3951 np->right->parent = np;
3955 /* If there are just three nodes, split at the middle one. */
3957 npp = &(*npp)->right;
3960 /* Find the place in the list that bisects the list's total cost,
3961 where ranges count as 2.
3962 Here I gets half the total cost. */
3963 i = (i + ranges + 1) / 2;
3966 /* Skip nodes while their cost does not reach that amount. */
3967 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3972 npp = &(*npp)->right;
3977 np->parent = parent;
3980 /* Optimize each of the two split parts. */
3981 balance_case_nodes (&np->left, np);
3982 balance_case_nodes (&np->right, np);
3986 /* Else leave this branch as one level,
3987 but fill in `parent' fields. */
3989 np->parent = parent;
3990 for (; np->right; np = np->right)
3991 np->right->parent = np;
3996 /* Search the parent sections of the case node tree
3997 to see if a test for the lower bound of NODE would be redundant.
3998 INDEX_TYPE is the type of the index expression.
4000 The instructions to generate the case decision tree are
4001 output in the same order as nodes are processed so it is
4002 known that if a parent node checks the range of the current
4003 node minus one that the current node is bounded at its lower
4004 span. Thus the test would be redundant. */
4007 node_has_low_bound (case_node_ptr node, tree index_type)
4010 case_node_ptr pnode;
4012 /* If the lower bound of this node is the lowest value in the index type,
4013 we need not test it. */
4015 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4018 /* If this node has a left branch, the value at the left must be less
4019 than that at this node, so it cannot be bounded at the bottom and
4020 we need not bother testing any further. */
4025 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4026 node->low, integer_one_node));
4028 /* If the subtraction above overflowed, we can't verify anything.
4029 Otherwise, look for a parent that tests our value - 1. */
4031 if (! tree_int_cst_lt (low_minus_one, node->low))
4034 for (pnode = node->parent; pnode; pnode = pnode->parent)
4035 if (tree_int_cst_equal (low_minus_one, pnode->high))
4041 /* Search the parent sections of the case node tree
4042 to see if a test for the upper bound of NODE would be redundant.
4043 INDEX_TYPE is the type of the index expression.
4045 The instructions to generate the case decision tree are
4046 output in the same order as nodes are processed so it is
4047 known that if a parent node checks the range of the current
4048 node plus one that the current node is bounded at its upper
4049 span. Thus the test would be redundant. */
4052 node_has_high_bound (case_node_ptr node, tree index_type)
4055 case_node_ptr pnode;
4057 /* If there is no upper bound, obviously no test is needed. */
4059 if (TYPE_MAX_VALUE (index_type) == NULL)
4062 /* If the upper bound of this node is the highest value in the type
4063 of the index expression, we need not test against it. */
4065 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4068 /* If this node has a right branch, the value at the right must be greater
4069 than that at this node, so it cannot be bounded at the top and
4070 we need not bother testing any further. */
4075 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4076 node->high, integer_one_node));
4078 /* If the addition above overflowed, we can't verify anything.
4079 Otherwise, look for a parent that tests our value + 1. */
4081 if (! tree_int_cst_lt (node->high, high_plus_one))
4084 for (pnode = node->parent; pnode; pnode = pnode->parent)
4085 if (tree_int_cst_equal (high_plus_one, pnode->low))
4091 /* Search the parent sections of the
4092 case node tree to see if both tests for the upper and lower
4093 bounds of NODE would be redundant. */
4096 node_is_bounded (case_node_ptr node, tree index_type)
4098 return (node_has_low_bound (node, index_type)
4099 && node_has_high_bound (node, index_type));
4102 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4105 emit_jump_if_reachable (rtx label)
4107 if (GET_CODE (get_last_insn ()) != BARRIER)
4111 /* Emit step-by-step code to select a case for the value of INDEX.
4112 The thus generated decision tree follows the form of the
4113 case-node binary tree NODE, whose nodes represent test conditions.
4114 INDEX_TYPE is the type of the index of the switch.
4116 Care is taken to prune redundant tests from the decision tree
4117 by detecting any boundary conditions already checked by
4118 emitted rtx. (See node_has_high_bound, node_has_low_bound
4119 and node_is_bounded, above.)
4121 Where the test conditions can be shown to be redundant we emit
4122 an unconditional jump to the target code. As a further
4123 optimization, the subordinates of a tree node are examined to
4124 check for bounded nodes. In this case conditional and/or
4125 unconditional jumps as a result of the boundary check for the
4126 current node are arranged to target the subordinates associated
4127 code for out of bound conditions on the current node.
4129 We can assume that when control reaches the code generated here,
4130 the index value has already been compared with the parents
4131 of this node, and determined to be on the same side of each parent
4132 as this node is. Thus, if this node tests for the value 51,
4133 and a parent tested for 52, we don't need to consider
4134 the possibility of a value greater than 51. If another parent
4135 tests for the value 50, then this node need not test anything. */
4138 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
4141 /* If INDEX has an unsigned type, we must make unsigned branches. */
4142 int unsignedp = TYPE_UNSIGNED (index_type);
4143 enum machine_mode mode = GET_MODE (index);
4144 enum machine_mode imode = TYPE_MODE (index_type);
4146 /* See if our parents have already tested everything for us.
4147 If they have, emit an unconditional jump for this node. */
4148 if (node_is_bounded (node, index_type))
4149 emit_jump (label_rtx (node->code_label));
4151 else if (tree_int_cst_equal (node->low, node->high))
4153 /* Node is single valued. First see if the index expression matches
4154 this node and then check our children, if any. */
4156 do_jump_if_equal (index,
4157 convert_modes (mode, imode,
4158 expand_expr (node->low, NULL_RTX,
4161 label_rtx (node->code_label), unsignedp);
4163 if (node->right != 0 && node->left != 0)
4165 /* This node has children on both sides.
4166 Dispatch to one side or the other
4167 by comparing the index value with this node's value.
4168 If one subtree is bounded, check that one first,
4169 so we can avoid real branches in the tree. */
4171 if (node_is_bounded (node->right, index_type))
4173 emit_cmp_and_jump_insns (index,
4176 expand_expr (node->high, NULL_RTX,
4179 GT, NULL_RTX, mode, unsignedp,
4180 label_rtx (node->right->code_label));
4181 emit_case_nodes (index, node->left, default_label, index_type);
4184 else if (node_is_bounded (node->left, index_type))
4186 emit_cmp_and_jump_insns (index,
4189 expand_expr (node->high, NULL_RTX,
4192 LT, NULL_RTX, mode, unsignedp,
4193 label_rtx (node->left->code_label));
4194 emit_case_nodes (index, node->right, default_label, index_type);
4197 /* If both children are single-valued cases with no
4198 children, finish up all the work. This way, we can save
4199 one ordered comparison. */
4200 else if (tree_int_cst_equal (node->right->low, node->right->high)
4201 && node->right->left == 0
4202 && node->right->right == 0
4203 && tree_int_cst_equal (node->left->low, node->left->high)
4204 && node->left->left == 0
4205 && node->left->right == 0)
4207 /* Neither node is bounded. First distinguish the two sides;
4208 then emit the code for one side at a time. */
4210 /* See if the value matches what the right hand side
4212 do_jump_if_equal (index,
4213 convert_modes (mode, imode,
4214 expand_expr (node->right->low,
4218 label_rtx (node->right->code_label),
4221 /* See if the value matches what the left hand side
4223 do_jump_if_equal (index,
4224 convert_modes (mode, imode,
4225 expand_expr (node->left->low,
4229 label_rtx (node->left->code_label),
4235 /* Neither node is bounded. First distinguish the two sides;
4236 then emit the code for one side at a time. */
4238 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4240 /* See if the value is on the right. */
4241 emit_cmp_and_jump_insns (index,
4244 expand_expr (node->high, NULL_RTX,
4247 GT, NULL_RTX, mode, unsignedp,
4248 label_rtx (test_label));
4250 /* Value must be on the left.
4251 Handle the left-hand subtree. */
4252 emit_case_nodes (index, node->left, default_label, index_type);
4253 /* If left-hand subtree does nothing,
4255 emit_jump_if_reachable (default_label);
4257 /* Code branches here for the right-hand subtree. */
4258 expand_label (test_label);
4259 emit_case_nodes (index, node->right, default_label, index_type);
4263 else if (node->right != 0 && node->left == 0)
4265 /* Here we have a right child but no left so we issue conditional
4266 branch to default and process the right child.
4268 Omit the conditional branch to default if we it avoid only one
4269 right child; it costs too much space to save so little time. */
4271 if (node->right->right || node->right->left
4272 || !tree_int_cst_equal (node->right->low, node->right->high))
4274 if (!node_has_low_bound (node, index_type))
4276 emit_cmp_and_jump_insns (index,
4279 expand_expr (node->high, NULL_RTX,
4282 LT, NULL_RTX, mode, unsignedp,
4286 emit_case_nodes (index, node->right, default_label, index_type);
4289 /* We cannot process node->right normally
4290 since we haven't ruled out the numbers less than
4291 this node's value. So handle node->right explicitly. */
4292 do_jump_if_equal (index,
4295 expand_expr (node->right->low, NULL_RTX,
4298 label_rtx (node->right->code_label), unsignedp);
4301 else if (node->right == 0 && node->left != 0)
4303 /* Just one subtree, on the left. */
4304 if (node->left->left || node->left->right
4305 || !tree_int_cst_equal (node->left->low, node->left->high))
4307 if (!node_has_high_bound (node, index_type))
4309 emit_cmp_and_jump_insns (index,
4312 expand_expr (node->high, NULL_RTX,
4315 GT, NULL_RTX, mode, unsignedp,
4319 emit_case_nodes (index, node->left, default_label, index_type);
4322 /* We cannot process node->left normally
4323 since we haven't ruled out the numbers less than
4324 this node's value. So handle node->left explicitly. */
4325 do_jump_if_equal (index,
4328 expand_expr (node->left->low, NULL_RTX,
4331 label_rtx (node->left->code_label), unsignedp);
4336 /* Node is a range. These cases are very similar to those for a single
4337 value, except that we do not start by testing whether this node
4338 is the one to branch to. */
4340 if (node->right != 0 && node->left != 0)
4342 /* Node has subtrees on both sides.
4343 If the right-hand subtree is bounded,
4344 test for it first, since we can go straight there.
4345 Otherwise, we need to make a branch in the control structure,
4346 then handle the two subtrees. */
4347 tree test_label = 0;
4349 if (node_is_bounded (node->right, index_type))
4350 /* Right hand node is fully bounded so we can eliminate any
4351 testing and branch directly to the target code. */
4352 emit_cmp_and_jump_insns (index,
4355 expand_expr (node->high, NULL_RTX,
4358 GT, NULL_RTX, mode, unsignedp,
4359 label_rtx (node->right->code_label));
4362 /* Right hand node requires testing.
4363 Branch to a label where we will handle it later. */
4365 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4366 emit_cmp_and_jump_insns (index,
4369 expand_expr (node->high, NULL_RTX,
4372 GT, NULL_RTX, mode, unsignedp,
4373 label_rtx (test_label));
4376 /* Value belongs to this node or to the left-hand subtree. */
4378 emit_cmp_and_jump_insns (index,
4381 expand_expr (node->low, NULL_RTX,
4384 GE, NULL_RTX, mode, unsignedp,
4385 label_rtx (node->code_label));
4387 /* Handle the left-hand subtree. */
4388 emit_case_nodes (index, node->left, default_label, index_type);
4390 /* If right node had to be handled later, do that now. */
4394 /* If the left-hand subtree fell through,
4395 don't let it fall into the right-hand subtree. */
4396 emit_jump_if_reachable (default_label);
4398 expand_label (test_label);
4399 emit_case_nodes (index, node->right, default_label, index_type);
4403 else if (node->right != 0 && node->left == 0)
4405 /* Deal with values to the left of this node,
4406 if they are possible. */
4407 if (!node_has_low_bound (node, index_type))
4409 emit_cmp_and_jump_insns (index,
4412 expand_expr (node->low, NULL_RTX,
4415 LT, NULL_RTX, mode, unsignedp,
4419 /* Value belongs to this node or to the right-hand subtree. */
4421 emit_cmp_and_jump_insns (index,
4424 expand_expr (node->high, NULL_RTX,
4427 LE, NULL_RTX, mode, unsignedp,
4428 label_rtx (node->code_label));
4430 emit_case_nodes (index, node->right, default_label, index_type);
4433 else if (node->right == 0 && node->left != 0)
4435 /* Deal with values to the right of this node,
4436 if they are possible. */
4437 if (!node_has_high_bound (node, index_type))
4439 emit_cmp_and_jump_insns (index,
4442 expand_expr (node->high, NULL_RTX,
4445 GT, NULL_RTX, mode, unsignedp,
4449 /* Value belongs to this node or to the left-hand subtree. */
4451 emit_cmp_and_jump_insns (index,
4454 expand_expr (node->low, NULL_RTX,
4457 GE, NULL_RTX, mode, unsignedp,
4458 label_rtx (node->code_label));
4460 emit_case_nodes (index, node->left, default_label, index_type);
4465 /* Node has no children so we check low and high bounds to remove
4466 redundant tests. Only one of the bounds can exist,
4467 since otherwise this node is bounded--a case tested already. */
4468 int high_bound = node_has_high_bound (node, index_type);
4469 int low_bound = node_has_low_bound (node, index_type);
4471 if (!high_bound && low_bound)
4473 emit_cmp_and_jump_insns (index,
4476 expand_expr (node->high, NULL_RTX,
4479 GT, NULL_RTX, mode, unsignedp,
4483 else if (!low_bound && high_bound)
4485 emit_cmp_and_jump_insns (index,
4488 expand_expr (node->low, NULL_RTX,
4491 LT, NULL_RTX, mode, unsignedp,
4494 else if (!low_bound && !high_bound)
4496 /* Widen LOW and HIGH to the same width as INDEX. */
4497 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
4498 tree low = build1 (CONVERT_EXPR, type, node->low);
4499 tree high = build1 (CONVERT_EXPR, type, node->high);
4500 rtx low_rtx, new_index, new_bound;
4502 /* Instead of doing two branches, emit one unsigned branch for
4503 (index-low) > (high-low). */
4504 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
4505 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
4506 NULL_RTX, unsignedp,
4508 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
4512 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
4513 mode, 1, default_label);
4516 emit_jump (label_rtx (node->code_label));
4521 #include "gt-stmt.h"