1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
38 #include "coretypes.h"
47 #include "insn-config.h"
50 #include "hard-reg-set.h"
57 #include "langhooks.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
87 struct case_node GTY(())
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
101 /* These are used by estimate_case_costs and balance_case_nodes. */
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `cond_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting GTY(())
140 struct nesting *next;
150 /* For conds (if-then and if-then-else statements). */
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
160 } GTY ((tag ("COND_NESTING"))) cond;
161 /* For variable binding contours. */
164 /* Sequence number of this binding contour within the function,
165 in order of entry. */
166 int block_start_count;
167 /* The NOTE that starts this contour.
168 Used by expand_goto to check whether the destination
169 is within each contour or not. */
171 /* The saved target_temp_slot_level from our outer block.
172 We may reset target_temp_slot_level to be the level of
173 this block, if that is done, target_temp_slot_level
174 reverts to the saved target_temp_slot_level at the very
176 int block_target_temp_slot_level;
177 /* True if we are currently emitting insns in an area of
178 output code that is controlled by a conditional
179 expression. This is used by the cleanup handling code to
180 generate conditional cleanup actions. */
181 int conditional_code;
182 /* A place to move the start of the exception region for any
183 of the conditional cleanups, must be at the end or after
184 the start of the last unconditional cleanup, and before any
185 conditional branch points. */
186 rtx last_unconditional_cleanup;
187 } GTY ((tag ("BLOCK_NESTING"))) block;
188 /* For switch (C) or case (Pascal) statements. */
191 /* The insn after which the case dispatch should finally
192 be emitted. Zero for a dummy. */
194 /* A list of case labels; it is first built as an AVL tree.
195 During expand_end_case, this is converted to a list, and may be
196 rearranged into a nearly balanced binary tree. */
197 struct case_node *case_list;
198 /* Label to jump to if no case matches. */
200 /* The expression to be dispatched on. */
202 /* Type that INDEX_EXPR should be converted to. */
204 /* Name of this kind of statement, for warnings. */
205 const char *printname;
206 /* Used to save no_line_numbers till we see the first case label.
207 We set this to -1 when we see the first case label in this
209 int line_number_status;
210 } GTY ((tag ("CASE_NESTING"))) case_stmt;
211 } GTY ((desc ("%1.desc"))) data;
214 /* Allocate and return a new `struct nesting'. */
216 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
218 /* Pop the nesting stack element by element until we pop off
219 the element which is at the top of STACK.
220 Update all the other stacks, popping off elements from them
221 as we pop them from nesting_stack. */
223 #define POPSTACK(STACK) \
224 do { struct nesting *target = STACK; \
225 struct nesting *this; \
226 do { this = nesting_stack; \
227 if (cond_stack == this) \
228 cond_stack = cond_stack->next; \
229 if (block_stack == this) \
230 block_stack = block_stack->next; \
231 if (case_stack == this) \
232 case_stack = case_stack->next; \
233 nesting_depth = nesting_stack->depth - 1; \
234 nesting_stack = this->all; } \
235 while (this != target); } while (0)
237 /* In some cases it is impossible to generate code for a forward goto
238 until the label definition is seen. This happens when it may be necessary
239 for the goto to reset the stack pointer: we don't yet know how to do that.
240 So expand_goto puts an entry on this fixup list.
241 Each time a binding contour that resets the stack is exited,
243 If the target label has now been defined, we can insert the proper code. */
245 struct goto_fixup GTY(())
247 /* Points to following fixup. */
248 struct goto_fixup *next;
249 /* Points to the insn before the jump insn.
250 If more code must be inserted, it goes after this insn. */
252 /* The LABEL_DECL that this jump is jumping to, or 0
253 for break, continue or return. */
255 /* The BLOCK for the place where this goto was found. */
257 /* The CODE_LABEL rtx that this is jumping to. */
259 /* Number of binding contours started in current function
260 before the label reference. */
261 int block_start_count;
264 struct stmt_status GTY(())
266 /* Chain of all pending binding contours. */
267 struct nesting * x_block_stack;
269 /* If any new stacks are added here, add them to POPSTACKS too. */
271 /* Chain of all pending conditional statements. */
272 struct nesting * x_cond_stack;
274 /* Chain of all pending case or switch statements. */
275 struct nesting * x_case_stack;
277 /* Separate chain including all of the above,
278 chained through the `all' field. */
279 struct nesting * x_nesting_stack;
281 /* Number of entries on nesting_stack now. */
284 /* Number of binding contours started so far in this function. */
285 int x_block_start_count;
287 /* Location of last line-number note, whether we actually
288 emitted it or not. */
289 location_t x_emit_locus;
291 struct goto_fixup *x_goto_fixup_chain;
294 #define block_stack (cfun->stmt->x_block_stack)
295 #define cond_stack (cfun->stmt->x_cond_stack)
296 #define case_stack (cfun->stmt->x_case_stack)
297 #define nesting_stack (cfun->stmt->x_nesting_stack)
298 #define nesting_depth (cfun->stmt->x_nesting_depth)
299 #define current_block_start_count (cfun->stmt->x_block_start_count)
300 #define emit_locus (cfun->stmt->x_emit_locus)
301 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
303 /* Nonzero if we are using EH to handle cleanups. */
304 int using_eh_for_cleanups_p = 0;
306 static int n_occurrences (int, const char *);
307 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
308 static void expand_nl_goto_receiver (void);
309 static bool check_operand_nalternatives (tree, tree);
310 static bool check_unique_operand_names (tree, tree);
311 static char *resolve_operand_name_1 (char *, tree, tree);
312 static void expand_null_return_1 (void);
313 static enum br_predictor return_prediction (rtx);
314 static rtx shift_return_value (rtx);
315 static void expand_value_return (rtx);
316 static void do_jump_if_equal (rtx, rtx, rtx, int);
317 static int estimate_case_costs (case_node_ptr);
318 static bool same_case_target_p (rtx, rtx);
319 static void strip_default_case_nodes (case_node_ptr *, rtx);
320 static bool lshift_cheap_p (void);
321 static int case_bit_test_cmp (const void *, const void *);
322 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
323 static void group_case_nodes (case_node_ptr);
324 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
325 static int node_has_low_bound (case_node_ptr, tree);
326 static int node_has_high_bound (case_node_ptr, tree);
327 static int node_is_bounded (case_node_ptr, tree);
328 static void emit_jump_if_reachable (rtx);
329 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
330 static struct case_node *case_tree2list (case_node *, case_node *);
333 using_eh_for_cleanups (void)
335 using_eh_for_cleanups_p = 1;
339 init_stmt_for_function (void)
341 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
344 /* Record the current file and line. Called from emit_line_note. */
347 set_file_and_line_for_stmt (location_t location)
349 /* If we're outputting an inline function, and we add a line note,
350 there may be no CFUN->STMT information. So, there's no need to
353 emit_locus = location;
356 /* Emit a no-op instruction. */
363 last_insn = get_last_insn ();
365 && (LABEL_P (last_insn)
366 || (NOTE_P (last_insn)
367 && prev_real_insn (last_insn) == 0)))
368 emit_insn (gen_nop ());
371 /* Return the rtx-label that corresponds to a LABEL_DECL,
372 creating it if necessary. */
375 label_rtx (tree label)
377 if (TREE_CODE (label) != LABEL_DECL)
380 if (!DECL_RTL_SET_P (label))
382 rtx r = gen_label_rtx ();
383 SET_DECL_RTL (label, r);
384 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
385 LABEL_PRESERVE_P (r) = 1;
388 return DECL_RTL (label);
391 /* As above, but also put it on the forced-reference list of the
392 function that contains it. */
394 force_label_rtx (tree label)
396 rtx ref = label_rtx (label);
397 tree function = decl_function_context (label);
403 if (function != current_function_decl)
404 p = find_function_data (function);
408 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
409 p->expr->x_forced_labels);
413 /* Add an unconditional jump to LABEL as the next sequential instruction. */
416 emit_jump (rtx label)
418 do_pending_stack_adjust ();
419 emit_jump_insn (gen_jump (label));
423 /* Emit code to jump to the address
424 specified by the pointer expression EXP. */
427 expand_computed_goto (tree exp)
429 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
431 x = convert_memory_address (Pmode, x);
434 do_pending_stack_adjust ();
435 emit_indirect_jump (x);
438 /* Handle goto statements and the labels that they can go to. */
440 /* Specify the location in the RTL code of a label LABEL,
441 which is a LABEL_DECL tree node.
443 This is used for the kind of label that the user can jump to with a
444 goto statement, and for alternatives of a switch or case statement.
445 RTL labels generated for loops and conditionals don't go through here;
446 they are generated directly at the RTL level, by other functions below.
448 Note that this has nothing to do with defining label *names*.
449 Languages vary in how they do that and what that even means. */
452 expand_label (tree label)
454 rtx label_r = label_rtx (label);
456 do_pending_stack_adjust ();
457 emit_label (label_r);
458 if (DECL_NAME (label))
459 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
461 if (DECL_NONLOCAL (label))
463 expand_nl_goto_receiver ();
464 nonlocal_goto_handler_labels
465 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
466 nonlocal_goto_handler_labels);
469 if (FORCED_LABEL (label))
470 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
472 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
473 maybe_set_first_label_num (label_r);
476 /* Generate RTL code for a `goto' statement with target label LABEL.
477 LABEL should be a LABEL_DECL tree node that was or will later be
478 defined with `expand_label'. */
481 expand_goto (tree label)
483 #ifdef ENABLE_CHECKING
484 /* Check for a nonlocal goto to a containing function. Should have
485 gotten translated to __builtin_nonlocal_goto. */
486 tree context = decl_function_context (label);
487 if (context != 0 && context != current_function_decl)
491 emit_jump (label_rtx (label));
494 /* Return the number of times character C occurs in string S. */
496 n_occurrences (int c, const char *s)
504 /* Generate RTL for an asm statement (explicit assembler code).
505 STRING is a STRING_CST node containing the assembler code text,
506 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
507 insn is volatile; don't optimize it. */
510 expand_asm (tree string, int vol)
514 if (TREE_CODE (string) == ADDR_EXPR)
515 string = TREE_OPERAND (string, 0);
517 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
519 MEM_VOLATILE_P (body) = vol;
524 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
525 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
526 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
527 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
528 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
529 constraint allows the use of a register operand. And, *IS_INOUT
530 will be true if the operand is read-write, i.e., if it is used as
531 an input as well as an output. If *CONSTRAINT_P is not in
532 canonical form, it will be made canonical. (Note that `+' will be
533 replaced with `=' as part of this process.)
535 Returns TRUE if all went well; FALSE if an error occurred. */
538 parse_output_constraint (const char **constraint_p, int operand_num,
539 int ninputs, int noutputs, bool *allows_mem,
540 bool *allows_reg, bool *is_inout)
542 const char *constraint = *constraint_p;
545 /* Assume the constraint doesn't allow the use of either a register
550 /* Allow the `=' or `+' to not be at the beginning of the string,
551 since it wasn't explicitly documented that way, and there is a
552 large body of code that puts it last. Swap the character to
553 the front, so as not to uglify any place else. */
554 p = strchr (constraint, '=');
556 p = strchr (constraint, '+');
558 /* If the string doesn't contain an `=', issue an error
562 error ("output operand constraint lacks `='");
566 /* If the constraint begins with `+', then the operand is both read
567 from and written to. */
568 *is_inout = (*p == '+');
570 /* Canonicalize the output constraint so that it begins with `='. */
571 if (p != constraint || is_inout)
574 size_t c_len = strlen (constraint);
577 warning ("output constraint `%c' for operand %d is not at the beginning",
580 /* Make a copy of the constraint. */
581 buf = alloca (c_len + 1);
582 strcpy (buf, constraint);
583 /* Swap the first character and the `=' or `+'. */
584 buf[p - constraint] = buf[0];
585 /* Make sure the first character is an `='. (Until we do this,
586 it might be a `+'.) */
588 /* Replace the constraint with the canonicalized string. */
589 *constraint_p = ggc_alloc_string (buf, c_len);
590 constraint = *constraint_p;
593 /* Loop through the constraint string. */
594 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
599 error ("operand constraint contains incorrectly positioned '+' or '='");
603 if (operand_num + 1 == ninputs + noutputs)
605 error ("`%%' constraint used with last operand");
610 case 'V': case 'm': case 'o':
614 case '?': case '!': case '*': case '&': case '#':
615 case 'E': case 'F': case 'G': case 'H':
616 case 's': case 'i': case 'n':
617 case 'I': case 'J': case 'K': case 'L': case 'M':
618 case 'N': case 'O': case 'P': case ',':
621 case '0': case '1': case '2': case '3': case '4':
622 case '5': case '6': case '7': case '8': case '9':
624 error ("matching constraint not valid in output operand");
628 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
629 excepting those that expand_call created. So match memory
646 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
648 #ifdef EXTRA_CONSTRAINT_STR
649 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
651 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
655 /* Otherwise we can't assume anything about the nature of
656 the constraint except that it isn't purely registers.
657 Treat it like "g" and hope for the best. */
668 /* Similar, but for input constraints. */
671 parse_input_constraint (const char **constraint_p, int input_num,
672 int ninputs, int noutputs, int ninout,
673 const char * const * constraints,
674 bool *allows_mem, bool *allows_reg)
676 const char *constraint = *constraint_p;
677 const char *orig_constraint = constraint;
678 size_t c_len = strlen (constraint);
680 bool saw_match = false;
682 /* Assume the constraint doesn't allow the use of either
683 a register or memory. */
687 /* Make sure constraint has neither `=', `+', nor '&'. */
689 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
690 switch (constraint[j])
692 case '+': case '=': case '&':
693 if (constraint == orig_constraint)
695 error ("input operand constraint contains `%c'", constraint[j]);
701 if (constraint == orig_constraint
702 && input_num + 1 == ninputs - ninout)
704 error ("`%%' constraint used with last operand");
709 case 'V': case 'm': case 'o':
714 case '?': case '!': case '*': case '#':
715 case 'E': case 'F': case 'G': case 'H':
716 case 's': case 'i': case 'n':
717 case 'I': case 'J': case 'K': case 'L': case 'M':
718 case 'N': case 'O': case 'P': case ',':
721 /* Whether or not a numeric constraint allows a register is
722 decided by the matching constraint, and so there is no need
723 to do anything special with them. We must handle them in
724 the default case, so that we don't unnecessarily force
725 operands to memory. */
726 case '0': case '1': case '2': case '3': case '4':
727 case '5': case '6': case '7': case '8': case '9':
734 match = strtoul (constraint + j, &end, 10);
735 if (match >= (unsigned long) noutputs)
737 error ("matching constraint references invalid operand number");
741 /* Try and find the real constraint for this dup. Only do this
742 if the matching constraint is the only alternative. */
744 && (j == 0 || (j == 1 && constraint[0] == '%')))
746 constraint = constraints[match];
747 *constraint_p = constraint;
748 c_len = strlen (constraint);
750 /* ??? At the end of the loop, we will skip the first part of
751 the matched constraint. This assumes not only that the
752 other constraint is an output constraint, but also that
753 the '=' or '+' come first. */
757 j = end - constraint;
758 /* Anticipate increment at end of loop. */
773 if (! ISALPHA (constraint[j]))
775 error ("invalid punctuation `%c' in constraint", constraint[j]);
778 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
781 #ifdef EXTRA_CONSTRAINT_STR
782 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
784 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
788 /* Otherwise we can't assume anything about the nature of
789 the constraint except that it isn't purely registers.
790 Treat it like "g" and hope for the best. */
798 if (saw_match && !*allows_reg)
799 warning ("matching constraint does not allow a register");
804 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
805 if it is an operand which must be passed in memory (i.e. an "m"
806 constraint), false otherwise. */
809 asm_op_is_mem_input (tree input, tree expr)
811 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
812 tree outputs = ASM_OUTPUTS (expr);
813 int noutputs = list_length (outputs);
814 const char **constraints
815 = (const char **) alloca ((noutputs) * sizeof (const char *));
817 bool allows_mem, allows_reg;
820 /* Collect output constraints. */
821 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
822 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
824 /* We pass 0 for input_num, ninputs and ninout; they are only used for
825 error checking which will be done at expand time. */
826 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
827 &allows_mem, &allows_reg);
828 return (!allows_reg && allows_mem);
831 /* Check for overlap between registers marked in CLOBBERED_REGS and
832 anything inappropriate in DECL. Emit error and return TRUE for error,
836 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
838 /* Conflicts between asm-declared register variables and the clobber
839 list are not allowed. */
840 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
841 && DECL_REGISTER (decl)
842 && REG_P (DECL_RTL (decl))
843 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
845 rtx reg = DECL_RTL (decl);
848 for (regno = REGNO (reg);
850 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
852 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
854 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
855 IDENTIFIER_POINTER (DECL_NAME (decl)));
857 /* Reset registerness to stop multiple errors emitted for a
859 DECL_REGISTER (decl) = 0;
866 /* Generate RTL for an asm statement with arguments.
867 STRING is the instruction template.
868 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
869 Each output or input has an expression in the TREE_VALUE and
870 and a tree list in TREE_PURPOSE which in turn contains a constraint
871 name in TREE_VALUE (or NULL_TREE) and a constraint string
873 CLOBBERS is a list of STRING_CST nodes each naming a hard register
874 that is clobbered by this insn.
876 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
877 Some elements of OUTPUTS may be replaced with trees representing temporary
878 values. The caller should copy those temporary values to the originally
881 VOL nonzero means the insn is volatile; don't optimize it. */
884 expand_asm_operands (tree string, tree outputs, tree inputs,
885 tree clobbers, int vol, location_t locus)
887 rtvec argvec, constraintvec;
889 int ninputs = list_length (inputs);
890 int noutputs = list_length (outputs);
893 HARD_REG_SET clobbered_regs;
894 int clobber_conflict_found = 0;
898 /* Vector of RTX's of evaluated output operands. */
899 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
900 int *inout_opnum = alloca (noutputs * sizeof (int));
901 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
902 enum machine_mode *inout_mode
903 = alloca (noutputs * sizeof (enum machine_mode));
904 const char **constraints
905 = alloca ((noutputs + ninputs) * sizeof (const char *));
906 int old_generating_concat_p = generating_concat_p;
908 /* An ASM with no outputs needs to be treated as volatile, for now. */
912 if (! check_operand_nalternatives (outputs, inputs))
915 string = resolve_asm_operand_names (string, outputs, inputs);
917 /* Collect constraints. */
919 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
920 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
921 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
922 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
924 /* Sometimes we wish to automatically clobber registers across an asm.
925 Case in point is when the i386 backend moved from cc0 to a hard reg --
926 maintaining source-level compatibility means automatically clobbering
927 the flags register. */
928 clobbers = targetm.md_asm_clobbers (clobbers);
930 /* Count the number of meaningful clobbered registers, ignoring what
931 we would ignore later. */
933 CLEAR_HARD_REG_SET (clobbered_regs);
934 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
936 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
938 i = decode_reg_name (regname);
939 if (i >= 0 || i == -4)
942 error ("unknown register name `%s' in `asm'", regname);
944 /* Mark clobbered registers. */
947 /* Clobbering the PIC register is an error */
948 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
950 error ("PIC register `%s' clobbered in `asm'", regname);
954 SET_HARD_REG_BIT (clobbered_regs, i);
958 /* First pass over inputs and outputs checks validity and sets
959 mark_addressable if needed. */
962 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
964 tree val = TREE_VALUE (tail);
965 tree type = TREE_TYPE (val);
966 const char *constraint;
971 /* If there's an erroneous arg, emit no insn. */
972 if (type == error_mark_node)
975 /* Try to parse the output constraint. If that fails, there's
976 no point in going further. */
977 constraint = constraints[i];
978 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
979 &allows_mem, &allows_reg, &is_inout))
986 && REG_P (DECL_RTL (val))
987 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
988 lang_hooks.mark_addressable (val);
995 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
997 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1001 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1003 bool allows_reg, allows_mem;
1004 const char *constraint;
1006 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1007 would get VOIDmode and that could cause a crash in reload. */
1008 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1011 constraint = constraints[i + noutputs];
1012 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1013 constraints, &allows_mem, &allows_reg))
1016 if (! allows_reg && allows_mem)
1017 lang_hooks.mark_addressable (TREE_VALUE (tail));
1020 /* Second pass evaluates arguments. */
1023 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1025 tree val = TREE_VALUE (tail);
1026 tree type = TREE_TYPE (val);
1032 if (!parse_output_constraint (&constraints[i], i, ninputs,
1033 noutputs, &allows_mem, &allows_reg,
1037 /* If an output operand is not a decl or indirect ref and our constraint
1038 allows a register, make a temporary to act as an intermediate.
1039 Make the asm insn write into that, then our caller will copy it to
1040 the real output operand. Likewise for promoted variables. */
1042 generating_concat_p = 0;
1044 real_output_rtx[i] = NULL_RTX;
1045 if ((TREE_CODE (val) == INDIRECT_REF
1048 && (allows_mem || REG_P (DECL_RTL (val)))
1049 && ! (REG_P (DECL_RTL (val))
1050 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1054 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1056 op = validize_mem (op);
1058 if (! allows_reg && !MEM_P (op))
1059 error ("output number %d not directly addressable", i);
1060 if ((! allows_mem && MEM_P (op))
1061 || GET_CODE (op) == CONCAT)
1063 real_output_rtx[i] = protect_from_queue (op, 1);
1064 op = gen_reg_rtx (GET_MODE (op));
1066 emit_move_insn (op, real_output_rtx[i]);
1071 op = assign_temp (type, 0, 0, 1);
1072 op = validize_mem (op);
1073 TREE_VALUE (tail) = make_tree (type, op);
1077 generating_concat_p = old_generating_concat_p;
1081 inout_mode[ninout] = TYPE_MODE (type);
1082 inout_opnum[ninout++] = i;
1085 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1086 clobber_conflict_found = 1;
1089 /* Make vectors for the expression-rtx, constraint strings,
1090 and named operands. */
1092 argvec = rtvec_alloc (ninputs);
1093 constraintvec = rtvec_alloc (ninputs);
1095 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1096 : GET_MODE (output_rtx[0])),
1097 TREE_STRING_POINTER (string),
1098 empty_string, 0, argvec, constraintvec,
1101 MEM_VOLATILE_P (body) = vol;
1103 /* Eval the inputs and put them into ARGVEC.
1104 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1106 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1108 bool allows_reg, allows_mem;
1109 const char *constraint;
1113 constraint = constraints[i + noutputs];
1114 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1115 constraints, &allows_mem, &allows_reg))
1118 generating_concat_p = 0;
1120 val = TREE_VALUE (tail);
1121 type = TREE_TYPE (val);
1122 op = expand_expr (val, NULL_RTX, VOIDmode,
1123 (allows_mem && !allows_reg
1124 ? EXPAND_MEMORY : EXPAND_NORMAL));
1126 /* Never pass a CONCAT to an ASM. */
1127 if (GET_CODE (op) == CONCAT)
1128 op = force_reg (GET_MODE (op), op);
1129 else if (MEM_P (op))
1130 op = validize_mem (op);
1132 if (asm_operand_ok (op, constraint) <= 0)
1135 op = force_reg (TYPE_MODE (type), op);
1136 else if (!allows_mem)
1137 warning ("asm operand %d probably doesn't match constraints",
1139 else if (MEM_P (op))
1141 /* We won't recognize either volatile memory or memory
1142 with a queued address as available a memory_operand
1143 at this point. Ignore it: clearly this *is* a memory. */
1147 warning ("use of memory input without lvalue in "
1148 "asm operand %d is deprecated", i + noutputs);
1150 if (CONSTANT_P (op))
1152 rtx mem = force_const_mem (TYPE_MODE (type), op);
1154 op = validize_mem (mem);
1156 op = force_reg (TYPE_MODE (type), op);
1159 || GET_CODE (op) == SUBREG
1160 || GET_CODE (op) == CONCAT)
1162 tree qual_type = build_qualified_type (type,
1164 | TYPE_QUAL_CONST));
1165 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1166 memloc = validize_mem (memloc);
1167 emit_move_insn (memloc, op);
1173 generating_concat_p = old_generating_concat_p;
1174 ASM_OPERANDS_INPUT (body, i) = op;
1176 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1177 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1179 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1180 clobber_conflict_found = 1;
1183 /* Protect all the operands from the queue now that they have all been
1186 generating_concat_p = 0;
1188 for (i = 0; i < ninputs - ninout; i++)
1189 ASM_OPERANDS_INPUT (body, i)
1190 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1192 for (i = 0; i < noutputs; i++)
1193 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1195 /* For in-out operands, copy output rtx to input rtx. */
1196 for (i = 0; i < ninout; i++)
1198 int j = inout_opnum[i];
1201 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1204 sprintf (buffer, "%d", j);
1205 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1206 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1209 generating_concat_p = old_generating_concat_p;
1211 /* Now, for each output, construct an rtx
1212 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1213 ARGVEC CONSTRAINTS OPNAMES))
1214 If there is more than one, put them inside a PARALLEL. */
1216 if (noutputs == 1 && nclobbers == 0)
1218 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1219 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1222 else if (noutputs == 0 && nclobbers == 0)
1224 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1236 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1238 /* For each output operand, store a SET. */
1239 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1241 XVECEXP (body, 0, i)
1242 = gen_rtx_SET (VOIDmode,
1244 gen_rtx_ASM_OPERANDS
1245 (GET_MODE (output_rtx[i]),
1246 TREE_STRING_POINTER (string),
1247 constraints[i], i, argvec, constraintvec,
1250 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1253 /* If there are no outputs (but there are some clobbers)
1254 store the bare ASM_OPERANDS into the PARALLEL. */
1257 XVECEXP (body, 0, i++) = obody;
1259 /* Store (clobber REG) for each clobbered register specified. */
1261 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1263 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1264 int j = decode_reg_name (regname);
1269 if (j == -3) /* `cc', which is not a register */
1272 if (j == -4) /* `memory', don't cache memory across asm */
1274 XVECEXP (body, 0, i++)
1275 = gen_rtx_CLOBBER (VOIDmode,
1278 gen_rtx_SCRATCH (VOIDmode)));
1282 /* Ignore unknown register, error already signaled. */
1286 /* Use QImode since that's guaranteed to clobber just one reg. */
1287 clobbered_reg = gen_rtx_REG (QImode, j);
1289 /* Do sanity check for overlap between clobbers and respectively
1290 input and outputs that hasn't been handled. Such overlap
1291 should have been detected and reported above. */
1292 if (!clobber_conflict_found)
1296 /* We test the old body (obody) contents to avoid tripping
1297 over the under-construction body. */
1298 for (opno = 0; opno < noutputs; opno++)
1299 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1300 internal_error ("asm clobber conflict with output operand");
1302 for (opno = 0; opno < ninputs - ninout; opno++)
1303 if (reg_overlap_mentioned_p (clobbered_reg,
1304 ASM_OPERANDS_INPUT (obody, opno)))
1305 internal_error ("asm clobber conflict with input operand");
1308 XVECEXP (body, 0, i++)
1309 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1315 /* For any outputs that needed reloading into registers, spill them
1316 back to where they belong. */
1317 for (i = 0; i < noutputs; ++i)
1318 if (real_output_rtx[i])
1319 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1325 expand_asm_expr (tree exp)
1331 if (ASM_INPUT_P (exp))
1333 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1337 outputs = ASM_OUTPUTS (exp);
1338 noutputs = list_length (outputs);
1339 /* o[I] is the place that output number I should be written. */
1340 o = (tree *) alloca (noutputs * sizeof (tree));
1342 /* Record the contents of OUTPUTS before it is modified. */
1343 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1344 o[i] = TREE_VALUE (tail);
1346 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1347 OUTPUTS some trees for where the values were actually stored. */
1348 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1349 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1352 /* Copy all the intermediate outputs into the specified outputs. */
1353 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1355 if (o[i] != TREE_VALUE (tail))
1357 expand_assignment (o[i], TREE_VALUE (tail), 0);
1360 /* Restore the original value so that it's correct the next
1361 time we expand this function. */
1362 TREE_VALUE (tail) = o[i];
1366 /* Those MODIFY_EXPRs could do autoincrements. */
1370 /* A subroutine of expand_asm_operands. Check that all operands have
1371 the same number of alternatives. Return true if so. */
1374 check_operand_nalternatives (tree outputs, tree inputs)
1376 if (outputs || inputs)
1378 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1380 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1383 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1385 error ("too many alternatives in `asm'");
1392 const char *constraint
1393 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1395 if (n_occurrences (',', constraint) != nalternatives)
1397 error ("operand constraints for `asm' differ in number of alternatives");
1401 if (TREE_CHAIN (tmp))
1402 tmp = TREE_CHAIN (tmp);
1404 tmp = next, next = 0;
1411 /* A subroutine of expand_asm_operands. Check that all operand names
1412 are unique. Return true if so. We rely on the fact that these names
1413 are identifiers, and so have been canonicalized by get_identifier,
1414 so all we need are pointer comparisons. */
1417 check_unique_operand_names (tree outputs, tree inputs)
1421 for (i = outputs; i ; i = TREE_CHAIN (i))
1423 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1427 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1428 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1432 for (i = inputs; i ; i = TREE_CHAIN (i))
1434 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1438 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1439 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1441 for (j = outputs; j ; j = TREE_CHAIN (j))
1442 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1449 error ("duplicate asm operand name '%s'",
1450 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1454 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1455 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1456 STRING and in the constraints to those numbers. */
1459 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1466 check_unique_operand_names (outputs, inputs);
1468 /* Substitute [<name>] in input constraint strings. There should be no
1469 named operands in output constraints. */
1470 for (t = inputs; t ; t = TREE_CHAIN (t))
1472 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1473 if (strchr (c, '[') != NULL)
1475 p = buffer = xstrdup (c);
1476 while ((p = strchr (p, '[')) != NULL)
1477 p = resolve_operand_name_1 (p, outputs, inputs);
1478 TREE_VALUE (TREE_PURPOSE (t))
1479 = build_string (strlen (buffer), buffer);
1484 /* Now check for any needed substitutions in the template. */
1485 c = TREE_STRING_POINTER (string);
1486 while ((c = strchr (c, '%')) != NULL)
1490 else if (ISALPHA (c[1]) && c[2] == '[')
1501 /* OK, we need to make a copy so we can perform the substitutions.
1502 Assume that we will not need extra space--we get to remove '['
1503 and ']', which means we cannot have a problem until we have more
1504 than 999 operands. */
1505 buffer = xstrdup (TREE_STRING_POINTER (string));
1506 p = buffer + (c - TREE_STRING_POINTER (string));
1508 while ((p = strchr (p, '%')) != NULL)
1512 else if (ISALPHA (p[1]) && p[2] == '[')
1520 p = resolve_operand_name_1 (p, outputs, inputs);
1523 string = build_string (strlen (buffer), buffer);
1530 /* A subroutine of resolve_operand_names. P points to the '[' for a
1531 potential named operand of the form [<name>]. In place, replace
1532 the name and brackets with a number. Return a pointer to the
1533 balance of the string after substitution. */
1536 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1543 /* Collect the operand name. */
1544 q = strchr (p, ']');
1547 error ("missing close brace for named operand");
1548 return strchr (p, '\0');
1552 /* Resolve the name to a number. */
1553 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1555 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1558 const char *c = TREE_STRING_POINTER (name);
1559 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1563 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1565 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1568 const char *c = TREE_STRING_POINTER (name);
1569 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1575 error ("undefined named operand '%s'", p + 1);
1579 /* Replace the name with the number. Unfortunately, not all libraries
1580 get the return value of sprintf correct, so search for the end of the
1581 generated string by hand. */
1582 sprintf (p, "%d", op);
1583 p = strchr (p, '\0');
1585 /* Verify the no extra buffer space assumption. */
1589 /* Shift the rest of the buffer down to fill the gap. */
1590 memmove (p, q + 1, strlen (q + 1) + 1);
1595 /* Generate RTL to evaluate the expression EXP. */
1598 expand_expr_stmt (tree exp)
1603 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1604 type = TREE_TYPE (exp);
1606 /* If all we do is reference a volatile value in memory,
1607 copy it to a register to be sure it is actually touched. */
1608 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1610 if (TYPE_MODE (type) == VOIDmode)
1612 else if (TYPE_MODE (type) != BLKmode)
1613 value = copy_to_reg (value);
1616 rtx lab = gen_label_rtx ();
1618 /* Compare the value with itself to reference it. */
1619 emit_cmp_and_jump_insns (value, value, EQ,
1620 expand_expr (TYPE_SIZE (type),
1621 NULL_RTX, VOIDmode, 0),
1627 /* Free any temporaries used to evaluate this expression. */
1633 /* Warn if EXP contains any computations whose results are not used.
1634 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1635 (potential) location of the expression. */
1638 warn_if_unused_value (tree exp, location_t locus)
1641 if (TREE_USED (exp))
1644 /* Don't warn about void constructs. This includes casting to void,
1645 void function calls, and statement expressions with a final cast
1647 if (VOID_TYPE_P (TREE_TYPE (exp)))
1650 if (EXPR_LOCUS (exp))
1651 locus = *EXPR_LOCUS (exp);
1653 switch (TREE_CODE (exp))
1655 case PREINCREMENT_EXPR:
1656 case POSTINCREMENT_EXPR:
1657 case PREDECREMENT_EXPR:
1658 case POSTDECREMENT_EXPR:
1663 case TRY_CATCH_EXPR:
1664 case WITH_CLEANUP_EXPR:
1669 /* For a binding, warn if no side effect within it. */
1670 exp = BIND_EXPR_BODY (exp);
1674 exp = TREE_OPERAND (exp, 0);
1677 case TRUTH_ORIF_EXPR:
1678 case TRUTH_ANDIF_EXPR:
1679 /* In && or ||, warn if 2nd operand has no side effect. */
1680 exp = TREE_OPERAND (exp, 1);
1684 if (TREE_NO_WARNING (exp))
1686 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1688 /* Let people do `(foo (), 0)' without a warning. */
1689 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1691 exp = TREE_OPERAND (exp, 1);
1696 case NON_LVALUE_EXPR:
1697 /* Don't warn about conversions not explicit in the user's program. */
1698 if (TREE_NO_WARNING (exp))
1700 /* Assignment to a cast usually results in a cast of a modify.
1701 Don't complain about that. There can be an arbitrary number of
1702 casts before the modify, so we must loop until we find the first
1703 non-cast expression and then test to see if that is a modify. */
1705 tree tem = TREE_OPERAND (exp, 0);
1707 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1708 tem = TREE_OPERAND (tem, 0);
1710 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1711 || TREE_CODE (tem) == CALL_EXPR)
1717 /* Don't warn about automatic dereferencing of references, since
1718 the user cannot control it. */
1719 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1721 exp = TREE_OPERAND (exp, 0);
1727 /* Referencing a volatile value is a side effect, so don't warn. */
1729 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1730 && TREE_THIS_VOLATILE (exp))
1733 /* If this is an expression which has no operands, there is no value
1734 to be unused. There are no such language-independent codes,
1735 but front ends may define such. */
1736 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
1737 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1741 /* If this is an expression with side effects, don't warn. */
1742 if (TREE_SIDE_EFFECTS (exp))
1745 warning ("%Hvalue computed is not used", &locus);
1750 /* Generate RTL for the start of an if-then. COND is the expression
1751 whose truth should be tested.
1753 If EXITFLAG is nonzero, this conditional is visible to
1754 `exit_something'. */
1757 expand_start_cond (tree cond, int exitflag)
1759 struct nesting *thiscond = ALLOC_NESTING ();
1761 /* Make an entry on cond_stack for the cond we are entering. */
1763 thiscond->desc = COND_NESTING;
1764 thiscond->next = cond_stack;
1765 thiscond->all = nesting_stack;
1766 thiscond->depth = ++nesting_depth;
1767 thiscond->data.cond.next_label = gen_label_rtx ();
1768 /* Before we encounter an `else', we don't need a separate exit label
1769 unless there are supposed to be exit statements
1770 to exit this conditional. */
1771 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1772 thiscond->data.cond.endif_label = thiscond->exit_label;
1773 cond_stack = thiscond;
1774 nesting_stack = thiscond;
1776 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1779 /* Generate RTL between then-clause and the elseif-clause
1780 of an if-then-elseif-.... */
1783 expand_start_elseif (tree cond)
1785 if (cond_stack->data.cond.endif_label == 0)
1786 cond_stack->data.cond.endif_label = gen_label_rtx ();
1787 emit_jump (cond_stack->data.cond.endif_label);
1788 emit_label (cond_stack->data.cond.next_label);
1789 cond_stack->data.cond.next_label = gen_label_rtx ();
1790 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1793 /* Generate RTL between the then-clause and the else-clause
1794 of an if-then-else. */
1797 expand_start_else (void)
1799 if (cond_stack->data.cond.endif_label == 0)
1800 cond_stack->data.cond.endif_label = gen_label_rtx ();
1802 emit_jump (cond_stack->data.cond.endif_label);
1803 emit_label (cond_stack->data.cond.next_label);
1804 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1807 /* After calling expand_start_else, turn this "else" into an "else if"
1808 by providing another condition. */
1811 expand_elseif (tree cond)
1813 cond_stack->data.cond.next_label = gen_label_rtx ();
1814 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1817 /* Generate RTL for the end of an if-then.
1818 Pop the record for it off of cond_stack. */
1821 expand_end_cond (void)
1823 struct nesting *thiscond = cond_stack;
1825 do_pending_stack_adjust ();
1826 if (thiscond->data.cond.next_label)
1827 emit_label (thiscond->data.cond.next_label);
1828 if (thiscond->data.cond.endif_label)
1829 emit_label (thiscond->data.cond.endif_label);
1831 POPSTACK (cond_stack);
1834 /* Return nonzero if we should preserve sub-expressions as separate
1835 pseudos. We never do so if we aren't optimizing. We always do so
1836 if -fexpensive-optimizations. */
1839 preserve_subexpressions_p (void)
1841 if (flag_expensive_optimizations)
1844 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
1851 /* Generate RTL to return from the current function, with no value.
1852 (That is, we do not do anything about returning any value.) */
1855 expand_null_return (void)
1857 /* If this function was declared to return a value, but we
1858 didn't, clobber the return registers so that they are not
1859 propagated live to the rest of the function. */
1860 clobber_return_register ();
1862 expand_null_return_1 ();
1865 /* Generate RTL to return directly from the current function.
1866 (That is, we bypass any return value.) */
1869 expand_naked_return (void)
1873 clear_pending_stack_adjust ();
1874 do_pending_stack_adjust ();
1876 end_label = naked_return_label;
1878 end_label = naked_return_label = gen_label_rtx ();
1880 emit_jump (end_label);
1883 /* Try to guess whether the value of return means error code. */
1884 static enum br_predictor
1885 return_prediction (rtx val)
1887 /* Different heuristics for pointers and scalars. */
1888 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
1890 /* NULL is usually not returned. */
1891 if (val == const0_rtx)
1892 return PRED_NULL_RETURN;
1896 /* Negative return values are often used to indicate
1898 if (GET_CODE (val) == CONST_INT
1899 && INTVAL (val) < 0)
1900 return PRED_NEGATIVE_RETURN;
1901 /* Constant return values are also usually erors,
1902 zero/one often mean booleans so exclude them from the
1904 if (CONSTANT_P (val)
1905 && (val != const0_rtx && val != const1_rtx))
1906 return PRED_CONST_RETURN;
1908 return PRED_NO_PREDICTION;
1912 /* If the current function returns values in the most significant part
1913 of a register, shift return value VAL appropriately. The mode of
1914 the function's return type is known not to be BLKmode. */
1917 shift_return_value (rtx val)
1921 type = TREE_TYPE (DECL_RESULT (current_function_decl));
1922 if (targetm.calls.return_in_msb (type))
1925 HOST_WIDE_INT shift;
1927 target = DECL_RTL (DECL_RESULT (current_function_decl));
1928 shift = (GET_MODE_BITSIZE (GET_MODE (target))
1929 - BITS_PER_UNIT * int_size_in_bytes (type));
1931 val = expand_shift (LSHIFT_EXPR, GET_MODE (target),
1932 gen_lowpart (GET_MODE (target), val),
1933 build_int_2 (shift, 0), target, 1);
1939 /* Generate RTL to return from the current function, with value VAL. */
1942 expand_value_return (rtx val)
1945 enum br_predictor pred;
1947 if (flag_guess_branch_prob
1948 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
1950 /* Emit information for branch prediction. */
1953 note = emit_note (NOTE_INSN_PREDICTION);
1955 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
1959 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1961 /* Copy the value to the return location
1962 unless it's already there. */
1964 if (return_reg != val)
1966 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1967 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1969 int unsignedp = TYPE_UNSIGNED (type);
1970 enum machine_mode old_mode
1971 = DECL_MODE (DECL_RESULT (current_function_decl));
1972 enum machine_mode mode
1973 = promote_mode (type, old_mode, &unsignedp, 1);
1975 if (mode != old_mode)
1976 val = convert_modes (mode, old_mode, val, unsignedp);
1978 if (GET_CODE (return_reg) == PARALLEL)
1979 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1981 emit_move_insn (return_reg, val);
1984 expand_null_return_1 ();
1987 /* Output a return with no value. */
1990 expand_null_return_1 (void)
1994 clear_pending_stack_adjust ();
1995 do_pending_stack_adjust ();
1997 end_label = return_label;
1999 end_label = return_label = gen_label_rtx ();
2000 emit_jump (end_label);
2003 /* Generate RTL to evaluate the expression RETVAL and return it
2004 from the current function. */
2007 expand_return (tree retval)
2013 /* If function wants no value, give it none. */
2014 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2016 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2018 expand_null_return ();
2022 if (retval == error_mark_node)
2024 /* Treat this like a return of no value from a function that
2026 expand_null_return ();
2029 else if (TREE_CODE (retval) == RESULT_DECL)
2030 retval_rhs = retval;
2031 else if ((TREE_CODE (retval) == MODIFY_EXPR
2032 || TREE_CODE (retval) == INIT_EXPR)
2033 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2034 retval_rhs = TREE_OPERAND (retval, 1);
2036 retval_rhs = retval;
2038 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2040 /* If the result is an aggregate that is being returned in one (or more)
2041 registers, load the registers here. The compiler currently can't handle
2042 copying a BLKmode value into registers. We could put this code in a
2043 more general area (for use by everyone instead of just function
2044 call/return), but until this feature is generally usable it is kept here
2045 (and in expand_call). */
2048 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2049 && REG_P (result_rtl))
2052 unsigned HOST_WIDE_INT bitpos, xbitpos;
2053 unsigned HOST_WIDE_INT padding_correction = 0;
2054 unsigned HOST_WIDE_INT bytes
2055 = int_size_in_bytes (TREE_TYPE (retval_rhs));
2056 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2057 unsigned int bitsize
2058 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
2059 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
2060 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2061 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2062 enum machine_mode tmpmode, result_reg_mode;
2066 expand_null_return ();
2070 /* If the structure doesn't take up a whole number of words, see
2071 whether the register value should be padded on the left or on
2072 the right. Set PADDING_CORRECTION to the number of padding
2073 bits needed on the left side.
2075 In most ABIs, the structure will be returned at the least end of
2076 the register, which translates to right padding on little-endian
2077 targets and left padding on big-endian targets. The opposite
2078 holds if the structure is returned at the most significant
2079 end of the register. */
2080 if (bytes % UNITS_PER_WORD != 0
2081 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2083 : BYTES_BIG_ENDIAN))
2084 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2087 /* Copy the structure BITSIZE bits at a time. */
2088 for (bitpos = 0, xbitpos = padding_correction;
2089 bitpos < bytes * BITS_PER_UNIT;
2090 bitpos += bitsize, xbitpos += bitsize)
2092 /* We need a new destination pseudo each time xbitpos is
2093 on a word boundary and when xbitpos == padding_correction
2094 (the first time through). */
2095 if (xbitpos % BITS_PER_WORD == 0
2096 || xbitpos == padding_correction)
2098 /* Generate an appropriate register. */
2099 dst = gen_reg_rtx (word_mode);
2100 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2102 /* Clear the destination before we move anything into it. */
2103 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
2106 /* We need a new source operand each time bitpos is on a word
2108 if (bitpos % BITS_PER_WORD == 0)
2109 src = operand_subword_force (result_val,
2110 bitpos / BITS_PER_WORD,
2113 /* Use bitpos for the source extraction (left justified) and
2114 xbitpos for the destination store (right justified). */
2115 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2116 extract_bit_field (src, bitsize,
2117 bitpos % BITS_PER_WORD, 1,
2118 NULL_RTX, word_mode, word_mode,
2123 tmpmode = GET_MODE (result_rtl);
2124 if (tmpmode == BLKmode)
2126 /* Find the smallest integer mode large enough to hold the
2127 entire structure and use that mode instead of BLKmode
2128 on the USE insn for the return register. */
2129 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2130 tmpmode != VOIDmode;
2131 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2132 /* Have we found a large enough mode? */
2133 if (GET_MODE_SIZE (tmpmode) >= bytes)
2136 /* No suitable mode found. */
2137 if (tmpmode == VOIDmode)
2140 PUT_MODE (result_rtl, tmpmode);
2143 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2144 result_reg_mode = word_mode;
2146 result_reg_mode = tmpmode;
2147 result_reg = gen_reg_rtx (result_reg_mode);
2150 for (i = 0; i < n_regs; i++)
2151 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2154 if (tmpmode != result_reg_mode)
2155 result_reg = gen_lowpart (tmpmode, result_reg);
2157 expand_value_return (result_reg);
2159 else if (retval_rhs != 0
2160 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2161 && (REG_P (result_rtl)
2162 || (GET_CODE (result_rtl) == PARALLEL)))
2164 /* Calculate the return value into a temporary (usually a pseudo
2166 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2167 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2169 val = assign_temp (nt, 0, 0, 1);
2170 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2171 val = force_not_mem (val);
2173 /* Return the calculated value. */
2174 expand_value_return (shift_return_value (val));
2178 /* No hard reg used; calculate value into hard return reg. */
2179 expand_expr (retval, const0_rtx, VOIDmode, 0);
2181 expand_value_return (result_rtl);
2185 /* Generate the RTL code for entering a binding contour.
2186 The variables are declared one by one, by calls to `expand_decl'.
2188 FLAGS is a bitwise or of the following flags:
2190 1 - Nonzero if this construct should be visible to
2193 2 - Nonzero if this contour does not require a
2194 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2195 language-independent code should set this flag because they
2196 will not create corresponding BLOCK nodes. (There should be
2197 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2198 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
2199 when expand_end_bindings is called.
2201 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2202 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2206 expand_start_bindings_and_block (int flags, tree block)
2208 struct nesting *thisblock = ALLOC_NESTING ();
2210 int exit_flag = ((flags & 1) != 0);
2211 int block_flag = ((flags & 2) == 0);
2213 /* If a BLOCK is supplied, then the caller should be requesting a
2214 NOTE_INSN_BLOCK_BEG note. */
2215 if (!block_flag && block)
2218 /* Create a note to mark the beginning of the block. */
2219 note = emit_note (NOTE_INSN_DELETED);
2221 /* Make an entry on block_stack for the block we are entering. */
2223 thisblock->desc = BLOCK_NESTING;
2224 thisblock->next = block_stack;
2225 thisblock->all = nesting_stack;
2226 thisblock->depth = ++nesting_depth;
2227 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
2229 thisblock->data.block.conditional_code = 0;
2230 thisblock->data.block.last_unconditional_cleanup = note;
2231 /* When we insert instructions after the last unconditional cleanup,
2232 we don't adjust last_insn. That means that a later add_insn will
2233 clobber the instructions we've just added. The easiest way to
2234 fix this is to just insert another instruction here, so that the
2235 instructions inserted after the last unconditional cleanup are
2236 never the last instruction. */
2237 emit_note (NOTE_INSN_DELETED);
2239 thisblock->data.block.first_insn = note;
2240 thisblock->data.block.block_start_count = ++current_block_start_count;
2241 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2242 block_stack = thisblock;
2243 nesting_stack = thisblock;
2245 /* Make a new level for allocating stack slots. */
2249 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2250 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2251 expand_expr are made. After we end the region, we know that all
2252 space for all temporaries that were created by TARGET_EXPRs will be
2253 destroyed and their space freed for reuse. */
2256 expand_start_target_temps (void)
2258 /* This is so that even if the result is preserved, the space
2259 allocated will be freed, as we know that it is no longer in use. */
2262 /* Start a new binding layer that will keep track of all cleanup
2263 actions to be performed. */
2264 expand_start_bindings (2);
2266 target_temp_slot_level = temp_slot_level;
2270 expand_end_target_temps (void)
2272 expand_end_bindings (NULL_TREE, 0, 0);
2274 /* This is so that even if the result is preserved, the space
2275 allocated will be freed, as we know that it is no longer in use. */
2279 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
2280 in question represents the outermost pair of curly braces (i.e. the "body
2281 block") of a function or method.
2283 For any BLOCK node representing a "body block" of a function or method, the
2284 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2285 represents the outermost (function) scope for the function or method (i.e.
2286 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
2287 *that* node in turn will point to the relevant FUNCTION_DECL node. */
2290 is_body_block (tree stmt)
2292 if (lang_hooks.no_body_blocks)
2295 if (TREE_CODE (stmt) == BLOCK)
2297 tree parent = BLOCK_SUPERCONTEXT (stmt);
2299 if (parent && TREE_CODE (parent) == BLOCK)
2301 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2303 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2311 /* Return an opaque pointer to the current nesting level, so frontend code
2312 can check its own sanity. */
2315 current_nesting_level (void)
2317 return cfun ? block_stack : 0;
2320 /* Emit code to restore vital registers at the beginning of a nonlocal goto
2323 expand_nl_goto_receiver (void)
2325 /* Clobber the FP when we get here, so we have to make sure it's
2326 marked as used by this function. */
2327 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2329 /* Mark the static chain as clobbered here so life information
2330 doesn't get messed up for it. */
2331 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2333 #ifdef HAVE_nonlocal_goto
2334 if (! HAVE_nonlocal_goto)
2336 /* First adjust our frame pointer to its actual value. It was
2337 previously set to the start of the virtual area corresponding to
2338 the stacked variables when we branched here and now needs to be
2339 adjusted to the actual hardware fp value.
2341 Assignments are to virtual registers are converted by
2342 instantiate_virtual_regs into the corresponding assignment
2343 to the underlying register (fp in this case) that makes
2344 the original assignment true.
2345 So the following insn will actually be
2346 decrementing fp by STARTING_FRAME_OFFSET. */
2347 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2349 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2350 if (fixed_regs[ARG_POINTER_REGNUM])
2352 #ifdef ELIMINABLE_REGS
2353 /* If the argument pointer can be eliminated in favor of the
2354 frame pointer, we don't need to restore it. We assume here
2355 that if such an elimination is present, it can always be used.
2356 This is the case on all known machines; if we don't make this
2357 assumption, we do unnecessary saving on many machines. */
2358 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
2361 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
2362 if (elim_regs[i].from == ARG_POINTER_REGNUM
2363 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2366 if (i == ARRAY_SIZE (elim_regs))
2369 /* Now restore our arg pointer from the address at which it
2370 was saved in our stack frame. */
2371 emit_move_insn (virtual_incoming_args_rtx,
2372 copy_to_reg (get_arg_pointer_save_area (cfun)));
2377 #ifdef HAVE_nonlocal_goto_receiver
2378 if (HAVE_nonlocal_goto_receiver)
2379 emit_insn (gen_nonlocal_goto_receiver ());
2382 /* @@@ This is a kludge. Not all machine descriptions define a blockage
2383 insn, but we must not allow the code we just generated to be reordered
2384 by scheduling. Specifically, the update of the frame pointer must
2385 happen immediately, not later. So emit an ASM_INPUT to act as blockage
2387 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2390 /* Warn about any unused VARS (which may contain nodes other than
2391 VAR_DECLs, but such nodes are ignored). The nodes are connected
2392 via the TREE_CHAIN field. */
2395 warn_about_unused_variables (tree vars)
2399 if (warn_unused_variable)
2400 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2401 if (TREE_CODE (decl) == VAR_DECL
2402 && ! TREE_USED (decl)
2403 && ! DECL_IN_SYSTEM_HEADER (decl)
2404 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
2405 warning ("%Junused variable '%D'", decl, decl);
2408 /* Generate RTL code to terminate a binding contour.
2410 VARS is the chain of VAR_DECL nodes for the variables bound in this
2411 contour. There may actually be other nodes in this chain, but any
2412 nodes other than VAR_DECLS are ignored.
2414 MARK_ENDS is nonzero if we should put a note at the beginning
2415 and end of this binding contour.
2417 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
2418 zero if we can jump into this contour only if it does not have a saved
2419 stack level, and negative if we are not to check for invalid use of
2420 labels (because the front end does that). */
2423 expand_end_bindings (tree vars, int mark_ends ATTRIBUTE_UNUSED,
2424 int dont_jump_in ATTRIBUTE_UNUSED)
2426 struct nesting *thisblock = block_stack;
2428 /* If any of the variables in this scope were not used, warn the
2430 warn_about_unused_variables (vars);
2432 if (thisblock->exit_label)
2434 do_pending_stack_adjust ();
2435 emit_label (thisblock->exit_label);
2438 /* Mark the beginning and end of the scope if requested. */
2440 /* Get rid of the beginning-mark if we don't make an end-mark. */
2441 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2443 /* Restore the temporary level of TARGET_EXPRs. */
2444 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
2446 /* Restore block_stack level for containing block. */
2448 POPSTACK (block_stack);
2450 /* Pop the stack slot nesting and free any slots at this level. */
2454 /* Generate RTL for the automatic variable declaration DECL.
2455 (Other kinds of declarations are simply ignored if seen here.) */
2458 expand_decl (tree decl)
2462 type = TREE_TYPE (decl);
2464 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
2465 type in case this node is used in a reference. */
2466 if (TREE_CODE (decl) == CONST_DECL)
2468 DECL_MODE (decl) = TYPE_MODE (type);
2469 DECL_ALIGN (decl) = TYPE_ALIGN (type);
2470 DECL_SIZE (decl) = TYPE_SIZE (type);
2471 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
2475 /* Otherwise, only automatic variables need any expansion done. Static and
2476 external variables, and external functions, will be handled by
2477 `assemble_variable' (called from finish_decl). TYPE_DECL requires
2478 nothing. PARM_DECLs are handled in `assign_parms'. */
2479 if (TREE_CODE (decl) != VAR_DECL)
2482 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
2485 /* Create the RTL representation for the variable. */
2487 if (type == error_mark_node)
2488 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
2490 else if (DECL_SIZE (decl) == 0)
2491 /* Variable with incomplete type. */
2494 if (DECL_INITIAL (decl) == 0)
2495 /* Error message was already done; now avoid a crash. */
2496 x = gen_rtx_MEM (BLKmode, const0_rtx);
2498 /* An initializer is going to decide the size of this array.
2499 Until we know the size, represent its address with a reg. */
2500 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
2502 set_mem_attributes (x, decl, 1);
2503 SET_DECL_RTL (decl, x);
2505 else if (use_register_for_decl (decl))
2507 /* Automatic variable that can go in a register. */
2508 int unsignedp = TYPE_UNSIGNED (type);
2509 enum machine_mode reg_mode
2510 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
2512 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
2514 /* Note if the object is a user variable. */
2515 if (!DECL_ARTIFICIAL (decl))
2517 mark_user_reg (DECL_RTL (decl));
2519 /* Trust user variables which have a pointer type to really
2520 be pointers. Do not trust compiler generated temporaries
2521 as our type system is totally busted as it relates to
2522 pointer arithmetic which translates into lots of compiler
2523 generated objects with pointer types, but which are not really
2525 if (POINTER_TYPE_P (type))
2526 mark_reg_pointer (DECL_RTL (decl),
2527 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
2530 maybe_set_unchanging (DECL_RTL (decl), decl);
2533 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
2534 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
2535 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
2536 STACK_CHECK_MAX_VAR_SIZE)))
2538 /* Variable of fixed size that goes on the stack. */
2543 /* If we previously made RTL for this decl, it must be an array
2544 whose size was determined by the initializer.
2545 The old address was a register; set that register now
2546 to the proper address. */
2547 if (DECL_RTL_SET_P (decl))
2549 if (!MEM_P (DECL_RTL (decl))
2550 || !REG_P (XEXP (DECL_RTL (decl), 0)))
2552 oldaddr = XEXP (DECL_RTL (decl), 0);
2555 /* Set alignment we actually gave this decl. */
2556 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2557 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2558 DECL_USER_ALIGN (decl) = 0;
2560 x = assign_temp (decl, 1, 1, 1);
2561 set_mem_attributes (x, decl, 1);
2562 SET_DECL_RTL (decl, x);
2566 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2567 if (addr != oldaddr)
2568 emit_move_insn (oldaddr, addr);
2572 /* Dynamic-size object: must push space on the stack. */
2574 rtx address, size, x;
2576 /* Record the stack pointer on entry to block, if have
2577 not already done so. */
2578 do_pending_stack_adjust ();
2580 /* Compute the variable's size, in bytes. This will expand any
2581 needed SAVE_EXPRs for the first time. */
2582 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
2585 /* Allocate space on the stack for the variable. Note that
2586 DECL_ALIGN says how the variable is to be aligned and we
2587 cannot use it to conclude anything about the alignment of
2589 address = allocate_dynamic_stack_space (size, NULL_RTX,
2590 TYPE_ALIGN (TREE_TYPE (decl)));
2592 /* Reference the variable indirect through that rtx. */
2593 x = gen_rtx_MEM (DECL_MODE (decl), address);
2594 set_mem_attributes (x, decl, 1);
2595 SET_DECL_RTL (decl, x);
2598 /* Indicate the alignment we actually gave this variable. */
2599 #ifdef STACK_BOUNDARY
2600 DECL_ALIGN (decl) = STACK_BOUNDARY;
2602 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2604 DECL_USER_ALIGN (decl) = 0;
2608 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
2610 expand_stack_alloc (tree alloc, tree t_size)
2612 rtx address, dest, size;
2615 if (TREE_CODE (alloc) != ADDR_EXPR)
2617 var = TREE_OPERAND (alloc, 0);
2618 if (TREE_CODE (var) != VAR_DECL)
2621 type = TREE_TYPE (var);
2623 /* Compute the variable's size, in bytes. */
2624 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
2627 /* Allocate space on the stack for the variable. */
2628 address = XEXP (DECL_RTL (var), 0);
2629 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
2630 if (dest != address)
2631 emit_move_insn (address, dest);
2633 /* Indicate the alignment we actually gave this variable. */
2634 #ifdef STACK_BOUNDARY
2635 DECL_ALIGN (var) = STACK_BOUNDARY;
2637 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
2639 DECL_USER_ALIGN (var) = 0;
2642 /* Emit code to save the current value of stack. */
2644 expand_stack_save (void)
2648 do_pending_stack_adjust ();
2649 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
2653 /* Emit code to restore the current value of stack. */
2655 expand_stack_restore (tree var)
2657 rtx sa = DECL_RTL (var);
2659 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2662 /* Emit code to perform the initialization of a declaration DECL. */
2665 expand_decl_init (tree decl)
2667 int was_used = TREE_USED (decl);
2669 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
2670 for static decls. */
2671 if (TREE_CODE (decl) == CONST_DECL
2672 || TREE_STATIC (decl))
2675 /* Compute and store the initial value now. */
2679 if (DECL_INITIAL (decl) == error_mark_node)
2681 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2683 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2684 || code == POINTER_TYPE || code == REFERENCE_TYPE)
2685 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2689 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2691 emit_line_note (DECL_SOURCE_LOCATION (decl));
2692 expand_assignment (decl, DECL_INITIAL (decl), 0);
2696 /* Don't let the initialization count as "using" the variable. */
2697 TREE_USED (decl) = was_used;
2699 /* Free any temporaries we made while initializing the decl. */
2700 preserve_temp_slots (NULL_RTX);
2706 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2707 DECL_ELTS is the list of elements that belong to DECL's type.
2708 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2711 expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2717 /* If any of the elements are addressable, so is the entire union. */
2718 for (t = decl_elts; t; t = TREE_CHAIN (t))
2719 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2721 TREE_ADDRESSABLE (decl) = 1;
2726 x = DECL_RTL (decl);
2728 /* Go through the elements, assigning RTL to each. */
2729 for (t = decl_elts; t; t = TREE_CHAIN (t))
2731 tree decl_elt = TREE_VALUE (t);
2732 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2734 /* If any of the elements are addressable, so is the entire
2736 if (TREE_USED (decl_elt))
2737 TREE_USED (decl) = 1;
2739 /* Propagate the union's alignment to the elements. */
2740 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2741 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2743 /* If the element has BLKmode and the union doesn't, the union is
2744 aligned such that the element doesn't need to have BLKmode, so
2745 change the element's mode to the appropriate one for its size. */
2746 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2747 DECL_MODE (decl_elt) = mode
2748 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2750 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2751 instead create a new MEM rtx with the proper mode. */
2754 if (mode == GET_MODE (x))
2755 SET_DECL_RTL (decl_elt, x);
2757 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
2761 if (mode == GET_MODE (x))
2762 SET_DECL_RTL (decl_elt, x);
2764 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
2771 /* Enter a case (Pascal) or switch (C) statement.
2772 Push a block onto case_stack and nesting_stack
2773 to accumulate the case-labels that are seen
2774 and to record the labels generated for the statement.
2776 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
2777 Otherwise, this construct is transparent for `exit_something'.
2779 EXPR is the index-expression to be dispatched on.
2780 TYPE is its nominal type. We could simply convert EXPR to this type,
2781 but instead we take short cuts. */
2784 expand_start_case (int exit_flag, tree expr, tree type,
2785 const char *printname)
2787 struct nesting *thiscase = ALLOC_NESTING ();
2789 /* Make an entry on case_stack for the case we are entering. */
2791 thiscase->desc = CASE_NESTING;
2792 thiscase->next = case_stack;
2793 thiscase->all = nesting_stack;
2794 thiscase->depth = ++nesting_depth;
2795 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
2796 thiscase->data.case_stmt.case_list = 0;
2797 thiscase->data.case_stmt.index_expr = expr;
2798 thiscase->data.case_stmt.nominal_type = type;
2799 thiscase->data.case_stmt.default_label = 0;
2800 thiscase->data.case_stmt.printname = printname;
2801 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
2802 case_stack = thiscase;
2803 nesting_stack = thiscase;
2805 do_pending_stack_adjust ();
2808 /* Make sure case_stmt.start points to something that won't
2809 need any transformation before expand_end_case. */
2810 if (!NOTE_P (get_last_insn ()))
2811 emit_note (NOTE_INSN_DELETED);
2813 thiscase->data.case_stmt.start = get_last_insn ();
2816 /* Accumulate one case or default label inside a case or switch statement.
2817 VALUE is the value of the case (a null pointer, for a default label).
2818 The function CONVERTER, when applied to arguments T and V,
2819 converts the value V to the type T.
2821 If not currently inside a case or switch statement, return 1 and do
2822 nothing. The caller will print a language-specific error message.
2823 If VALUE is a duplicate or overlaps, return 2 and do nothing
2824 except store the (first) duplicate node in *DUPLICATE.
2825 If VALUE is out of range, return 3 and do nothing.
2826 Return 0 on success.
2828 Extended to handle range statements. */
2831 pushcase (tree value, tree (*converter) (tree, tree), tree label,
2837 /* Fail if not inside a real case statement. */
2838 if (! (case_stack && case_stack->data.case_stmt.start))
2841 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
2842 nominal_type = case_stack->data.case_stmt.nominal_type;
2844 /* If the index is erroneous, avoid more problems: pretend to succeed. */
2845 if (index_type == error_mark_node)
2848 /* Convert VALUE to the type in which the comparisons are nominally done. */
2850 value = (*converter) (nominal_type, value);
2852 /* Fail if this value is out of range for the actual type of the index
2853 (which may be narrower than NOMINAL_TYPE). */
2855 && (TREE_CONSTANT_OVERFLOW (value)
2856 || ! int_fits_type_p (value, index_type)))
2859 return add_case_node (value, value, label, duplicate, false);
2862 /* Like pushcase but this case applies to all values between VALUE1 and
2863 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
2864 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
2865 starts at VALUE1 and ends at the highest value of the index type.
2866 If both are NULL, this case applies to all values.
2868 The return value is the same as that of pushcase but there is one
2869 additional error code: 4 means the specified range was empty. */
2872 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
2873 tree label, tree *duplicate)
2878 /* Fail if not inside a real case statement. */
2879 if (! (case_stack && case_stack->data.case_stmt.start))
2882 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
2883 nominal_type = case_stack->data.case_stmt.nominal_type;
2885 /* If the index is erroneous, avoid more problems: pretend to succeed. */
2886 if (index_type == error_mark_node)
2889 /* Convert VALUEs to type in which the comparisons are nominally done
2890 and replace any unspecified value with the corresponding bound. */
2892 value1 = TYPE_MIN_VALUE (index_type);
2894 value2 = TYPE_MAX_VALUE (index_type);
2896 /* Fail if the range is empty. Do this before any conversion since
2897 we want to allow out-of-range empty ranges. */
2898 if (value2 != 0 && tree_int_cst_lt (value2, value1))
2901 /* If the max was unbounded, use the max of the nominal_type we are
2902 converting to. Do this after the < check above to suppress false
2905 value2 = TYPE_MAX_VALUE (nominal_type);
2907 value1 = (*converter) (nominal_type, value1);
2908 value2 = (*converter) (nominal_type, value2);
2910 /* Fail if these values are out of range. */
2911 if (TREE_CONSTANT_OVERFLOW (value1)
2912 || ! int_fits_type_p (value1, index_type))
2915 if (TREE_CONSTANT_OVERFLOW (value2)
2916 || ! int_fits_type_p (value2, index_type))
2919 return add_case_node (value1, value2, label, duplicate, false);
2922 /* Do the actual insertion of a case label for pushcase and pushcase_range
2923 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
2924 slowdown for large switch statements. */
2927 add_case_node (tree low, tree high, tree label, tree *duplicate,
2928 bool dont_expand_label)
2930 struct case_node *p, **q, *r;
2932 /* If there's no HIGH value, then this is not a case range; it's
2933 just a simple case label. But that's just a degenerate case
2938 /* Handle default labels specially. */
2941 if (case_stack->data.case_stmt.default_label != 0)
2943 *duplicate = case_stack->data.case_stmt.default_label;
2946 case_stack->data.case_stmt.default_label = label;
2947 if (!dont_expand_label)
2948 expand_label (label);
2952 q = &case_stack->data.case_stmt.case_list;
2959 /* Keep going past elements distinctly greater than HIGH. */
2960 if (tree_int_cst_lt (high, p->low))
2963 /* or distinctly less than LOW. */
2964 else if (tree_int_cst_lt (p->high, low))
2969 /* We have an overlap; this is an error. */
2970 *duplicate = p->code_label;
2975 /* Add this label to the chain, and succeed. */
2977 r = ggc_alloc (sizeof (struct case_node));
2980 /* If the bounds are equal, turn this into the one-value case. */
2981 if (tree_int_cst_equal (low, high))
2986 r->code_label = label;
2987 if (!dont_expand_label)
2988 expand_label (label);
2998 struct case_node *s;
3004 if (! (b = p->balance))
3005 /* Growth propagation from left side. */
3012 if ((p->left = s = r->right))
3021 if ((r->parent = s))
3029 case_stack->data.case_stmt.case_list = r;
3032 /* r->balance == +1 */
3037 struct case_node *t = r->right;
3039 if ((p->left = s = t->right))
3043 if ((r->right = s = t->left))
3057 if ((t->parent = s))
3065 case_stack->data.case_stmt.case_list = t;
3072 /* p->balance == +1; growth of left side balances the node. */
3082 if (! (b = p->balance))
3083 /* Growth propagation from right side. */
3091 if ((p->right = s = r->left))
3099 if ((r->parent = s))
3108 case_stack->data.case_stmt.case_list = r;
3112 /* r->balance == -1 */
3116 struct case_node *t = r->left;
3118 if ((p->right = s = t->left))
3123 if ((r->left = s = t->right))
3137 if ((t->parent = s))
3146 case_stack->data.case_stmt.case_list = t;
3152 /* p->balance == -1; growth of right side balances the node. */
3165 /* Maximum number of case bit tests. */
3166 #define MAX_CASE_BIT_TESTS 3
3168 /* By default, enable case bit tests on targets with ashlsi3. */
3169 #ifndef CASE_USE_BIT_TESTS
3170 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
3171 != CODE_FOR_nothing)
3175 /* A case_bit_test represents a set of case nodes that may be
3176 selected from using a bit-wise comparison. HI and LO hold
3177 the integer to be tested against, LABEL contains the label
3178 to jump to upon success and BITS counts the number of case
3179 nodes handled by this test, typically the number of bits
3182 struct case_bit_test
3190 /* Determine whether "1 << x" is relatively cheap in word_mode. */
3193 bool lshift_cheap_p (void)
3195 static bool init = false;
3196 static bool cheap = true;
3200 rtx reg = gen_rtx_REG (word_mode, 10000);
3201 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
3202 cheap = cost < COSTS_N_INSNS (3);
3209 /* Comparison function for qsort to order bit tests by decreasing
3210 number of case nodes, i.e. the node with the most cases gets
3214 case_bit_test_cmp (const void *p1, const void *p2)
3216 const struct case_bit_test *d1 = p1;
3217 const struct case_bit_test *d2 = p2;
3219 return d2->bits - d1->bits;
3222 /* Expand a switch statement by a short sequence of bit-wise
3223 comparisons. "switch(x)" is effectively converted into
3224 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
3227 INDEX_EXPR is the value being switched on, which is of
3228 type INDEX_TYPE. MINVAL is the lowest case value of in
3229 the case nodes, of INDEX_TYPE type, and RANGE is highest
3230 value minus MINVAL, also of type INDEX_TYPE. NODES is
3231 the set of case nodes, and DEFAULT_LABEL is the label to
3232 branch to should none of the cases match.
3234 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
3238 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
3239 tree range, case_node_ptr nodes, rtx default_label)
3241 struct case_bit_test test[MAX_CASE_BIT_TESTS];
3242 enum machine_mode mode;
3243 rtx expr, index, label;
3244 unsigned int i,j,lo,hi;
3245 struct case_node *n;
3249 for (n = nodes; n; n = n->right)
3251 label = label_rtx (n->code_label);
3252 for (i = 0; i < count; i++)
3253 if (same_case_target_p (label, test[i].label))
3258 if (count >= MAX_CASE_BIT_TESTS)
3262 test[i].label = label;
3269 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3270 n->low, minval)), 1);
3271 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3272 n->high, minval)), 1);
3273 for (j = lo; j <= hi; j++)
3274 if (j >= HOST_BITS_PER_WIDE_INT)
3275 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
3277 test[i].lo |= (HOST_WIDE_INT) 1 << j;
3280 qsort (test, count, sizeof(*test), case_bit_test_cmp);
3282 index_expr = fold (build (MINUS_EXPR, index_type,
3283 convert (index_type, index_expr),
3284 convert (index_type, minval)));
3285 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3287 index = protect_from_queue (index, 0);
3288 do_pending_stack_adjust ();
3290 mode = TYPE_MODE (index_type);
3291 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
3292 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
3295 index = convert_to_mode (word_mode, index, 0);
3296 index = expand_binop (word_mode, ashl_optab, const1_rtx,
3297 index, NULL_RTX, 1, OPTAB_WIDEN);
3299 for (i = 0; i < count; i++)
3301 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
3302 expr = expand_binop (word_mode, and_optab, index, expr,
3303 NULL_RTX, 1, OPTAB_WIDEN);
3304 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
3305 word_mode, 1, test[i].label);
3308 emit_jump (default_label);
3312 #define HAVE_casesi 0
3315 #ifndef HAVE_tablejump
3316 #define HAVE_tablejump 0
3319 /* Terminate a case (Pascal) or switch (C) statement
3320 in which ORIG_INDEX is the expression to be tested.
3321 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
3322 type as given in the source before any compiler conversions.
3323 Generate the code to test it and jump to the right place. */
3326 expand_end_case_type (tree orig_index, tree orig_type)
3328 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
3329 rtx default_label = 0;
3330 struct case_node *n, *m;
3331 unsigned int count, uniq;
3337 rtx before_case, end, lab;
3338 struct nesting *thiscase = case_stack;
3339 tree index_expr, index_type;
3340 bool exit_done = false;
3343 /* Don't crash due to previous errors. */
3344 if (thiscase == NULL)
3347 index_expr = thiscase->data.case_stmt.index_expr;
3348 index_type = TREE_TYPE (index_expr);
3349 unsignedp = TYPE_UNSIGNED (index_type);
3350 if (orig_type == NULL)
3351 orig_type = TREE_TYPE (orig_index);
3353 do_pending_stack_adjust ();
3355 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3356 if (index_type != error_mark_node)
3358 /* If we don't have a default-label, create one here,
3359 after the body of the switch. */
3360 if (thiscase->data.case_stmt.default_label == 0)
3362 thiscase->data.case_stmt.default_label
3363 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3364 /* Share the exit label if possible. */
3365 if (thiscase->exit_label)
3367 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
3368 thiscase->exit_label);
3371 expand_label (thiscase->data.case_stmt.default_label);
3373 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3375 before_case = get_last_insn ();
3377 if (thiscase->data.case_stmt.case_list
3378 && thiscase->data.case_stmt.case_list->left)
3379 thiscase->data.case_stmt.case_list
3380 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
3382 /* Simplify the case-list before we count it. */
3383 group_case_nodes (thiscase->data.case_stmt.case_list);
3384 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
3387 /* Get upper and lower bounds of case values.
3388 Also convert all the case values to the index expr's data type. */
3392 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3394 /* Check low and high label values are integers. */
3395 if (TREE_CODE (n->low) != INTEGER_CST)
3397 if (TREE_CODE (n->high) != INTEGER_CST)
3400 n->low = convert (index_type, n->low);
3401 n->high = convert (index_type, n->high);
3403 /* Count the elements and track the largest and smallest
3404 of them (treating them as signed even if they are not). */
3412 if (INT_CST_LT (n->low, minval))
3414 if (INT_CST_LT (maxval, n->high))
3417 /* A range counts double, since it requires two compares. */
3418 if (! tree_int_cst_equal (n->low, n->high))
3421 /* Count the number of unique case node targets. */
3423 lab = label_rtx (n->code_label);
3424 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
3425 if (same_case_target_p (label_rtx (m->code_label), lab))
3432 /* Compute span of values. */
3434 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
3438 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3440 emit_jump (default_label);
3443 /* Try implementing this switch statement by a short sequence of
3444 bit-wise comparisons. However, we let the binary-tree case
3445 below handle constant index expressions. */
3446 else if (CASE_USE_BIT_TESTS
3447 && ! TREE_CONSTANT (index_expr)
3448 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
3449 && compare_tree_int (range, 0) > 0
3450 && lshift_cheap_p ()
3451 && ((uniq == 1 && count >= 3)
3452 || (uniq == 2 && count >= 5)
3453 || (uniq == 3 && count >= 6)))
3455 /* Optimize the case where all the case values fit in a
3456 word without having to subtract MINVAL. In this case,
3457 we can optimize away the subtraction. */
3458 if (compare_tree_int (minval, 0) > 0
3459 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
3461 minval = integer_zero_node;
3464 emit_case_bit_tests (index_type, index_expr, minval, range,
3465 thiscase->data.case_stmt.case_list,
3469 /* If range of values is much bigger than number of values,
3470 make a sequence of conditional branches instead of a dispatch.
3471 If the switch-index is a constant, do it this way
3472 because we can optimize it. */
3474 else if (count < case_values_threshold ()
3475 || compare_tree_int (range,
3476 (optimize_size ? 3 : 10) * count) > 0
3477 /* RANGE may be signed, and really large ranges will show up
3478 as negative numbers. */
3479 || compare_tree_int (range, 0) < 0
3480 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
3483 || TREE_CONSTANT (index_expr)
3484 /* If neither casesi or tablejump is available, we can
3485 only go this way. */
3486 || (!HAVE_casesi && !HAVE_tablejump))
3488 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3490 /* If the index is a short or char that we do not have
3491 an insn to handle comparisons directly, convert it to
3492 a full integer now, rather than letting each comparison
3493 generate the conversion. */
3495 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3496 && ! have_insn_for (COMPARE, GET_MODE (index)))
3498 enum machine_mode wider_mode;
3499 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3500 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3501 if (have_insn_for (COMPARE, wider_mode))
3503 index = convert_to_mode (wider_mode, index, unsignedp);
3509 do_pending_stack_adjust ();
3511 index = protect_from_queue (index, 0);
3513 index = copy_to_reg (index);
3514 if (GET_CODE (index) == CONST_INT
3515 || TREE_CODE (index_expr) == INTEGER_CST)
3517 /* Make a tree node with the proper constant value
3518 if we don't already have one. */
3519 if (TREE_CODE (index_expr) != INTEGER_CST)
3522 = build_int_2 (INTVAL (index),
3523 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
3524 index_expr = convert (index_type, index_expr);
3527 /* For constant index expressions we need only
3528 issue an unconditional branch to the appropriate
3529 target code. The job of removing any unreachable
3530 code is left to the optimization phase if the
3531 "-O" option is specified. */
3532 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3533 if (! tree_int_cst_lt (index_expr, n->low)
3534 && ! tree_int_cst_lt (n->high, index_expr))
3538 emit_jump (label_rtx (n->code_label));
3540 emit_jump (default_label);
3544 /* If the index expression is not constant we generate
3545 a binary decision tree to select the appropriate
3546 target code. This is done as follows:
3548 The list of cases is rearranged into a binary tree,
3549 nearly optimal assuming equal probability for each case.
3551 The tree is transformed into RTL, eliminating
3552 redundant test conditions at the same time.
3554 If program flow could reach the end of the
3555 decision tree an unconditional jump to the
3556 default code is emitted. */
3559 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
3560 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3561 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
3562 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3563 default_label, index_type);
3564 emit_jump_if_reachable (default_label);
3569 table_label = gen_label_rtx ();
3570 if (! try_casesi (index_type, index_expr, minval, range,
3571 table_label, default_label))
3573 index_type = thiscase->data.case_stmt.nominal_type;
3575 /* Index jumptables from zero for suitable values of
3576 minval to avoid a subtraction. */
3578 && compare_tree_int (minval, 0) > 0
3579 && compare_tree_int (minval, 3) < 0)
3581 minval = integer_zero_node;
3585 if (! try_tablejump (index_type, index_expr, minval, range,
3586 table_label, default_label))
3590 /* Get table of labels to jump to, in order of case index. */
3592 ncases = tree_low_cst (range, 0) + 1;
3593 labelvec = alloca (ncases * sizeof (rtx));
3594 memset (labelvec, 0, ncases * sizeof (rtx));
3596 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3598 /* Compute the low and high bounds relative to the minimum
3599 value since that should fit in a HOST_WIDE_INT while the
3600 actual values may not. */
3602 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3603 n->low, minval)), 1);
3604 HOST_WIDE_INT i_high
3605 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3606 n->high, minval)), 1);
3609 for (i = i_low; i <= i_high; i ++)
3611 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
3614 /* Fill in the gaps with the default. */
3615 for (i = 0; i < ncases; i++)
3616 if (labelvec[i] == 0)
3617 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
3619 /* Output the table. */
3620 emit_label (table_label);
3622 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
3623 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
3624 gen_rtx_LABEL_REF (Pmode, table_label),
3625 gen_rtvec_v (ncases, labelvec),
3626 const0_rtx, const0_rtx));
3628 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
3629 gen_rtvec_v (ncases, labelvec)));
3631 /* If the case insn drops through the table,
3632 after the table we must jump to the default-label.
3633 Otherwise record no drop-through after the table. */
3634 #ifdef CASE_DROPS_THROUGH
3635 emit_jump (default_label);
3641 before_case = NEXT_INSN (before_case);
3642 end = get_last_insn ();
3643 if (squeeze_notes (&before_case, &end))
3645 reorder_insns (before_case, end,
3646 thiscase->data.case_stmt.start);
3649 if (thiscase->exit_label && !exit_done)
3650 emit_label (thiscase->exit_label);
3652 POPSTACK (case_stack);
3657 /* Convert the tree NODE into a list linked by the right field, with the left
3658 field zeroed. RIGHT is used for recursion; it is a list to be placed
3659 rightmost in the resulting list. */
3661 static struct case_node *
3662 case_tree2list (struct case_node *node, struct case_node *right)
3664 struct case_node *left;
3667 right = case_tree2list (node->right, right);
3669 node->right = right;
3670 if ((left = node->left))
3673 return case_tree2list (left, node);
3679 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3682 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
3684 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
3690 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
3691 (GET_MODE (op1) == VOIDmode
3692 ? GET_MODE (op2) : GET_MODE (op1)),
3696 /* Not all case values are encountered equally. This function
3697 uses a heuristic to weight case labels, in cases where that
3698 looks like a reasonable thing to do.
3700 Right now, all we try to guess is text, and we establish the
3703 chars above space: 16
3712 If we find any cases in the switch that are not either -1 or in the range
3713 of valid ASCII characters, or are control characters other than those
3714 commonly used with "\", don't treat this switch scanning text.
3716 Return 1 if these nodes are suitable for cost estimation, otherwise
3720 estimate_case_costs (case_node_ptr node)
3722 tree min_ascii = integer_minus_one_node;
3723 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3727 /* If we haven't already made the cost table, make it now. Note that the
3728 lower bound of the table is -1, not zero. */
3730 if (! cost_table_initialized)
3732 cost_table_initialized = 1;
3734 for (i = 0; i < 128; i++)
3737 COST_TABLE (i) = 16;
3738 else if (ISPUNCT (i))
3740 else if (ISCNTRL (i))
3741 COST_TABLE (i) = -1;
3744 COST_TABLE (' ') = 8;
3745 COST_TABLE ('\t') = 4;
3746 COST_TABLE ('\0') = 4;
3747 COST_TABLE ('\n') = 2;
3748 COST_TABLE ('\f') = 1;
3749 COST_TABLE ('\v') = 1;
3750 COST_TABLE ('\b') = 1;
3753 /* See if all the case expressions look like text. It is text if the
3754 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3755 as signed arithmetic since we don't want to ever access cost_table with a
3756 value less than -1. Also check that none of the constants in a range
3757 are strange control characters. */
3759 for (n = node; n; n = n->right)
3761 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3764 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
3765 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
3766 if (COST_TABLE (i) < 0)
3770 /* All interesting values are within the range of interesting
3771 ASCII characters. */
3775 /* Determine whether two case labels branch to the same target. */
3778 same_case_target_p (rtx l1, rtx l2)
3786 i1 = next_real_insn (l1);
3787 i2 = next_real_insn (l2);
3791 if (i1 && simplejump_p (i1))
3793 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
3796 if (i2 && simplejump_p (i2))
3798 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
3801 /* When coming from gimple, we usually won't have emitted either
3802 the labels or the body of the switch statement. The job being
3803 done here should be done via jump threading at the tree level.
3804 Cases that go the same place should have the same label. */
3808 /* Delete nodes that branch to the default label from a list of
3809 case nodes. Eg. case 5: default: becomes just default: */
3812 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
3819 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
3826 /* Scan an ordered list of case nodes
3827 combining those with consecutive values or ranges.
3829 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3832 group_case_nodes (case_node_ptr head)
3834 case_node_ptr node = head;
3839 case_node_ptr np = node;
3841 lab = label_rtx (node->code_label);
3843 /* Try to group the successors of NODE with NODE. */
3844 while (((np = np->right) != 0)
3845 /* Do they jump to the same place? */
3846 && same_case_target_p (label_rtx (np->code_label), lab)
3847 /* Are their ranges consecutive? */
3848 && tree_int_cst_equal (np->low,
3849 fold (build (PLUS_EXPR,
3850 TREE_TYPE (node->high),
3853 /* An overflow is not consecutive. */
3854 && tree_int_cst_lt (node->high,
3855 fold (build (PLUS_EXPR,
3856 TREE_TYPE (node->high),
3858 integer_one_node))))
3860 node->high = np->high;
3862 /* NP is the first node after NODE which can't be grouped with it.
3863 Delete the nodes in between, and move on to that node. */
3869 /* Take an ordered list of case nodes
3870 and transform them into a near optimal binary tree,
3871 on the assumption that any target code selection value is as
3872 likely as any other.
3874 The transformation is performed by splitting the ordered
3875 list into two equal sections plus a pivot. The parts are
3876 then attached to the pivot as left and right branches. Each
3877 branch is then transformed recursively. */
3880 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
3893 /* Count the number of entries on branch. Also count the ranges. */
3897 if (!tree_int_cst_equal (np->low, np->high))
3901 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
3905 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
3913 /* Split this list if it is long enough for that to help. */
3918 /* Find the place in the list that bisects the list's total cost,
3919 Here I gets half the total cost. */
3924 /* Skip nodes while their cost does not reach that amount. */
3925 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3926 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
3927 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
3930 npp = &(*npp)->right;
3935 /* Leave this branch lopsided, but optimize left-hand
3936 side and fill in `parent' fields for right-hand side. */
3938 np->parent = parent;
3939 balance_case_nodes (&np->left, np);
3940 for (; np->right; np = np->right)
3941 np->right->parent = np;
3945 /* If there are just three nodes, split at the middle one. */
3947 npp = &(*npp)->right;
3950 /* Find the place in the list that bisects the list's total cost,
3951 where ranges count as 2.
3952 Here I gets half the total cost. */
3953 i = (i + ranges + 1) / 2;
3956 /* Skip nodes while their cost does not reach that amount. */
3957 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3962 npp = &(*npp)->right;
3967 np->parent = parent;
3970 /* Optimize each of the two split parts. */
3971 balance_case_nodes (&np->left, np);
3972 balance_case_nodes (&np->right, np);
3976 /* Else leave this branch as one level,
3977 but fill in `parent' fields. */
3979 np->parent = parent;
3980 for (; np->right; np = np->right)
3981 np->right->parent = np;
3986 /* Search the parent sections of the case node tree
3987 to see if a test for the lower bound of NODE would be redundant.
3988 INDEX_TYPE is the type of the index expression.
3990 The instructions to generate the case decision tree are
3991 output in the same order as nodes are processed so it is
3992 known that if a parent node checks the range of the current
3993 node minus one that the current node is bounded at its lower
3994 span. Thus the test would be redundant. */
3997 node_has_low_bound (case_node_ptr node, tree index_type)
4000 case_node_ptr pnode;
4002 /* If the lower bound of this node is the lowest value in the index type,
4003 we need not test it. */
4005 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4008 /* If this node has a left branch, the value at the left must be less
4009 than that at this node, so it cannot be bounded at the bottom and
4010 we need not bother testing any further. */
4015 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4016 node->low, integer_one_node));
4018 /* If the subtraction above overflowed, we can't verify anything.
4019 Otherwise, look for a parent that tests our value - 1. */
4021 if (! tree_int_cst_lt (low_minus_one, node->low))
4024 for (pnode = node->parent; pnode; pnode = pnode->parent)
4025 if (tree_int_cst_equal (low_minus_one, pnode->high))
4031 /* Search the parent sections of the case node tree
4032 to see if a test for the upper bound of NODE would be redundant.
4033 INDEX_TYPE is the type of the index expression.
4035 The instructions to generate the case decision tree are
4036 output in the same order as nodes are processed so it is
4037 known that if a parent node checks the range of the current
4038 node plus one that the current node is bounded at its upper
4039 span. Thus the test would be redundant. */
4042 node_has_high_bound (case_node_ptr node, tree index_type)
4045 case_node_ptr pnode;
4047 /* If there is no upper bound, obviously no test is needed. */
4049 if (TYPE_MAX_VALUE (index_type) == NULL)
4052 /* If the upper bound of this node is the highest value in the type
4053 of the index expression, we need not test against it. */
4055 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4058 /* If this node has a right branch, the value at the right must be greater
4059 than that at this node, so it cannot be bounded at the top and
4060 we need not bother testing any further. */
4065 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4066 node->high, integer_one_node));
4068 /* If the addition above overflowed, we can't verify anything.
4069 Otherwise, look for a parent that tests our value + 1. */
4071 if (! tree_int_cst_lt (node->high, high_plus_one))
4074 for (pnode = node->parent; pnode; pnode = pnode->parent)
4075 if (tree_int_cst_equal (high_plus_one, pnode->low))
4081 /* Search the parent sections of the
4082 case node tree to see if both tests for the upper and lower
4083 bounds of NODE would be redundant. */
4086 node_is_bounded (case_node_ptr node, tree index_type)
4088 return (node_has_low_bound (node, index_type)
4089 && node_has_high_bound (node, index_type));
4092 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4095 emit_jump_if_reachable (rtx label)
4097 if (!BARRIER_P (get_last_insn ()))
4101 /* Emit step-by-step code to select a case for the value of INDEX.
4102 The thus generated decision tree follows the form of the
4103 case-node binary tree NODE, whose nodes represent test conditions.
4104 INDEX_TYPE is the type of the index of the switch.
4106 Care is taken to prune redundant tests from the decision tree
4107 by detecting any boundary conditions already checked by
4108 emitted rtx. (See node_has_high_bound, node_has_low_bound
4109 and node_is_bounded, above.)
4111 Where the test conditions can be shown to be redundant we emit
4112 an unconditional jump to the target code. As a further
4113 optimization, the subordinates of a tree node are examined to
4114 check for bounded nodes. In this case conditional and/or
4115 unconditional jumps as a result of the boundary check for the
4116 current node are arranged to target the subordinates associated
4117 code for out of bound conditions on the current node.
4119 We can assume that when control reaches the code generated here,
4120 the index value has already been compared with the parents
4121 of this node, and determined to be on the same side of each parent
4122 as this node is. Thus, if this node tests for the value 51,
4123 and a parent tested for 52, we don't need to consider
4124 the possibility of a value greater than 51. If another parent
4125 tests for the value 50, then this node need not test anything. */
4128 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
4131 /* If INDEX has an unsigned type, we must make unsigned branches. */
4132 int unsignedp = TYPE_UNSIGNED (index_type);
4133 enum machine_mode mode = GET_MODE (index);
4134 enum machine_mode imode = TYPE_MODE (index_type);
4136 /* See if our parents have already tested everything for us.
4137 If they have, emit an unconditional jump for this node. */
4138 if (node_is_bounded (node, index_type))
4139 emit_jump (label_rtx (node->code_label));
4141 else if (tree_int_cst_equal (node->low, node->high))
4143 /* Node is single valued. First see if the index expression matches
4144 this node and then check our children, if any. */
4146 do_jump_if_equal (index,
4147 convert_modes (mode, imode,
4148 expand_expr (node->low, NULL_RTX,
4151 label_rtx (node->code_label), unsignedp);
4153 if (node->right != 0 && node->left != 0)
4155 /* This node has children on both sides.
4156 Dispatch to one side or the other
4157 by comparing the index value with this node's value.
4158 If one subtree is bounded, check that one first,
4159 so we can avoid real branches in the tree. */
4161 if (node_is_bounded (node->right, index_type))
4163 emit_cmp_and_jump_insns (index,
4166 expand_expr (node->high, NULL_RTX,
4169 GT, NULL_RTX, mode, unsignedp,
4170 label_rtx (node->right->code_label));
4171 emit_case_nodes (index, node->left, default_label, index_type);
4174 else if (node_is_bounded (node->left, index_type))
4176 emit_cmp_and_jump_insns (index,
4179 expand_expr (node->high, NULL_RTX,
4182 LT, NULL_RTX, mode, unsignedp,
4183 label_rtx (node->left->code_label));
4184 emit_case_nodes (index, node->right, default_label, index_type);
4187 /* If both children are single-valued cases with no
4188 children, finish up all the work. This way, we can save
4189 one ordered comparison. */
4190 else if (tree_int_cst_equal (node->right->low, node->right->high)
4191 && node->right->left == 0
4192 && node->right->right == 0
4193 && tree_int_cst_equal (node->left->low, node->left->high)
4194 && node->left->left == 0
4195 && node->left->right == 0)
4197 /* Neither node is bounded. First distinguish the two sides;
4198 then emit the code for one side at a time. */
4200 /* See if the value matches what the right hand side
4202 do_jump_if_equal (index,
4203 convert_modes (mode, imode,
4204 expand_expr (node->right->low,
4208 label_rtx (node->right->code_label),
4211 /* See if the value matches what the left hand side
4213 do_jump_if_equal (index,
4214 convert_modes (mode, imode,
4215 expand_expr (node->left->low,
4219 label_rtx (node->left->code_label),
4225 /* Neither node is bounded. First distinguish the two sides;
4226 then emit the code for one side at a time. */
4228 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4230 /* See if the value is on the right. */
4231 emit_cmp_and_jump_insns (index,
4234 expand_expr (node->high, NULL_RTX,
4237 GT, NULL_RTX, mode, unsignedp,
4238 label_rtx (test_label));
4240 /* Value must be on the left.
4241 Handle the left-hand subtree. */
4242 emit_case_nodes (index, node->left, default_label, index_type);
4243 /* If left-hand subtree does nothing,
4245 emit_jump_if_reachable (default_label);
4247 /* Code branches here for the right-hand subtree. */
4248 expand_label (test_label);
4249 emit_case_nodes (index, node->right, default_label, index_type);
4253 else if (node->right != 0 && node->left == 0)
4255 /* Here we have a right child but no left so we issue conditional
4256 branch to default and process the right child.
4258 Omit the conditional branch to default if we it avoid only one
4259 right child; it costs too much space to save so little time. */
4261 if (node->right->right || node->right->left
4262 || !tree_int_cst_equal (node->right->low, node->right->high))
4264 if (!node_has_low_bound (node, index_type))
4266 emit_cmp_and_jump_insns (index,
4269 expand_expr (node->high, NULL_RTX,
4272 LT, NULL_RTX, mode, unsignedp,
4276 emit_case_nodes (index, node->right, default_label, index_type);
4279 /* We cannot process node->right normally
4280 since we haven't ruled out the numbers less than
4281 this node's value. So handle node->right explicitly. */
4282 do_jump_if_equal (index,
4285 expand_expr (node->right->low, NULL_RTX,
4288 label_rtx (node->right->code_label), unsignedp);
4291 else if (node->right == 0 && node->left != 0)
4293 /* Just one subtree, on the left. */
4294 if (node->left->left || node->left->right
4295 || !tree_int_cst_equal (node->left->low, node->left->high))
4297 if (!node_has_high_bound (node, index_type))
4299 emit_cmp_and_jump_insns (index,
4302 expand_expr (node->high, NULL_RTX,
4305 GT, NULL_RTX, mode, unsignedp,
4309 emit_case_nodes (index, node->left, default_label, index_type);
4312 /* We cannot process node->left normally
4313 since we haven't ruled out the numbers less than
4314 this node's value. So handle node->left explicitly. */
4315 do_jump_if_equal (index,
4318 expand_expr (node->left->low, NULL_RTX,
4321 label_rtx (node->left->code_label), unsignedp);
4326 /* Node is a range. These cases are very similar to those for a single
4327 value, except that we do not start by testing whether this node
4328 is the one to branch to. */
4330 if (node->right != 0 && node->left != 0)
4332 /* Node has subtrees on both sides.
4333 If the right-hand subtree is bounded,
4334 test for it first, since we can go straight there.
4335 Otherwise, we need to make a branch in the control structure,
4336 then handle the two subtrees. */
4337 tree test_label = 0;
4339 if (node_is_bounded (node->right, index_type))
4340 /* Right hand node is fully bounded so we can eliminate any
4341 testing and branch directly to the target code. */
4342 emit_cmp_and_jump_insns (index,
4345 expand_expr (node->high, NULL_RTX,
4348 GT, NULL_RTX, mode, unsignedp,
4349 label_rtx (node->right->code_label));
4352 /* Right hand node requires testing.
4353 Branch to a label where we will handle it later. */
4355 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4356 emit_cmp_and_jump_insns (index,
4359 expand_expr (node->high, NULL_RTX,
4362 GT, NULL_RTX, mode, unsignedp,
4363 label_rtx (test_label));
4366 /* Value belongs to this node or to the left-hand subtree. */
4368 emit_cmp_and_jump_insns (index,
4371 expand_expr (node->low, NULL_RTX,
4374 GE, NULL_RTX, mode, unsignedp,
4375 label_rtx (node->code_label));
4377 /* Handle the left-hand subtree. */
4378 emit_case_nodes (index, node->left, default_label, index_type);
4380 /* If right node had to be handled later, do that now. */
4384 /* If the left-hand subtree fell through,
4385 don't let it fall into the right-hand subtree. */
4386 emit_jump_if_reachable (default_label);
4388 expand_label (test_label);
4389 emit_case_nodes (index, node->right, default_label, index_type);
4393 else if (node->right != 0 && node->left == 0)
4395 /* Deal with values to the left of this node,
4396 if they are possible. */
4397 if (!node_has_low_bound (node, index_type))
4399 emit_cmp_and_jump_insns (index,
4402 expand_expr (node->low, NULL_RTX,
4405 LT, NULL_RTX, mode, unsignedp,
4409 /* Value belongs to this node or to the right-hand subtree. */
4411 emit_cmp_and_jump_insns (index,
4414 expand_expr (node->high, NULL_RTX,
4417 LE, NULL_RTX, mode, unsignedp,
4418 label_rtx (node->code_label));
4420 emit_case_nodes (index, node->right, default_label, index_type);
4423 else if (node->right == 0 && node->left != 0)
4425 /* Deal with values to the right of this node,
4426 if they are possible. */
4427 if (!node_has_high_bound (node, index_type))
4429 emit_cmp_and_jump_insns (index,
4432 expand_expr (node->high, NULL_RTX,
4435 GT, NULL_RTX, mode, unsignedp,
4439 /* Value belongs to this node or to the left-hand subtree. */
4441 emit_cmp_and_jump_insns (index,
4444 expand_expr (node->low, NULL_RTX,
4447 GE, NULL_RTX, mode, unsignedp,
4448 label_rtx (node->code_label));
4450 emit_case_nodes (index, node->left, default_label, index_type);
4455 /* Node has no children so we check low and high bounds to remove
4456 redundant tests. Only one of the bounds can exist,
4457 since otherwise this node is bounded--a case tested already. */
4458 int high_bound = node_has_high_bound (node, index_type);
4459 int low_bound = node_has_low_bound (node, index_type);
4461 if (!high_bound && low_bound)
4463 emit_cmp_and_jump_insns (index,
4466 expand_expr (node->high, NULL_RTX,
4469 GT, NULL_RTX, mode, unsignedp,
4473 else if (!low_bound && high_bound)
4475 emit_cmp_and_jump_insns (index,
4478 expand_expr (node->low, NULL_RTX,
4481 LT, NULL_RTX, mode, unsignedp,
4484 else if (!low_bound && !high_bound)
4486 /* Widen LOW and HIGH to the same width as INDEX. */
4487 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
4488 tree low = build1 (CONVERT_EXPR, type, node->low);
4489 tree high = build1 (CONVERT_EXPR, type, node->high);
4490 rtx low_rtx, new_index, new_bound;
4492 /* Instead of doing two branches, emit one unsigned branch for
4493 (index-low) > (high-low). */
4494 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
4495 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
4496 NULL_RTX, unsignedp,
4498 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
4502 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
4503 mode, 1, default_label);
4506 emit_jump (label_rtx (node->code_label));
4511 #include "gt-stmt.h"