1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
38 #include "coretypes.h"
47 #include "insn-config.h"
50 #include "hard-reg-set.h"
57 #include "langhooks.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
87 struct case_node GTY(())
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
101 /* These are used by estimate_case_costs and balance_case_nodes. */
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `cond_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting GTY(())
140 struct nesting *next;
150 /* For conds (if-then and if-then-else statements). */
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
160 } GTY ((tag ("COND_NESTING"))) cond;
161 /* For variable binding contours. */
164 /* Sequence number of this binding contour within the function,
165 in order of entry. */
166 int block_start_count;
167 /* The NOTE that starts this contour.
168 Used by expand_goto to check whether the destination
169 is within each contour or not. */
171 /* The saved target_temp_slot_level from our outer block.
172 We may reset target_temp_slot_level to be the level of
173 this block, if that is done, target_temp_slot_level
174 reverts to the saved target_temp_slot_level at the very
176 int block_target_temp_slot_level;
177 /* True if we are currently emitting insns in an area of
178 output code that is controlled by a conditional
179 expression. This is used by the cleanup handling code to
180 generate conditional cleanup actions. */
181 int conditional_code;
182 /* A place to move the start of the exception region for any
183 of the conditional cleanups, must be at the end or after
184 the start of the last unconditional cleanup, and before any
185 conditional branch points. */
186 rtx last_unconditional_cleanup;
187 } GTY ((tag ("BLOCK_NESTING"))) block;
188 /* For switch (C) or case (Pascal) statements. */
191 /* The insn after which the case dispatch should finally
192 be emitted. Zero for a dummy. */
194 /* A list of case labels; it is first built as an AVL tree.
195 During expand_end_case, this is converted to a list, and may be
196 rearranged into a nearly balanced binary tree. */
197 struct case_node *case_list;
198 /* Label to jump to if no case matches. */
200 /* The expression to be dispatched on. */
202 /* Type that INDEX_EXPR should be converted to. */
204 /* Name of this kind of statement, for warnings. */
205 const char *printname;
206 /* Used to save no_line_numbers till we see the first case label.
207 We set this to -1 when we see the first case label in this
209 int line_number_status;
210 } GTY ((tag ("CASE_NESTING"))) case_stmt;
211 } GTY ((desc ("%1.desc"))) data;
214 /* Allocate and return a new `struct nesting'. */
216 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
218 /* Pop the nesting stack element by element until we pop off
219 the element which is at the top of STACK.
220 Update all the other stacks, popping off elements from them
221 as we pop them from nesting_stack. */
223 #define POPSTACK(STACK) \
224 do { struct nesting *target = STACK; \
225 struct nesting *this; \
226 do { this = nesting_stack; \
227 if (cond_stack == this) \
228 cond_stack = cond_stack->next; \
229 if (block_stack == this) \
230 block_stack = block_stack->next; \
231 if (case_stack == this) \
232 case_stack = case_stack->next; \
233 nesting_depth = nesting_stack->depth - 1; \
234 nesting_stack = this->all; } \
235 while (this != target); } while (0)
237 /* In some cases it is impossible to generate code for a forward goto
238 until the label definition is seen. This happens when it may be necessary
239 for the goto to reset the stack pointer: we don't yet know how to do that.
240 So expand_goto puts an entry on this fixup list.
241 Each time a binding contour that resets the stack is exited,
243 If the target label has now been defined, we can insert the proper code. */
245 struct goto_fixup GTY(())
247 /* Points to following fixup. */
248 struct goto_fixup *next;
249 /* Points to the insn before the jump insn.
250 If more code must be inserted, it goes after this insn. */
252 /* The LABEL_DECL that this jump is jumping to, or 0
253 for break, continue or return. */
255 /* The BLOCK for the place where this goto was found. */
257 /* The CODE_LABEL rtx that this is jumping to. */
259 /* Number of binding contours started in current function
260 before the label reference. */
261 int block_start_count;
264 struct stmt_status GTY(())
266 /* Chain of all pending binding contours. */
267 struct nesting * x_block_stack;
269 /* If any new stacks are added here, add them to POPSTACKS too. */
271 /* Chain of all pending conditional statements. */
272 struct nesting * x_cond_stack;
274 /* Chain of all pending case or switch statements. */
275 struct nesting * x_case_stack;
277 /* Separate chain including all of the above,
278 chained through the `all' field. */
279 struct nesting * x_nesting_stack;
281 /* Number of entries on nesting_stack now. */
284 /* Number of binding contours started so far in this function. */
285 int x_block_start_count;
287 /* Location of last line-number note, whether we actually
288 emitted it or not. */
289 location_t x_emit_locus;
291 struct goto_fixup *x_goto_fixup_chain;
294 #define block_stack (cfun->stmt->x_block_stack)
295 #define cond_stack (cfun->stmt->x_cond_stack)
296 #define case_stack (cfun->stmt->x_case_stack)
297 #define nesting_stack (cfun->stmt->x_nesting_stack)
298 #define nesting_depth (cfun->stmt->x_nesting_depth)
299 #define current_block_start_count (cfun->stmt->x_block_start_count)
300 #define emit_locus (cfun->stmt->x_emit_locus)
301 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
303 /* Nonzero if we are using EH to handle cleanups. */
304 int using_eh_for_cleanups_p = 0;
306 static int n_occurrences (int, const char *);
307 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
308 static void expand_nl_goto_receiver (void);
309 static bool check_operand_nalternatives (tree, tree);
310 static bool check_unique_operand_names (tree, tree);
311 static char *resolve_operand_name_1 (char *, tree, tree);
312 static void expand_null_return_1 (void);
313 static enum br_predictor return_prediction (rtx);
314 static rtx shift_return_value (rtx);
315 static void expand_value_return (rtx);
316 static void do_jump_if_equal (rtx, rtx, rtx, int);
317 static int estimate_case_costs (case_node_ptr);
318 static bool same_case_target_p (rtx, rtx);
319 static void strip_default_case_nodes (case_node_ptr *, rtx);
320 static bool lshift_cheap_p (void);
321 static int case_bit_test_cmp (const void *, const void *);
322 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
323 static void group_case_nodes (case_node_ptr);
324 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
325 static int node_has_low_bound (case_node_ptr, tree);
326 static int node_has_high_bound (case_node_ptr, tree);
327 static int node_is_bounded (case_node_ptr, tree);
328 static void emit_jump_if_reachable (rtx);
329 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
330 static struct case_node *case_tree2list (case_node *, case_node *);
333 using_eh_for_cleanups (void)
335 using_eh_for_cleanups_p = 1;
339 init_stmt_for_function (void)
341 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
344 /* Record the current file and line. Called from emit_line_note. */
347 set_file_and_line_for_stmt (location_t location)
349 /* If we're outputting an inline function, and we add a line note,
350 there may be no CFUN->STMT information. So, there's no need to
353 emit_locus = location;
356 /* Emit a no-op instruction. */
363 last_insn = get_last_insn ();
365 && (LABEL_P (last_insn)
366 || (NOTE_P (last_insn)
367 && prev_real_insn (last_insn) == 0)))
368 emit_insn (gen_nop ());
371 /* Return the rtx-label that corresponds to a LABEL_DECL,
372 creating it if necessary. */
375 label_rtx (tree label)
377 if (TREE_CODE (label) != LABEL_DECL)
380 if (!DECL_RTL_SET_P (label))
382 rtx r = gen_label_rtx ();
383 SET_DECL_RTL (label, r);
384 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
385 LABEL_PRESERVE_P (r) = 1;
388 return DECL_RTL (label);
391 /* As above, but also put it on the forced-reference list of the
392 function that contains it. */
394 force_label_rtx (tree label)
396 rtx ref = label_rtx (label);
397 tree function = decl_function_context (label);
403 if (function != current_function_decl)
404 p = find_function_data (function);
408 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
409 p->expr->x_forced_labels);
413 /* Add an unconditional jump to LABEL as the next sequential instruction. */
416 emit_jump (rtx label)
418 do_pending_stack_adjust ();
419 emit_jump_insn (gen_jump (label));
423 /* Emit code to jump to the address
424 specified by the pointer expression EXP. */
427 expand_computed_goto (tree exp)
429 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
431 x = convert_memory_address (Pmode, x);
433 do_pending_stack_adjust ();
434 emit_indirect_jump (x);
437 /* Handle goto statements and the labels that they can go to. */
439 /* Specify the location in the RTL code of a label LABEL,
440 which is a LABEL_DECL tree node.
442 This is used for the kind of label that the user can jump to with a
443 goto statement, and for alternatives of a switch or case statement.
444 RTL labels generated for loops and conditionals don't go through here;
445 they are generated directly at the RTL level, by other functions below.
447 Note that this has nothing to do with defining label *names*.
448 Languages vary in how they do that and what that even means. */
451 expand_label (tree label)
453 rtx label_r = label_rtx (label);
455 do_pending_stack_adjust ();
456 emit_label (label_r);
457 if (DECL_NAME (label))
458 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
460 if (DECL_NONLOCAL (label))
462 expand_nl_goto_receiver ();
463 nonlocal_goto_handler_labels
464 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
465 nonlocal_goto_handler_labels);
468 if (FORCED_LABEL (label))
469 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
471 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
472 maybe_set_first_label_num (label_r);
475 /* Generate RTL code for a `goto' statement with target label LABEL.
476 LABEL should be a LABEL_DECL tree node that was or will later be
477 defined with `expand_label'. */
480 expand_goto (tree label)
482 #ifdef ENABLE_CHECKING
483 /* Check for a nonlocal goto to a containing function. Should have
484 gotten translated to __builtin_nonlocal_goto. */
485 tree context = decl_function_context (label);
486 if (context != 0 && context != current_function_decl)
490 emit_jump (label_rtx (label));
493 /* Return the number of times character C occurs in string S. */
495 n_occurrences (int c, const char *s)
503 /* Generate RTL for an asm statement (explicit assembler code).
504 STRING is a STRING_CST node containing the assembler code text,
505 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
506 insn is volatile; don't optimize it. */
509 expand_asm (tree string, int vol)
513 if (TREE_CODE (string) == ADDR_EXPR)
514 string = TREE_OPERAND (string, 0);
516 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
518 MEM_VOLATILE_P (body) = vol;
523 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
524 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
525 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
526 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
527 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
528 constraint allows the use of a register operand. And, *IS_INOUT
529 will be true if the operand is read-write, i.e., if it is used as
530 an input as well as an output. If *CONSTRAINT_P is not in
531 canonical form, it will be made canonical. (Note that `+' will be
532 replaced with `=' as part of this process.)
534 Returns TRUE if all went well; FALSE if an error occurred. */
537 parse_output_constraint (const char **constraint_p, int operand_num,
538 int ninputs, int noutputs, bool *allows_mem,
539 bool *allows_reg, bool *is_inout)
541 const char *constraint = *constraint_p;
544 /* Assume the constraint doesn't allow the use of either a register
549 /* Allow the `=' or `+' to not be at the beginning of the string,
550 since it wasn't explicitly documented that way, and there is a
551 large body of code that puts it last. Swap the character to
552 the front, so as not to uglify any place else. */
553 p = strchr (constraint, '=');
555 p = strchr (constraint, '+');
557 /* If the string doesn't contain an `=', issue an error
561 error ("output operand constraint lacks `='");
565 /* If the constraint begins with `+', then the operand is both read
566 from and written to. */
567 *is_inout = (*p == '+');
569 /* Canonicalize the output constraint so that it begins with `='. */
570 if (p != constraint || is_inout)
573 size_t c_len = strlen (constraint);
576 warning ("output constraint `%c' for operand %d is not at the beginning",
579 /* Make a copy of the constraint. */
580 buf = alloca (c_len + 1);
581 strcpy (buf, constraint);
582 /* Swap the first character and the `=' or `+'. */
583 buf[p - constraint] = buf[0];
584 /* Make sure the first character is an `='. (Until we do this,
585 it might be a `+'.) */
587 /* Replace the constraint with the canonicalized string. */
588 *constraint_p = ggc_alloc_string (buf, c_len);
589 constraint = *constraint_p;
592 /* Loop through the constraint string. */
593 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
598 error ("operand constraint contains incorrectly positioned '+' or '='");
602 if (operand_num + 1 == ninputs + noutputs)
604 error ("`%%' constraint used with last operand");
609 case 'V': case 'm': case 'o':
613 case '?': case '!': case '*': case '&': case '#':
614 case 'E': case 'F': case 'G': case 'H':
615 case 's': case 'i': case 'n':
616 case 'I': case 'J': case 'K': case 'L': case 'M':
617 case 'N': case 'O': case 'P': case ',':
620 case '0': case '1': case '2': case '3': case '4':
621 case '5': case '6': case '7': case '8': case '9':
623 error ("matching constraint not valid in output operand");
627 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
628 excepting those that expand_call created. So match memory
645 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
647 #ifdef EXTRA_CONSTRAINT_STR
648 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
650 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
654 /* Otherwise we can't assume anything about the nature of
655 the constraint except that it isn't purely registers.
656 Treat it like "g" and hope for the best. */
667 /* Similar, but for input constraints. */
670 parse_input_constraint (const char **constraint_p, int input_num,
671 int ninputs, int noutputs, int ninout,
672 const char * const * constraints,
673 bool *allows_mem, bool *allows_reg)
675 const char *constraint = *constraint_p;
676 const char *orig_constraint = constraint;
677 size_t c_len = strlen (constraint);
679 bool saw_match = false;
681 /* Assume the constraint doesn't allow the use of either
682 a register or memory. */
686 /* Make sure constraint has neither `=', `+', nor '&'. */
688 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
689 switch (constraint[j])
691 case '+': case '=': case '&':
692 if (constraint == orig_constraint)
694 error ("input operand constraint contains `%c'", constraint[j]);
700 if (constraint == orig_constraint
701 && input_num + 1 == ninputs - ninout)
703 error ("`%%' constraint used with last operand");
708 case 'V': case 'm': case 'o':
713 case '?': case '!': case '*': case '#':
714 case 'E': case 'F': case 'G': case 'H':
715 case 's': case 'i': case 'n':
716 case 'I': case 'J': case 'K': case 'L': case 'M':
717 case 'N': case 'O': case 'P': case ',':
720 /* Whether or not a numeric constraint allows a register is
721 decided by the matching constraint, and so there is no need
722 to do anything special with them. We must handle them in
723 the default case, so that we don't unnecessarily force
724 operands to memory. */
725 case '0': case '1': case '2': case '3': case '4':
726 case '5': case '6': case '7': case '8': case '9':
733 match = strtoul (constraint + j, &end, 10);
734 if (match >= (unsigned long) noutputs)
736 error ("matching constraint references invalid operand number");
740 /* Try and find the real constraint for this dup. Only do this
741 if the matching constraint is the only alternative. */
743 && (j == 0 || (j == 1 && constraint[0] == '%')))
745 constraint = constraints[match];
746 *constraint_p = constraint;
747 c_len = strlen (constraint);
749 /* ??? At the end of the loop, we will skip the first part of
750 the matched constraint. This assumes not only that the
751 other constraint is an output constraint, but also that
752 the '=' or '+' come first. */
756 j = end - constraint;
757 /* Anticipate increment at end of loop. */
772 if (! ISALPHA (constraint[j]))
774 error ("invalid punctuation `%c' in constraint", constraint[j]);
777 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
780 #ifdef EXTRA_CONSTRAINT_STR
781 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
783 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
787 /* Otherwise we can't assume anything about the nature of
788 the constraint except that it isn't purely registers.
789 Treat it like "g" and hope for the best. */
797 if (saw_match && !*allows_reg)
798 warning ("matching constraint does not allow a register");
803 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
804 if it is an operand which must be passed in memory (i.e. an "m"
805 constraint), false otherwise. */
808 asm_op_is_mem_input (tree input, tree expr)
810 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
811 tree outputs = ASM_OUTPUTS (expr);
812 int noutputs = list_length (outputs);
813 const char **constraints
814 = (const char **) alloca ((noutputs) * sizeof (const char *));
816 bool allows_mem, allows_reg;
819 /* Collect output constraints. */
820 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
821 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
823 /* We pass 0 for input_num, ninputs and ninout; they are only used for
824 error checking which will be done at expand time. */
825 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
826 &allows_mem, &allows_reg);
827 return (!allows_reg && allows_mem);
830 /* Check for overlap between registers marked in CLOBBERED_REGS and
831 anything inappropriate in DECL. Emit error and return TRUE for error,
835 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
837 /* Conflicts between asm-declared register variables and the clobber
838 list are not allowed. */
839 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
840 && DECL_REGISTER (decl)
841 && REG_P (DECL_RTL (decl))
842 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
844 rtx reg = DECL_RTL (decl);
847 for (regno = REGNO (reg);
849 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
851 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
853 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
854 IDENTIFIER_POINTER (DECL_NAME (decl)));
856 /* Reset registerness to stop multiple errors emitted for a
858 DECL_REGISTER (decl) = 0;
865 /* Generate RTL for an asm statement with arguments.
866 STRING is the instruction template.
867 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
868 Each output or input has an expression in the TREE_VALUE and
869 and a tree list in TREE_PURPOSE which in turn contains a constraint
870 name in TREE_VALUE (or NULL_TREE) and a constraint string
872 CLOBBERS is a list of STRING_CST nodes each naming a hard register
873 that is clobbered by this insn.
875 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
876 Some elements of OUTPUTS may be replaced with trees representing temporary
877 values. The caller should copy those temporary values to the originally
880 VOL nonzero means the insn is volatile; don't optimize it. */
883 expand_asm_operands (tree string, tree outputs, tree inputs,
884 tree clobbers, int vol, location_t locus)
886 rtvec argvec, constraintvec;
888 int ninputs = list_length (inputs);
889 int noutputs = list_length (outputs);
892 HARD_REG_SET clobbered_regs;
893 int clobber_conflict_found = 0;
897 /* Vector of RTX's of evaluated output operands. */
898 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
899 int *inout_opnum = alloca (noutputs * sizeof (int));
900 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
901 enum machine_mode *inout_mode
902 = alloca (noutputs * sizeof (enum machine_mode));
903 const char **constraints
904 = alloca ((noutputs + ninputs) * sizeof (const char *));
905 int old_generating_concat_p = generating_concat_p;
907 /* An ASM with no outputs needs to be treated as volatile, for now. */
911 if (! check_operand_nalternatives (outputs, inputs))
914 string = resolve_asm_operand_names (string, outputs, inputs);
916 /* Collect constraints. */
918 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
919 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
920 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
921 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
923 /* Sometimes we wish to automatically clobber registers across an asm.
924 Case in point is when the i386 backend moved from cc0 to a hard reg --
925 maintaining source-level compatibility means automatically clobbering
926 the flags register. */
927 clobbers = targetm.md_asm_clobbers (clobbers);
929 /* Count the number of meaningful clobbered registers, ignoring what
930 we would ignore later. */
932 CLEAR_HARD_REG_SET (clobbered_regs);
933 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
935 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
937 i = decode_reg_name (regname);
938 if (i >= 0 || i == -4)
941 error ("unknown register name `%s' in `asm'", regname);
943 /* Mark clobbered registers. */
946 /* Clobbering the PIC register is an error */
947 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
949 error ("PIC register `%s' clobbered in `asm'", regname);
953 SET_HARD_REG_BIT (clobbered_regs, i);
957 /* First pass over inputs and outputs checks validity and sets
958 mark_addressable if needed. */
961 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
963 tree val = TREE_VALUE (tail);
964 tree type = TREE_TYPE (val);
965 const char *constraint;
970 /* If there's an erroneous arg, emit no insn. */
971 if (type == error_mark_node)
974 /* Try to parse the output constraint. If that fails, there's
975 no point in going further. */
976 constraint = constraints[i];
977 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
978 &allows_mem, &allows_reg, &is_inout))
985 && REG_P (DECL_RTL (val))
986 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
987 lang_hooks.mark_addressable (val);
994 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
996 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1000 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1002 bool allows_reg, allows_mem;
1003 const char *constraint;
1005 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1006 would get VOIDmode and that could cause a crash in reload. */
1007 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1010 constraint = constraints[i + noutputs];
1011 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1012 constraints, &allows_mem, &allows_reg))
1015 if (! allows_reg && allows_mem)
1016 lang_hooks.mark_addressable (TREE_VALUE (tail));
1019 /* Second pass evaluates arguments. */
1022 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1024 tree val = TREE_VALUE (tail);
1025 tree type = TREE_TYPE (val);
1031 if (!parse_output_constraint (&constraints[i], i, ninputs,
1032 noutputs, &allows_mem, &allows_reg,
1036 /* If an output operand is not a decl or indirect ref and our constraint
1037 allows a register, make a temporary to act as an intermediate.
1038 Make the asm insn write into that, then our caller will copy it to
1039 the real output operand. Likewise for promoted variables. */
1041 generating_concat_p = 0;
1043 real_output_rtx[i] = NULL_RTX;
1044 if ((TREE_CODE (val) == INDIRECT_REF
1047 && (allows_mem || REG_P (DECL_RTL (val)))
1048 && ! (REG_P (DECL_RTL (val))
1049 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1053 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1055 op = validize_mem (op);
1057 if (! allows_reg && !MEM_P (op))
1058 error ("output number %d not directly addressable", i);
1059 if ((! allows_mem && MEM_P (op))
1060 || GET_CODE (op) == CONCAT)
1062 real_output_rtx[i] = op;
1063 op = gen_reg_rtx (GET_MODE (op));
1065 emit_move_insn (op, real_output_rtx[i]);
1070 op = assign_temp (type, 0, 0, 1);
1071 op = validize_mem (op);
1072 TREE_VALUE (tail) = make_tree (type, op);
1076 generating_concat_p = old_generating_concat_p;
1080 inout_mode[ninout] = TYPE_MODE (type);
1081 inout_opnum[ninout++] = i;
1084 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1085 clobber_conflict_found = 1;
1088 /* Make vectors for the expression-rtx, constraint strings,
1089 and named operands. */
1091 argvec = rtvec_alloc (ninputs);
1092 constraintvec = rtvec_alloc (ninputs);
1094 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1095 : GET_MODE (output_rtx[0])),
1096 TREE_STRING_POINTER (string),
1097 empty_string, 0, argvec, constraintvec,
1100 MEM_VOLATILE_P (body) = vol;
1102 /* Eval the inputs and put them into ARGVEC.
1103 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1105 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1107 bool allows_reg, allows_mem;
1108 const char *constraint;
1112 constraint = constraints[i + noutputs];
1113 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1114 constraints, &allows_mem, &allows_reg))
1117 generating_concat_p = 0;
1119 val = TREE_VALUE (tail);
1120 type = TREE_TYPE (val);
1121 op = expand_expr (val, NULL_RTX, VOIDmode,
1122 (allows_mem && !allows_reg
1123 ? EXPAND_MEMORY : EXPAND_NORMAL));
1125 /* Never pass a CONCAT to an ASM. */
1126 if (GET_CODE (op) == CONCAT)
1127 op = force_reg (GET_MODE (op), op);
1128 else if (MEM_P (op))
1129 op = validize_mem (op);
1131 if (asm_operand_ok (op, constraint) <= 0)
1134 op = force_reg (TYPE_MODE (type), op);
1135 else if (!allows_mem)
1136 warning ("asm operand %d probably doesn't match constraints",
1138 else if (MEM_P (op))
1140 /* We won't recognize either volatile memory or memory
1141 with a queued address as available a memory_operand
1142 at this point. Ignore it: clearly this *is* a memory. */
1146 warning ("use of memory input without lvalue in "
1147 "asm operand %d is deprecated", i + noutputs);
1149 if (CONSTANT_P (op))
1151 rtx mem = force_const_mem (TYPE_MODE (type), op);
1153 op = validize_mem (mem);
1155 op = force_reg (TYPE_MODE (type), op);
1158 || GET_CODE (op) == SUBREG
1159 || GET_CODE (op) == CONCAT)
1161 tree qual_type = build_qualified_type (type,
1163 | TYPE_QUAL_CONST));
1164 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1165 memloc = validize_mem (memloc);
1166 emit_move_insn (memloc, op);
1172 generating_concat_p = old_generating_concat_p;
1173 ASM_OPERANDS_INPUT (body, i) = op;
1175 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1176 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1178 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1179 clobber_conflict_found = 1;
1182 /* Protect all the operands from the queue now that they have all been
1185 generating_concat_p = 0;
1187 /* For in-out operands, copy output rtx to input rtx. */
1188 for (i = 0; i < ninout; i++)
1190 int j = inout_opnum[i];
1193 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1196 sprintf (buffer, "%d", j);
1197 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1198 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1201 generating_concat_p = old_generating_concat_p;
1203 /* Now, for each output, construct an rtx
1204 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1205 ARGVEC CONSTRAINTS OPNAMES))
1206 If there is more than one, put them inside a PARALLEL. */
1208 if (noutputs == 1 && nclobbers == 0)
1210 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1211 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1214 else if (noutputs == 0 && nclobbers == 0)
1216 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1228 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1230 /* For each output operand, store a SET. */
1231 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1233 XVECEXP (body, 0, i)
1234 = gen_rtx_SET (VOIDmode,
1236 gen_rtx_ASM_OPERANDS
1237 (GET_MODE (output_rtx[i]),
1238 TREE_STRING_POINTER (string),
1239 constraints[i], i, argvec, constraintvec,
1242 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1245 /* If there are no outputs (but there are some clobbers)
1246 store the bare ASM_OPERANDS into the PARALLEL. */
1249 XVECEXP (body, 0, i++) = obody;
1251 /* Store (clobber REG) for each clobbered register specified. */
1253 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1255 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1256 int j = decode_reg_name (regname);
1261 if (j == -3) /* `cc', which is not a register */
1264 if (j == -4) /* `memory', don't cache memory across asm */
1266 XVECEXP (body, 0, i++)
1267 = gen_rtx_CLOBBER (VOIDmode,
1270 gen_rtx_SCRATCH (VOIDmode)));
1274 /* Ignore unknown register, error already signaled. */
1278 /* Use QImode since that's guaranteed to clobber just one reg. */
1279 clobbered_reg = gen_rtx_REG (QImode, j);
1281 /* Do sanity check for overlap between clobbers and respectively
1282 input and outputs that hasn't been handled. Such overlap
1283 should have been detected and reported above. */
1284 if (!clobber_conflict_found)
1288 /* We test the old body (obody) contents to avoid tripping
1289 over the under-construction body. */
1290 for (opno = 0; opno < noutputs; opno++)
1291 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1292 internal_error ("asm clobber conflict with output operand");
1294 for (opno = 0; opno < ninputs - ninout; opno++)
1295 if (reg_overlap_mentioned_p (clobbered_reg,
1296 ASM_OPERANDS_INPUT (obody, opno)))
1297 internal_error ("asm clobber conflict with input operand");
1300 XVECEXP (body, 0, i++)
1301 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1307 /* For any outputs that needed reloading into registers, spill them
1308 back to where they belong. */
1309 for (i = 0; i < noutputs; ++i)
1310 if (real_output_rtx[i])
1311 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1317 expand_asm_expr (tree exp)
1323 if (ASM_INPUT_P (exp))
1325 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1329 outputs = ASM_OUTPUTS (exp);
1330 noutputs = list_length (outputs);
1331 /* o[I] is the place that output number I should be written. */
1332 o = (tree *) alloca (noutputs * sizeof (tree));
1334 /* Record the contents of OUTPUTS before it is modified. */
1335 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1336 o[i] = TREE_VALUE (tail);
1338 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1339 OUTPUTS some trees for where the values were actually stored. */
1340 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1341 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1344 /* Copy all the intermediate outputs into the specified outputs. */
1345 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1347 if (o[i] != TREE_VALUE (tail))
1349 expand_assignment (o[i], TREE_VALUE (tail), 0);
1352 /* Restore the original value so that it's correct the next
1353 time we expand this function. */
1354 TREE_VALUE (tail) = o[i];
1359 /* A subroutine of expand_asm_operands. Check that all operands have
1360 the same number of alternatives. Return true if so. */
1363 check_operand_nalternatives (tree outputs, tree inputs)
1365 if (outputs || inputs)
1367 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1369 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1372 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1374 error ("too many alternatives in `asm'");
1381 const char *constraint
1382 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1384 if (n_occurrences (',', constraint) != nalternatives)
1386 error ("operand constraints for `asm' differ in number of alternatives");
1390 if (TREE_CHAIN (tmp))
1391 tmp = TREE_CHAIN (tmp);
1393 tmp = next, next = 0;
1400 /* A subroutine of expand_asm_operands. Check that all operand names
1401 are unique. Return true if so. We rely on the fact that these names
1402 are identifiers, and so have been canonicalized by get_identifier,
1403 so all we need are pointer comparisons. */
1406 check_unique_operand_names (tree outputs, tree inputs)
1410 for (i = outputs; i ; i = TREE_CHAIN (i))
1412 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1416 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1417 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1421 for (i = inputs; i ; i = TREE_CHAIN (i))
1423 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1427 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1428 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1430 for (j = outputs; j ; j = TREE_CHAIN (j))
1431 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1438 error ("duplicate asm operand name '%s'",
1439 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1443 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1444 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1445 STRING and in the constraints to those numbers. */
1448 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1455 check_unique_operand_names (outputs, inputs);
1457 /* Substitute [<name>] in input constraint strings. There should be no
1458 named operands in output constraints. */
1459 for (t = inputs; t ; t = TREE_CHAIN (t))
1461 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1462 if (strchr (c, '[') != NULL)
1464 p = buffer = xstrdup (c);
1465 while ((p = strchr (p, '[')) != NULL)
1466 p = resolve_operand_name_1 (p, outputs, inputs);
1467 TREE_VALUE (TREE_PURPOSE (t))
1468 = build_string (strlen (buffer), buffer);
1473 /* Now check for any needed substitutions in the template. */
1474 c = TREE_STRING_POINTER (string);
1475 while ((c = strchr (c, '%')) != NULL)
1479 else if (ISALPHA (c[1]) && c[2] == '[')
1490 /* OK, we need to make a copy so we can perform the substitutions.
1491 Assume that we will not need extra space--we get to remove '['
1492 and ']', which means we cannot have a problem until we have more
1493 than 999 operands. */
1494 buffer = xstrdup (TREE_STRING_POINTER (string));
1495 p = buffer + (c - TREE_STRING_POINTER (string));
1497 while ((p = strchr (p, '%')) != NULL)
1501 else if (ISALPHA (p[1]) && p[2] == '[')
1509 p = resolve_operand_name_1 (p, outputs, inputs);
1512 string = build_string (strlen (buffer), buffer);
1519 /* A subroutine of resolve_operand_names. P points to the '[' for a
1520 potential named operand of the form [<name>]. In place, replace
1521 the name and brackets with a number. Return a pointer to the
1522 balance of the string after substitution. */
1525 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1532 /* Collect the operand name. */
1533 q = strchr (p, ']');
1536 error ("missing close brace for named operand");
1537 return strchr (p, '\0');
1541 /* Resolve the name to a number. */
1542 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1544 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1547 const char *c = TREE_STRING_POINTER (name);
1548 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1552 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1554 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1557 const char *c = TREE_STRING_POINTER (name);
1558 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1564 error ("undefined named operand '%s'", p + 1);
1568 /* Replace the name with the number. Unfortunately, not all libraries
1569 get the return value of sprintf correct, so search for the end of the
1570 generated string by hand. */
1571 sprintf (p, "%d", op);
1572 p = strchr (p, '\0');
1574 /* Verify the no extra buffer space assumption. */
1578 /* Shift the rest of the buffer down to fill the gap. */
1579 memmove (p, q + 1, strlen (q + 1) + 1);
1584 /* Generate RTL to evaluate the expression EXP. */
1587 expand_expr_stmt (tree exp)
1592 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1593 type = TREE_TYPE (exp);
1595 /* If all we do is reference a volatile value in memory,
1596 copy it to a register to be sure it is actually touched. */
1597 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1599 if (TYPE_MODE (type) == VOIDmode)
1601 else if (TYPE_MODE (type) != BLKmode)
1602 value = copy_to_reg (value);
1605 rtx lab = gen_label_rtx ();
1607 /* Compare the value with itself to reference it. */
1608 emit_cmp_and_jump_insns (value, value, EQ,
1609 expand_expr (TYPE_SIZE (type),
1610 NULL_RTX, VOIDmode, 0),
1616 /* Free any temporaries used to evaluate this expression. */
1620 /* Warn if EXP contains any computations whose results are not used.
1621 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1622 (potential) location of the expression. */
1625 warn_if_unused_value (tree exp, location_t locus)
1628 if (TREE_USED (exp))
1631 /* Don't warn about void constructs. This includes casting to void,
1632 void function calls, and statement expressions with a final cast
1634 if (VOID_TYPE_P (TREE_TYPE (exp)))
1637 if (EXPR_LOCUS (exp))
1638 locus = *EXPR_LOCUS (exp);
1640 switch (TREE_CODE (exp))
1642 case PREINCREMENT_EXPR:
1643 case POSTINCREMENT_EXPR:
1644 case PREDECREMENT_EXPR:
1645 case POSTDECREMENT_EXPR:
1650 case TRY_CATCH_EXPR:
1651 case WITH_CLEANUP_EXPR:
1656 /* For a binding, warn if no side effect within it. */
1657 exp = BIND_EXPR_BODY (exp);
1661 exp = TREE_OPERAND (exp, 0);
1664 case TRUTH_ORIF_EXPR:
1665 case TRUTH_ANDIF_EXPR:
1666 /* In && or ||, warn if 2nd operand has no side effect. */
1667 exp = TREE_OPERAND (exp, 1);
1671 if (TREE_NO_WARNING (exp))
1673 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1675 /* Let people do `(foo (), 0)' without a warning. */
1676 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1678 exp = TREE_OPERAND (exp, 1);
1683 case NON_LVALUE_EXPR:
1684 /* Don't warn about conversions not explicit in the user's program. */
1685 if (TREE_NO_WARNING (exp))
1687 /* Assignment to a cast usually results in a cast of a modify.
1688 Don't complain about that. There can be an arbitrary number of
1689 casts before the modify, so we must loop until we find the first
1690 non-cast expression and then test to see if that is a modify. */
1692 tree tem = TREE_OPERAND (exp, 0);
1694 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1695 tem = TREE_OPERAND (tem, 0);
1697 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1698 || TREE_CODE (tem) == CALL_EXPR)
1704 /* Don't warn about automatic dereferencing of references, since
1705 the user cannot control it. */
1706 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1708 exp = TREE_OPERAND (exp, 0);
1714 /* Referencing a volatile value is a side effect, so don't warn. */
1716 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1717 && TREE_THIS_VOLATILE (exp))
1720 /* If this is an expression which has no operands, there is no value
1721 to be unused. There are no such language-independent codes,
1722 but front ends may define such. */
1723 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
1724 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1728 /* If this is an expression with side effects, don't warn. */
1729 if (TREE_SIDE_EFFECTS (exp))
1732 warning ("%Hvalue computed is not used", &locus);
1737 /* Generate RTL for the start of an if-then. COND is the expression
1738 whose truth should be tested.
1740 If EXITFLAG is nonzero, this conditional is visible to
1741 `exit_something'. */
1744 expand_start_cond (tree cond, int exitflag)
1746 struct nesting *thiscond = ALLOC_NESTING ();
1748 /* Make an entry on cond_stack for the cond we are entering. */
1750 thiscond->desc = COND_NESTING;
1751 thiscond->next = cond_stack;
1752 thiscond->all = nesting_stack;
1753 thiscond->depth = ++nesting_depth;
1754 thiscond->data.cond.next_label = gen_label_rtx ();
1755 /* Before we encounter an `else', we don't need a separate exit label
1756 unless there are supposed to be exit statements
1757 to exit this conditional. */
1758 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1759 thiscond->data.cond.endif_label = thiscond->exit_label;
1760 cond_stack = thiscond;
1761 nesting_stack = thiscond;
1763 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1766 /* Generate RTL between then-clause and the elseif-clause
1767 of an if-then-elseif-.... */
1770 expand_start_elseif (tree cond)
1772 if (cond_stack->data.cond.endif_label == 0)
1773 cond_stack->data.cond.endif_label = gen_label_rtx ();
1774 emit_jump (cond_stack->data.cond.endif_label);
1775 emit_label (cond_stack->data.cond.next_label);
1776 cond_stack->data.cond.next_label = gen_label_rtx ();
1777 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1780 /* Generate RTL between the then-clause and the else-clause
1781 of an if-then-else. */
1784 expand_start_else (void)
1786 if (cond_stack->data.cond.endif_label == 0)
1787 cond_stack->data.cond.endif_label = gen_label_rtx ();
1789 emit_jump (cond_stack->data.cond.endif_label);
1790 emit_label (cond_stack->data.cond.next_label);
1791 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1794 /* After calling expand_start_else, turn this "else" into an "else if"
1795 by providing another condition. */
1798 expand_elseif (tree cond)
1800 cond_stack->data.cond.next_label = gen_label_rtx ();
1801 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1804 /* Generate RTL for the end of an if-then.
1805 Pop the record for it off of cond_stack. */
1808 expand_end_cond (void)
1810 struct nesting *thiscond = cond_stack;
1812 do_pending_stack_adjust ();
1813 if (thiscond->data.cond.next_label)
1814 emit_label (thiscond->data.cond.next_label);
1815 if (thiscond->data.cond.endif_label)
1816 emit_label (thiscond->data.cond.endif_label);
1818 POPSTACK (cond_stack);
1821 /* Return nonzero if we should preserve sub-expressions as separate
1822 pseudos. We never do so if we aren't optimizing. We always do so
1823 if -fexpensive-optimizations. */
1826 preserve_subexpressions_p (void)
1828 if (flag_expensive_optimizations)
1831 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
1838 /* Generate RTL to return from the current function, with no value.
1839 (That is, we do not do anything about returning any value.) */
1842 expand_null_return (void)
1844 /* If this function was declared to return a value, but we
1845 didn't, clobber the return registers so that they are not
1846 propagated live to the rest of the function. */
1847 clobber_return_register ();
1849 expand_null_return_1 ();
1852 /* Generate RTL to return directly from the current function.
1853 (That is, we bypass any return value.) */
1856 expand_naked_return (void)
1860 clear_pending_stack_adjust ();
1861 do_pending_stack_adjust ();
1863 end_label = naked_return_label;
1865 end_label = naked_return_label = gen_label_rtx ();
1867 emit_jump (end_label);
1870 /* Try to guess whether the value of return means error code. */
1871 static enum br_predictor
1872 return_prediction (rtx val)
1874 /* Different heuristics for pointers and scalars. */
1875 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
1877 /* NULL is usually not returned. */
1878 if (val == const0_rtx)
1879 return PRED_NULL_RETURN;
1883 /* Negative return values are often used to indicate
1885 if (GET_CODE (val) == CONST_INT
1886 && INTVAL (val) < 0)
1887 return PRED_NEGATIVE_RETURN;
1888 /* Constant return values are also usually erors,
1889 zero/one often mean booleans so exclude them from the
1891 if (CONSTANT_P (val)
1892 && (val != const0_rtx && val != const1_rtx))
1893 return PRED_CONST_RETURN;
1895 return PRED_NO_PREDICTION;
1899 /* If the current function returns values in the most significant part
1900 of a register, shift return value VAL appropriately. The mode of
1901 the function's return type is known not to be BLKmode. */
1904 shift_return_value (rtx val)
1908 type = TREE_TYPE (DECL_RESULT (current_function_decl));
1909 if (targetm.calls.return_in_msb (type))
1912 HOST_WIDE_INT shift;
1914 target = DECL_RTL (DECL_RESULT (current_function_decl));
1915 shift = (GET_MODE_BITSIZE (GET_MODE (target))
1916 - BITS_PER_UNIT * int_size_in_bytes (type));
1918 val = expand_shift (LSHIFT_EXPR, GET_MODE (target),
1919 gen_lowpart (GET_MODE (target), val),
1920 build_int_2 (shift, 0), target, 1);
1926 /* Generate RTL to return from the current function, with value VAL. */
1929 expand_value_return (rtx val)
1932 enum br_predictor pred;
1934 if (flag_guess_branch_prob
1935 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
1937 /* Emit information for branch prediction. */
1940 note = emit_note (NOTE_INSN_PREDICTION);
1942 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
1946 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1948 /* Copy the value to the return location
1949 unless it's already there. */
1951 if (return_reg != val)
1953 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1954 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1956 int unsignedp = TYPE_UNSIGNED (type);
1957 enum machine_mode old_mode
1958 = DECL_MODE (DECL_RESULT (current_function_decl));
1959 enum machine_mode mode
1960 = promote_mode (type, old_mode, &unsignedp, 1);
1962 if (mode != old_mode)
1963 val = convert_modes (mode, old_mode, val, unsignedp);
1965 if (GET_CODE (return_reg) == PARALLEL)
1966 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1968 emit_move_insn (return_reg, val);
1971 expand_null_return_1 ();
1974 /* Output a return with no value. */
1977 expand_null_return_1 (void)
1981 clear_pending_stack_adjust ();
1982 do_pending_stack_adjust ();
1984 end_label = return_label;
1986 end_label = return_label = gen_label_rtx ();
1987 emit_jump (end_label);
1990 /* Generate RTL to evaluate the expression RETVAL and return it
1991 from the current function. */
1994 expand_return (tree retval)
2000 /* If function wants no value, give it none. */
2001 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2003 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2004 expand_null_return ();
2008 if (retval == error_mark_node)
2010 /* Treat this like a return of no value from a function that
2012 expand_null_return ();
2015 else if (TREE_CODE (retval) == RESULT_DECL)
2016 retval_rhs = retval;
2017 else if ((TREE_CODE (retval) == MODIFY_EXPR
2018 || TREE_CODE (retval) == INIT_EXPR)
2019 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2020 retval_rhs = TREE_OPERAND (retval, 1);
2022 retval_rhs = retval;
2024 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2026 /* If the result is an aggregate that is being returned in one (or more)
2027 registers, load the registers here. The compiler currently can't handle
2028 copying a BLKmode value into registers. We could put this code in a
2029 more general area (for use by everyone instead of just function
2030 call/return), but until this feature is generally usable it is kept here
2031 (and in expand_call). */
2034 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2035 && REG_P (result_rtl))
2038 unsigned HOST_WIDE_INT bitpos, xbitpos;
2039 unsigned HOST_WIDE_INT padding_correction = 0;
2040 unsigned HOST_WIDE_INT bytes
2041 = int_size_in_bytes (TREE_TYPE (retval_rhs));
2042 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2043 unsigned int bitsize
2044 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
2045 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
2046 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2047 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2048 enum machine_mode tmpmode, result_reg_mode;
2052 expand_null_return ();
2056 /* If the structure doesn't take up a whole number of words, see
2057 whether the register value should be padded on the left or on
2058 the right. Set PADDING_CORRECTION to the number of padding
2059 bits needed on the left side.
2061 In most ABIs, the structure will be returned at the least end of
2062 the register, which translates to right padding on little-endian
2063 targets and left padding on big-endian targets. The opposite
2064 holds if the structure is returned at the most significant
2065 end of the register. */
2066 if (bytes % UNITS_PER_WORD != 0
2067 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2069 : BYTES_BIG_ENDIAN))
2070 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2073 /* Copy the structure BITSIZE bits at a time. */
2074 for (bitpos = 0, xbitpos = padding_correction;
2075 bitpos < bytes * BITS_PER_UNIT;
2076 bitpos += bitsize, xbitpos += bitsize)
2078 /* We need a new destination pseudo each time xbitpos is
2079 on a word boundary and when xbitpos == padding_correction
2080 (the first time through). */
2081 if (xbitpos % BITS_PER_WORD == 0
2082 || xbitpos == padding_correction)
2084 /* Generate an appropriate register. */
2085 dst = gen_reg_rtx (word_mode);
2086 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2088 /* Clear the destination before we move anything into it. */
2089 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
2092 /* We need a new source operand each time bitpos is on a word
2094 if (bitpos % BITS_PER_WORD == 0)
2095 src = operand_subword_force (result_val,
2096 bitpos / BITS_PER_WORD,
2099 /* Use bitpos for the source extraction (left justified) and
2100 xbitpos for the destination store (right justified). */
2101 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2102 extract_bit_field (src, bitsize,
2103 bitpos % BITS_PER_WORD, 1,
2104 NULL_RTX, word_mode, word_mode));
2107 tmpmode = GET_MODE (result_rtl);
2108 if (tmpmode == BLKmode)
2110 /* Find the smallest integer mode large enough to hold the
2111 entire structure and use that mode instead of BLKmode
2112 on the USE insn for the return register. */
2113 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2114 tmpmode != VOIDmode;
2115 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2116 /* Have we found a large enough mode? */
2117 if (GET_MODE_SIZE (tmpmode) >= bytes)
2120 /* No suitable mode found. */
2121 if (tmpmode == VOIDmode)
2124 PUT_MODE (result_rtl, tmpmode);
2127 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2128 result_reg_mode = word_mode;
2130 result_reg_mode = tmpmode;
2131 result_reg = gen_reg_rtx (result_reg_mode);
2133 for (i = 0; i < n_regs; i++)
2134 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2137 if (tmpmode != result_reg_mode)
2138 result_reg = gen_lowpart (tmpmode, result_reg);
2140 expand_value_return (result_reg);
2142 else if (retval_rhs != 0
2143 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2144 && (REG_P (result_rtl)
2145 || (GET_CODE (result_rtl) == PARALLEL)))
2147 /* Calculate the return value into a temporary (usually a pseudo
2149 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2150 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2152 val = assign_temp (nt, 0, 0, 1);
2153 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2154 val = force_not_mem (val);
2155 /* Return the calculated value. */
2156 expand_value_return (shift_return_value (val));
2160 /* No hard reg used; calculate value into hard return reg. */
2161 expand_expr (retval, const0_rtx, VOIDmode, 0);
2162 expand_value_return (result_rtl);
2166 /* Generate the RTL code for entering a binding contour.
2167 The variables are declared one by one, by calls to `expand_decl'.
2169 FLAGS is a bitwise or of the following flags:
2171 1 - Nonzero if this construct should be visible to
2174 2 - Nonzero if this contour does not require a
2175 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2176 language-independent code should set this flag because they
2177 will not create corresponding BLOCK nodes. (There should be
2178 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2179 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
2180 when expand_end_bindings is called.
2182 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2183 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2187 expand_start_bindings_and_block (int flags, tree block)
2189 struct nesting *thisblock = ALLOC_NESTING ();
2191 int exit_flag = ((flags & 1) != 0);
2192 int block_flag = ((flags & 2) == 0);
2194 /* If a BLOCK is supplied, then the caller should be requesting a
2195 NOTE_INSN_BLOCK_BEG note. */
2196 if (!block_flag && block)
2199 /* Create a note to mark the beginning of the block. */
2200 note = emit_note (NOTE_INSN_DELETED);
2202 /* Make an entry on block_stack for the block we are entering. */
2204 thisblock->desc = BLOCK_NESTING;
2205 thisblock->next = block_stack;
2206 thisblock->all = nesting_stack;
2207 thisblock->depth = ++nesting_depth;
2208 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
2210 thisblock->data.block.conditional_code = 0;
2211 thisblock->data.block.last_unconditional_cleanup = note;
2212 /* When we insert instructions after the last unconditional cleanup,
2213 we don't adjust last_insn. That means that a later add_insn will
2214 clobber the instructions we've just added. The easiest way to
2215 fix this is to just insert another instruction here, so that the
2216 instructions inserted after the last unconditional cleanup are
2217 never the last instruction. */
2218 emit_note (NOTE_INSN_DELETED);
2220 thisblock->data.block.first_insn = note;
2221 thisblock->data.block.block_start_count = ++current_block_start_count;
2222 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2223 block_stack = thisblock;
2224 nesting_stack = thisblock;
2226 /* Make a new level for allocating stack slots. */
2230 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2231 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2232 expand_expr are made. After we end the region, we know that all
2233 space for all temporaries that were created by TARGET_EXPRs will be
2234 destroyed and their space freed for reuse. */
2237 expand_start_target_temps (void)
2239 /* This is so that even if the result is preserved, the space
2240 allocated will be freed, as we know that it is no longer in use. */
2243 /* Start a new binding layer that will keep track of all cleanup
2244 actions to be performed. */
2245 expand_start_bindings (2);
2247 target_temp_slot_level = temp_slot_level;
2251 expand_end_target_temps (void)
2253 expand_end_bindings (NULL_TREE, 0, 0);
2255 /* This is so that even if the result is preserved, the space
2256 allocated will be freed, as we know that it is no longer in use. */
2260 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
2261 in question represents the outermost pair of curly braces (i.e. the "body
2262 block") of a function or method.
2264 For any BLOCK node representing a "body block" of a function or method, the
2265 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2266 represents the outermost (function) scope for the function or method (i.e.
2267 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
2268 *that* node in turn will point to the relevant FUNCTION_DECL node. */
2271 is_body_block (tree stmt)
2273 if (lang_hooks.no_body_blocks)
2276 if (TREE_CODE (stmt) == BLOCK)
2278 tree parent = BLOCK_SUPERCONTEXT (stmt);
2280 if (parent && TREE_CODE (parent) == BLOCK)
2282 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2284 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2292 /* Return an opaque pointer to the current nesting level, so frontend code
2293 can check its own sanity. */
2296 current_nesting_level (void)
2298 return cfun ? block_stack : 0;
2301 /* Emit code to restore vital registers at the beginning of a nonlocal goto
2304 expand_nl_goto_receiver (void)
2306 /* Clobber the FP when we get here, so we have to make sure it's
2307 marked as used by this function. */
2308 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2310 /* Mark the static chain as clobbered here so life information
2311 doesn't get messed up for it. */
2312 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2314 #ifdef HAVE_nonlocal_goto
2315 if (! HAVE_nonlocal_goto)
2317 /* First adjust our frame pointer to its actual value. It was
2318 previously set to the start of the virtual area corresponding to
2319 the stacked variables when we branched here and now needs to be
2320 adjusted to the actual hardware fp value.
2322 Assignments are to virtual registers are converted by
2323 instantiate_virtual_regs into the corresponding assignment
2324 to the underlying register (fp in this case) that makes
2325 the original assignment true.
2326 So the following insn will actually be
2327 decrementing fp by STARTING_FRAME_OFFSET. */
2328 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2330 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2331 if (fixed_regs[ARG_POINTER_REGNUM])
2333 #ifdef ELIMINABLE_REGS
2334 /* If the argument pointer can be eliminated in favor of the
2335 frame pointer, we don't need to restore it. We assume here
2336 that if such an elimination is present, it can always be used.
2337 This is the case on all known machines; if we don't make this
2338 assumption, we do unnecessary saving on many machines. */
2339 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
2342 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
2343 if (elim_regs[i].from == ARG_POINTER_REGNUM
2344 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2347 if (i == ARRAY_SIZE (elim_regs))
2350 /* Now restore our arg pointer from the address at which it
2351 was saved in our stack frame. */
2352 emit_move_insn (virtual_incoming_args_rtx,
2353 copy_to_reg (get_arg_pointer_save_area (cfun)));
2358 #ifdef HAVE_nonlocal_goto_receiver
2359 if (HAVE_nonlocal_goto_receiver)
2360 emit_insn (gen_nonlocal_goto_receiver ());
2363 /* @@@ This is a kludge. Not all machine descriptions define a blockage
2364 insn, but we must not allow the code we just generated to be reordered
2365 by scheduling. Specifically, the update of the frame pointer must
2366 happen immediately, not later. So emit an ASM_INPUT to act as blockage
2368 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2371 /* Warn about any unused VARS (which may contain nodes other than
2372 VAR_DECLs, but such nodes are ignored). The nodes are connected
2373 via the TREE_CHAIN field. */
2376 warn_about_unused_variables (tree vars)
2380 if (warn_unused_variable)
2381 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2382 if (TREE_CODE (decl) == VAR_DECL
2383 && ! TREE_USED (decl)
2384 && ! DECL_IN_SYSTEM_HEADER (decl)
2385 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
2386 warning ("%Junused variable '%D'", decl, decl);
2389 /* Generate RTL code to terminate a binding contour.
2391 VARS is the chain of VAR_DECL nodes for the variables bound in this
2392 contour. There may actually be other nodes in this chain, but any
2393 nodes other than VAR_DECLS are ignored.
2395 MARK_ENDS is nonzero if we should put a note at the beginning
2396 and end of this binding contour.
2398 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
2399 zero if we can jump into this contour only if it does not have a saved
2400 stack level, and negative if we are not to check for invalid use of
2401 labels (because the front end does that). */
2404 expand_end_bindings (tree vars, int mark_ends ATTRIBUTE_UNUSED,
2405 int dont_jump_in ATTRIBUTE_UNUSED)
2407 struct nesting *thisblock = block_stack;
2409 /* If any of the variables in this scope were not used, warn the
2411 warn_about_unused_variables (vars);
2413 if (thisblock->exit_label)
2415 do_pending_stack_adjust ();
2416 emit_label (thisblock->exit_label);
2419 /* Mark the beginning and end of the scope if requested. */
2421 /* Get rid of the beginning-mark if we don't make an end-mark. */
2422 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2424 /* Restore the temporary level of TARGET_EXPRs. */
2425 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
2427 /* Restore block_stack level for containing block. */
2429 POPSTACK (block_stack);
2431 /* Pop the stack slot nesting and free any slots at this level. */
2435 /* Generate RTL for the automatic variable declaration DECL.
2436 (Other kinds of declarations are simply ignored if seen here.) */
2439 expand_decl (tree decl)
2443 type = TREE_TYPE (decl);
2445 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
2446 type in case this node is used in a reference. */
2447 if (TREE_CODE (decl) == CONST_DECL)
2449 DECL_MODE (decl) = TYPE_MODE (type);
2450 DECL_ALIGN (decl) = TYPE_ALIGN (type);
2451 DECL_SIZE (decl) = TYPE_SIZE (type);
2452 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
2456 /* Otherwise, only automatic variables need any expansion done. Static and
2457 external variables, and external functions, will be handled by
2458 `assemble_variable' (called from finish_decl). TYPE_DECL requires
2459 nothing. PARM_DECLs are handled in `assign_parms'. */
2460 if (TREE_CODE (decl) != VAR_DECL)
2463 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
2466 /* Create the RTL representation for the variable. */
2468 if (type == error_mark_node)
2469 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
2471 else if (DECL_SIZE (decl) == 0)
2472 /* Variable with incomplete type. */
2475 if (DECL_INITIAL (decl) == 0)
2476 /* Error message was already done; now avoid a crash. */
2477 x = gen_rtx_MEM (BLKmode, const0_rtx);
2479 /* An initializer is going to decide the size of this array.
2480 Until we know the size, represent its address with a reg. */
2481 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
2483 set_mem_attributes (x, decl, 1);
2484 SET_DECL_RTL (decl, x);
2486 else if (use_register_for_decl (decl))
2488 /* Automatic variable that can go in a register. */
2489 int unsignedp = TYPE_UNSIGNED (type);
2490 enum machine_mode reg_mode
2491 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
2493 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
2495 /* Note if the object is a user variable. */
2496 if (!DECL_ARTIFICIAL (decl))
2498 mark_user_reg (DECL_RTL (decl));
2500 /* Trust user variables which have a pointer type to really
2501 be pointers. Do not trust compiler generated temporaries
2502 as our type system is totally busted as it relates to
2503 pointer arithmetic which translates into lots of compiler
2504 generated objects with pointer types, but which are not really
2506 if (POINTER_TYPE_P (type))
2507 mark_reg_pointer (DECL_RTL (decl),
2508 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
2511 maybe_set_unchanging (DECL_RTL (decl), decl);
2514 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
2515 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
2516 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
2517 STACK_CHECK_MAX_VAR_SIZE)))
2519 /* Variable of fixed size that goes on the stack. */
2524 /* If we previously made RTL for this decl, it must be an array
2525 whose size was determined by the initializer.
2526 The old address was a register; set that register now
2527 to the proper address. */
2528 if (DECL_RTL_SET_P (decl))
2530 if (!MEM_P (DECL_RTL (decl))
2531 || !REG_P (XEXP (DECL_RTL (decl), 0)))
2533 oldaddr = XEXP (DECL_RTL (decl), 0);
2536 /* Set alignment we actually gave this decl. */
2537 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2538 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2539 DECL_USER_ALIGN (decl) = 0;
2541 x = assign_temp (decl, 1, 1, 1);
2542 set_mem_attributes (x, decl, 1);
2543 SET_DECL_RTL (decl, x);
2547 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2548 if (addr != oldaddr)
2549 emit_move_insn (oldaddr, addr);
2553 /* Dynamic-size object: must push space on the stack. */
2555 rtx address, size, x;
2557 /* Record the stack pointer on entry to block, if have
2558 not already done so. */
2559 do_pending_stack_adjust ();
2561 /* Compute the variable's size, in bytes. This will expand any
2562 needed SAVE_EXPRs for the first time. */
2563 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
2566 /* Allocate space on the stack for the variable. Note that
2567 DECL_ALIGN says how the variable is to be aligned and we
2568 cannot use it to conclude anything about the alignment of
2570 address = allocate_dynamic_stack_space (size, NULL_RTX,
2571 TYPE_ALIGN (TREE_TYPE (decl)));
2573 /* Reference the variable indirect through that rtx. */
2574 x = gen_rtx_MEM (DECL_MODE (decl), address);
2575 set_mem_attributes (x, decl, 1);
2576 SET_DECL_RTL (decl, x);
2579 /* Indicate the alignment we actually gave this variable. */
2580 #ifdef STACK_BOUNDARY
2581 DECL_ALIGN (decl) = STACK_BOUNDARY;
2583 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2585 DECL_USER_ALIGN (decl) = 0;
2589 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
2591 expand_stack_alloc (tree alloc, tree t_size)
2593 rtx address, dest, size;
2596 if (TREE_CODE (alloc) != ADDR_EXPR)
2598 var = TREE_OPERAND (alloc, 0);
2599 if (TREE_CODE (var) != VAR_DECL)
2602 type = TREE_TYPE (var);
2604 /* Compute the variable's size, in bytes. */
2605 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
2608 /* Allocate space on the stack for the variable. */
2609 address = XEXP (DECL_RTL (var), 0);
2610 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
2611 if (dest != address)
2612 emit_move_insn (address, dest);
2614 /* Indicate the alignment we actually gave this variable. */
2615 #ifdef STACK_BOUNDARY
2616 DECL_ALIGN (var) = STACK_BOUNDARY;
2618 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
2620 DECL_USER_ALIGN (var) = 0;
2623 /* Emit code to save the current value of stack. */
2625 expand_stack_save (void)
2629 do_pending_stack_adjust ();
2630 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
2634 /* Emit code to restore the current value of stack. */
2636 expand_stack_restore (tree var)
2638 rtx sa = DECL_RTL (var);
2640 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2643 /* Emit code to perform the initialization of a declaration DECL. */
2646 expand_decl_init (tree decl)
2648 int was_used = TREE_USED (decl);
2650 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
2651 for static decls. */
2652 if (TREE_CODE (decl) == CONST_DECL
2653 || TREE_STATIC (decl))
2656 /* Compute and store the initial value now. */
2660 if (DECL_INITIAL (decl) == error_mark_node)
2662 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2664 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2665 || code == POINTER_TYPE || code == REFERENCE_TYPE)
2666 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2669 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2671 emit_line_note (DECL_SOURCE_LOCATION (decl));
2672 expand_assignment (decl, DECL_INITIAL (decl), 0);
2675 /* Don't let the initialization count as "using" the variable. */
2676 TREE_USED (decl) = was_used;
2678 /* Free any temporaries we made while initializing the decl. */
2679 preserve_temp_slots (NULL_RTX);
2685 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2686 DECL_ELTS is the list of elements that belong to DECL's type.
2687 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2690 expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2696 /* If any of the elements are addressable, so is the entire union. */
2697 for (t = decl_elts; t; t = TREE_CHAIN (t))
2698 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2700 TREE_ADDRESSABLE (decl) = 1;
2705 x = DECL_RTL (decl);
2707 /* Go through the elements, assigning RTL to each. */
2708 for (t = decl_elts; t; t = TREE_CHAIN (t))
2710 tree decl_elt = TREE_VALUE (t);
2711 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2713 /* If any of the elements are addressable, so is the entire
2715 if (TREE_USED (decl_elt))
2716 TREE_USED (decl) = 1;
2718 /* Propagate the union's alignment to the elements. */
2719 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2720 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2722 /* If the element has BLKmode and the union doesn't, the union is
2723 aligned such that the element doesn't need to have BLKmode, so
2724 change the element's mode to the appropriate one for its size. */
2725 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2726 DECL_MODE (decl_elt) = mode
2727 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2729 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2730 instead create a new MEM rtx with the proper mode. */
2733 if (mode == GET_MODE (x))
2734 SET_DECL_RTL (decl_elt, x);
2736 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
2740 if (mode == GET_MODE (x))
2741 SET_DECL_RTL (decl_elt, x);
2743 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
2750 /* Enter a case (Pascal) or switch (C) statement.
2751 Push a block onto case_stack and nesting_stack
2752 to accumulate the case-labels that are seen
2753 and to record the labels generated for the statement.
2755 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
2756 Otherwise, this construct is transparent for `exit_something'.
2758 EXPR is the index-expression to be dispatched on.
2759 TYPE is its nominal type. We could simply convert EXPR to this type,
2760 but instead we take short cuts. */
2763 expand_start_case (int exit_flag, tree expr, tree type,
2764 const char *printname)
2766 struct nesting *thiscase = ALLOC_NESTING ();
2768 /* Make an entry on case_stack for the case we are entering. */
2770 thiscase->desc = CASE_NESTING;
2771 thiscase->next = case_stack;
2772 thiscase->all = nesting_stack;
2773 thiscase->depth = ++nesting_depth;
2774 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
2775 thiscase->data.case_stmt.case_list = 0;
2776 thiscase->data.case_stmt.index_expr = expr;
2777 thiscase->data.case_stmt.nominal_type = type;
2778 thiscase->data.case_stmt.default_label = 0;
2779 thiscase->data.case_stmt.printname = printname;
2780 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
2781 case_stack = thiscase;
2782 nesting_stack = thiscase;
2784 do_pending_stack_adjust ();
2786 /* Make sure case_stmt.start points to something that won't
2787 need any transformation before expand_end_case. */
2788 if (!NOTE_P (get_last_insn ()))
2789 emit_note (NOTE_INSN_DELETED);
2791 thiscase->data.case_stmt.start = get_last_insn ();
2794 /* Accumulate one case or default label inside a case or switch statement.
2795 VALUE is the value of the case (a null pointer, for a default label).
2796 The function CONVERTER, when applied to arguments T and V,
2797 converts the value V to the type T.
2799 If not currently inside a case or switch statement, return 1 and do
2800 nothing. The caller will print a language-specific error message.
2801 If VALUE is a duplicate or overlaps, return 2 and do nothing
2802 except store the (first) duplicate node in *DUPLICATE.
2803 If VALUE is out of range, return 3 and do nothing.
2804 Return 0 on success.
2806 Extended to handle range statements. */
2809 pushcase (tree value, tree (*converter) (tree, tree), tree label,
2815 /* Fail if not inside a real case statement. */
2816 if (! (case_stack && case_stack->data.case_stmt.start))
2819 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
2820 nominal_type = case_stack->data.case_stmt.nominal_type;
2822 /* If the index is erroneous, avoid more problems: pretend to succeed. */
2823 if (index_type == error_mark_node)
2826 /* Convert VALUE to the type in which the comparisons are nominally done. */
2828 value = (*converter) (nominal_type, value);
2830 /* Fail if this value is out of range for the actual type of the index
2831 (which may be narrower than NOMINAL_TYPE). */
2833 && (TREE_CONSTANT_OVERFLOW (value)
2834 || ! int_fits_type_p (value, index_type)))
2837 return add_case_node (value, value, label, duplicate, false);
2840 /* Like pushcase but this case applies to all values between VALUE1 and
2841 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
2842 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
2843 starts at VALUE1 and ends at the highest value of the index type.
2844 If both are NULL, this case applies to all values.
2846 The return value is the same as that of pushcase but there is one
2847 additional error code: 4 means the specified range was empty. */
2850 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
2851 tree label, tree *duplicate)
2856 /* Fail if not inside a real case statement. */
2857 if (! (case_stack && case_stack->data.case_stmt.start))
2860 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
2861 nominal_type = case_stack->data.case_stmt.nominal_type;
2863 /* If the index is erroneous, avoid more problems: pretend to succeed. */
2864 if (index_type == error_mark_node)
2867 /* Convert VALUEs to type in which the comparisons are nominally done
2868 and replace any unspecified value with the corresponding bound. */
2870 value1 = TYPE_MIN_VALUE (index_type);
2872 value2 = TYPE_MAX_VALUE (index_type);
2874 /* Fail if the range is empty. Do this before any conversion since
2875 we want to allow out-of-range empty ranges. */
2876 if (value2 != 0 && tree_int_cst_lt (value2, value1))
2879 /* If the max was unbounded, use the max of the nominal_type we are
2880 converting to. Do this after the < check above to suppress false
2883 value2 = TYPE_MAX_VALUE (nominal_type);
2885 value1 = (*converter) (nominal_type, value1);
2886 value2 = (*converter) (nominal_type, value2);
2888 /* Fail if these values are out of range. */
2889 if (TREE_CONSTANT_OVERFLOW (value1)
2890 || ! int_fits_type_p (value1, index_type))
2893 if (TREE_CONSTANT_OVERFLOW (value2)
2894 || ! int_fits_type_p (value2, index_type))
2897 return add_case_node (value1, value2, label, duplicate, false);
2900 /* Do the actual insertion of a case label for pushcase and pushcase_range
2901 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
2902 slowdown for large switch statements. */
2905 add_case_node (tree low, tree high, tree label, tree *duplicate,
2906 bool dont_expand_label)
2908 struct case_node *p, **q, *r;
2910 /* If there's no HIGH value, then this is not a case range; it's
2911 just a simple case label. But that's just a degenerate case
2916 /* Handle default labels specially. */
2919 if (case_stack->data.case_stmt.default_label != 0)
2921 *duplicate = case_stack->data.case_stmt.default_label;
2924 case_stack->data.case_stmt.default_label = label;
2925 if (!dont_expand_label)
2926 expand_label (label);
2930 q = &case_stack->data.case_stmt.case_list;
2937 /* Keep going past elements distinctly greater than HIGH. */
2938 if (tree_int_cst_lt (high, p->low))
2941 /* or distinctly less than LOW. */
2942 else if (tree_int_cst_lt (p->high, low))
2947 /* We have an overlap; this is an error. */
2948 *duplicate = p->code_label;
2953 /* Add this label to the chain, and succeed. */
2955 r = ggc_alloc (sizeof (struct case_node));
2958 /* If the bounds are equal, turn this into the one-value case. */
2959 if (tree_int_cst_equal (low, high))
2964 r->code_label = label;
2965 if (!dont_expand_label)
2966 expand_label (label);
2976 struct case_node *s;
2982 if (! (b = p->balance))
2983 /* Growth propagation from left side. */
2990 if ((p->left = s = r->right))
2999 if ((r->parent = s))
3007 case_stack->data.case_stmt.case_list = r;
3010 /* r->balance == +1 */
3015 struct case_node *t = r->right;
3017 if ((p->left = s = t->right))
3021 if ((r->right = s = t->left))
3035 if ((t->parent = s))
3043 case_stack->data.case_stmt.case_list = t;
3050 /* p->balance == +1; growth of left side balances the node. */
3060 if (! (b = p->balance))
3061 /* Growth propagation from right side. */
3069 if ((p->right = s = r->left))
3077 if ((r->parent = s))
3086 case_stack->data.case_stmt.case_list = r;
3090 /* r->balance == -1 */
3094 struct case_node *t = r->left;
3096 if ((p->right = s = t->left))
3101 if ((r->left = s = t->right))
3115 if ((t->parent = s))
3124 case_stack->data.case_stmt.case_list = t;
3130 /* p->balance == -1; growth of right side balances the node. */
3143 /* Maximum number of case bit tests. */
3144 #define MAX_CASE_BIT_TESTS 3
3146 /* By default, enable case bit tests on targets with ashlsi3. */
3147 #ifndef CASE_USE_BIT_TESTS
3148 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
3149 != CODE_FOR_nothing)
3153 /* A case_bit_test represents a set of case nodes that may be
3154 selected from using a bit-wise comparison. HI and LO hold
3155 the integer to be tested against, LABEL contains the label
3156 to jump to upon success and BITS counts the number of case
3157 nodes handled by this test, typically the number of bits
3160 struct case_bit_test
3168 /* Determine whether "1 << x" is relatively cheap in word_mode. */
3171 bool lshift_cheap_p (void)
3173 static bool init = false;
3174 static bool cheap = true;
3178 rtx reg = gen_rtx_REG (word_mode, 10000);
3179 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
3180 cheap = cost < COSTS_N_INSNS (3);
3187 /* Comparison function for qsort to order bit tests by decreasing
3188 number of case nodes, i.e. the node with the most cases gets
3192 case_bit_test_cmp (const void *p1, const void *p2)
3194 const struct case_bit_test *d1 = p1;
3195 const struct case_bit_test *d2 = p2;
3197 return d2->bits - d1->bits;
3200 /* Expand a switch statement by a short sequence of bit-wise
3201 comparisons. "switch(x)" is effectively converted into
3202 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
3205 INDEX_EXPR is the value being switched on, which is of
3206 type INDEX_TYPE. MINVAL is the lowest case value of in
3207 the case nodes, of INDEX_TYPE type, and RANGE is highest
3208 value minus MINVAL, also of type INDEX_TYPE. NODES is
3209 the set of case nodes, and DEFAULT_LABEL is the label to
3210 branch to should none of the cases match.
3212 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
3216 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
3217 tree range, case_node_ptr nodes, rtx default_label)
3219 struct case_bit_test test[MAX_CASE_BIT_TESTS];
3220 enum machine_mode mode;
3221 rtx expr, index, label;
3222 unsigned int i,j,lo,hi;
3223 struct case_node *n;
3227 for (n = nodes; n; n = n->right)
3229 label = label_rtx (n->code_label);
3230 for (i = 0; i < count; i++)
3231 if (same_case_target_p (label, test[i].label))
3236 if (count >= MAX_CASE_BIT_TESTS)
3240 test[i].label = label;
3247 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3248 n->low, minval)), 1);
3249 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3250 n->high, minval)), 1);
3251 for (j = lo; j <= hi; j++)
3252 if (j >= HOST_BITS_PER_WIDE_INT)
3253 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
3255 test[i].lo |= (HOST_WIDE_INT) 1 << j;
3258 qsort (test, count, sizeof(*test), case_bit_test_cmp);
3260 index_expr = fold (build (MINUS_EXPR, index_type,
3261 convert (index_type, index_expr),
3262 convert (index_type, minval)));
3263 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3264 do_pending_stack_adjust ();
3266 mode = TYPE_MODE (index_type);
3267 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
3268 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
3271 index = convert_to_mode (word_mode, index, 0);
3272 index = expand_binop (word_mode, ashl_optab, const1_rtx,
3273 index, NULL_RTX, 1, OPTAB_WIDEN);
3275 for (i = 0; i < count; i++)
3277 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
3278 expr = expand_binop (word_mode, and_optab, index, expr,
3279 NULL_RTX, 1, OPTAB_WIDEN);
3280 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
3281 word_mode, 1, test[i].label);
3284 emit_jump (default_label);
3288 #define HAVE_casesi 0
3291 #ifndef HAVE_tablejump
3292 #define HAVE_tablejump 0
3295 /* Terminate a case (Pascal) or switch (C) statement
3296 in which ORIG_INDEX is the expression to be tested.
3297 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
3298 type as given in the source before any compiler conversions.
3299 Generate the code to test it and jump to the right place. */
3302 expand_end_case_type (tree orig_index, tree orig_type)
3304 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
3305 rtx default_label = 0;
3306 struct case_node *n, *m;
3307 unsigned int count, uniq;
3313 rtx before_case, end, lab;
3314 struct nesting *thiscase = case_stack;
3315 tree index_expr, index_type;
3316 bool exit_done = false;
3319 /* Don't crash due to previous errors. */
3320 if (thiscase == NULL)
3323 index_expr = thiscase->data.case_stmt.index_expr;
3324 index_type = TREE_TYPE (index_expr);
3325 unsignedp = TYPE_UNSIGNED (index_type);
3326 if (orig_type == NULL)
3327 orig_type = TREE_TYPE (orig_index);
3329 do_pending_stack_adjust ();
3331 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3332 if (index_type != error_mark_node)
3334 /* If we don't have a default-label, create one here,
3335 after the body of the switch. */
3336 if (thiscase->data.case_stmt.default_label == 0)
3338 thiscase->data.case_stmt.default_label
3339 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3340 /* Share the exit label if possible. */
3341 if (thiscase->exit_label)
3343 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
3344 thiscase->exit_label);
3347 expand_label (thiscase->data.case_stmt.default_label);
3349 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3351 before_case = get_last_insn ();
3353 if (thiscase->data.case_stmt.case_list
3354 && thiscase->data.case_stmt.case_list->left)
3355 thiscase->data.case_stmt.case_list
3356 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
3358 /* Simplify the case-list before we count it. */
3359 group_case_nodes (thiscase->data.case_stmt.case_list);
3360 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
3363 /* Get upper and lower bounds of case values.
3364 Also convert all the case values to the index expr's data type. */
3368 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3370 /* Check low and high label values are integers. */
3371 if (TREE_CODE (n->low) != INTEGER_CST)
3373 if (TREE_CODE (n->high) != INTEGER_CST)
3376 n->low = convert (index_type, n->low);
3377 n->high = convert (index_type, n->high);
3379 /* Count the elements and track the largest and smallest
3380 of them (treating them as signed even if they are not). */
3388 if (INT_CST_LT (n->low, minval))
3390 if (INT_CST_LT (maxval, n->high))
3393 /* A range counts double, since it requires two compares. */
3394 if (! tree_int_cst_equal (n->low, n->high))
3397 /* Count the number of unique case node targets. */
3399 lab = label_rtx (n->code_label);
3400 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
3401 if (same_case_target_p (label_rtx (m->code_label), lab))
3408 /* Compute span of values. */
3410 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
3414 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3415 emit_jump (default_label);
3418 /* Try implementing this switch statement by a short sequence of
3419 bit-wise comparisons. However, we let the binary-tree case
3420 below handle constant index expressions. */
3421 else if (CASE_USE_BIT_TESTS
3422 && ! TREE_CONSTANT (index_expr)
3423 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
3424 && compare_tree_int (range, 0) > 0
3425 && lshift_cheap_p ()
3426 && ((uniq == 1 && count >= 3)
3427 || (uniq == 2 && count >= 5)
3428 || (uniq == 3 && count >= 6)))
3430 /* Optimize the case where all the case values fit in a
3431 word without having to subtract MINVAL. In this case,
3432 we can optimize away the subtraction. */
3433 if (compare_tree_int (minval, 0) > 0
3434 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
3436 minval = integer_zero_node;
3439 emit_case_bit_tests (index_type, index_expr, minval, range,
3440 thiscase->data.case_stmt.case_list,
3444 /* If range of values is much bigger than number of values,
3445 make a sequence of conditional branches instead of a dispatch.
3446 If the switch-index is a constant, do it this way
3447 because we can optimize it. */
3449 else if (count < case_values_threshold ()
3450 || compare_tree_int (range,
3451 (optimize_size ? 3 : 10) * count) > 0
3452 /* RANGE may be signed, and really large ranges will show up
3453 as negative numbers. */
3454 || compare_tree_int (range, 0) < 0
3455 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
3458 || TREE_CONSTANT (index_expr)
3459 /* If neither casesi or tablejump is available, we can
3460 only go this way. */
3461 || (!HAVE_casesi && !HAVE_tablejump))
3463 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3465 /* If the index is a short or char that we do not have
3466 an insn to handle comparisons directly, convert it to
3467 a full integer now, rather than letting each comparison
3468 generate the conversion. */
3470 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3471 && ! have_insn_for (COMPARE, GET_MODE (index)))
3473 enum machine_mode wider_mode;
3474 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3475 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3476 if (have_insn_for (COMPARE, wider_mode))
3478 index = convert_to_mode (wider_mode, index, unsignedp);
3483 do_pending_stack_adjust ();
3486 index = copy_to_reg (index);
3487 if (GET_CODE (index) == CONST_INT
3488 || TREE_CODE (index_expr) == INTEGER_CST)
3490 /* Make a tree node with the proper constant value
3491 if we don't already have one. */
3492 if (TREE_CODE (index_expr) != INTEGER_CST)
3495 = build_int_2 (INTVAL (index),
3496 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
3497 index_expr = convert (index_type, index_expr);
3500 /* For constant index expressions we need only
3501 issue an unconditional branch to the appropriate
3502 target code. The job of removing any unreachable
3503 code is left to the optimization phase if the
3504 "-O" option is specified. */
3505 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3506 if (! tree_int_cst_lt (index_expr, n->low)
3507 && ! tree_int_cst_lt (n->high, index_expr))
3511 emit_jump (label_rtx (n->code_label));
3513 emit_jump (default_label);
3517 /* If the index expression is not constant we generate
3518 a binary decision tree to select the appropriate
3519 target code. This is done as follows:
3521 The list of cases is rearranged into a binary tree,
3522 nearly optimal assuming equal probability for each case.
3524 The tree is transformed into RTL, eliminating
3525 redundant test conditions at the same time.
3527 If program flow could reach the end of the
3528 decision tree an unconditional jump to the
3529 default code is emitted. */
3532 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
3533 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3534 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
3535 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3536 default_label, index_type);
3537 emit_jump_if_reachable (default_label);
3542 table_label = gen_label_rtx ();
3543 if (! try_casesi (index_type, index_expr, minval, range,
3544 table_label, default_label))
3546 index_type = thiscase->data.case_stmt.nominal_type;
3548 /* Index jumptables from zero for suitable values of
3549 minval to avoid a subtraction. */
3551 && compare_tree_int (minval, 0) > 0
3552 && compare_tree_int (minval, 3) < 0)
3554 minval = integer_zero_node;
3558 if (! try_tablejump (index_type, index_expr, minval, range,
3559 table_label, default_label))
3563 /* Get table of labels to jump to, in order of case index. */